language stringclasses 1 value | repo stringclasses 133 values | path stringlengths 13 229 | class_span dict | source stringlengths 14 2.92M | target stringlengths 1 153 |
|---|---|---|---|---|---|
csharp | unoplatform__uno | src/Uno.UI/UI/Xaml/Controls/ScrollContentPresenter/IScrollContentPresenter.cs | {
"start": 490,
"end": 783
} | internal partial interface ____
{
// NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE
//
// This | IScrollContentPresenter |
csharp | JoshClose__CsvHelper | tests/CsvHelper.Tests/TypeConversion/BigIntegerConverterTests.cs | {
"start": 588,
"end": 1423
} | public class ____
{
[Fact]
public void RoundTripMaxValueTest()
{
var converter = new BigIntegerConverter();
var s = converter.ConvertToString((BigInteger)long.MaxValue + 1, null!, new MemberMapData(null));
var bi = converter.ConvertFromString(s, null!, new MemberMapData(null));
Assert.Equal((BigInteger)long.MaxValue + 1, bi);
}
[Fact]
public void RoundTripMinValueTest()
{
var converter = new BigIntegerConverter();
var s = converter.ConvertToString((BigInteger)long.MinValue - 1, null!, new MemberMapData(null));
var bi = converter.ConvertFromString(s, null!, new MemberMapData(null));
Assert.Equal((BigInteger)long.MinValue - 1, bi);
}
}
}
| BigIntegerConverterTests |
csharp | files-community__Files | src/Files.Core.SourceGenerator/Generators/VTableFunctionGenerator.cs | {
"start": 1012,
"end": 5006
} | struct
____ (node.Parent is not TypeDeclarationSyntax { Keyword.RawKind: (int)SyntaxKind.StructKeyword, Modifiers: { } modifiers } ||
!modifiers.Any(SyntaxKind.PartialKeyword))
return false;
return true;
},
static (context, token) =>
{
token.ThrowIfCancellationRequested();
var fullyQualifiedParentTypeName = context.TargetSymbol.ContainingType.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat);
var structNamespace = context.TargetSymbol.ContainingType.ContainingNamespace.ToString();
var structName = context.TargetSymbol.ContainingType.Name;
var methodSymbol = (IMethodSymbol)context.TargetSymbol;
var isReturnTypeVoid = methodSymbol.ReturnsVoid;
var functionName = methodSymbol.Name;
var returnTypeName = methodSymbol.ReturnType.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat);
var parameters = methodSymbol.Parameters.Select(x => new ParameterTypeNamePair(x.Type.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat), x.Name));
var index = (int)context.Attributes[0].NamedArguments.FirstOrDefault(x => x.Key.Equals("Index")).Value.Value!;
return new VTableFunctionInfo(fullyQualifiedParentTypeName, structNamespace, structName, isReturnTypeVoid, functionName, returnTypeName, index, new(parameters.ToImmutableArray()));
})
.Where(static item => item is not null)
.Collect()
.Select((items, token) =>
{
token.ThrowIfCancellationRequested();
return items.GroupBy(source => source.FullyQualifiedParentTypeName, StringComparer.OrdinalIgnoreCase).ToImmutableArray();
});
context.RegisterSourceOutput(sources, (context, sources) =>
{
foreach (var source in sources)
{
var fileName = $"{source.ToImmutableArray().ElementAt(0).ParentTypeNamespace}.{source.ToImmutableArray().ElementAt(0).ParentTypeName}_VTableFunctions.g.cs";
var generatedCSharpCode = GenerateVtableFunctionsForStruct(source.ToImmutableArray());
context.AddSource(fileName, generatedCSharpCode);
}
});
}
private string GenerateVtableFunctionsForStruct(ImmutableArray<VTableFunctionInfo> sources)
{
StringBuilder builder = new();
builder.AppendLine($"// <auto-generated/>");
builder.AppendLine();
builder.AppendLine($"using global::System.Runtime.CompilerServices;");
builder.AppendLine();
builder.AppendLine($"#pragma warning disable");
builder.AppendLine();
builder.AppendLine($"namespace {sources.ElementAt(0).ParentTypeNamespace};");
builder.AppendLine();
builder.AppendLine($"public unsafe partial struct {sources.ElementAt(0).ParentTypeName}");
builder.AppendLine($"{{");
builder.AppendLine($" private void** lpVtbl;");
builder.AppendLine();
var sourceIndex = 0;
var sourceCount = sources.Count();
foreach (var source in sources)
{
var returnTypeName = source.IsReturnTypeVoid ? "void" : "int";
builder.AppendLine($" [global::System.Runtime.CompilerServices.MethodImpl(global::System.Runtime.CompilerServices.MethodImplOptions.AggressiveInlining)]");
builder.AppendLine($" public partial {source.ReturnTypeName} {source.Name}({string.Join(", ", source.Parameters.Select(x => $"{x.FullyQualifiedTypeName} {x.ValueName}"))})");
builder.AppendLine($" {{");
builder.AppendLine($" return ({source.ReturnTypeName})((delegate* unmanaged[MemberFunction]<{sources.ElementAt(0).FullyQualifiedParentTypeName}*, {string.Join(", ", source.Parameters.Select(x => $"{x.FullyQualifiedTypeName}"))}, {returnTypeName}>)(lpVtbl[{source.Index}]))");
builder.AppendLine($" (({sources.ElementAt(0).FullyQualifiedParentTypeName}*)global::System.Runtime.CompilerServices.Unsafe.AsPointer(ref this), {string.Join(", ", source.Parameters.Select(x => $"{x.ValueName}"))});");
builder.AppendLine($" }}");
if (sourceIndex < sourceCount - 1)
builder.AppendLine();
sourceIndex++;
}
builder.AppendLine($"}}");
return builder.ToString();
}
}
}
| if |
csharp | unoplatform__uno | src/SamplesApp/UITests.Shared/Windows_UI_Xaml_Media/ImageBrushTests/RectanglemaskingRectangleGrid.xaml.cs | {
"start": 602,
"end": 749
} | partial class ____ : UserControl
{
public RectanglemaskingRectangleGrid()
{
this.InitializeComponent();
}
}
}
| RectanglemaskingRectangleGrid |
csharp | graphql-dotnet__graphql-dotnet | src/GraphQL/Execution/Errors/DocumentError.cs | {
"start": 163,
"end": 306
} | public abstract class ____ : ExecutionError
{
/// <summary>
/// Initializes a new instance of the <see cref="DocumentError"/> | DocumentError |
csharp | microsoft__semantic-kernel | dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_Simple.cs | {
"start": 649,
"end": 3741
} | public class ____(ITestOutputHelper output) : BaseTest(output)
{
[Fact]
public async Task ExampleAsync()
{
// Create an embedding generation service.
var embeddingGenerator = new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAIEmbeddings.Endpoint), new AzureCliCredential())
.GetEmbeddingClient(TestConfiguration.AzureOpenAIEmbeddings.DeploymentName)
.AsIEmbeddingGenerator(1536);
// Construct an InMemory vector store.
var vectorStore = new InMemoryVectorStore();
// Get and create collection if it doesn't exist.
var collection = vectorStore.GetCollection<ulong, Glossary>("skglossary");
await collection.EnsureCollectionExistsAsync();
// Create glossary entries and generate embeddings for them.
var glossaryEntries = CreateGlossaryEntries().ToList();
var tasks = glossaryEntries.Select(entry => Task.Run(async () =>
{
entry.DefinitionEmbedding = (await embeddingGenerator.GenerateAsync(entry.Definition)).Vector;
}));
await Task.WhenAll(tasks);
// Upsert the glossary entries into the collection and return their keys.
await collection.UpsertAsync(glossaryEntries);
// Search the collection using a vector search.
var searchString = "What is an Application Programming Interface";
var searchVector = (await embeddingGenerator.GenerateAsync(searchString)).Vector;
var resultRecords = await collection.SearchAsync(searchVector, top: 1).ToListAsync();
Console.WriteLine("Search string: " + searchString);
Console.WriteLine("Result: " + resultRecords.First().Record.Definition);
Console.WriteLine();
// Search the collection using a vector search.
searchString = "What is Retrieval Augmented Generation";
searchVector = (await embeddingGenerator.GenerateAsync(searchString)).Vector;
resultRecords = await collection.SearchAsync(searchVector, top: 1).ToListAsync();
Console.WriteLine("Search string: " + searchString);
Console.WriteLine("Result: " + resultRecords.First().Record.Definition);
Console.WriteLine();
// Search the collection using a vector search with pre-filtering.
searchString = "What is Retrieval Augmented Generation";
searchVector = (await embeddingGenerator.GenerateAsync(searchString)).Vector;
resultRecords = await collection.SearchAsync(searchVector, top: 3, new() { Filter = g => g.Category == "External Definitions" }).ToListAsync();
Console.WriteLine("Search string: " + searchString);
Console.WriteLine("Number of results: " + resultRecords.Count);
Console.WriteLine("Result 1 Score: " + resultRecords[0].Score);
Console.WriteLine("Result 1: " + resultRecords[0].Record.Definition);
Console.WriteLine("Result 2 Score: " + resultRecords[1].Score);
Console.WriteLine("Result 2: " + resultRecords[1].Record.Definition);
}
/// <summary>
/// Sample model | VectorStore_VectorSearch_Simple |
csharp | dotnet__BenchmarkDotNet | tests/BenchmarkDotNet.Tests/GenericBuilderTests.cs | {
"start": 2144,
"end": 3139
} | public class ____<T1, T2, T3>
{
[Benchmark] public T1 CreateT1() => Activator.CreateInstance<T1>();
[Benchmark] public T2 CreateT2() => Activator.CreateInstance<T2>();
[Benchmark] public T3 CreateT3() => Activator.CreateInstance<T3>();
}
[Fact]
public void TestBuildGenericWithWrongAttributes()
{
var types = GenericBenchmarksBuilder.GetRunnableBenchmarks(new[] {typeof(GenericBenchmarkWithWrongAttribute<,>)});
Assert.Equal(2, types.Length);
Assert.Single(types, typeof(GenericBenchmarkWithWrongAttribute<int, char>));
Assert.Single(types, typeof(GenericBenchmarkWithWrongAttribute<char, string>));
}
[GenericTypeArguments(typeof(int), typeof(char))]
[GenericTypeArguments(typeof(char), typeof(string))]
#pragma warning disable BDN1102
[GenericTypeArguments(typeof(char))]
#pragma warning restore BDN1102
| ThreeArgGenericBenchmark |
csharp | microsoft__PowerToys | src/modules/AdvancedPaste/AdvancedPaste/Helpers/IUserSettings.cs | {
"start": 379,
"end": 939
} | public interface ____
{
public bool IsAIEnabled { get; }
public bool ShowCustomPreview { get; }
public bool CloseAfterLosingFocus { get; }
public bool EnableClipboardPreview { get; }
public IReadOnlyList<AdvancedPasteCustomAction> CustomActions { get; }
public IReadOnlyList<PasteFormats> AdditionalActions { get; }
public PasteAIConfiguration PasteAIConfiguration { get; }
public event EventHandler Changed;
Task SetActiveAIProviderAsync(string providerId);
}
}
| IUserSettings |
csharp | GtkSharp__GtkSharp | Source/Libs/AtkSharp/Object.cs | {
"start": 915,
"end": 1182
} | public partial class ____ {
protected void EmitChildrenChanged (ChildrenChangedDetail detail, uint child_index, Atk.Object child)
{
GLib.Signal.Emit (this,
"children-changed::" + detail.ToString ().ToLower (),
child_index, child.Handle);
}
| Object |
csharp | AutoMapper__AutoMapper | src/AutoMapper/Mappers/ConvertMapper.cs | {
"start": 40,
"end": 880
} | public sealed class ____ : IObjectMapper
{
public static bool IsPrimitive(Type type) => type.IsPrimitive || type == typeof(string) || type == typeof(decimal);
public bool IsMatch(TypePair types) => (types.SourceType == typeof(string) && types.DestinationType == typeof(DateTime)) ||
(IsPrimitive(types.SourceType) && IsPrimitive(types.DestinationType));
public Expression MapExpression(IGlobalConfiguration configuration, ProfileMap profileMap,
MemberMap memberMap, Expression sourceExpression, Expression destExpression)
{
var convertMethod = typeof(Convert).GetMethod("To" + destExpression.Type.Name, [sourceExpression.Type]);
return Call(convertMethod, sourceExpression);
}
#if FULL_OR_STANDARD
public TypePair? GetAssociatedTypes(TypePair initialTypes) => null;
#endif
} | ConvertMapper |
csharp | AutoFixture__AutoFixture | Src/IdiomsUnitTest/EqualsNullAssertionTest.cs | {
"start": 3078,
"end": 5197
} | private class ____ : MethodInfo
{
public override Type ReflectedType
{
get { return null; }
}
public override Type DeclaringType
{
get { return null; }
}
public override MethodInfo GetBaseDefinition()
{
throw new NotImplementedException();
}
public override ICustomAttributeProvider ReturnTypeCustomAttributes
{
get { throw new NotImplementedException(); }
}
public override MethodAttributes Attributes
{
get { throw new NotImplementedException(); }
}
public override MethodImplAttributes GetMethodImplementationFlags()
{
throw new NotImplementedException();
}
public override ParameterInfo[] GetParameters()
{
throw new NotImplementedException();
}
public override object Invoke(object obj, BindingFlags invokeAttr, Binder binder, object[] parameters, System.Globalization.CultureInfo culture)
{
throw new NotImplementedException();
}
public override RuntimeMethodHandle MethodHandle
{
get { throw new NotImplementedException(); }
}
public override object[] GetCustomAttributes(Type attributeType, bool inherit)
{
throw new NotImplementedException();
}
public override object[] GetCustomAttributes(bool inherit)
{
throw new NotImplementedException();
}
public override bool IsDefined(Type attributeType, bool inherit)
{
throw new NotImplementedException();
}
public override string Name
{
get { throw new NotImplementedException(); }
}
}
#pragma warning disable 659
| MethodInfoWithNullDeclaringAndReflectedType |
csharp | dotnet__efcore | test/EFCore.Specification.Tests/ModelBuilding/GiantModel.cs | {
"start": 325837,
"end": 326057
} | public class ____
{
public int Id { get; set; }
public RelatedEntity1495 ParentEntity { get; set; }
public IEnumerable<RelatedEntity1497> ChildEntities { get; set; }
}
| RelatedEntity1496 |
csharp | reactiveui__ReactiveUI | src/ReactiveUI.Tests/Commands/CreatesCommandBindingTests.cs | {
"start": 402,
"end": 1451
} | public class ____
{
/// <summary>
/// Test that makes sure events binder binds to explicit event.
/// </summary>
[Test]
public void EventBinderBindsToExplicitEvent()
{
var input = new TestFixture();
var fixture = new CreatesCommandBindingViaEvent();
var wasCalled = false;
var cmd = ReactiveCommand.Create<int>(_ => wasCalled = true);
using (Assert.EnterMultipleScope())
{
Assert.That(fixture.GetAffinityForObject(input.GetType(), true), Is.GreaterThan(0));
Assert.That(fixture.GetAffinityForObject(input.GetType(), false), Is.LessThanOrEqualTo(0));
}
var disposable = fixture.BindCommandToObject<PropertyChangedEventArgs>(cmd, input, Observable.Return((object)5), "PropertyChanged");
input.IsNotNullString = "Foo";
Assert.That(wasCalled, Is.True);
wasCalled = false;
disposable?.Dispose();
input.IsNotNullString = "Bar";
Assert.That(wasCalled, Is.False);
}
}
| CreatesCommandBindingTests |
csharp | EventStore__EventStore | src/KurrentDB.Core.Tests/Transforms/WithHeader/WithHeaderChunkTransform.cs | {
"start": 274,
"end": 560
} | public class ____(int transformHeaderSize) : IChunkTransform {
public IChunkReadTransform Read { get; } = new WithHeaderChunkReadTransform(transformHeaderSize);
public IChunkWriteTransform Write { get; } = new WithHeaderChunkWriteTransform(transformHeaderSize);
}
| WithHeaderChunkTransform |
csharp | OrchardCMS__OrchardCore | src/OrchardCore/OrchardCore.Indexing.Abstractions/Models/IndexProfileResetContext.cs | {
"start": 40,
"end": 306
} | public sealed class ____
{
public IndexProfile IndexProfile { get; }
public IndexProfileResetContext(IndexProfile indexProfile)
{
ArgumentNullException.ThrowIfNull(indexProfile);
IndexProfile = indexProfile;
}
}
| IndexProfileResetContext |
csharp | microsoft__garnet | libs/server/Resp/RespCommandDataProvider.cs | {
"start": 7266,
"end": 7408
} | public enum ____
{
// Default file format (JSON serialized array of data objects)
Default = 0,
}
} | RespCommandsDataFileType |
csharp | unoplatform__uno | src/Uno.UI.Tests/Windows_UI_Xaml_Data/xBindTests/Controls/Binding_TypeMismatch_DataTemplate.xaml.cs | {
"start": 871,
"end": 1291
} | public class ____ : System.ComponentModel.INotifyPropertyChanged
{
public event System.ComponentModel.PropertyChangedEventHandler PropertyChanged;
private int _myProperty;
public int MyInteger
{
get => _myProperty;
set
{
_myProperty = value;
PropertyChanged?.Invoke(this, new System.ComponentModel.PropertyChangedEventArgs(nameof(MyInteger)));
}
}
}
}
| Binding_TypeMismatch_DataTemplate_Data |
csharp | dotnetcore__FreeSql | FreeSql.Tests/FreeSql.Tests/DataAnnotations/Navigate/ManyToManyTest.cs | {
"start": 4881,
"end": 5207
} | public class ____
{
public Guid id { get; set; }
public string username { get; set; }
public DateTime createtime { get; set; }
[Navigate(ManyToMany = typeof(user_role_nav1))]
public virtual List<mtm_role_nav1> roles { get; set; }
}
| mtm_user_nav1 |
csharp | AvaloniaUI__Avalonia | src/Avalonia.X11/X11Enums.cs | {
"start": 45,
"end": 1582
} | internal enum ____
{
Success = 0, /* everything's okay */
BadRequest = 1, /* bad request code */
BadValue = 2, /* int parameter out of range */
BadWindow = 3, /* parameter not a Window */
BadPixmap = 4, /* parameter not a Pixmap */
BadAtom = 5, /* parameter not an Atom */
BadCursor = 6, /* parameter not a Cursor */
BadFont = 7, /* parameter not a Font */
BadMatch = 8, /* parameter mismatch */
BadDrawable = 9, /* parameter not a Pixmap or Window */
BadAccess = 10, /* depending on context:
- key/button already grabbed
- attempt to free an illegal
cmap entry
- attempt to store into a read-only
color map entry.
- attempt to modify the access control
list from other than the local host.
*/
BadAlloc = 11, /* insufficient resources */
BadColor = 12, /* no such colormap */
BadGC = 13, /* parameter not a GC */
BadIDChoice = 14, /* choice not in range or already used */
BadName = 15, /* font or color name doesn't exist */
BadLength = 16, /* Request length incorrect */
BadImplementation = 17, /* server is defective */
FirstExtensionError = 128,
LastExtensionError = 255,
}
[Flags]
| Status |
csharp | dotnet__aspnetcore | src/Http/Routing/src/DecisionTree/DecisionTreeBuilder.cs | {
"start": 7222,
"end": 7957
} | private sealed class ____
{
public TreeBuilderContext()
{
CurrentCriteria = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
MatchedItems = new HashSet<ItemDescriptor<TItem>>();
}
public TreeBuilderContext(TreeBuilderContext other)
{
CurrentCriteria = new HashSet<string>(other.CurrentCriteria, StringComparer.OrdinalIgnoreCase);
MatchedItems = new HashSet<ItemDescriptor<TItem>>();
}
public HashSet<string> CurrentCriteria { get; private set; }
public HashSet<ItemDescriptor<TItem>> MatchedItems { get; private set; }
}
// Subclass just to give a logical name to a mess of generics
| TreeBuilderContext |
csharp | restsharp__RestSharp | src/RestSharp/Serializers/Xml/IXmlSerializer.cs | {
"start": 667,
"end": 752
} | public interface ____ : ISerializer {
string? Namespace { get; set; }
} | IXmlSerializer |
csharp | dotnet__efcore | test/EFCore.Specification.Tests/Query/AdHocAdvancedMappingsQueryTestBase.cs | {
"start": 26690,
"end": 26866
} | public class ____
{
public virtual DateTime Normalize(DateTime dateTime)
=> dateTime.Date;
}
}
#endregion
}
| MyDatetimeConverter |
csharp | dotnet__aspnetcore | src/Mvc/Mvc.Core/test/Routing/ActionEndpointFactoryTest.cs | {
"start": 20597,
"end": 20816
} | private class ____ : IOutboundParameterTransformer
{
public string TransformOutbound(object value)
{
return value?.ToString().ToUpperInvariant();
}
}
}
| UpperCaseParameterTransform |
csharp | unoplatform__uno | src/Uno.UI/UI/Xaml/Shapes/Shape.wasm.cs | {
"start": 1023,
"end": 12364
} | partial class ____
{
private protected string? _bboxCacheKey;
private readonly SerialDisposable _fillBrushSubscription = new SerialDisposable();
private readonly SerialDisposable _strokeBrushSubscription = new SerialDisposable();
private DefsSvgElement? _defs;
private protected readonly SvgElement _mainSvgElement;
private protected bool _shouldUpdateNative = !FeatureConfiguration.Shape.WasmDelayUpdateUntilFirstArrange;
private UpdateRenderPropertiesHashes _lastRenderHashes = new(null, null, StrokeWidth: 0d.GetHashCode(), null);
protected Shape() : base("svg", isSvg: true)
{
// This constructor shouldn't be used. It exists to match WinUI API surface.
throw new InvalidOperationException("This constructor shouldn't be used.");
}
private protected Shape(string mainSvgElementTag) : base("svg", isSvg: true)
{
_mainSvgElement = new SvgElement(mainSvgElementTag);
AddChild(_mainSvgElement);
}
private protected void UpdateRender()
{
// We can delay the setting of these property until first arrange pass,
// so to prevent double-updates from both OnPropertyChanged & ArrangeOverride.
// These updates are a little costly as we need to cross the cs-js bridge.
_shouldUpdateNative = true;
// Regarding to caching of GetPathBoundingBox(js: getBBox) result:
// On shapes that depends on it, so: Line, Path, Polygon, Polyline
// all the properties below (at the time of written) has no effect on getBBox:
// > Note that the values of the opacity, visibility, fill, fill-opacity, fill-rule, stroke-dasharray and stroke-dashoffset properties on an element have no effect on the bounding box of an element.
// > -- https://svgwg.org/svg2-draft/coords.html#BoundingBoxes
// while not mentioned, stroke-width doesnt affect getBBox neither (for the 4 classes of shape mentioned above).
// StrokeThickness can alter getBBox on Ellipse and Rectangle, but we dont use getBBox in these two.
// nested subscriptions of SolidColorBrush::{ Color, Opacity } for Fill and Stroke
// are done in OnFillChanged/OnStrokeChanged which in turns calls OnFillBrushChanged/OnStrokeBrushChanged
// on brush changes and on nested properties changes.
var hashes = new UpdateRenderPropertiesHashes
(
Fill: GetHashOfInterestFor(GetActualFill()),
Stroke: GetHashOfInterestFor(Stroke),
StrokeWidth: ActualStrokeThickness.GetHashCode(),
StrokeDashArray: GetHashOfInterestFor(StrokeDashArray)
);
switch (
_lastRenderHashes.Fill != hashes.Fill,
_lastRenderHashes.Stroke != hashes.Stroke,
_lastRenderHashes.StrokeWidth != hashes.StrokeWidth,
_lastRenderHashes.StrokeDashArray != hashes.StrokeDashArray
)
{
case (true, false, false, false): UpdateSvgFill(); break;
case (false, true, false, false): UpdateSvgStroke(); break;
case (false, false, true, false): UpdateSvgStrokeWidth(); break;
case (false, false, false, true): UpdateSvgStrokeDashArray(); break;
case (true, true, false, false): UpdateSvgFillAndStroke(); break;
case (false, false, false, false): return;
default: UpdateSvgEverything(); break;
}
_lastRenderHashes = hashes;
// todo@xy: we need to ensure dp-of-interests guarantees an arrange call if changed
}
private void OnFillBrushChanged()
{
if (!_shouldUpdateNative) return;
var hash = GetHashOfInterestFor(GetActualFill());
if (hash != _lastRenderHashes.Fill)
{
UpdateSvgFill();
_lastRenderHashes = _lastRenderHashes with { Fill = hash };
}
}
private void OnStrokeBrushChanged()
{
if (!_shouldUpdateNative) return;
var hash = GetHashOfInterestFor(Stroke);
if (hash != _lastRenderHashes.Stroke)
{
UpdateSvgStroke();
_lastRenderHashes = _lastRenderHashes with { Stroke = hash };
}
}
private void UpdateSvgFill()
{
if (!_shouldUpdateNative) return;
UpdateHitTestVisibility();
var (color, def) = GetBrushImpl(GetActualFill());
_fillBrushSubscription.Disposable = TryAppendBrushDef(def);
WindowManagerInterop.SetShapeFillStyle(_mainSvgElement.HtmlId, color?.ToCssIntegerAsInt(), def?.Def.HtmlId);
}
private void UpdateSvgStroke()
{
if (!_shouldUpdateNative) return;
var (color, def) = GetBrushImpl(Stroke);
_strokeBrushSubscription.Disposable = TryAppendBrushDef(def);
WindowManagerInterop.SetShapeStrokeStyle(_mainSvgElement.HtmlId, color?.ToCssIntegerAsInt(), def?.Def.HtmlId);
}
private void UpdateSvgStrokeWidth()
{
if (!_shouldUpdateNative) return;
WindowManagerInterop.SetShapeStrokeWidthStyle(_mainSvgElement.HtmlId, ActualStrokeThickness);
}
private void UpdateSvgStrokeDashArray()
{
if (!_shouldUpdateNative) return;
WindowManagerInterop.SetShapeStrokeDashArrayStyle(_mainSvgElement.HtmlId, StrokeDashArray?.ToArray() ?? Array.Empty<double>());
}
private void UpdateSvgFillAndStroke()
{
if (!_shouldUpdateNative) return;
var fillImpl = GetBrushImpl(GetActualFill());
var strokeImpl = GetBrushImpl(Stroke);
_fillBrushSubscription.Disposable = TryAppendBrushDef(fillImpl.Def);
_strokeBrushSubscription.Disposable = TryAppendBrushDef(strokeImpl.Def);
WindowManagerInterop.SetShapeStylesFast1(
_mainSvgElement.HtmlId,
fillImpl.Color?.ToCssIntegerAsInt(), fillImpl.Def?.Def.HtmlId,
strokeImpl.Color?.ToCssIntegerAsInt(), strokeImpl.Def?.Def.HtmlId
);
}
private void UpdateSvgEverything()
{
if (!_shouldUpdateNative) return;
var fillImpl = GetBrushImpl(GetActualFill());
var strokeImpl = GetBrushImpl(Stroke);
_fillBrushSubscription.Disposable = TryAppendBrushDef(fillImpl.Def);
_strokeBrushSubscription.Disposable = TryAppendBrushDef(strokeImpl.Def);
WindowManagerInterop.SetShapeStylesFast2(
_mainSvgElement.HtmlId,
fillImpl.Color?.ToCssIntegerAsInt(), fillImpl.Def?.Def.HtmlId,
strokeImpl.Color?.ToCssIntegerAsInt(), strokeImpl.Def?.Def.HtmlId, ActualStrokeThickness, StrokeDashArray?.ToArray() ?? Array.Empty<double>()
);
}
private void UpdateHitTestVisibility()
{
// We don't request an update of the HitTest (UpdateHitTest()) since this element is never expected to be hit testable.
// Note: We also enforce that the default hit test == false is not altered in the OnHitTestVisibilityChanged.
// Instead we explicitly set the IsHitTestVisible on each child SvgElement
var fill = Fill;
// Known issue: The hit test is only linked to the Fill, but should also take in consideration the Stroke and the StrokeThickness.
// Note: _mainSvgElement and _defs are internal elements, so it's legit to alter the IsHitTestVisible here.
_mainSvgElement.IsHitTestVisible = fill != null;
if (_defs is not null)
{
_defs.IsHitTestVisible = fill != null;
}
}
/// <summary>
/// Gets host for non-visual elements
/// </summary>
private UIElementCollection GetDefs()
{
if (_defs == null)
{
_defs = new DefsSvgElement();
AddChild(_defs);
}
return _defs.Defs;
}
private static Rect GetPathBoundingBox(Shape shape)
{
if (FeatureConfiguration.Shape.WasmCacheBBoxCalculationResult)
{
var key = shape.GetBBoxCacheKey();
if (!string.IsNullOrEmpty(key))
{
if (!_bboxCache.TryGetValue(key, out var rect))
{
_bboxCache[key] = rect = shape._mainSvgElement.GetBBox();
}
return rect;
}
}
var result = shape._mainSvgElement.GetBBox();
return result;
}
private protected void Render(Shape? shape, Size? size = null, double scaleX = 1d, double scaleY = 1d, double renderOriginX = 0d, double renderOriginY = 0d)
{
Debug.Assert(shape == this);
var scale = Matrix3x2.CreateScale((float)scaleX, (float)scaleY);
var translate = Matrix3x2.CreateTranslation((float)renderOriginX, (float)renderOriginY);
var matrix = scale * translate;
_mainSvgElement.SetNativeTransform(matrix);
}
internal override bool HitTest(Point relativePosition)
{
var considerFill = Fill != null;
// TODO: Verify if this should also consider StrokeThickness (likely it should)
var considerStroke = Stroke != null;
return (considerFill || considerStroke) &&
WindowManagerInterop.ContainsPoint(_mainSvgElement.HtmlId, relativePosition.X, relativePosition.Y, considerFill, considerStroke);
}
// lazy impl, and _cacheKey can be invalidated by setting to null
private string? GetBBoxCacheKey() => _bboxCacheKey ?? (_bboxCacheKey = GetBBoxCacheKeyImpl());
// note: perf is of concern here. avoid $"string interpolation" and current-culture .ToString, and use string.concat and ToStringInvariant
private protected abstract string? GetBBoxCacheKeyImpl();
private Brush GetActualFill()
{
// The default is black if the style is not set in Web's' SVG. So if the Fill property is not set,
// we explicitly set the style to transparent in order to match the UWP behavior.
return Fill ?? SolidColorBrushHelper.Transparent;
}
private (Color? Color, BrushDef? Def) GetBrushImpl(Brush brush) => brush switch // todo@xy: fix the name...
{
SolidColorBrush scb => (scb.ColorWithOpacity, null),
ImageBrush ib => (null, ib.ToSvgElement(this)),
AcrylicBrush ab => (ab.FallbackColorWithOpacity, null),
LinearGradientBrush lgb => (null, (lgb.ToSvgElement(), null)),
RadialGradientBrush rgb => (null, (rgb.ToSvgElement(), null)),
// The default is black if the style is not set in Web's' SVG. So if the Fill property is not set,
// we explicitly set the style to transparent in order to match the UWP behavior.
null => (null, null),
_ => default,
};
private IDisposable? TryAppendBrushDef(BrushDef? def)
{
if (def is not { } d) return null;
GetDefs().Add(d.Def);
return new DisposableAction(() =>
{
GetDefs().Remove(d.Def);
d.InnerSubscription?.Dispose();
});
}
private static int? GetHashOfInterestFor(Brush brush)
{
int GetLGBHash(LinearGradientBrush lgb)
{
var hash = new HashCode();
hash.Add(lgb.StartPoint);
hash.Add(lgb.EndPoint);
if (lgb.GradientStops is { Count: > 0 })
{
foreach (var stop in lgb.GradientStops)
{
hash.Add(stop);
}
}
return hash.ToHashCode();
}
int GetRGBHash(RadialGradientBrush rgb)
{
var hash = new HashCode();
hash.Add(rgb.Center);
hash.Add(rgb.RadiusX);
hash.Add(rgb.RadiusX);
if (rgb.GradientStops is { Count: > 0 })
{
foreach (var stop in rgb.GradientStops)
{
hash.Add(stop);
}
}
return hash.ToHashCode();
}
return brush switch
{
SolidColorBrush scb => scb.ColorWithOpacity.GetHashCode(),
// We don't care about the nested properties of ImageBrush,
// because their changes will be updated through ImageBrush::ToSvgElement subscriptions.
// So an object's reference hash is good here.
ImageBrush ib => ib.GetHashCode(),
LinearGradientBrush lgb => GetLGBHash(lgb),
RadialGradientBrush rgb => GetRGBHash(rgb),
AcrylicBrush ab => ab.FallbackColorWithOpacity.GetHashCode(),
_ => null,
};
}
private static int? GetHashOfInterestFor(DoubleCollection doubles)
{
if (doubles is not { Count: > 0 })
{
return null;
}
var hash = new HashCode();
foreach (var item in doubles)
{
hash.Add(item);
}
return hash.ToHashCode();
}
}
}
| Shape |
csharp | unoplatform__uno | src/Uno.UWP/Collections/HashtableEx.cs | {
"start": 36614,
"end": 37044
} | internal sealed class ____
{
private readonly HashtableEx _hashtable;
public HashtableDebugView(HashtableEx hashtable)
{
if (hashtable == null)
{
throw new ArgumentNullException(nameof(hashtable));
}
_hashtable = hashtable;
}
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public KeyValuePairs[] Items => _hashtable.ToKeyValuePairsArray();
}
}
internal static | HashtableDebugView |
csharp | dotnet__efcore | src/EFCore.Relational/Query/SqlExpressions/SelectExpression.Helper.cs | {
"start": 8524,
"end": 8971
} | private sealed class ____ : IEqualityComparer<(ColumnExpression Column, ValueComparer Comparer)>
{
public bool Equals((ColumnExpression Column, ValueComparer Comparer) x, (ColumnExpression Column, ValueComparer Comparer) y)
=> x.Column.Equals(y.Column);
public int GetHashCode((ColumnExpression Column, ValueComparer Comparer) obj)
=> obj.Column.GetHashCode();
}
private readonly | IdentifierComparer |
csharp | dotnet__machinelearning | src/Microsoft.Data.Analysis/PrimitiveDataFrameColumn.BinaryOperationAPIs.ExplodedColumns.cs | {
"start": 443234,
"end": 443789
} | public partial class ____
{
public BooleanDataFrameColumn Or(BooleanDataFrameColumn column, bool inPlace = false)
{
if (column.Length != Length)
{
throw new ArgumentException(Strings.MismatchedColumnLengths, nameof(column));
}
BooleanDataFrameColumn retColumn = inPlace ? this : CloneAsBooleanColumn();
retColumn.ColumnContainer.HandleOperation(BinaryOperation.Or, column.ColumnContainer);
return retColumn;
}
}
| BooleanDataFrameColumn |
csharp | dotnet__aspnetcore | src/Mvc/Mvc.RazorPages/src/Diagnostics/MvcDiagnostics.cs | {
"start": 21201,
"end": 23254
} | public sealed class ____ : EventData
{
/// <summary>
/// Name of the event.
/// </summary>
public const string EventName = EventNamespace + "BeforeOnPageHandlerSelected";
/// <summary>
/// Initializes a new instance of <see cref="BeforePageFilterOnPageHandlerSelectedEventData"/>.
/// </summary>
/// <param name="actionDescriptor">The <see cref="CompiledPageActionDescriptor"/>.</param>
/// <param name="handlerSelectedContext">The <see cref="PageHandlerSelectedContext"/>.</param>
/// <param name="filter">The <see cref="IPageFilter"/>.</param>
public BeforePageFilterOnPageHandlerSelectedEventData(CompiledPageActionDescriptor actionDescriptor, PageHandlerSelectedContext handlerSelectedContext, IPageFilter filter)
{
ActionDescriptor = actionDescriptor;
HandlerSelectedContext = handlerSelectedContext;
Filter = filter;
}
/// <summary>
/// The <see cref="CompiledPageActionDescriptor"/>.
/// </summary>
public CompiledPageActionDescriptor ActionDescriptor { get; }
/// <summary>
/// The <see cref="PageHandlerSelectedContext"/>.
/// </summary>
public PageHandlerSelectedContext HandlerSelectedContext { get; }
/// <summary>
/// The <see cref="IPageFilter"/>.
/// </summary>
public IPageFilter Filter { get; }
/// <inheritdoc/>
protected override int Count => 3;
/// <inheritdoc/>
protected override KeyValuePair<string, object> this[int index] => index switch
{
0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor),
1 => new KeyValuePair<string, object>(nameof(HandlerSelectedContext), HandlerSelectedContext),
2 => new KeyValuePair<string, object>(nameof(Filter), Filter),
_ => throw new ArgumentOutOfRangeException(nameof(index))
};
}
/// <summary>
/// An <see cref="EventData"/> that occurs after <see cref="IPageFilter.OnPageHandlerSelected(PageHandlerSelectedContext)"/>.
/// </summary>
| BeforePageFilterOnPageHandlerSelectedEventData |
csharp | dotnet__reactive | AsyncRx.NET/System.Reactive.Async/Joins/AsyncPlan.Generated.cs | {
"start": 30451,
"end": 31409
} | internal sealed class ____<TSource1, TSource2, TSource3, TSource4, TSource5, TSource6, TSource7, TSource8, TResult> : AsyncPlanBase<TSource1, TSource2, TSource3, TSource4, TSource5, TSource6, TSource7, TSource8, TResult>
{
private readonly Func<TSource1, TSource2, TSource3, TSource4, TSource5, TSource6, TSource7, TSource8, TResult> _selector;
internal AsyncPlan(AsyncPattern<TSource1, TSource2, TSource3, TSource4, TSource5, TSource6, TSource7, TSource8> expression, Func<TSource1, TSource2, TSource3, TSource4, TSource5, TSource6, TSource7, TSource8, TResult> selector)
: base(expression)
{
_selector = selector;
}
protected override ValueTask<TResult> EvalAsync(TSource1 arg1, TSource2 arg2, TSource3 arg3, TSource4 arg4, TSource5 arg5, TSource6 arg6, TSource7 arg7, TSource8 arg8) => new ValueTask<TResult>(_selector(arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8));
}
| AsyncPlan |
csharp | abpframework__abp | test/DistEvents/DistDemoApp.Shared/DemoService.cs | {
"start": 146,
"end": 799
} | public class ____ : ITransientDependency
{
private readonly IRepository<TodoItem, Guid> _todoItemRepository;
public DemoService(IRepository<TodoItem, Guid> todoItemRepository)
{
_todoItemRepository = todoItemRepository;
}
public async Task CreateTodoItemAsync()
{
var todoItem = await _todoItemRepository.InsertAsync(
new TodoItem
{
Text = "todo item " + DateTime.Now.Ticks
}
);
Console.WriteLine("Created a new todo item: " + todoItem);
}
}
} | DemoService |
csharp | dotnet__machinelearning | src/Microsoft.ML.Data/Transforms/Hashing.cs | {
"start": 86966,
"end": 98398
} | public sealed class ____
{
/// <summary>
/// Name of the column resulting from the transformation of <see cref="InputColumnName"/>.
/// </summary>
public string Name { get; set; }
/// <summary> Name of column to transform.</summary>
public string InputColumnName { get; set; }
/// <summary> Number of bits to hash into. Must be between 1 and 31, inclusive.</summary>
public int NumberOfBits { get; set; }
/// <summary> Hashing seed.</summary>
public uint Seed { get; set; }
/// <summary> Whether the position of each term should be included in the hash, only applies to inputs of vector type.</summary>
public bool UseOrderedHashing { get; set; }
/// <summary>
/// During hashing we constuct mappings between original values and the produced hash values.
/// Text representation of original values are stored in the key names of the annotations for the new column. Hashing, as such, can map many initial values to one.
/// <see cref="MaximumNumberOfInverts"/> specifies the upper bound of the number of distinct input values mapping to a hash that should be retained.
/// <value>0</value> does not retain any input values. <value>-1</value> retains all input values mapping to each hash.
/// </summary>
public int MaximumNumberOfInverts { get; set; }
/// <summary>
/// Whether the slots of a vector column should be hashed into a single value.
/// </summary>
public bool Combine { get; set; }
private const uint VersionNoCombineOption = 0x00010003;
/// <summary>
/// Describes how the transformer handles one column pair.
/// </summary>
/// <param name="name">Name of the column resulting from the transformation of <paramref name="inputColumnName"/>.</param>
/// <param name="inputColumnName">Name of column to transform. If set to <see langword="null"/>, the value of the <paramref name="name"/> will be used as source.</param>
/// <param name="numberOfBits">Number of bits to hash into. Must be between 1 and 31, inclusive.</param>
/// <param name="seed">Hashing seed.</param>
/// <param name="useOrderedHashing">Whether the position of each term should be included in the hash, only applies to inputs of vector type.</param>
/// <param name="maximumNumberOfInverts">During hashing we construct mappings between original values and the produced hash values.
/// Text representation of original values are stored in the slot names of the annotations for the new column.Hashing, as such, can map many initial values to one.
/// <paramref name="maximumNumberOfInverts"/> specifies the upper bound of the number of distinct input values mapping to a hash that should be retained.
/// <value>0</value> does not retain any input values. <value>-1</value> retains all input values mapping to each hash.</param>
/// <param name="combine">Whether the slots of a vector column should be hashed into a single value.</param>
public ColumnOptions(string name,
string inputColumnName = null,
int numberOfBits = Defaults.NumberOfBits,
uint seed = Defaults.Seed,
bool useOrderedHashing = Defaults.UseOrderedHashing,
int maximumNumberOfInverts = Defaults.MaximumNumberOfInverts,
bool combine = Defaults.Combine)
{
if (maximumNumberOfInverts < -1)
throw Contracts.ExceptParam(nameof(maximumNumberOfInverts), "Value too small, must be -1 or larger");
if (maximumNumberOfInverts != 0 && numberOfBits >= 31)
throw Contracts.ExceptParam(nameof(numberOfBits), $"Cannot support maximumNumberOfInverts for a {0} bit hash. 30 is the maximum possible.", numberOfBits);
Contracts.CheckNonWhiteSpace(name, nameof(name));
Name = name;
InputColumnName = inputColumnName ?? name;
NumberOfBits = numberOfBits;
Seed = seed;
UseOrderedHashing = useOrderedHashing;
MaximumNumberOfInverts = maximumNumberOfInverts;
Combine = combine;
}
internal ColumnOptions(string name, string inputColumnName, ModelLoadContext ctx)
{
Name = name;
InputColumnName = inputColumnName;
// *** Binary format ***
// int: NumberOfBits
// uint: HashSeed
// byte: Ordered
// byte: Combine
NumberOfBits = ctx.Reader.ReadInt32();
Contracts.CheckDecode(NumBitsMin <= NumberOfBits && NumberOfBits < NumBitsLim);
Seed = ctx.Reader.ReadUInt32();
UseOrderedHashing = ctx.Reader.ReadBoolByte();
if (ctx.Header.ModelVerWritten > VersionNoCombineOption)
Combine = ctx.Reader.ReadBoolByte();
}
internal void Save(ModelSaveContext ctx)
{
// *** Binary format ***
// int: NumberOfBits
// uint: HashSeed
// byte: Ordered
// byte: Combine
Contracts.Assert(NumBitsMin <= NumberOfBits && NumberOfBits < NumBitsLim);
ctx.Writer.Write(NumberOfBits);
ctx.Writer.Write(Seed);
ctx.Writer.WriteBoolByte(UseOrderedHashing);
ctx.Writer.WriteBoolByte(Combine);
}
}
private readonly IHost _host;
private readonly ColumnOptions[] _columns;
internal static bool IsColumnTypeValid(DataViewType type)
{
var itemType = type.GetItemType();
return itemType is TextDataViewType || itemType is KeyDataViewType || itemType is NumberDataViewType ||
itemType is BooleanDataViewType || itemType is RowIdDataViewType;
}
internal const string ExpectedColumnType = "Expected Text, Key, numeric, Boolean or DataViewRowId item type";
/// <summary>
/// Initializes a new instance of <see cref="HashingEstimator"/>.
/// </summary>
/// <param name="env">Host Environment.</param>
/// <param name="outputColumnName">Name of the column resulting from the transformation of <paramref name="inputColumnName"/>.</param>
/// <param name="inputColumnName">Name of the column to transform.
/// If set to <see langword="null"/>, the value of the <paramref name="outputColumnName"/> will be used as source.</param>
/// <param name="numberOfBits">Number of bits to hash into. Must be between 1 and 31, inclusive.</param>
/// <param name="maximumNumberOfInverts">During hashing we construct mappings between original values and the produced hash values.
/// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one.
/// <paramref name="maximumNumberOfInverts"/> specifies the upper bound of the number of distinct input values mapping to a hash that should be retained.
/// <value>0</value> does not retain any input values. <value>-1</value> retains all input values mapping to each hash.</param>
/// <param name="useOrderedHashing">Whether the position of each term should be included in the hash, only applies to inputs of vector type.</param>
/// <param name="combine">Whether the slots of a vector column should be hashed into a single value.</param>
internal HashingEstimator(IHostEnvironment env, string outputColumnName, string inputColumnName = null,
int numberOfBits = Defaults.NumberOfBits, int maximumNumberOfInverts = Defaults.MaximumNumberOfInverts,
bool useOrderedHashing = Defaults.UseOrderedHashing, bool combine = Defaults.Combine)
: this(env, new ColumnOptions(outputColumnName, inputColumnName ?? outputColumnName,
numberOfBits: numberOfBits, useOrderedHashing: useOrderedHashing, maximumNumberOfInverts: maximumNumberOfInverts, combine: combine))
{
}
/// <summary>
/// Initializes a new instance of <see cref="HashingEstimator"/>.
/// </summary>
/// <param name="env">Host Environment.</param>
/// <param name="columns">Description of dataset columns and how to process them.</param>
[BestFriend]
internal HashingEstimator(IHostEnvironment env, params ColumnOptions[] columns)
{
Contracts.CheckValue(env, nameof(env));
_host = env.Register(nameof(HashingEstimator));
_columns = columns;
// Validate the options.
foreach (var columnOptions in _columns)
{
if (columnOptions.Combine && columnOptions.MaximumNumberOfInverts != 0)
throw _host.ExceptParam(nameof(ColumnOptions.Combine), "When the 'Combine' option is specified, invert hashes are not supported.");
if (columnOptions.Combine && columnOptions.UseOrderedHashing)
throw _host.ExceptParam(nameof(ColumnOptions.Combine), "When the 'Combine' option is specified, ordered hashing is not supported.");
}
}
/// <summary>
/// Trains and returns a <see cref="HashingTransformer"/>.
/// </summary>
public HashingTransformer Fit(IDataView input) => new HashingTransformer(_host, input, _columns);
/// <summary>
/// Returns the <see cref="SchemaShape"/> of the schema which will be produced by the transformer.
/// Used for schema propagation and verification in a pipeline.
/// </summary>
public SchemaShape GetOutputSchema(SchemaShape inputSchema)
{
_host.CheckValue(inputSchema, nameof(inputSchema));
var result = inputSchema.ToDictionary(x => x.Name);
foreach (var colInfo in _columns)
{
if (!inputSchema.TryFindColumn(colInfo.InputColumnName, out var col))
throw _host.ExceptSchemaMismatch(nameof(inputSchema), "input", colInfo.InputColumnName);
if (!IsColumnTypeValid(col.ItemType))
throw _host.ExceptParam(nameof(inputSchema), ExpectedColumnType);
var metadata = new List<SchemaShape.Column>();
if (!colInfo.Combine && col.Annotations.TryFindColumn(AnnotationUtils.Kinds.SlotNames, out var slotMeta))
metadata.Add(slotMeta);
if (colInfo.MaximumNumberOfInverts != 0)
metadata.Add(new SchemaShape.Column(AnnotationUtils.Kinds.KeyValues, SchemaShape.Column.VectorKind.Vector, TextDataViewType.Instance, false));
result[colInfo.Name] = new SchemaShape.Column(colInfo.Name, colInfo.Combine ? SchemaShape.Column.VectorKind.Scalar : col.Kind,
NumberDataViewType.UInt32, true, new SchemaShape(metadata));
}
return new SchemaShape(result.Values);
}
}
}
| ColumnOptions |
csharp | EventStore__EventStore | src/KurrentDB.Core/Messages/ReplicationMessage.cs | {
"start": 574,
"end": 654
} | partial class ____ {
[DerivedMessage(CoreMessage.Replication)]
| ReplicationMessage |
csharp | dotnet__aspnetcore | src/Mvc/Mvc.Api.Analyzers/test/TestFiles/AddResponseTypeAttributeCodeFixProviderIntegrationTest/CodeFixAddsStatusCodesFromConstructorParameters.Output.cs | {
"start": 294,
"end": 1417
} | public class ____ : ControllerBase
{
private const int FieldStatusCode = 201;
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status201Created)]
[ProducesResponseType(StatusCodes.Status202Accepted)]
[ProducesResponseType(StatusCodes.Status204NoContent)]
[ProducesResponseType(StatusCodes.Status422UnprocessableEntity)]
[ProducesDefaultResponseType]
public IActionResult GetItem(int id)
{
if (id == 0)
{
return new StatusCodeResult(422);
}
if (id == 1)
{
return new StatusCodeResult(StatusCodes.Status202Accepted);
}
if (id == 2)
{
const int localStatusCode = 204;
return new StatusCodeResult(localStatusCode);
}
if (id == 3)
{
return new StatusCodeResult(FieldStatusCode);
}
return Ok(new object());
}
}
}
| CodeFixAddsStatusCodesFromConstructorParametersController |
csharp | dotnet__efcore | test/EFCore.Tests/Metadata/Conventions/RelationshipDiscoveryConventionTest.cs | {
"start": 61881,
"end": 62040
} | private class ____
{
public int Id { get; set; }
public IEnumerable<ManyToManyFirst> ManyToManyFirsts { get; set; }
}
| ManyToManySecond |
csharp | fluentassertions__fluentassertions | Src/FluentAssertions/Equivalency/IAssertionContext.cs | {
"start": 250,
"end": 1329
} | public interface ____<TSubject>
{
/// <summary>
/// Gets the <see cref="IMember"/> of the member that returned the current object, or <see langword="null"/> if the current
/// object represents the root object.
/// </summary>
INode SelectedNode { get; }
/// <summary>
/// Gets the value of the <see cref="SelectedNode" />
/// </summary>
TSubject Subject { get; }
/// <summary>
/// Gets the value of the expectation object that was matched with the subject using a <see cref="IMemberMatchingRule"/>.
/// </summary>
TSubject Expectation { get; }
/// <summary>
/// A formatted phrase as is supported by <see cref="string.Format(string,object[])"/> explaining why the assertion
/// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically.
/// </summary>
string Because { get; set; }
/// <summary>
/// Zero or more objects to format using the placeholders in <see cref="Because" />.
/// </summary>
object[] BecauseArgs { get; set; }
}
| IAssertionContext |
csharp | ServiceStack__ServiceStack | ServiceStack.OrmLite/tests/ServiceStack.OrmLite.Oracle.Tests/OracleParamTests.cs | {
"start": 28831,
"end": 29077
} | public class ____ : Param<ParamRaw>
{
public int Raw { get; set; }
public override void SetValue(int value) { Raw = value; }
public override ParamRaw GetObject() { return this; }
}
| ParamRaw |
csharp | ChilliCream__graphql-platform | src/HotChocolate/Core/src/Types.Analyzers/Errors.cs | {
"start": 4866,
"end": 7209
} | class ____ be declared as partial to allow source generation",
category: "TypeSystem",
DiagnosticSeverity.Info,
isEnabledByDefault: true);
public static readonly DiagnosticDescriptor NodeResolverIdAttributeNotAllowed =
new(
id: "HC0092",
title: "ID Attribute Not Allowed",
messageFormat: "The [ID] attribute should not be used on node resolver parameters as the NodeResolver attribute already declares the parameter as an ID type",
category: "TypeSystem",
DiagnosticSeverity.Error,
isEnabledByDefault: true);
public static readonly DiagnosticDescriptor NodeResolverMustBePublic =
new(
id: "HC0093",
title: "Node Resolver Must Be Public",
messageFormat: "A node resolver method must be public",
category: "TypeSystem",
DiagnosticSeverity.Error,
isEnabledByDefault: true);
public static readonly DiagnosticDescriptor BindMemberNotFound =
new(
id: "HC0094",
title: "Bind Member Not Found",
messageFormat: "The member '{0}' does not exist on type '{1}'",
category: "TypeSystem",
DiagnosticSeverity.Error,
isEnabledByDefault: true);
public static readonly DiagnosticDescriptor BindMemberTypeMismatch =
new(
id: "HC0095",
title: "Bind Member Type Mismatch",
messageFormat: "The type '{0}' in nameof expression does not match the ObjectType type '{1}'",
category: "TypeSystem",
DiagnosticSeverity.Error,
isEnabledByDefault: true);
public static readonly DiagnosticDescriptor ExtendObjectTypeShouldBeUpgraded =
new(
id: "HC0096",
title: "ExtendObjectType Should Be Upgraded",
messageFormat: "Consider upgrading [ExtendObjectType<{0}>] to [ObjectType<{0}>]",
category: "TypeSystem",
DiagnosticSeverity.Info,
isEnabledByDefault: true);
public static readonly DiagnosticDescriptor ParentAttributeTypeMismatch =
new(
id: "HC0097",
title: "Parent Attribute Type Mismatch",
messageFormat: "The parameter type '{0}' must be '{1}' or a base type/ | should |
csharp | dotnet__aspnetcore | src/Mvc/Mvc.Core/src/ProducesResponseTypeOfTAttribute.cs | {
"start": 309,
"end": 1255
} | public class ____<T> : ProducesResponseTypeAttribute
{
/// <summary>
/// Initializes an instance of <see cref="ProducesResponseTypeAttribute"/>.
/// </summary>
/// <param name="statusCode">The HTTP response status code.</param>
public ProducesResponseTypeAttribute(int statusCode) : base(typeof(T), statusCode) { }
/// <summary>
/// Initializes an instance of <see cref="ProducesResponseTypeAttribute"/>.
/// </summary>
/// <param name="statusCode">The HTTP response status code.</param>
/// <param name="contentType">The content type associated with the response.</param>
/// <param name="additionalContentTypes">Additional content types supported by the response.</param>
public ProducesResponseTypeAttribute(int statusCode, string contentType, params string[] additionalContentTypes)
: base(typeof(T), statusCode, contentType, additionalContentTypes) { }
}
| ProducesResponseTypeAttribute |
csharp | dotnet__aspnetcore | src/Http/Headers/src/ContentRangeHeaderValue.cs | {
"start": 428,
"end": 13203
} | public class ____
{
private static readonly HttpHeaderParser<ContentRangeHeaderValue> Parser
= new GenericHeaderParser<ContentRangeHeaderValue>(false, GetContentRangeLength);
private StringSegment _unit;
private ContentRangeHeaderValue()
{
// Used by the parser to create a new instance of this type.
}
/// <summary>
/// Initializes a new instance of <see cref="ContentRangeHeaderValue"/>.
/// </summary>
/// <param name="from">The start of the range.</param>
/// <param name="to">The end of the range.</param>
/// <param name="length">The total size of the document in bytes.</param>
public ContentRangeHeaderValue(long from, long to, long length)
{
// Scenario: "Content-Range: bytes 12-34/5678"
ArgumentOutOfRangeException.ThrowIfNegative(length);
// "To" is inclusive. Per RFC 7233:
// A Content-Range field value is invalid if it contains a byte-range-resp that has a
// last-byte-pos value less than its first-byte-pos value, or a complete-length value
// less than or equal to its last-byte-pos value.
if ((to < 0) || (length <= to))
{
throw new ArgumentOutOfRangeException(nameof(to));
}
if ((from < 0) || (from > to))
{
throw new ArgumentOutOfRangeException(nameof(from));
}
From = from;
To = to;
Length = length;
_unit = HeaderUtilities.BytesUnit;
}
/// <summary>
/// Initializes a new instance of <see cref="ContentRangeHeaderValue"/>.
/// </summary>
/// <param name="length">The total size of the document in bytes.</param>
public ContentRangeHeaderValue(long length)
{
// Scenario: "Content-Range: bytes */1234"
ArgumentOutOfRangeException.ThrowIfNegative(length);
Length = length;
_unit = HeaderUtilities.BytesUnit;
}
/// <summary>
/// Initializes a new instance of <see cref="ContentRangeHeaderValue"/>.
/// </summary>
/// <param name="from">The start of the range.</param>
/// <param name="to">The end of the range.</param>
public ContentRangeHeaderValue(long from, long to)
{
// Scenario: "Content-Range: bytes 12-34/*"
ArgumentOutOfRangeException.ThrowIfNegative(to);
if ((from < 0) || (from > to))
{
throw new ArgumentOutOfRangeException(nameof(@from));
}
From = from;
To = to;
_unit = HeaderUtilities.BytesUnit;
}
/// <summary>
/// Gets or sets the unit in which ranges are specified.
/// </summary>
/// <value>Defaults to <c>bytes</c>.</value>
public StringSegment Unit
{
get { return _unit; }
set
{
HeaderUtilities.CheckValidToken(value, nameof(value));
_unit = value;
}
}
/// <summary>
/// Gets the start of the range.
/// </summary>
public long? From { get; private set; }
/// <summary>
/// Gets the end of the range.
/// </summary>
public long? To { get; private set; }
/// <summary>
/// Gets the total size of the document.
/// </summary>
[NotNullIfNotNull(nameof(Length))]
public long? Length { get; private set; }
/// <summary>
/// Gets a value that determines if <see cref="Length"/> has been specified.
/// </summary>
[MemberNotNullWhen(true, nameof(Length))]
public bool HasLength // e.g. "Content-Range: bytes 12-34/*"
{
get { return Length != null; }
}
/// <summary>
/// Gets a value that determines if <see cref="From"/> and <see cref="To"/> have been specified.
/// </summary>
[MemberNotNullWhen(true, nameof(From), nameof(To))]
public bool HasRange // e.g. "Content-Range: bytes */1234"
{
get { return From != null && To != null; }
}
/// <inheritdoc/>
public override bool Equals(object? obj)
{
var other = obj as ContentRangeHeaderValue;
if (other == null)
{
return false;
}
return ((From == other.From) && (To == other.To) && (Length == other.Length) &&
StringSegment.Equals(Unit, other.Unit, StringComparison.OrdinalIgnoreCase));
}
/// <inheritdoc/>
public override int GetHashCode()
{
var result = StringSegmentComparer.OrdinalIgnoreCase.GetHashCode(Unit);
if (HasRange)
{
result = result ^ From.GetHashCode() ^ To.GetHashCode();
}
if (HasLength)
{
result = result ^ Length.GetHashCode();
}
return result;
}
/// <inheritdoc/>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append(Unit.AsSpan());
sb.Append(' ');
if (HasRange)
{
sb.Append(From.GetValueOrDefault().ToString(NumberFormatInfo.InvariantInfo));
sb.Append('-');
sb.Append(To.GetValueOrDefault().ToString(NumberFormatInfo.InvariantInfo));
}
else
{
sb.Append('*');
}
sb.Append('/');
if (HasLength)
{
sb.Append(Length.GetValueOrDefault().ToString(NumberFormatInfo.InvariantInfo));
}
else
{
sb.Append('*');
}
return sb.ToString();
}
/// <summary>
/// Parses <paramref name="input"/> as a <see cref="ContentRangeHeaderValue"/> value.
/// </summary>
/// <param name="input">The values to parse.</param>
/// <returns>The parsed values.</returns>
public static ContentRangeHeaderValue Parse(StringSegment input)
{
var index = 0;
return Parser.ParseValue(input, ref index)!;
}
/// <summary>
/// Attempts to parse the specified <paramref name="input"/> as a <see cref="ContentRangeHeaderValue"/>.
/// </summary>
/// <param name="input">The value to parse.</param>
/// <param name="parsedValue">The parsed value.</param>
/// <returns><see langword="true"/> if input is a valid <see cref="ContentRangeHeaderValue"/>, otherwise <see langword="false"/>.</returns>
public static bool TryParse(StringSegment input, [NotNullWhen(true)] out ContentRangeHeaderValue? parsedValue)
{
var index = 0;
return Parser.TryParseValue(input, ref index, out parsedValue);
}
private static int GetContentRangeLength(StringSegment input, int startIndex, out ContentRangeHeaderValue? parsedValue)
{
Contract.Requires(startIndex >= 0);
parsedValue = null;
if (StringSegment.IsNullOrEmpty(input) || (startIndex >= input.Length))
{
return 0;
}
// Parse the unit string: <unit> in '<unit> <from>-<to>/<length>'
var unitLength = HttpRuleParser.GetTokenLength(input, startIndex);
if (unitLength == 0)
{
return 0;
}
var unit = input.Subsegment(startIndex, unitLength);
var current = startIndex + unitLength;
var separatorLength = HttpRuleParser.GetWhitespaceLength(input, current);
if (separatorLength == 0)
{
return 0;
}
current = current + separatorLength;
if (current == input.Length)
{
return 0;
}
// Read range values <from> and <to> in '<unit> <from>-<to>/<length>'
var fromStartIndex = current;
if (!TryGetRangeLength(input, ref current, out var fromLength, out var toStartIndex, out var toLength))
{
return 0;
}
// After the range is read we expect the length separator '/'
if ((current == input.Length) || (input[current] != '/'))
{
return 0;
}
current++; // Skip '/' separator
current = current + HttpRuleParser.GetWhitespaceLength(input, current);
if (current == input.Length)
{
return 0;
}
// We may not have a length (e.g. 'bytes 1-2/*'). But if we do, parse the length now.
var lengthStartIndex = current;
if (!TryGetLengthLength(input, ref current, out var lengthLength))
{
return 0;
}
if (!TryCreateContentRange(input, unit, fromStartIndex, fromLength, toStartIndex, toLength,
lengthStartIndex, lengthLength, out parsedValue))
{
return 0;
}
return current - startIndex;
}
private static bool TryGetLengthLength(StringSegment input, ref int current, out int lengthLength)
{
lengthLength = 0;
if (input[current] == '*')
{
current++;
}
else
{
// Parse length value: <length> in '<unit> <from>-<to>/<length>'
lengthLength = HttpRuleParser.GetNumberLength(input, current, false);
if ((lengthLength == 0) || (lengthLength > HttpRuleParser.MaxInt64Digits))
{
return false;
}
current = current + lengthLength;
}
current = current + HttpRuleParser.GetWhitespaceLength(input, current);
return true;
}
private static bool TryGetRangeLength(StringSegment input, ref int current, out int fromLength, out int toStartIndex, out int toLength)
{
fromLength = 0;
toStartIndex = 0;
toLength = 0;
// Check if we have a value like 'bytes */133'. If yes, skip the range part and continue parsing the
// length separator '/'.
if (input[current] == '*')
{
current++;
}
else
{
// Parse first range value: <from> in '<unit> <from>-<to>/<length>'
fromLength = HttpRuleParser.GetNumberLength(input, current, false);
if ((fromLength == 0) || (fromLength > HttpRuleParser.MaxInt64Digits))
{
return false;
}
current = current + fromLength;
current = current + HttpRuleParser.GetWhitespaceLength(input, current);
// After the first value, the '-' character must follow.
if ((current == input.Length) || (input[current] != '-'))
{
// We need a '-' character otherwise this can't be a valid range.
return false;
}
current++; // skip the '-' character
current = current + HttpRuleParser.GetWhitespaceLength(input, current);
if (current == input.Length)
{
return false;
}
// Parse second range value: <to> in '<unit> <from>-<to>/<length>'
toStartIndex = current;
toLength = HttpRuleParser.GetNumberLength(input, current, false);
if ((toLength == 0) || (toLength > HttpRuleParser.MaxInt64Digits))
{
return false;
}
current = current + toLength;
}
current = current + HttpRuleParser.GetWhitespaceLength(input, current);
return true;
}
private static bool TryCreateContentRange(
StringSegment input,
StringSegment unit,
int fromStartIndex,
int fromLength,
int toStartIndex,
int toLength,
int lengthStartIndex,
int lengthLength,
[NotNullWhen(true)] out ContentRangeHeaderValue? parsedValue)
{
parsedValue = null;
long from = 0;
if ((fromLength > 0) && !HeaderUtilities.TryParseNonNegativeInt64(input.Subsegment(fromStartIndex, fromLength), out from))
{
return false;
}
long to = 0;
if ((toLength > 0) && !HeaderUtilities.TryParseNonNegativeInt64(input.Subsegment(toStartIndex, toLength), out to))
{
return false;
}
// 'from' must not be greater than 'to'
if ((fromLength > 0) && (toLength > 0) && (from > to))
{
return false;
}
long length = 0;
if ((lengthLength > 0) && !HeaderUtilities.TryParseNonNegativeInt64(input.Subsegment(lengthStartIndex, lengthLength),
out length))
{
return false;
}
// 'from' and 'to' must be less than 'length'
if ((toLength > 0) && (lengthLength > 0) && (to >= length))
{
return false;
}
var result = new ContentRangeHeaderValue();
result._unit = unit;
if (fromLength > 0)
{
result.From = from;
result.To = to;
}
if (lengthLength > 0)
{
result.Length = length;
}
parsedValue = result;
return true;
}
}
| ContentRangeHeaderValue |
csharp | dotnet__efcore | test/EFCore.Specification.Tests/TestModels/ComplexTypeModel/Model.cs | {
"start": 1013,
"end": 1190
} | public class ____
{
public int Id { get; set; }
public required Customer RequiredCustomer { get; set; }
public Customer? OptionalCustomer { get; set; }
}
| CustomerGroup |
csharp | SixLabors__ImageSharp | src/ImageSharp/PixelFormats/PixelImplementations/PixelOperations/Rgb48.PixelOperations.cs | {
"start": 336,
"end": 403
} | internal partial class ____ : PixelOperations<Rgb48>;
}
| PixelOperations |
csharp | ChilliCream__graphql-platform | src/Nitro/CommandLine/src/CommandLine.Cloud/Generated/ApiClient.Client.cs | {
"start": 2279362,
"end": 2279729
} | public partial interface ____ : IOnSchemaVersionValidationUpdated_OnSchemaVersionValidationUpdate_Errors_Changes_Changes_3, IInterfaceImplementationAdded
{
}
[global::System.CodeDom.Compiler.GeneratedCode("StrawberryShake", "15.1.8.0")]
| IOnSchemaVersionValidationUpdated_OnSchemaVersionValidationUpdate_Errors_Changes_Changes_InterfaceImplementationAdded |
csharp | unoplatform__uno | src/Uno.UI/Controls/BindableSwitchCompat.Android.cs | {
"start": 365,
"end": 848
} | public partial class ____ : AndroidX.AppCompat.Widget.SwitchCompat, DependencyObject
{
public BindableSwitchCompat()
: base(ContextHelper.Current)
{
InitializeBinder();
CheckedChange += OnCheckedChange;
TextChanged += OnTextChange;
// TextOn and TextOff properties must be set to an empty string or the following error will happen because the properties are null.
// E / AndroidRuntime(6313): java.lang.NullPointerException: Attempt to invoke | BindableSwitchCompat |
csharp | microsoft__garnet | test/Garnet.test.cluster/ClusterTestUtils.cs | {
"start": 20288,
"end": 123271
} | partial class ____
{
static readonly TimeSpan backoff = TimeSpan.FromSeconds(0.1);
static readonly byte[] bresp_OK = Encoding.ASCII.GetBytes("+OK\r\n");
static readonly byte[] ascii_chars = Encoding.ASCII.GetBytes("abcdefghijklmnopqrstvuwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789");
public Random r;
ConnectionMultiplexer redis = null;
GarnetClientSession[] gcsConnections = null;
readonly EndPointCollection endpoints;
string[] nodeIds;
TextWriter textWriter;
readonly bool allowAdmin;
readonly bool disablePubSub;
readonly bool useTLS;
readonly string authUsername;
readonly string authPassword;
readonly X509CertificateCollection certificates;
readonly ClusterTestContext context;
public ClusterTestUtils(
EndPointCollection endpoints,
ClusterTestContext context = null,
TextWriter textWriter = null,
bool UseTLS = false,
string authUsername = null,
string authPassword = null,
X509CertificateCollection certificates = null)
{
r = new Random(674386);
this.context = context;
this.useTLS = UseTLS;
this.allowAdmin = true;
this.disablePubSub = true;
this.authUsername = authUsername;
this.authPassword = authPassword;
this.textWriter = textWriter;
this.endpoints = endpoints;
this.certificates = certificates;
}
public int HashSlot(RedisKey key)
=> redis.HashSlot(key);
public static void BackOff(TimeSpan timeSpan = default) => Thread.Sleep(timeSpan == default ? backoff : timeSpan);
public static void BackOff(CancellationToken cancellationToken, TimeSpan timeSpan = default, string msg = null)
{
if (cancellationToken.IsCancellationRequested)
ClassicAssert.Fail(msg ?? "Cancellation Requested");
Thread.Sleep(timeSpan == default ? backoff : timeSpan);
}
public void Connect(bool cluster = true, ILogger logger = null)
{
InitMultiplexer(GetRedisConfig(endpoints), textWriter, logger: logger);
if (cluster)
this.nodeIds = GetNodeIds(logger: logger);
}
private void InitMultiplexer(ConfigurationOptions redisConfig, TextWriter textWriter, bool failAssert = true, ILogger logger = null)
{
try
{
redis = ConnectionMultiplexer.Connect(redisConfig, null);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error occurred at InitMultiplexer");
if (failAssert)
Assert.Fail(ex.Message);
}
}
public ConfigurationOptions GetRedisConfig(EndPointCollection endpoints)
{
return TestUtils.GetConfig(
endpoints,
allowAdmin: allowAdmin,
useTLS: useTLS,
disablePubSub: disablePubSub,
authUsername: authUsername,
authPassword: authPassword,
certificates: certificates);
}
public void Dispose()
{
CloseConnections();
}
public void CloseConnections()
{
redis?.Close(false);
redis?.Dispose();
if (gcsConnections != null)
{
foreach (var gcs in gcsConnections)
gcs?.Dispose();
}
}
public string[] GetNodeIds(List<int> nodes = null, ILogger logger = null)
{
string[] nodeIds = new string[endpoints.Count];
if (nodes == null)
{
for (int i = 0; i < nodeIds.Length; i++)
nodeIds[i] = NodesMyself((IPEndPoint)endpoints[i], ClusterInfoTag.NODEID, logger: logger);
}
else
{
for (int i = 0; i < nodes.Count; i++)
{
var j = nodes[i];
nodeIds[j] = NodesMyself((IPEndPoint)endpoints[j], ClusterInfoTag.NODEID, logger: logger);
}
}
return nodeIds;
}
public void Reconnect(List<int> nodes = null, TextWriter textWriter = null, ILogger logger = null)
{
CloseConnections();
EndPointCollection endPoints = endpoints;
if (nodes != null)
{
endPoints = new EndPointCollection();
foreach (var nodeIndex in nodes)
{
var endpoint = (IPEndPoint)endpoints[nodeIndex];
endPoints.Add(endpoint.Address, endpoint.Port);
}
}
var connOpts = GetRedisConfig(endPoints);
InitMultiplexer(connOpts, textWriter, logger: logger);
nodeIds = GetNodeIds(nodes, logger);
}
public EndPointCollection GetEndPoints() => endpoints;
public ConnectionMultiplexer GetMultiplexer() => redis;
public IDatabase GetDatabase() => redis.GetDatabase(0);
public GarnetClientSession GetGarnetClientSession(int nodeIndex, bool useTLS = false)
{
gcsConnections ??= new GarnetClientSession[endpoints.Count];
if (gcsConnections[nodeIndex] == null)
{
SslClientAuthenticationOptions sslOptions = null;
if (useTLS)
{
sslOptions = new SslClientAuthenticationOptions
{
ClientCertificates = [CertificateUtils.GetMachineCertificateByFile(certFile, certPassword)],
TargetHost = "GarnetTest",
AllowRenegotiation = false,
RemoteCertificateValidationCallback = TestUtils.ValidateServerCertificate,
};
}
gcsConnections[nodeIndex] = new GarnetClientSession(GetEndPoint(nodeIndex), new(), tlsOptions: sslOptions);
gcsConnections[nodeIndex].Connect();
}
return gcsConnections[nodeIndex];
}
public const string certFile = "testcert.pfx";
public const string certPassword = "placeholder";
public GarnetClientSession CreateGarnetClientSession(int nodeIndex, bool useTLS = false)
{
SslClientAuthenticationOptions sslOptions = null;
if (useTLS)
{
sslOptions = new SslClientAuthenticationOptions
{
ClientCertificates = [CertificateUtils.GetMachineCertificateByFile(certFile, certPassword)],
TargetHost = "GarnetTest",
AllowRenegotiation = false,
RemoteCertificateValidationCallback = TestUtils.ValidateServerCertificate,
};
}
return new(endpoints[nodeIndex], new(), tlsOptions: sslOptions);
}
public IServer GetServer(int nodeIndex) => redis.GetServer(GetEndPoint(nodeIndex));
public IServer GetServer(IPEndPoint endPoint) => redis.GetServer(endPoint);
public string GetAddressFromNodeIndex(int nodeIndex) => ((IPEndPoint)endpoints[nodeIndex]).Address.ToString();
public int GetPortFromNodeIndex(int nodeIndex) => ((IPEndPoint)endpoints[nodeIndex]).Port;
public int GetNodeIndexFromPort(int port)
{
for (int i = 0; i < endpoints.Count; i++)
if (GetPortFromNodeIndex(i) == port)
return i;
return -1;
}
public IPEndPoint GetEndPoint(int nodeIndex) => (IPEndPoint)endpoints[nodeIndex];
public IPEndPoint GetEndPointFromPort(int Port) => endpoints.Select(x => (IPEndPoint)x).First(x => x.Port == Port);
public void RandomBytesRestrictedToSlot(ref byte[] data, int slot, int startOffset = -1, int endOffset = -1)
{
RandomBytes(ref data, startOffset, endOffset);
while (HashSlot(data) != slot) RandomBytes(ref data, startOffset, endOffset);
}
public void RandomBytesRestrictedToSlot(ref Random r, ref byte[] data, int slot, int startOffset = -1, int endOffset = -1)
{
RandomBytes(ref data, startOffset, endOffset);
while (HashSlot(data) != slot) RandomBytes(ref r, ref data, startOffset, endOffset);
}
public void InitRandom(int seed)
{
r = new Random(seed);
}
public void RandomBytes(ref byte[] data, int startOffset = -1, int endOffset = -1)
=> RandomBytes(ref r, ref data, startOffset, endOffset);
public static void RandomBytes(ref Random r, ref byte[] data, int startOffset = -1, int endOffset = -1)
{
startOffset = startOffset == -1 ? 0 : startOffset;
endOffset = endOffset == -1 ? data.Length : endOffset;
for (var i = startOffset; i < endOffset; i++)
data[i] = ascii_chars[r.Next(ascii_chars.Length)];
}
public byte[] RandomBytes(byte[] data, int startOffset = -1, int endOffset = -1)
{
byte[] newData = new byte[data.Length];
Array.Copy(data, 0, newData, 0, data.Length);
RandomBytes(ref newData, startOffset, endOffset);
return newData;
}
public List<int> RandomList(int count, int maxVal)
{
List<int> list = new List<int>();
var size = r.Next(1, count);
for (int i = 0; i < size; i++)
{
list.Add(r.Next(0, maxVal));
}
return list;
}
public List<int> RandomHset(int count, int maxVal)
{
HashSet<int> hset = new HashSet<int>();
var size = r.Next(1, count);
for (int i = 0; i < size; i++)
{
hset.Add(r.Next(0, maxVal));
}
return [.. hset];
}
public string RandomStr(int length, int startOffset = -1, int endOffset = -1)
{
byte[] data = new byte[length];
RandomBytes(ref data, startOffset, endOffset);
return Encoding.ASCII.GetString(data);
}
public static ushort HashSlot(byte[] key)
{
fixed (byte* ptr = key)
{
byte* keyPtr = ptr;
return HashSlotUtils.HashSlot(keyPtr, key.Length);
}
}
public int GetRandomTargetNodeIndex(ref LightClientRequest[] connections, int sourceNodeIndex)
{
int targetNodeIndex = r.Next(0, connections.Length);
while (targetNodeIndex == sourceNodeIndex) targetNodeIndex = r.Next(0, connections.Length);
return targetNodeIndex;
}
public static (int, int) LightReceive(byte* buf, int bytesRead, int opType)
{
string result = null;
byte* ptr = buf;
string[] resultArray = null;
int count = 0;
switch (*buf)
{
case (byte)'+':
if (!RespReadResponseUtils.TryReadSimpleString(out result, ref ptr, buf + bytesRead))
return (0, 0);
count++;
break;
case (byte)':':
if (!RespReadResponseUtils.TryReadIntegerAsString(out result, ref ptr, buf + bytesRead))
return (0, 0);
count++;
break;
case (byte)'-':
if (!RespReadResponseUtils.TryReadErrorAsString(out result, ref ptr, buf + bytesRead))
return (0, 0);
count++;
break;
case (byte)'$':
if (!RespReadResponseUtils.TryReadStringWithLengthHeader(out result, ref ptr, buf + bytesRead))
return (0, 0);
count++;
break;
case (byte)'*':
if (!RespReadResponseUtils.TryReadStringArrayWithLengthHeader(out resultArray, ref ptr, buf + bytesRead))
return (0, 0);
count++;
break;
default:
throw new Exception("Unexpected response: " + Encoding.ASCII.GetString(new Span<byte>(buf, bytesRead)).Replace("\n", "|").Replace("\r", "") + "]");
}
return (bytesRead, count);
}
public static string ParseRespToString(byte[] data, out string[] resultArray)
{
resultArray = null;
string result = null;
fixed (byte* buf = data)
{
byte* ptr = buf;
if (buf[0] == '$' && buf[1] == '-' && buf[2] == '1' && buf[3] == '\r' && buf[4] == '\n')
return "(empty)";
switch (*buf)
{
case (byte)'+':
RespReadResponseUtils.TryReadSimpleString(out result, ref ptr, buf + data.Length);
break;
case (byte)':':
RespReadResponseUtils.TryReadIntegerAsString(out result, ref ptr, buf + data.Length);
break;
case (byte)'-':
RespReadResponseUtils.TryReadErrorAsString(out result, ref ptr, buf + data.Length);
break;
case (byte)'$':
RespReadResponseUtils.TryReadStringWithLengthHeader(out result, ref ptr, buf + data.Length);
break;
case (byte)'*':
RespReadResponseUtils.TryReadStringArrayWithLengthHeader(out resultArray, ref ptr, buf + data.Length);
break;
default:
throw new Exception("Unexpected response: " + Encoding.ASCII.GetString(new Span<byte>(buf, data.Length)).Replace("\n", "|").Replace("\r", "") + "]");
}
}
return result;
}
public static ReadOnlySpan<byte> MOVED => "-MOVED"u8;
public static ReadOnlySpan<byte> ASK => "-ASK"u8;
public static ReadOnlySpan<byte> MIGRATING => "-MIGRATING"u8;
public static ReadOnlySpan<byte> CROSSSLOT => "-CROSSSLOT"u8;
public static ReadOnlySpan<byte> CLUSTERDOWN => "-CLUSTERDOWN"u8;
public static ResponseState ParseResponseState(
byte[] result,
out int slot,
out IPEndPoint endpoint,
out string returnValue,
out string[] returnValueArray)
{
returnValue = null;
returnValueArray = null;
slot = default;
endpoint = null;
if (result[0] == (byte)'+' || result[0] == (byte)':' || result[0] == '*' || result[0] == '$')
{
returnValue = ParseRespToString(result, out returnValueArray);
return ResponseState.OK;
}
else if (result.AsSpan().StartsWith(MOVED))
{
GetEndPointFromResponse(result, out slot, out endpoint);
return ResponseState.MOVED;
}
else if (result.AsSpan().StartsWith(ASK))
{
GetEndPointFromResponse(result, out slot, out endpoint);
return ResponseState.ASK;
}
else if (result.AsSpan().StartsWith(MIGRATING))
{
return ResponseState.MIGRATING;
}
else if (result.AsSpan().StartsWith(CROSSSLOT))
{
return ResponseState.CROSSSLOT;
}
else if (result.AsSpan().StartsWith(CLUSTERDOWN))
{
return ResponseState.CLUSTERDOWN;
}
else
ClassicAssert.IsFalse(true);
return ResponseState.NONE;
}
public static ResponseState ParseResponseState(
ref LightClientRequest node,
byte[] key,
byte[] result,
out int slot,
out IPEndPoint endpoint,
out byte[] value,
out string[] values)
{
value = null;
values = null;
slot = -1;
endpoint = null;
if (result[0] == (byte)'+' || result[0] == (byte)':' || result[0] == '*' || result[0] == '$')
{
endpoint = node.EndPoint as IPEndPoint;
slot = HashSlot(key);
var strValue = ParseRespToString(result, out values);
if (strValue != null)
value = Encoding.ASCII.GetBytes(strValue);
return ResponseState.OK;
}
else if (result.AsSpan()[..MOVED.Length].SequenceEqual(MOVED))
{
GetEndPointFromResponse(result, out slot, out endpoint);
return ResponseState.MOVED;
}
else if (result.AsSpan()[..ASK.Length].SequenceEqual(ASK))
{
GetEndPointFromResponse(result, out slot, out endpoint);
return ResponseState.ASK;
}
else if (result.AsSpan()[..MIGRATING.Length].SequenceEqual(MIGRATING))
{
return ResponseState.MIGRATING;
}
else if (result.AsSpan()[..CROSSSLOT.Length].SequenceEqual(CROSSSLOT))
{
return ResponseState.CROSSSLOT;
}
else if (result.AsSpan()[..CLUSTERDOWN.Length].SequenceEqual(CLUSTERDOWN))
{
return ResponseState.CLUSTERDOWN;
}
else
ClassicAssert.IsFalse(true, Encoding.ASCII.GetString(result.AsSpan()[..32]));
return ResponseState.NONE;
}
public static LightClientRequest[] CreateLightRequestConnections(int[] Ports)
{
LightClientRequest[] lightClientRequests = new LightClientRequest[Ports.Length];
for (int i = 0; i < Ports.Length; i++)
{
lightClientRequests[i] = new LightClientRequest(new IPEndPoint(IPAddress.Loopback, Ports[i]), 0, LightReceive);
}
return lightClientRequests;
}
public Dictionary<ushort, int> GetSlotPortMapFromNode(int nodeIndex, ILogger logger)
{
var slots = GetOwnedSlotsFromNode(nodeIndex, logger);
int Port = ((IPEndPoint)endpoints[nodeIndex]).Port;
return slots.Select(slot => new KeyValuePair<ushort, int>((ushort)slot, Port)).ToDictionary(x => x.Key, x => x.Value);
}
public Dictionary<ushort, int> GetSlotPortMapFromServer(int Port, ILogger logger)
{
var endPoint = GetEndPointFromPort(Port);
var slots = GetOwnedSlotsFromNode(endPoint, logger);
return slots.Select(slot => new KeyValuePair<ushort, int>((ushort)slot, Port)).ToDictionary(x => x.Key, x => x.Value);
}
public static Dictionary<ushort, int> MergeSlotPortMap(Dictionary<ushort, int> a, Dictionary<ushort, int> b)
{
foreach (var pair in b)
{
ClassicAssert.IsTrue(!a.ContainsKey(pair.Key));
a.Add(pair.Key, pair.Value);
}
return a;
}
public string AddSlotsRange(int nodeIndex, List<(int, int)> ranges, ILogger logger)
=> (string)AddSlotsRange((IPEndPoint)endpoints[nodeIndex], ranges, logger);
public RedisResult AddSlotsRange(IPEndPoint endPoint, List<(int, int)> ranges, ILogger logger)
{
ICollection<object> args = new List<object>() { "addslotsrange" };
foreach (var range in ranges)
{
args.Add(range.Item1);
args.Add(range.Item2);
}
return Execute(endPoint, "cluster", args, logger: logger);
}
public void SetConfigEpoch(int sourceNodeIndex, long epoch, ILogger logger = null)
=> SetConfigEpoch((IPEndPoint)endpoints[sourceNodeIndex], epoch, logger);
public void SetConfigEpoch(IPEndPoint endPoint, long epoch, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
var resp = server.Execute("cluster", "set-config-epoch", $"{epoch}");
ClassicAssert.AreEqual((string)resp, "OK");
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occured");
Assert.Fail(ex.Message);
}
}
public void BumpEpoch(int nodeIndex, bool waitForSync = false, ILogger logger = null)
=> BumpEpoch((IPEndPoint)endpoints[nodeIndex], waitForSync: waitForSync, logger);
public void BumpEpoch(IPEndPoint endPoint, bool waitForSync = false, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
var resp = server.Execute("cluster", "bumpepoch");
ClassicAssert.AreEqual((string)resp, "OK");
if (waitForSync)
WaitForEpochSync(endPoint).GetAwaiter().GetResult();
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occured");
Assert.Fail(ex.Message);
}
}
public void WaitForConfigPropagation(int fromNode, List<int> nodes = null, ILogger logger = null)
{
if (nodes == null)
nodes = [.. Enumerable.Range(0, endpoints.Count)];
var fromNodeConfig = ClusterNodes(fromNode, logger: logger);
while (true)
{
retry:
foreach (var nodeIndex in nodes)
{
if (nodeIndex == fromNode)
continue;
var nodeConfig = ClusterNodes(nodeIndex, logger: logger);
if (!MatchConfig(fromNodeConfig, nodeConfig))
{
BackOff(cancellationToken: context.cts.Token);
goto retry;
}
}
break;
}
}
public static bool MatchConfig(ClusterConfiguration configA, ClusterConfiguration configB)
{
foreach (var nodeA in configA.Nodes)
{
bool found = false;
foreach (var nodeB in configB.Nodes)
{
if (!nodeA.NodeId.Equals(nodeB.NodeId))
continue;
found = true;
//Check if node info is same
if (nodeA.IsReplica != nodeB.IsReplica)
return false;
if (nodeA.Parent != nodeB.Parent)
return false;
if (!nodeA.EndPoint.Equals(nodeB.EndPoint))
return false;
//Check if slot info is same
var slotsA = nodeA.Slots.ToArray();
var slotsB = nodeB.Slots.ToArray();
if (slotsA.Length != slotsB.Length)
return false;
for (int i = 0; i < slotsA.Length; i++)
{
if (slotsA[i].From != slotsB[i].From)
return false;
if (slotsA[i].To != slotsB[i].To)
return false;
}
}
if (!found) return false;
}
return true;
}
public void Authenticate(int sourceNodeIndex, string username, string password, ILogger logger = null)
=> Authenticate((IPEndPoint)endpoints[sourceNodeIndex], username, password, logger);
public void Authenticate(IPEndPoint source, string username, string password, ILogger logger = null)
{
try
{
var server = redis.GetServer(source);
server.Execute("auth", username, password);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred");
Assert.Fail(ex.Message);
}
}
public void Meet(int sourceNodeIndex, int meetNodeIndex, ILogger logger = null)
=> Meet((IPEndPoint)endpoints[sourceNodeIndex], (IPEndPoint)endpoints[meetNodeIndex], logger);
public void Meet(IPEndPoint source, IPEndPoint target, ILogger logger = null)
{
try
{
var server = redis.GetServer(source);
var resp = server.Execute("cluster", "meet", $"{target.Address}", $"{target.Port}");
ClassicAssert.AreEqual((string)resp, "OK");
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred");
Assert.Fail(ex.Message);
}
}
public void Meet(int sourceNodeIndex, int meetNodeIndex, string hostname, ILogger logger = null)
=> Meet((IPEndPoint)endpoints[sourceNodeIndex], (IPEndPoint)endpoints[meetNodeIndex], hostname, logger);
public void Meet(IPEndPoint source, IPEndPoint target, string hostname, ILogger logger = null)
{
try
{
var server = redis.GetServer(source);
var resp = server.Execute("cluster", "meet", $"{hostname}", $"{target.Port}");
ClassicAssert.AreEqual((string)resp, "OK");
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred");
Assert.Fail(ex.Message);
}
}
public string[] BanList(int sourceNodeIndex, ILogger logger = null)
=> BanList((IPEndPoint)endpoints[sourceNodeIndex], logger);
public string[] BanList(IPEndPoint source, ILogger logger = null)
{
try
{
var server = redis.GetServer(source);
var resp = server.Execute("cluster", "banlist");
return (string[])resp;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred");
Assert.Fail(ex.Message);
return null;
}
}
public static List<string> Nodes(ref LightClientRequest lightClientRequest)
{
var result = lightClientRequest.SendCommand($"cluster nodes");
var strResult = Encoding.ASCII.GetString(result);
var data = strResult.Split('\n');
List<string> nodeConfig = new();
for (int i = 1; ; i++)
{
if (data[i].Equals("\r"))
break;
nodeConfig.Add(data[i]);
}
return nodeConfig;
}
public List<string> Nodes(int nodeIndex, ILogger logger)
{
return Nodes((IPEndPoint)endpoints[nodeIndex], logger: logger);
}
public List<string> Nodes(IPEndPoint endPoint, ILogger logger)
{
try
{
var server = redis.GetServer(endPoint);
var strResult = (string)server.Execute("cluster", "nodes");
var data = strResult.Split('\n');
List<string> nodeConfig = new();
for (int i = 0; ; i++)
{
if (data[i] == "")
break;
nodeConfig.Add(data[i]);
}
return nodeConfig;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occured");
Assert.Fail(ex.Message);
return null;
}
}
public Dictionary<string, Dictionary<ClusterInfoTag, string>> NodesDict(int nodeIndex, ILogger logger)
{
return NodesDict((IPEndPoint)endpoints[nodeIndex], logger);
}
public Dictionary<string, Dictionary<ClusterInfoTag, string>> NodesDict(IPEndPoint endPoint, ILogger logger)
{
try
{
var server = redis.GetServer(endPoint);
var strResult = (string)server.Execute("cluster", "nodes");
var data = strResult.Split('\n');
Dictionary<string, Dictionary<ClusterInfoTag, string>> nodeConfig = new();
for (int i = 0; i < data.Length; i++)
{
if (data[i] == "") continue;
var properties = data[i].Split(' ');
var nodeid = properties[0].Trim();
nodeConfig.Add(nodeid, new());
nodeConfig[nodeid].Add(ClusterInfoTag.ADDRESS, properties[(int)ClusterInfoTag.ADDRESS].Trim());
nodeConfig[nodeid].Add(ClusterInfoTag.FLAGS, properties[(int)ClusterInfoTag.FLAGS].Trim());
nodeConfig[nodeid].Add(ClusterInfoTag.PRIMARY, properties[(int)ClusterInfoTag.PRIMARY].Trim());
nodeConfig[nodeid].Add(ClusterInfoTag.PING_SENT, properties[(int)ClusterInfoTag.PING_SENT].Trim());
nodeConfig[nodeid].Add(ClusterInfoTag.PONG_RECEIVED, properties[(int)ClusterInfoTag.PONG_RECEIVED].Trim());
nodeConfig[nodeid].Add(ClusterInfoTag.CONFIG_EPOCH, properties[(int)ClusterInfoTag.CONFIG_EPOCH].Trim());
nodeConfig[nodeid].Add(ClusterInfoTag.LINK_STATE, properties[(int)ClusterInfoTag.LINK_STATE].Trim());
if (properties.Length > (int)ClusterInfoTag.SLOT)
nodeConfig[nodeid].Add(ClusterInfoTag.SLOT, properties[(int)ClusterInfoTag.SLOT].Trim());
}
return nodeConfig;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error occurred");
Assert.Fail(ex.Message);
return null;
}
}
public string GetConfigEpochOfNodeFromNodeIndex(int nodeIndex, string nodeidOfConfigEpoch, ILogger logger)
{
var dict = NodesDict(nodeIndex, logger);
return dict[nodeidOfConfigEpoch][ClusterInfoTag.CONFIG_EPOCH];
}
public long GetConfigEpoch(int nodeIndex, ILogger logger)
{
var dict = NodesDict(nodeIndex, logger);
return long.Parse(dict[nodeIds[nodeIndex]][ClusterInfoTag.CONFIG_EPOCH]);
}
public void WaitAll(ILogger logger)
{
foreach (var endPoint in endpoints)
{
while (Nodes((IPEndPoint)endPoint, logger).Count != endpoints.Count)
{
BackOff(cancellationToken: context.cts.Token);
}
}
}
public bool IsKnown(int nodeIndex, int knownNodeIndex, ILogger logger = null)
{
var toKnowNodeId = ClusterNodes(knownNodeIndex).Nodes.First().NodeId;
var nodeConfig = ClusterNodes(nodeIndex);
return nodeConfig.Nodes.Any(x => x.NodeId.Equals(toKnowNodeId, StringComparison.OrdinalIgnoreCase));
}
public void WaitUntilNodeIsKnown(int nodeIndex, int toKnowNode, ILogger logger = null)
{
var toKnowNodeId = ClusterNodes(toKnowNode).Nodes.First().NodeId;
WaitUntilNodeIdIsKnown(nodeIndex, toKnowNodeId, logger);
}
public void WaitUntilNodeIsKnownByAllNodes(int nodeIndex, ILogger logger = null)
{
var c = ClusterNodes(nodeIndex, logger);
var nodeId = c.Nodes.First().NodeId;
var retry = 0;
while (true)
{
var configs = new List<ClusterConfiguration>();
for (var i = 0; i < endpoints.Count; i++)
{
if (i == nodeIndex) continue;
configs.Add(ClusterNodes(i, logger));
}
var count = 0;
foreach (var config in configs)
foreach (var node in config.Nodes)
if (nodeId.Equals(node.NodeId)) count++;
if (count == endpoints.Count - 1) break;
if (retry++ > 1_000_000) Assert.Fail("retry config sync at WaitUntilNodeIsKnownByAllNodes reached");
BackOff(cancellationToken: context.cts.Token);
}
}
public void WaitUntilNodeIdIsKnown(int nodeIndex, string nodeId, ILogger logger = null)
{
while (true)
{
var nodes = ClusterNodes(nodeIndex, logger).Nodes;
var found = false;
foreach (var node in nodes)
if (node.NodeId.Equals(nodeId))
found = true;
if (found) break;
BackOff(cancellationToken: context.cts.Token);
}
}
public int CountKeysInSlot(int slot, ILogger logger = null)
{
var db = redis.GetDatabase(0);
RedisResult result;
try
{
result = db.Execute("cluster", "countkeysinslot", slot.ToString());
}
catch (Exception ex)
{
logger?.LogInformation(ex, "Exception occurred in CountKeysInSlot");
if (slot < 0 || slot > ushort.MaxValue - 1)
{
ClassicAssert.AreEqual("ERR Slot out of range", ex.Message);
return 0;
}
return 0;
}
return ResultType.Integer == result.Resp2Type ? int.Parse(result.ToString()) : 0;
}
public int CountKeysInSlot(int nodeIndex, int slot, ILogger logger = null)
{
var server = redis.GetServer((IPEndPoint)endpoints[nodeIndex]);
RedisResult resp;
try
{
resp = server.Execute("cluster", "countkeysinslot", $"{slot}");
}
catch (Exception ex)
{
logger?.LogWarning(ex, "Exception occurred in CountKeysInSlot");
return -1;
}
return (int)resp;
}
public RedisResult[] GetKeysInSlot(int slot, int count, ILogger logger = null)
{
var db = redis.GetDatabase(0);
RedisResult result = null;
try
{
result = db.Execute("cluster", "getkeysinslot", slot.ToString(), count.ToString());
}
catch (Exception ex)
{
logger?.LogWarning(ex, "Exception occurred in GetKeysInSlot");
if (slot < 0 || slot > ushort.MaxValue - 1)
{
ClassicAssert.AreEqual(ex.Message, "ERR Slot out of range");
return null;
}
}
return (RedisResult[])result;
}
public List<byte[]> GetKeysInSlot(int nodeIndex, int slot, int keyCount, ILogger logger = null)
{
try
{
var server = redis.GetServer((IPEndPoint)endpoints[nodeIndex]);
var resp = server.Execute("cluster", "getkeysinslot", $"{slot}", $"{keyCount}");
return [.. ((RedisResult[])resp).Select(x => Encoding.ASCII.GetBytes((string)x))];
}
catch (Exception ex)
{
logger?.LogError("GetKeysInsSlot {msg}", ex.Message);
Assert.Fail(ex.Message);
return null;
}
}
public List<int> GetOwnedSlotsFromNode(int nodeIndex, ILogger logger)
{
var endPoint = (IPEndPoint)endpoints[nodeIndex];
return GetOwnedSlotsFromNode(endPoint, logger);
}
public List<int> GetOwnedSlotsFromNode(IPEndPoint endPoint, ILogger logger)
{
var nodeConfig = Nodes(endPoint, logger);
var nodeInfo = nodeConfig[0].Split(' ');
var slots = new List<int>();
if (nodeInfo.Length >= (int)ClusterInfoTag.SLOT)
{
for (int i = (int)ClusterInfoTag.SLOT; i < nodeInfo.Length; i++)
{
var range = nodeInfo[i].Split('-');
ushort slotStart = ushort.Parse(range[0]);
ushort slotEnd;
if (range.Length > 1)
{
slotEnd = ushort.Parse(range[1]);
slots.AddRange(Enumerable.Range(slotStart, slotEnd - slotStart + 1));
}
else
slots.Add(slotStart);
}
}
return slots;
}
public static string GetNodeIdFromNode(ref LightClientRequest sourceNode)
{
var nodeConfig = Nodes(ref sourceNode);
var nodeInfo = nodeConfig[0].Split(' ');
return nodeInfo[(int)ClusterInfoTag.NODEID];
}
public string GetNodeIdFromNode(int nodeIndex, ILogger logger)
{
var nodeConfig = Nodes(nodeIndex, logger: logger);
var nodeInfo = nodeConfig[0].Split(' ');
return nodeInfo[(int)ClusterInfoTag.NODEID];
}
public int GetMovedAddress(int port, ushort slot, ILogger logger)
{
var nodeConfig = Nodes(GetEndPointFromPort(port), logger);
foreach (var configLine in nodeConfig)
{
var nodeInfo = configLine.Split(' ');
for (int i = (int)ClusterInfoTag.SLOT; i < nodeInfo.Length; i++)
{
var range = nodeInfo[i].Split('-');
ushort slotStart = ushort.Parse(range[0]);
ushort slotEnd;
if (range.Length > 1)
{
slotEnd = ushort.Parse(range[1]);
if (slot >= slotStart && slot <= slotEnd)
{
var portStr = nodeInfo[(int)ClusterInfoTag.ADDRESS].Split('@')[0].Split(':')[1];
return int.Parse(portStr);
}
}
else
{
if (slot == slotStart)
{
var portStr = nodeInfo[(int)ClusterInfoTag.ADDRESS].Split('@')[0].Split(':')[1];
return int.Parse(portStr);
}
}
}
}
return -1;
}
public int GetEndPointIndexFromPort(int port)
{
for (int i = 0; i < endpoints.Count; i++)
{
if (((IPEndPoint)endpoints[i]).Port == port)
return i;
}
return -1;
}
public static int GetSourceNodeIndexFromSlot(ref LightClientRequest[] connections, ushort slot)
{
for (int j = 0; j < connections.Length; j++)
{
var nodeConfig = Nodes(ref connections[j])[0];
var nodeInfo = nodeConfig.Split(' ');
for (var i = (int)ClusterInfoTag.SLOT; i < nodeInfo.Length; i++)
{
var range = nodeInfo[i].Split('-');
var slotStart = ushort.Parse(range[0]);
int slotEnd;
if (range.Length > 1)
{
slotEnd = ushort.Parse(range[1]);
if (slot >= slotStart && slot <= slotEnd)
{
return j;
}
}
else
{
if (slot == slotStart)
{
return j;
}
}
}
}
return -1;
}
public int GetSourceNodeIndexFromSlot(ushort slot, ILogger logger)
{
for (var j = 0; j < endpoints.Count; j++)
{
var nodeConfig = Nodes((IPEndPoint)endpoints[j], logger)[0];
var nodeInfo = nodeConfig.Split(' ');
for (var i = (int)ClusterInfoTag.SLOT; i < nodeInfo.Length; i++)
{
var range = nodeInfo[i].Split('-');
var slotStart = ushort.Parse(range[0]);
ushort slotEnd;
if (range.Length > 1)
{
slotEnd = ushort.Parse(range[1]);
if (slot >= slotStart && slot <= slotEnd)
{
return j;
}
}
else
{
if (slot == slotStart)
{
return j;
}
}
}
}
return -1;
}
public static void GetEndPointFromResponse(byte[] resp, out int slot, out IPEndPoint endpoint)
{
var strResp = Encoding.ASCII.GetString(resp);
var data = strResp.Split(' ');
slot = int.Parse(data[1]);
var endpointSplit = data[2].Split(':');
endpoint = new IPEndPoint(
IPAddress.Parse(endpointSplit[0]),
int.Parse(endpointSplit[1].Split('\r')[0]));
}
public string AddDelSlots(int nodeIndex, List<int> slots, bool addslot, ILogger logger = null)
{
var endPoint = ((IPEndPoint)endpoints[nodeIndex]);
var server = redis.GetServer(endPoint);
var objects = slots.Select(x => (object)x).ToList();
objects.Insert(0, addslot ? "addslots" : "delslots");
try
{
return (string)server.Execute("cluster", [.. objects]);
}
catch (Exception e)
{
logger?.LogError("AddDelSlots error {msg}", e.Message);
// No fail because testing failing responses
return e.Message;
}
}
public string AddDelSlotsRange(int nodeIndex, List<(int, int)> ranges, bool addslot, ILogger logger = null)
{
var endPoint = (IPEndPoint)endpoints[nodeIndex];
var server = redis.GetServer(endPoint);
var objects = ranges.SelectMany(x => new List<object> { x.Item1, x.Item2 }).ToList();
objects.Insert(0, addslot ? "addslotsrange" : "delslotsrange");
try
{
return (string)server.Execute("cluster", [.. objects]);
}
catch (Exception e)
{
logger?.LogError("AddDelSlotsRange error {msg}", e.Message);
// No fail testing resp response
return e.Message;
}
}
public static string SetSlot(ref LightClientRequest node, int slot, string state, string nodeid)
{
byte[] resp;
if (nodeid != "")
{
resp = node.SendCommand($"cluster setslot {slot} {state} {nodeid}");
}
else
{
resp = node.SendCommand($"cluster setslot {slot} {state}");
}
return ParseRespToString(resp, out _);
}
public string SetSlot(int nodeIndex, int slot, string state, string nodeid, ILogger logger = null)
{
var endPoint = GetEndPoint(nodeIndex);
return SetSlot(endPoint, slot, state, nodeid, logger);
}
public string SetSlot(IPEndPoint endPoint, int slot, string state, string nodeid, ILogger logger = null)
{
var server = GetServer(endPoint);
try
{
string ret;
if (nodeid != "")
{
ret = (string)server.Execute("cluster", "setslot", $"{slot}", $"{state}", $"{nodeid}");
}
else
{
ret = (string)server.Execute("cluster", "setslot", $"{slot}", $"{state}");
}
return ret;
}
catch (RedisTimeoutException tex)
{
logger?.LogError(tex, "Timeout exception");
Assert.Fail(tex.Message);
return tex.Message;
}
catch (Exception e)
{
// No fail because testing responses
return e.Message;
}
}
public string[] SlotState(int nodeIndex, int slot, ILogger logger = null)
=> SlotState(GetEndPoint(nodeIndex), slot, logger);
public string[] SlotState(IPEndPoint endpoint, int slot, ILogger logger = null)
{
try
{
var server = GetServer(endpoint);
var resp = (string)server.Execute("cluster", "slotstate", $"{slot}");
return resp.Split(" ");
}
catch (Exception ex)
{
logger?.LogError(ex, "SlotState");
Assert.Fail(ex.Message);
}
return null;
}
public void MigrateSlotsIndex(int sourceNodeIndex, int targetNodeIndex, List<int> slots, bool range = false, string authPassword = null, ILogger logger = null)
{
var srcPort = GetPortFromNodeIndex(sourceNodeIndex);
var dstPort = GetPortFromNodeIndex(targetNodeIndex);
MigrateSlots(srcPort, dstPort, slots, range, authPassword, logger);
}
public void MigrateSlots(int sourcePort, int targetPort, List<int> slots, bool range = false, string authPassword = null, ILogger logger = null)
{
var sourceEndPoint = GetEndPointFromPort(sourcePort);
var targetEndPoint = GetEndPointFromPort(targetPort);
MigrateSlots(sourceEndPoint, targetEndPoint, slots, range, authPassword, logger);
}
public void MigrateSlots(IPEndPoint source, IPEndPoint target, List<int> slots, bool range = false, string authPassword = null, ILogger logger = null)
{
// MIGRATE host port <key | ""> destination-db timeout [COPY] [REPLACE] [[AUTH password] | [AUTH2 username password]] [KEYS key [key...]]
var server = redis.GetServer(source);
ICollection<object> args = new List<object>
{
target.Address.ToString(),
target.Port,
"",
0,
-1
};
if (authPassword != null)
{
args.Add("AUTH");
args.Add(authPassword);
}
args.Add(range ? "SLOTSRANGE" : "SLOTS");
foreach (var slot in slots)
args.Add(slot);
try
{
var resp = server.Execute("migrate", args);
ClassicAssert.AreEqual((string)resp, "OK");
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred");
Assert.Fail(ex.Message);
}
}
public void MigrateKeys(IPEndPoint source, IPEndPoint target, List<byte[]> keys, ILogger logger)
{
var server = redis.GetServer(source);
ICollection<object> args = new List<object>()
{
target.Address.ToString(),
target.Port,
$"",
0,
-1,
"KEYS"
};
foreach (var key in keys)
args.Add(Encoding.ASCII.GetString(key));
var elapsed = Stopwatch.GetTimestamp();
try
{
var resp = server.Execute("migrate", args);
ClassicAssert.AreEqual((string)resp, "OK");
}
catch (Exception ex)
{
elapsed = Stopwatch.GetTimestamp() - elapsed;
logger?.LogError(ex, "An error has occurred");
logger?.LogError("timeoutSpan: {elapsed}", TimeSpan.FromTicks(elapsed));
Assert.Fail(ex.Message);
}
}
public int MigrateTasks(IPEndPoint endPoint, ILogger logger)
{
var server = redis.GetServer(endPoint);
try
{
var result = server.Execute("cluster", "MTASKS");
return int.Parse((string)result);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred");
Assert.Fail(ex.Message);
return -1;
}
}
public void WaitForMigrationCleanup(int nodeIndex, ILogger logger = null)
=> WaitForMigrationCleanup(endpoints[nodeIndex].ToIPEndPoint(), logger);
public void WaitForMigrationCleanup(IPEndPoint endPoint, ILogger logger)
{
while (MigrateTasks(endPoint, logger) > 0) { BackOff(cancellationToken: context.cts.Token); }
}
public void WaitForMigrationCleanup(ILogger logger)
{
foreach (var endPoint in endpoints)
WaitForMigrationCleanup((IPEndPoint)endPoint, logger);
}
public static void Asking(ref LightClientRequest sourceNode)
{
var result = sourceNode.SendCommand($"ASKING");
ClassicAssert.IsTrue(result.AsSpan()[..bresp_OK.Length].SequenceEqual(bresp_OK));
}
public void PingAll(ILogger logger)
{
try
{
for (int i = 0; i < endpoints.Count; i++)
{
var endPoint = ((IPEndPoint)endpoints[i]);
var server = redis.GetServer(endPoint);
var resp = server.Execute("PING");
while (((string)resp) != "PONG")
resp = server.Execute("PING");
}
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred");
Assert.Fail(ex.Message);
}
}
public List<SlotItem> ClusterSlots(int nodeIndex, ILogger logger = null)
=> ClusterSlots((IPEndPoint)endpoints[nodeIndex], logger);
public List<SlotItem> ClusterSlots(IPEndPoint endPoint, ILogger logger = null)
{
List<SlotItem> slotItems = new();
try
{
var server = redis.GetServer(endPoint);
var result = server.Execute("cluster", "slots");
if (result.IsNull)
return null;
var slotRanges = (RedisResult[])result;
foreach (var slotRange in slotRanges)
{
SlotItem slotItem = default;
var info = (RedisResult[])slotRange;
var (startSlot, endSlot) = ((int)info[0], (int)info[1]);
ClassicAssert.IsTrue(startSlot >= 0 && startSlot <= 16383);
ClassicAssert.IsTrue(endSlot >= 0 && endSlot <= 16383);
slotItem.startSlot = (ushort)startSlot;
slotItem.endSlot = (ushort)endSlot;
slotItem.nnInfo = new NodeNetInfo[info.Length - 2];
for (int i = 2; i < info.Length; i++)
{
var nodeInfo = (RedisResult[])info[i];
var address = (string)nodeInfo[0];
var port = (int)nodeInfo[1];
var nodeid = (string)nodeInfo[2];
var hostNameInfo = ((RedisResult[])nodeInfo[3]);
var hostname = (string)hostNameInfo[1];
slotItem.nnInfo[i - 2].address = address;
slotItem.nnInfo[i - 2].port = port;
slotItem.nnInfo[i - 2].nodeid = nodeid;
slotItem.nnInfo[i - 2].hostname = hostname;
slotItem.nnInfo[i - 2].isPrimary = (i == 2);
}
slotItems.Add(slotItem);
}
return slotItems;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; ClusterSlots");
Assert.Fail(ex.Message);
return null;
}
}
public string ClusterReplicate(int replicaNodeIndex, int primaryNodeIndex, bool async = false, bool failEx = true, ILogger logger = null)
{
var primaryId = ClusterMyId(primaryNodeIndex, logger: logger);
return ClusterReplicate(replicaNodeIndex, primaryId, async: async, failEx: failEx, logger: logger);
}
public string ClusterReplicate(int sourceNodeIndex, string primaryNodeId, bool async = false, bool failEx = true, ILogger logger = null)
=> ClusterReplicate((IPEndPoint)endpoints[sourceNodeIndex], primaryNodeId, async: async, failEx: failEx, logger);
public string ClusterReplicate(IPEndPoint endPoint, string primaryNodeId, bool async = false, bool failEx = true, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
List<object> args = async ? ["replicate", primaryNodeId, "async"] : ["replicate", primaryNodeId, "sync"];
var result = (string)server.Execute("cluster", args);
ClassicAssert.AreEqual("OK", result);
return result;
}
catch (Exception ex)
{
if (failEx)
{
logger?.LogError(ex, "An error has occured; ClusterReplicate");
Assert.Fail(ex.Message);
}
return ex.Message;
}
}
public string ClusterFailover(int sourceNodeIndex, string option = null, ILogger logger = null)
=> ClusterFailover((IPEndPoint)endpoints[sourceNodeIndex], option, logger);
public string ClusterFailover(IPEndPoint endPoint, string option = null, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
List<object> args = option == null ? ["failover"] : ["failover", option];
var result = (string)server.Execute("cluster", args);
ClassicAssert.AreEqual("OK", result);
return result;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; ClusterFailover");
Assert.Fail(ex.Message);
return ex.Message;
}
}
public string ReplicaOf(int replicaNodeIndex, int primaryNodeIndex = -1, bool failEx = true, ILogger logger = null)
=> ReplicaOf((IPEndPoint)endpoints[replicaNodeIndex],
primaryNodeIndex >= 0 ? (IPEndPoint)endpoints[primaryNodeIndex] : null, failEx: failEx, logger);
public string ReplicaOf(IPEndPoint replicaNode, IPEndPoint primaryNode = null, bool failEx = true, ILogger logger = null)
{
try
{
var server = redis.GetServer(replicaNode);
var args = new List<object>() {
primaryNode == null ? "NO" : primaryNode.Address.ToString(),
primaryNode == null ? "ONE" : primaryNode.Port.ToString()
};
var result = (string)server.Execute("replicaof", args);
ClassicAssert.AreEqual("OK", result);
return result;
}
catch (Exception ex)
{
if (failEx)
{
logger?.LogError("An error has occurred; ReplicaOf {msg}", ex.Message);
Assert.Fail(ex.Message);
}
return ex.Message;
}
}
public string ClusterForget(int nodeIndex, string nodeid, int expirySeconds, ILogger logger = null)
=> ClusterForget((IPEndPoint)endpoints[nodeIndex], nodeid, expirySeconds, logger);
public string ClusterForget(IPEndPoint endPoint, string nodeId, int expirySeconds, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
var args = new List<object>() {
"forget",
Encoding.ASCII.GetBytes(nodeId),
Encoding.ASCII.GetBytes(expirySeconds.ToString())
};
var result = (string)server.Execute("cluster", args);
ClassicAssert.AreEqual("OK", result);
return result;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; ClusterForget");
Assert.Fail(ex.Message);
return ex.Message;
}
}
public string ClusterReset(int nodeIndex, bool soft = true, int expiry = 60, ILogger logger = null)
=> ClusterReset((IPEndPoint)endpoints[nodeIndex], soft, expiry, logger);
public string ClusterReset(IPEndPoint endPoint, bool soft = true, int expiry = 60, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
var args = new List<object>() {
"reset",
soft ? "soft" : "hard",
expiry.ToString()
};
var result = (string)server.Execute("cluster", args);
ClassicAssert.AreEqual("OK", result);
return result;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; ClusterReset");
Assert.Fail(ex.Message);
return ex.Message;
}
}
public int ClusterKeySlot(int nodeIndex, string key, ILogger logger = null)
=> ClusterKeySlot((IPEndPoint)endpoints[nodeIndex], key, logger);
public int ClusterKeySlot(IPEndPoint endPoint, string key, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
var args = new List<object>() {
"keyslot",
Encoding.ASCII.GetBytes(key)
};
var result = (string)server.Execute("cluster", args);
return int.Parse(result);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; ClusterKeySlot");
Assert.Fail();
return -1;
}
}
public void FlushAll(int nodeIndex, ILogger logger = null)
=> FlushAll((IPEndPoint)endpoints[nodeIndex], logger);
public void FlushAll(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
server.FlushAllDatabases();
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; FlushAllDatabases");
Assert.Fail();
}
}
public string ClusterStatus(int[] nodeIndices)
{
var clusterStatus = "";
foreach (var index in nodeIndices)
clusterStatus += $"{GetNodeInfo(ClusterNodes(index))}\n";
static string GetNodeInfo(ClusterConfiguration nodeConfig)
{
var output = $"[{nodeConfig.Origin}]";
foreach (var node in nodeConfig.Nodes)
output += $"\n\t{node.Raw}";
return output;
}
return clusterStatus;
}
public ClusterConfiguration ClusterNodes(int nodeIndex, ILogger logger = null)
=> ClusterNodes((IPEndPoint)endpoints[nodeIndex], logger);
public ClusterConfiguration ClusterNodes(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
return server.ClusterNodes();
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; ClusterNodes");
Assert.Fail();
return null;
}
}
public void WaitClusterNodesSync(int syncOnNodeIndex, int count, ILogger logger)
{
while (true)
{
var config = ClusterNodes(syncOnNodeIndex, logger);
if (config.Nodes.Count == count)
break;
BackOff(cancellationToken: context.cts.Token);
}
retrySync:
var configNodes = ClusterNodes(syncOnNodeIndex, logger).Nodes.ToList();
configNodes.Sort((x, y) => x.NodeId.CompareTo(y.NodeId));
for (var i = 0; i < endpoints.Count; i++)
{
if (i == syncOnNodeIndex) continue;
var otherConfigNodes = ClusterNodes(i, logger).Nodes.ToList();
otherConfigNodes.Sort((x, y) => x.NodeId.CompareTo(y.NodeId));
if (configNodes.Count != otherConfigNodes.Count) goto retrySync;
for (var j = 0; j < configNodes.Count; j++)
{
if (!configNodes[j].Equals(otherConfigNodes[j]))
{
BackOff(cancellationToken: context.cts.Token);
goto retrySync;
}
}
}
}
public List<ShardInfo> ClusterShards(int nodeIndex, ILogger logger = null)
=> ClusterShards((IPEndPoint)endpoints[nodeIndex], logger);
public List<ShardInfo> ClusterShards(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
var result = server.Execute("cluster", "shards");
if (result.IsNull)
return null;
List<ShardInfo> shards = [];
var shardArray = (RedisResult[])result;
foreach (var shard in shardArray.Select(v => (RedisResult[])v))
{
ClassicAssert.AreEqual(4, shard.Length);
var slots = (RedisResult[])shard[1];
var nodes = (RedisResult[])shard[3];
ShardInfo shardInfo = new()
{
slotRanges = []
};
for (var i = 0; i < slots.Length; i += 2)
shardInfo.slotRanges.Add(((int)slots[i], (int)slots[i + 1]));
shardInfo.nodes = [];
foreach (var node in nodes.Select(v => (RedisResult[])v))
{
ClassicAssert.AreEqual(12, node.Length);
NodeInfo nodeInfo = new()
{
nodeIndex = GetNodeIndexFromPort((int)node[3]),
nodeid = (string)node[1],
port = (int)node[3],
address = (string)node[5],
role = Enum.Parse<NodeRole>((string)node[7]),
replicationOffset = (long)node[9]
};
shardInfo.nodes.Add(nodeInfo);
}
shards.Add(shardInfo);
}
return shards;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; ClusterShards");
Assert.Fail(ex.Message);
return null;
}
}
public ResponseState SetKey(int nodeIndex, byte[] key, byte[] value, out int slot, out IPEndPoint actualEndpoint, bool asking = false, int expiry = -1, ILogger logger = null)
{
var endPoint = GetEndPoint(nodeIndex);
return SetKey(endPoint, key, value, out slot, out actualEndpoint, asking, expiry, logger);
}
public ResponseState SetKey(IPEndPoint endPoint, byte[] key, byte[] value, out int slot, out IPEndPoint actualEndpoint, bool asking = false, int expiry = -1, ILogger logger = null)
{
var server = GetServer(endPoint);
slot = -1;
actualEndpoint = endPoint;
if (asking)
{
try
{
var resp = (string)server.Execute("ASKING");
ClassicAssert.AreEqual("OK", resp);
}
catch (Exception ex)
{
logger?.LogError(ex, $"{nameof(SetKey)}");
Assert.Fail(ex.Message);
}
}
try
{
if (expiry == -1)
{
ICollection<object> args = [key, value];
var resp = (string)server.Execute("set", args, CommandFlags.NoRedirect);
ClassicAssert.AreEqual("OK", resp);
return ResponseState.OK;
}
else
{
ICollection<object> args = [key, expiry, value];
var resp = (string)server.Execute("setex", args, CommandFlags.NoRedirect);
ClassicAssert.AreEqual("OK", resp);
return ResponseState.OK;
}
}
catch (Exception e)
{
var tokens = e.Message.Split(' ');
if (tokens.Length > 10 && tokens[2].Equals("MOVED"))
{
var endpointSplit = tokens[5].Split(':');
actualEndpoint = new IPEndPoint(IPAddress.Parse(endpointSplit[0]), int.Parse(endpointSplit[1]));
slot = int.Parse(tokens[8]);
return ResponseState.MOVED;
}
else if (tokens.Length > 10 && tokens[0].Equals("Endpoint"))
{
var endpointSplit = tokens[1].Split(':');
actualEndpoint = new IPEndPoint(IPAddress.Parse(endpointSplit[0]), int.Parse(endpointSplit[1]));
slot = int.Parse(tokens[4]);
return ResponseState.ASK;
}
else if (e.Message.StartsWith("CLUSTERDOWN"))
{
return ResponseState.CLUSTERDOWN;
}
else if (e.Message.StartsWith("MIGRATING"))
{
return ResponseState.MIGRATING;
}
else if (e.Message.StartsWith("WERR"))
{
return ResponseState.REPLICA_WERR;
}
logger?.LogError(e, "Unexpected exception");
Assert.Fail(e.Message);
return ResponseState.NONE;
}
}
public string GetKey(int nodeIndex, byte[] key, out int slot, out IPEndPoint actualEndpoint, out ResponseState responseState, bool asking = false, ILogger logger = null)
{
var endPoint = GetEndPoint(nodeIndex);
return GetKey(endPoint, key, out slot, out actualEndpoint, out responseState, asking, logger);
}
public string GetKey(IPEndPoint endPoint, byte[] key, out int slot, out IPEndPoint actualEndpoint, out ResponseState responseState, bool asking = false, ILogger logger = null)
{
slot = -1;
actualEndpoint = endPoint;
responseState = ResponseState.NONE;
var server = GetServer(endPoint);
string result;
if (asking)
{
try
{
var resp = (string)server.Execute("ASKING");
ClassicAssert.AreEqual("OK", resp);
}
catch (Exception ex)
{
logger?.LogError(ex, $"{nameof(SetKey)}");
Assert.Fail(ex.Message);
}
}
try
{
ICollection<object> args = new List<object>() { (object)key };
result = (string)server.Execute("get", args, CommandFlags.NoRedirect);
slot = HashSlot(key);
responseState = ResponseState.OK;
return result;
}
catch (Exception e)
{
var tokens = e.Message.Split(' ');
if (tokens.Length > 10 && tokens[2].Equals("MOVED"))
{
var endpointSplit = tokens[5].Split(':');
actualEndpoint = new IPEndPoint(IPAddress.Parse(endpointSplit[0]), int.Parse(endpointSplit[1]));
slot = int.Parse(tokens[8]);
responseState = ResponseState.MOVED;
return "MOVED";
}
else if (tokens.Length > 10 && tokens[0].Equals("Endpoint"))
{
var endpointSplit = tokens[1].Split(':');
actualEndpoint = new IPEndPoint(IPAddress.Parse(endpointSplit[0]), int.Parse(endpointSplit[1]));
slot = int.Parse(tokens[4]);
responseState = ResponseState.ASK;
return "ASK";
}
else if (tokens[0].Equals("ASK"))
{
var endpointSplit = tokens[2].Split(':');
actualEndpoint = new IPEndPoint(IPAddress.Parse(endpointSplit[0]), int.Parse(endpointSplit[1]));
slot = int.Parse(tokens[1]);
responseState = ResponseState.ASK;
return "ASK";
}
else if (e.Message.StartsWith("CLUSTERDOWN"))
{
responseState = ResponseState.CLUSTERDOWN;
return "CLUSTERDOWN";
}
logger?.LogError(e, "Unexpected exception");
Assert.Fail(e.Message);
return e.Message;
}
}
public string SetMultiKey(int nodeIndex, List<byte[]> keys, List<byte[]> values, out int slot, out string address, out int port)
{
slot = -1;
address = null;
port = -1;
var server = GetServer(nodeIndex);
ICollection<object> args = new List<object>();
for (var i = 0; i < keys.Count; i++)
{
args.Add(keys[i]);
args.Add(values[i]);
}
try
{
return (string)server.Execute("mset", args, CommandFlags.NoRedirect);
}
catch (Exception e)
{
var tokens = e.Message.Split(' ');
if (tokens.Length > 10 && tokens[2].Equals("MOVED"))
{
address = tokens[5].Split(':')[0];
port = int.Parse(tokens[5].Split(':')[1]);
slot = int.Parse(tokens[8]);
return "MOVED";
}
else if (tokens.Length > 10 && tokens[0].Equals("Endpoint"))
{
address = tokens[1].Split(':')[0];
port = int.Parse(tokens[1].Split(':')[1]);
slot = int.Parse(tokens[4]);
return "ASK";
}
if (e.Message.StartsWith("CROSSSLOT"))
{
return "CROSSSLOT";
}
Assert.Fail(e.Message);
return e.Message;
}
}
public string GetMultiKey(int nodeIndex, List<byte[]> keys, out List<byte[]> getResult, out int slot, out string address, out int port)
{
getResult = null;
slot = -1;
address = null;
port = -1;
var server = GetServer(nodeIndex);
ICollection<object> args = new List<object>();
for (int i = 0; i < keys.Count; i++)
{
args.Add(keys[i]);
}
try
{
var result = server.Execute("mget", args, CommandFlags.NoRedirect);
getResult = [.. ((RedisResult[])result).Select(x => (byte[])x)];
return "OK";
}
catch (Exception e)
{
var tokens = e.Message.Split(' ');
if (tokens.Length > 10 && tokens[2].Equals("MOVED"))
{
address = tokens[5].Split(':')[0];
port = int.Parse(tokens[5].Split(':')[1]);
slot = int.Parse(tokens[8]);
return "MOVED";
}
else if (tokens.Length > 10 && tokens[0].Equals("Endpoint"))
{
address = tokens[1].Split(':')[0];
port = int.Parse(tokens[1].Split(':')[1]);
slot = int.Parse(tokens[4]);
return "ASK";
}
if (e.Message.StartsWith("CROSSSLOT"))
{
return "CROSSSLOT";
}
Assert.Fail(e.Message);
return e.Message;
}
}
public int Lpush(int nodeIndex, string key, List<int> elements, ILogger logger = null)
{
try
{
var server = GetServer(nodeIndex);
var args = new List<object>() { key };
for (int i = elements.Count - 1; i >= 0; i--) args.Add(elements[i]);
var result = (int)server.Execute("LPUSH", args);
ClassicAssert.AreEqual(elements.Count, result);
return result;
}
catch (Exception ex)
{
logger?.LogError(ex, "lpush error");
Assert.Fail(ex.Message);
return -1;
}
}
public List<int> Lrange(int nodeIndex, string key, ILogger logger = null)
{
try
{
var server = GetServer(nodeIndex);
var args = new List<object>() { key, "0", "-1" };
var result = server.Execute("LRANGE", args);
return [.. ((int[])result)];
}
catch (Exception ex)
{
logger?.LogError(ex, "lrange error");
Assert.Fail(ex.Message);
return null;
}
}
public void Sadd(int nodeIndex, string key, List<int> elements, ILogger logger = null)
{
try
{
var server = GetServer(nodeIndex);
var args = new List<object>() { key };
for (int i = elements.Count - 1; i >= 0; i--) args.Add(elements[i]);
var result = (int)server.Execute("SADD", args);
ClassicAssert.AreEqual(elements.Count, result);
}
catch (Exception ex)
{
logger?.LogError(ex, "sadd error");
Assert.Fail(ex.Message);
}
}
public List<int> Smembers(int nodeIndex, string key, ILogger logger = null)
{
try
{
var server = GetServer(nodeIndex);
var args = new List<object>() { key };
var result = server.Execute("SMEMBERS", args);
return [.. ((int[])result)];
}
catch (Exception ex)
{
logger?.LogError(ex, "smembers error");
Assert.Fail(ex.Message);
return null;
}
}
public long GetStoreCurrentAofAddress(int nodeIndex, ILogger logger = null)
=> GetStoreCurrentAofAddress((IPEndPoint)endpoints[nodeIndex], logger);
public long GetStoreCurrentAofAddress(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var storeCurrentSafeAofAddress = GetReplicationInfo(endPoint, [ReplicationInfoItem.STORE_CURRENT_SAFE_AOF_ADDRESS], logger)[0].Item2;
return long.Parse(storeCurrentSafeAofAddress);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; GetStoreCurrentAofAddress");
Assert.Fail(ex.Message);
return 0;
}
}
public long GetStoreRecoveredAofAddress(int nodeIndex, ILogger logger = null)
=> GetStoreRecoveredAofAddress((IPEndPoint)endpoints[nodeIndex], logger);
public long GetStoreRecoveredAofAddress(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var storeRecoveredSafeAofAddress = GetReplicationInfo(endPoint, [ReplicationInfoItem.STORE_RECOVERED_SAFE_AOF_ADDRESS], logger)[0].Item2;
return long.Parse(storeRecoveredSafeAofAddress);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occured; GetStoreRecoveredAofAddress");
Assert.Fail(ex.Message);
return 0;
}
}
public long GetObjectStoreCurrentAofAddress(int nodeIndex, ILogger logger = null)
=> GetObjectStoreCurrentAofAddress((IPEndPoint)endpoints[nodeIndex], logger);
public long GetObjectStoreCurrentAofAddress(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var objectStoreCurrentSafeAofAddress = GetReplicationInfo(endPoint, [ReplicationInfoItem.OBJECT_STORE_CURRENT_SAFE_AOF_ADDRESS], logger)[0].Item2;
return long.Parse(objectStoreCurrentSafeAofAddress);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occured; GetObjectStoreCurrentAofAddress");
Assert.Fail(ex.Message);
return 0;
}
}
public long GetObjectStoreRecoveredAofAddress(int nodeIndex, ILogger logger = null)
=> GetObjectStoreRecoveredAofAddress((IPEndPoint)endpoints[nodeIndex], logger);
public long GetObjectStoreRecoveredAofAddress(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var objectStoreRecoveredSafeAofAddress = GetReplicationInfo(endPoint, [ReplicationInfoItem.OBJECT_STORE_RECOVERED_SAFE_AOF_ADDRESS], logger)[0].Item2;
return long.Parse(objectStoreRecoveredSafeAofAddress);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; GetObjectStoreRecoveredAofAddress");
Assert.Fail(ex.Message);
return 0;
}
}
public long GetConnectedReplicas(int nodeIndex, ILogger logger = null)
=> GetConnectedReplicas((IPEndPoint)endpoints[nodeIndex], logger);
public long GetConnectedReplicas(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var replicaCount = GetReplicationInfo(endPoint, [ReplicationInfoItem.CONNECTED_REPLICAS], logger)[0].Item2;
return long.Parse(replicaCount);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; GetConnectedReplicas");
Assert.Fail(ex.Message);
return 0;
}
}
public Role RoleCommand(int nodeIndex, ILogger logger = null)
=> RoleCommand(endpoints[nodeIndex].ToIPEndPoint(), logger);
public Role RoleCommand(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
return server.Role();
}
catch (Exception ex)
{
logger?.LogError(ex, "{command}", nameof(NodeRole));
Assert.Fail(ex.Message);
return null;
}
}
public string GetReplicationRole(int nodeIndex, ILogger logger = null)
=> GetReplicationRole((IPEndPoint)endpoints[nodeIndex], logger);
public string GetReplicationRole(IPEndPoint endPoint, ILogger logger = null)
{
try
{
return GetReplicationInfo(endPoint, [ReplicationInfoItem.ROLE], logger)[0].Item2;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; GetReplicationRole");
Assert.Fail(ex.Message);
return null;
}
}
public long GetReplicationOffset(int nodeIndex, ILogger logger = null)
=> GetReplicationOffset((IPEndPoint)endpoints[nodeIndex], logger);
public long GetReplicationOffset(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var offset = GetReplicationInfo(endPoint, [ReplicationInfoItem.REPLICATION_OFFSET], logger)[0].Item2;
return long.Parse(offset);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; GetReplicationOffset");
Assert.Fail(ex.Message);
return 0;
}
}
public bool GetReplicationSyncStatus(int nodeIndex, ILogger logger = null)
=> GetReplicationSyncStatus((IPEndPoint)endpoints[nodeIndex], logger);
public bool GetReplicationSyncStatus(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var sync = GetReplicationInfo(endPoint, [ReplicationInfoItem.PRIMARY_SYNC_IN_PROGRESS], logger)[0].Item2;
return bool.Parse(sync);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; GetReplicationSyncStatus");
Assert.Fail(ex.Message);
return false;
}
}
public string GetFailoverState(int nodeIndex, ILogger logger = null)
=> GetFailoverState((IPEndPoint)endpoints[nodeIndex], logger);
public string GetFailoverState(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var failoverState = GetReplicationInfo(endPoint, [ReplicationInfoItem.PRIMARY_FAILOVER_STATE], logger)[0].Item2;
return failoverState;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; GetFailoverState");
Assert.Fail(ex.Message);
return null;
}
}
public List<(ReplicationInfoItem, string)> GetReplicationInfo(int nodeIndex, ReplicationInfoItem[] infoItems, ILogger logger = null)
=> GetReplicationInfo((IPEndPoint)endpoints[nodeIndex], infoItems, logger);
private List<(ReplicationInfoItem, string)> GetReplicationInfo(IPEndPoint endPoint, ReplicationInfoItem[] infoItems, ILogger logger = null)
{
var server = redis.GetServer(endPoint);
try
{
var result = server.InfoRawAsync("replication").Result;
return ProcessReplicationInfo(result, infoItems);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occured; GetReplicationInfo");
Assert.Fail(ex.Message);
}
return null;
}
private static List<(ReplicationInfoItem, string)> ProcessReplicationInfo(string infoSection, ReplicationInfoItem[] infoItem)
{
var items = new List<(ReplicationInfoItem, string)>();
var data = infoSection.Split('\n');
string startsWith;
foreach (var ii in infoItem)
{
foreach (var item in data)
{
switch (ii)
{
case ReplicationInfoItem.ROLE:
startsWith = "role:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
continue;
case ReplicationInfoItem.CONNECTED_REPLICAS:
startsWith = "connected_slaves:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
continue;
case ReplicationInfoItem.PRIMARY_REPLID:
startsWith = "master_replid:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
continue;
case ReplicationInfoItem.REPLICATION_OFFSET:
startsWith = "master_repl_offset:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
continue;
case ReplicationInfoItem.STORE_CURRENT_SAFE_AOF_ADDRESS:
startsWith = "store_current_safe_aof_address:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
continue;
case ReplicationInfoItem.STORE_RECOVERED_SAFE_AOF_ADDRESS:
startsWith = "store_recovered_safe_aof_address:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
continue;
case ReplicationInfoItem.OBJECT_STORE_CURRENT_SAFE_AOF_ADDRESS:
startsWith = "object_store_current_safe_aof_address:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
continue;
case ReplicationInfoItem.OBJECT_STORE_RECOVERED_SAFE_AOF_ADDRESS:
startsWith = "object_store_recovered_safe_aof_address:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
continue;
case ReplicationInfoItem.PRIMARY_SYNC_IN_PROGRESS:
startsWith = "master_sync_in_progress:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
continue;
case ReplicationInfoItem.PRIMARY_FAILOVER_STATE:
startsWith = "master_failover_state:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
continue;
case ReplicationInfoItem.RECOVER_STATUS:
startsWith = "recover_status:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
break;
case ReplicationInfoItem.LAST_FAILOVER_STATE:
startsWith = "last_failover_state:";
if (item.StartsWith(startsWith)) items.Add((ii, item.Split(startsWith)[1].Trim()));
break;
default:
Assert.Fail($"type {infoItem} not supported!");
return null;
}
}
}
if (items.Count == 0) Assert.Fail($"Getting replication info for item {infoItem} \n {infoSection} \n");
return items;
}
public int GetStoreCurrentVersion(int nodeIndex, bool isMainStore, ILogger logger = null)
{
var result = GetStoreInfo(endpoints[nodeIndex].ToIPEndPoint(), [StoreInfoItem.CurrentVersion], isMainStore, logger);
ClassicAssert.AreEqual(1, result.Count);
return int.Parse(result[0].Item2);
}
public List<(StoreInfoItem, string)> GetStoreInfo(int nodeIndex, HashSet<StoreInfoItem> infoItems, bool isMainStore, ILogger logger = null)
=> GetStoreInfo(endpoints[nodeIndex].ToIPEndPoint(), infoItems, isMainStore, logger);
private List<(StoreInfoItem, string)> GetStoreInfo(IPEndPoint endPoint, HashSet<StoreInfoItem> infoItems, bool isMainStore, ILogger logger = null)
{
var fields = new List<(StoreInfoItem, string)>();
try
{
var server = redis.GetServer(endPoint);
var result = server.InfoRawAsync(isMainStore ? "store" : "objectstore").Result;
var data = result.Split('\n');
foreach (var line in data)
{
if (line.StartsWith('#'))
continue;
var field = line.Trim().Split(':');
if (!Enum.TryParse(field[0], ignoreCase: true, out StoreInfoItem type))
continue;
if (infoItems.Contains(type))
fields.Add((type, field[1]));
}
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; GetReplicationInfo");
Assert.Fail(ex.Message);
}
return fields;
}
public string GetInfo(int nodeIndex, string section, string segment, ILogger logger = null)
=> GetInfo(endpoints[nodeIndex].ToIPEndPoint(), section, segment, logger);
public string GetInfo(IPEndPoint endPoint, string section, string segment, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
var result = server.Info(section);
ClassicAssert.AreEqual(1, result.Length, "section does not exist");
foreach (var item in result[0])
if (item.Key.Equals(segment))
return item.Value;
Assert.Fail($"Segment not available for {section} section");
return "";
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; GetFailoverState");
Assert.Fail(ex.Message);
return null;
}
}
public void WaitForReplicaAofSync(int primaryIndex, int secondaryIndex, ILogger logger = null, CancellationToken cancellation = default)
{
long primaryReplicationOffset;
long secondaryReplicationOffset1;
while (true)
{
cancellation.ThrowIfCancellationRequested();
primaryReplicationOffset = GetReplicationOffset(primaryIndex, logger);
secondaryReplicationOffset1 = GetReplicationOffset(secondaryIndex, logger);
if (primaryReplicationOffset == secondaryReplicationOffset1)
break;
var primaryMainStoreVersion = context.clusterTestUtils.GetStoreCurrentVersion(primaryIndex, isMainStore: true, logger);
var replicaMainStoreVersion = context.clusterTestUtils.GetStoreCurrentVersion(secondaryIndex, isMainStore: true, logger);
BackOff(cancellationToken: context.cts.Token, msg: $"[{endpoints[primaryIndex]}]: {primaryMainStoreVersion},{primaryReplicationOffset} != [{endpoints[secondaryIndex]}]: {replicaMainStoreVersion},{secondaryReplicationOffset1}");
}
logger?.LogInformation("[{primaryEndpoint}]{primaryReplicationOffset} ?? [{endpoints[secondaryEndpoint}]{secondaryReplicationOffset1}", endpoints[primaryIndex], primaryReplicationOffset, endpoints[secondaryIndex], secondaryReplicationOffset1);
}
public void WaitForConnectedReplicaCount(int primaryIndex, long minCount, ILogger logger = null)
{
while (true)
{
var items = GetReplicationInfo(primaryIndex, [ReplicationInfoItem.ROLE, ReplicationInfoItem.CONNECTED_REPLICAS], logger);
var role = items[0].Item2;
ClassicAssert.AreEqual(role, "master");
try
{
var count = long.Parse(items[1].Item2);
if (count == minCount) break;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error occurred at WaitForConnectedReplicaCount");
Assert.Fail(ex.Message);
}
BackOff(cancellationToken: context.cts.Token);
}
}
public void WaitForReplicaRecovery(int nodeIndex, ILogger logger = null)
{
while (true)
{
var items = GetReplicationInfo(nodeIndex, [ReplicationInfoItem.ROLE, ReplicationInfoItem.PRIMARY_SYNC_IN_PROGRESS], logger);
var role = items[0].Item2;
if (role.Equals("slave"))
{
try
{
var syncInProgress = bool.Parse(items[1].Item2);
if (!syncInProgress) break;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error occurred at WaitForConnectedReplicaCount");
Assert.Fail(ex.Message);
}
BackOff(cancellationToken: context.cts.Token);
}
}
}
public void WaitForNoFailover(int nodeIndex, ILogger logger = null)
{
while (true)
{
var failoverState = GetFailoverState(nodeIndex, logger);
if (failoverState.Equals("no-failover")) break;
BackOff(cancellationToken: context.cts.Token);
}
}
public void WaitForFailoverCompleted(int nodeIndex, ILogger logger = null)
{
while (true)
{
var infoItem = context.clusterTestUtils.GetReplicationInfo(nodeIndex, [ReplicationInfoItem.LAST_FAILOVER_STATE], logger: context.logger);
if (infoItem[0].Item2.Equals("failover-completed"))
break;
BackOff(cancellationToken: context.cts.Token, msg: nameof(WaitForFailoverCompleted));
}
}
public void WaitForPrimaryRole(int nodeIndex, ILogger logger = null)
{
while (true)
{
var role = RoleCommand(nodeIndex, logger);
if (role.Value.Equals("master")) break;
BackOff(cancellationToken: context.cts.Token);
}
}
public void Checkpoint(int nodeIndex, ILogger logger = null)
=> Checkpoint((IPEndPoint)endpoints[nodeIndex], logger: logger);
public void Checkpoint(IPEndPoint endPoint, ILogger logger = null)
{
var server = redis.GetServer(endPoint);
try
{
var previousSaveTicks = (long)server.Execute("LASTSAVE");
#pragma warning disable CS0618 // Type or member is obsolete
server.Save(SaveType.ForegroundSave);
#pragma warning restore CS0618 // Type or member is obsolete
//// Spin wait for checkpoint to complete
//while (true)
//{
// var lastSaveTicks = (long)server.Execute("LASTSAVE");
// if (previousSaveTicks < lastSaveTicks) break;
// BackOff(TimeSpan.FromSeconds(1));
//}
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; StoreWrapper.Checkpoint");
Assert.Fail();
}
}
public DateTime LastSave(int nodeIndex, ILogger logger = null)
=> LastSave((IPEndPoint)endpoints[nodeIndex], logger: logger);
public DateTime LastSave(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
return server.LastSave();
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; WaitCheckpoint");
Assert.Fail();
}
return default;
}
public void WaitCheckpoint(int nodeIndex, DateTime time, ILogger logger = null)
=> WaitCheckpoint((IPEndPoint)endpoints[nodeIndex], time: time, logger: logger);
public void WaitCheckpoint(IPEndPoint endPoint, DateTime time, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
while (true)
{
var lastSaveTime = server.LastSave();
if (lastSaveTime >= time)
break;
BackOff(cancellationToken: context.cts.Token);
}
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; WaitCheckpoint");
Assert.Fail();
}
}
public int IncrBy(int nodeIndex, string key, long value, ILogger logger = null)
=> IncrBy((IPEndPoint)endpoints[nodeIndex], key, value, logger);
public int IncrBy(IPEndPoint endPoint, string key, long value, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
return (int)server.Execute("incrby", key, value);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occured; IncrBy");
Assert.Fail();
}
return -1;
}
public void ConfigSet(int nodeIndex, string parameter, string value, ILogger logger = null)
=> ConfigSet((IPEndPoint)endpoints[nodeIndex], parameter, value, logger);
public void ConfigSet(IPEndPoint endPoint, string parameter, string value, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
var resp = (string)server.Execute("config", "set", parameter, value);
ClassicAssert.AreEqual("OK", resp);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; ConfigSet");
Assert.Fail();
}
}
public void ConfigSet(int nodeIndex, string[] parameter, string[] value, ILogger logger = null)
=> ConfigSet((IPEndPoint)endpoints[nodeIndex], parameter, value, logger);
public void ConfigSet(IPEndPoint endPoint, string[] parameter, string[] value, ILogger logger = null)
{
try
{
ClassicAssert.AreEqual(parameter.Length, value.Length, $"set config parameter/value length missmatch {parameter.Length} != {value.Length}");
ICollection<object> args = new List<object>() { "set" };
for (int i = 0; i < parameter.Length; i++)
{
args.Add(parameter[i]);
args.Add(value[i]);
}
var server = redis.GetServer(endPoint);
var resp = (string)server.Execute("config", args);
ClassicAssert.AreEqual("OK", resp);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; ConfigSet");
Assert.Fail();
}
}
public void AclLoad(int nodeIndex, ILogger logger = null)
=> AclLoad((IPEndPoint)endpoints[nodeIndex], logger);
public void AclLoad(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
var resp = (string)server.Execute("ACL", "LOAD");
ClassicAssert.AreEqual("OK", resp);
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; AclLoad");
Assert.Fail();
}
}
public ClusterNode GetAnyOtherNode(IPEndPoint endPoint, ILogger logger = null)
{
var config = ClusterNodes(endPoint, logger);
foreach (var node in config.Nodes)
{
if (!node.EndPoint.ToIPEndPoint().Equals(endPoint))
return node;
}
Assert.Fail("Single node cluster");
return null;
}
public int DBSize(int nodeIndex, ILogger logger = null)
=> DBSize(endpoints[nodeIndex].ToIPEndPoint(), logger);
public int DBSize(IPEndPoint endPoint, ILogger logger = null)
{
try
{
var server = redis.GetServer(endPoint);
var count = (int)server.Execute("DBSIZE");
return count;
}
catch (Exception ex)
{
logger?.LogError(ex, "An error has occurred; DBSize");
Assert.Fail();
return -1;
}
}
}
} | ClusterTestUtils |
csharp | AvaloniaUI__Avalonia | tests/Avalonia.RenderTests.WpfCompare/CrossFact.cs | {
"start": 120,
"end": 184
} | public class ____ : StaTheoryAttribute
{
}
| CrossTheoryAttribute |
csharp | ChilliCream__graphql-platform | src/HotChocolate/Core/test/Types.CursorPagination.Tests/InferenceTests.cs | {
"start": 1068,
"end": 1259
} | public class ____
{
[Helper1]
[UsePaging]
public Task<Connection<ProductBase>> GetProductsAsync()
=> throw new NotImplementedException();
}
| Query1 |
csharp | ServiceStack__ServiceStack | ServiceStack.Stripe/src/ServiceStack.Stripe/Types/StripeTypes.cs | {
"start": 2924,
"end": 3761
} | public class ____ : StripeId, IStripeProduct
{
public bool Active { get; set; }
public string[] Attributes { get; set; }
public string Caption { get; set; }
public DateTime? Created { get; set; }
public string[] DeactivateOn { get; set; }
public string Description { get; set; }
public string[] Images { get; set; }
public bool Livemode { get; set; }
public Dictionary<string, string> Metadata { get; set; }
public string Name { get; set; }
public StripePackageDimensions PackageDimensions { get; set; }
public bool Shippable { get; set; }
public StripeCollection<StripeSku> Skus { get; set; }
public string StatementDescriptor { get; set; }
public StripeProductType Type { get; set; }
public DateTime? Updated { get; set; }
public string Url { get; set; }
}
| StripeProduct |
csharp | bitwarden__server | test/Common/AutoFixture/Attributes/RequiredEnvironmentTheoryAttribute.cs | {
"start": 324,
"end": 1123
} | public class ____ : TheoryAttribute
{
private readonly string[] _environmentVariableNames;
public RequiredEnvironmentTheoryAttribute(params string[] environmentVariableNames)
{
_environmentVariableNames = environmentVariableNames;
if (!HasRequiredEnvironmentVariables())
{
Skip = $"Missing one or more required environment variables. ({string.Join(", ", _environmentVariableNames)})";
}
}
private bool HasRequiredEnvironmentVariables()
{
foreach (var env in _environmentVariableNames)
{
var value = Environment.GetEnvironmentVariable(env);
if (value == null)
{
return false;
}
}
return true;
}
}
| RequiredEnvironmentTheoryAttribute |
csharp | unoplatform__uno | src/Uno.UWP/Generated/3.0.0.0/Windows.Media.Core/AudioStreamDescriptor.cs | {
"start": 293,
"end": 7767
} | public partial class ____ : global::Windows.Media.Core.IMediaStreamDescriptor, global::Windows.Media.Core.IMediaStreamDescriptor2
{
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public global::Windows.Media.MediaProperties.AudioEncodingProperties EncodingProperties
{
get
{
throw new global::System.NotImplementedException("The member AudioEncodingProperties AudioStreamDescriptor.EncodingProperties is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=AudioEncodingProperties%20AudioStreamDescriptor.EncodingProperties");
}
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public uint? TrailingEncoderPadding
{
get
{
throw new global::System.NotImplementedException("The member uint? AudioStreamDescriptor.TrailingEncoderPadding is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=uint%3F%20AudioStreamDescriptor.TrailingEncoderPadding");
}
set
{
global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.Media.Core.AudioStreamDescriptor", "uint? AudioStreamDescriptor.TrailingEncoderPadding");
}
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public uint? LeadingEncoderPadding
{
get
{
throw new global::System.NotImplementedException("The member uint? AudioStreamDescriptor.LeadingEncoderPadding is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=uint%3F%20AudioStreamDescriptor.LeadingEncoderPadding");
}
set
{
global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.Media.Core.AudioStreamDescriptor", "uint? AudioStreamDescriptor.LeadingEncoderPadding");
}
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public string Name
{
get
{
throw new global::System.NotImplementedException("The member string AudioStreamDescriptor.Name is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=string%20AudioStreamDescriptor.Name");
}
set
{
global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.Media.Core.AudioStreamDescriptor", "string AudioStreamDescriptor.Name");
}
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public string Language
{
get
{
throw new global::System.NotImplementedException("The member string AudioStreamDescriptor.Language is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=string%20AudioStreamDescriptor.Language");
}
set
{
global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.Media.Core.AudioStreamDescriptor", "string AudioStreamDescriptor.Language");
}
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public bool IsSelected
{
get
{
throw new global::System.NotImplementedException("The member bool AudioStreamDescriptor.IsSelected is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=bool%20AudioStreamDescriptor.IsSelected");
}
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public string Label
{
get
{
throw new global::System.NotImplementedException("The member string AudioStreamDescriptor.Label is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=string%20AudioStreamDescriptor.Label");
}
set
{
global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.Media.Core.AudioStreamDescriptor", "string AudioStreamDescriptor.Label");
}
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public AudioStreamDescriptor(global::Windows.Media.MediaProperties.AudioEncodingProperties encodingProperties)
{
global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.Media.Core.AudioStreamDescriptor", "AudioStreamDescriptor.AudioStreamDescriptor(AudioEncodingProperties encodingProperties)");
}
#endif
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.AudioStreamDescriptor(Windows.Media.MediaProperties.AudioEncodingProperties)
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.EncodingProperties.get
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.IsSelected.get
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.Name.set
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.Name.get
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.Language.set
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.Language.get
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.LeadingEncoderPadding.set
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.LeadingEncoderPadding.get
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.TrailingEncoderPadding.set
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.TrailingEncoderPadding.get
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.Label.set
// Forced skipping of method Windows.Media.Core.AudioStreamDescriptor.Label.get
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public global::Windows.Media.Core.AudioStreamDescriptor Copy()
{
throw new global::System.NotImplementedException("The member AudioStreamDescriptor AudioStreamDescriptor.Copy() is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=AudioStreamDescriptor%20AudioStreamDescriptor.Copy%28%29");
}
#endif
// Processing: Windows.Media.Core.IMediaStreamDescriptor
// Processing: Windows.Media.Core.IMediaStreamDescriptor2
}
}
| AudioStreamDescriptor |
csharp | ServiceStack__ServiceStack | ServiceStack/src/ServiceStack.Interfaces/FormatAttribute.cs | {
"start": 801,
"end": 979
} | public class ____ : FormatAttribute
{
public FormatEnumFlags(string type) : base(FormatMethods.EnumFlags)
{
Options = "{type:'" + type + "'}";
}
}
| FormatEnumFlags |
csharp | AvaloniaUI__Avalonia | tests/Avalonia.Base.UnitTests/AvaloniaPropertyTests.cs | {
"start": 8948,
"end": 9007
} | private class ____ : Class1
{
}
| Class2 |
csharp | icsharpcode__ILSpy | ILSpy.AddIn.Shared/Commands/NuGetReferenceForILSpy.cs | {
"start": 354,
"end": 1886
} | class ____
{
ProjectItem projectItem;
NuGetReferenceForILSpy(ProjectItem projectItem)
{
this.projectItem = projectItem;
}
/// <summary>
/// Detects whether the given selected item represents a supported project.
/// </summary>
/// <param name="itemData">Data object of selected item to check.</param>
/// <returns><see cref="NuGetReferenceForILSpy"/> instance or <c>null</c>, if item is not a supported project.</returns>
public static NuGetReferenceForILSpy Detect(object itemData)
{
ThreadHelper.ThrowIfNotOnUIThread();
if (itemData is ProjectItem projectItem)
{
var properties = Utils.GetProperties(projectItem.Properties, "Type", "ExtenderCATID");
if (((properties[0] as string) == "Package") || ((properties[1] as string) == PrjBrowseObjectCATID.prjCATIDCSharpReferenceBrowseObject))
{
return new NuGetReferenceForILSpy(projectItem);
}
}
return null;
}
/// <summary>
/// If possible retrieves parameters to use for launching ILSpy instance.
/// </summary>
/// <returns>Parameters object or <c>null, if not applicable.</c></returns>
public ILSpyParameters GetILSpyParameters()
{
ThreadHelper.ThrowIfNotOnUIThread();
var properties = Utils.GetProperties(projectItem.Properties, "Name", "Version", "Path");
if (properties[0] != null && properties[1] != null && properties[2] != null)
{
return new ILSpyParameters(new[] { $"{properties[2]}\\{properties[0]}.{properties[1]}.nupkg" });
}
return null;
}
}
}
| NuGetReferenceForILSpy |
csharp | Cysharp__MemoryPack | src/MemoryPack.Core/MemoryPackReaderOptionalState.cs | {
"start": 664,
"end": 1839
} | public sealed class ____ : IDisposable
{
readonly Dictionary<uint, object> refToObject;
public MemoryPackSerializerOptions Options { get; private set; }
internal MemoryPackReaderOptionalState()
{
refToObject = new Dictionary<uint, object>();
Options = null!;
}
internal void Init(MemoryPackSerializerOptions? options)
{
Options = options ?? MemoryPackSerializerOptions.Default;
}
public object GetObjectReference(uint id)
{
if (refToObject.TryGetValue(id, out var value))
{
return value;
}
MemoryPackSerializationException.ThrowMessage("Object is not found in this reference id:" + id);
return null!;
}
public void AddObjectReference(uint id, object value)
{
if (!refToObject.TryAdd(id, value))
{
MemoryPackSerializationException.ThrowMessage("Object is already added, id:" + id);
}
}
public void Reset()
{
refToObject.Clear();
Options = null!;
}
void IDisposable.Dispose()
{
MemoryPackReaderOptionalStatePool.Return(this);
}
}
| MemoryPackReaderOptionalState |
csharp | unoplatform__uno | src/Uno.UWP/ApplicationModel/DataTransfer/DataRequestDeferral.cs | {
"start": 79,
"end": 320
} | public partial class ____
{
private readonly DeferralCompletedHandler _handler;
internal DataRequestDeferral(DeferralCompletedHandler handler) => _handler = handler;
public void Complete() => _handler?.Invoke();
}
}
| DataRequestDeferral |
csharp | dotnet__efcore | src/EFCore.Relational/Migrations/Migration.cs | {
"start": 259,
"end": 470
} | class ____ by each EF Core migration.
/// </summary>
/// <remarks>
/// See <see href="https://aka.ms/efcore-docs-migrations">Database migrations</see> for more information and examples.
/// </remarks>
| inherited |
csharp | unoplatform__uno | src/Uno.UWP/Generated/3.0.0.0/Windows.Networking.Sockets/SocketActivityKind.cs | {
"start": 264,
"end": 836
} | public enum ____
{
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
None = 0,
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
StreamSocketListener = 1,
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
DatagramSocket = 2,
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
StreamSocket = 3,
#endif
}
#endif
}
| SocketActivityKind |
csharp | bitwarden__server | test/Core.Test/Vault/Services/CipherServiceTests.cs | {
"start": 1024,
"end": 52802
} | public class ____
{
[Theory, BitAutoData]
public async Task SaveAsync_WrongRevisionDate_Throws(SutProvider<CipherService> sutProvider, Cipher cipher)
{
var lastKnownRevisionDate = cipher.RevisionDate.AddDays(-1);
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.SaveAsync(cipher, cipher.UserId.Value, lastKnownRevisionDate));
Assert.Contains("out of date", exception.Message);
}
[Theory, BitAutoData]
public async Task SaveDetailsAsync_WrongRevisionDate_Throws(SutProvider<CipherService> sutProvider,
CipherDetails cipherDetails)
{
var lastKnownRevisionDate = cipherDetails.RevisionDate.AddDays(-1);
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.SaveDetailsAsync(cipherDetails, cipherDetails.UserId.Value, lastKnownRevisionDate));
Assert.Contains("out of date", exception.Message);
}
[Theory, BitAutoData]
public async Task ShareAsync_WrongRevisionDate_Throws(SutProvider<CipherService> sutProvider, Cipher cipher,
Organization organization, List<Guid> collectionIds)
{
sutProvider.GetDependency<IOrganizationRepository>().GetByIdAsync(organization.Id).Returns(organization);
var lastKnownRevisionDate = cipher.RevisionDate.AddDays(-1);
cipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[Guid.NewGuid().ToString()] = new CipherAttachment.MetaData { }
});
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.ShareAsync(cipher, cipher, organization.Id, collectionIds, cipher.UserId.Value,
lastKnownRevisionDate));
Assert.Contains("out of date", exception.Message);
}
[Theory, BitAutoData]
public async Task ShareManyAsync_WrongRevisionDate_Throws(SutProvider<CipherService> sutProvider,
IEnumerable<CipherDetails> ciphers, Guid organizationId, List<Guid> collectionIds)
{
sutProvider.GetDependency<IOrganizationRepository>().GetByIdAsync(organizationId)
.Returns(new Organization
{
PlanType = PlanType.EnterpriseAnnually,
MaxStorageGb = 100
});
var cipherInfos = ciphers.Select(c => (c, (DateTime?)c.RevisionDate.AddDays(-1)));
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.ShareManyAsync(cipherInfos, organizationId, collectionIds, ciphers.First().UserId.Value));
Assert.Contains("out of date", exception.Message);
}
[Theory]
[BitAutoData("")]
[BitAutoData("Correct Time")]
public async Task SaveAsync_CorrectRevisionDate_Passes(string revisionDateString,
SutProvider<CipherService> sutProvider, Cipher cipher)
{
var lastKnownRevisionDate = string.IsNullOrEmpty(revisionDateString) ? (DateTime?)null : cipher.RevisionDate;
await sutProvider.Sut.SaveAsync(cipher, cipher.UserId.Value, lastKnownRevisionDate);
await sutProvider.GetDependency<ICipherRepository>().Received(1).ReplaceAsync(cipher);
}
[Theory]
[BitAutoData("")]
[BitAutoData("Correct Time")]
public async Task SaveDetailsAsync_CorrectRevisionDate_Passes(string revisionDateString,
SutProvider<CipherService> sutProvider, CipherDetails cipherDetails)
{
var lastKnownRevisionDate = string.IsNullOrEmpty(revisionDateString) ? (DateTime?)null : cipherDetails.RevisionDate;
await sutProvider.Sut.SaveDetailsAsync(cipherDetails, cipherDetails.UserId.Value, lastKnownRevisionDate);
await sutProvider.GetDependency<ICipherRepository>().Received(1).ReplaceAsync(cipherDetails);
}
[Theory, BitAutoData]
public async Task CreateAttachmentAsync_WrongRevisionDate_Throws(SutProvider<CipherService> sutProvider, Cipher cipher, Guid savingUserId)
{
var lastKnownRevisionDate = cipher.RevisionDate.AddDays(-1);
var stream = new MemoryStream();
var fileName = "test.txt";
var key = "test-key";
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.CreateAttachmentAsync(cipher, stream, fileName, key, 100, savingUserId, false, lastKnownRevisionDate));
Assert.Contains("out of date", exception.Message);
}
[Theory]
[BitAutoData("")]
[BitAutoData("Correct Time")]
public async Task CreateAttachmentAsync_CorrectRevisionDate_DoesNotThrow(string revisionDateString,
SutProvider<CipherService> sutProvider, CipherDetails cipher, Guid savingUserId)
{
var lastKnownRevisionDate = string.IsNullOrEmpty(revisionDateString) ? (DateTime?)null : cipher.RevisionDate;
var stream = new MemoryStream(new byte[100]);
var fileName = "test.txt";
var key = "test-key";
// Setup cipher with user ownership
cipher.UserId = savingUserId;
cipher.OrganizationId = null;
// Mock user storage and premium access
var user = new User { Id = savingUserId, MaxStorageGb = 1 };
sutProvider.GetDependency<IUserRepository>()
.GetByIdAsync(savingUserId)
.Returns(user);
sutProvider.GetDependency<IUserService>()
.CanAccessPremium(user)
.Returns(true);
sutProvider.GetDependency<IAttachmentStorageService>()
.UploadNewAttachmentAsync(Arg.Any<Stream>(), cipher, Arg.Any<CipherAttachment.MetaData>())
.Returns(Task.CompletedTask);
sutProvider.GetDependency<IAttachmentStorageService>()
.ValidateFileAsync(cipher, Arg.Any<CipherAttachment.MetaData>(), Arg.Any<long>())
.Returns((true, 100L));
sutProvider.GetDependency<ICipherRepository>()
.UpdateAttachmentAsync(Arg.Any<CipherAttachment>())
.Returns(Task.CompletedTask);
sutProvider.GetDependency<ICipherRepository>()
.ReplaceAsync(Arg.Any<CipherDetails>())
.Returns(Task.CompletedTask);
await sutProvider.Sut.CreateAttachmentAsync(cipher, stream, fileName, key, 100, savingUserId, false, lastKnownRevisionDate);
await sutProvider.GetDependency<IAttachmentStorageService>().Received(1)
.UploadNewAttachmentAsync(Arg.Any<Stream>(), cipher, Arg.Any<CipherAttachment.MetaData>());
}
[Theory, BitAutoData]
public async Task CreateAttachmentForDelayedUploadAsync_WrongRevisionDate_Throws(SutProvider<CipherService> sutProvider, Cipher cipher, Guid savingUserId)
{
var lastKnownRevisionDate = cipher.RevisionDate.AddDays(-1);
var key = "test-key";
var fileName = "test.txt";
var fileSize = 100L;
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.CreateAttachmentForDelayedUploadAsync(cipher, key, fileName, fileSize, false, savingUserId, lastKnownRevisionDate));
Assert.Contains("out of date", exception.Message);
}
[Theory]
[BitAutoData("")]
[BitAutoData("Correct Time")]
public async Task CreateAttachmentForDelayedUploadAsync_CorrectRevisionDate_DoesNotThrow(string revisionDateString,
SutProvider<CipherService> sutProvider, CipherDetails cipher, Guid savingUserId)
{
var lastKnownRevisionDate = string.IsNullOrEmpty(revisionDateString) ? (DateTime?)null : cipher.RevisionDate;
var key = "test-key";
var fileName = "test.txt";
var fileSize = 100L;
// Setup cipher with user ownership
cipher.UserId = savingUserId;
cipher.OrganizationId = null;
// Mock user storage and premium access
var user = new User { Id = savingUserId, MaxStorageGb = 1 };
sutProvider.GetDependency<IUserRepository>()
.GetByIdAsync(savingUserId)
.Returns(user);
sutProvider.GetDependency<IUserService>()
.CanAccessPremium(user)
.Returns(true);
sutProvider.GetDependency<IAttachmentStorageService>()
.GetAttachmentUploadUrlAsync(cipher, Arg.Any<CipherAttachment.MetaData>())
.Returns("https://example.com/upload");
sutProvider.GetDependency<ICipherRepository>()
.UpdateAttachmentAsync(Arg.Any<CipherAttachment>())
.Returns(Task.CompletedTask);
var result = await sutProvider.Sut.CreateAttachmentForDelayedUploadAsync(cipher, key, fileName, fileSize, false, savingUserId, lastKnownRevisionDate);
Assert.NotNull(result.attachmentId);
Assert.NotNull(result.uploadUrl);
}
[Theory]
[BitAutoData]
public async Task SaveDetailsAsync_PersonalVault_WithOrganizationDataOwnershipPolicyEnabled_Throws(
SutProvider<CipherService> sutProvider,
CipherDetails cipher,
Guid savingUserId)
{
cipher.Id = default;
cipher.UserId = savingUserId;
cipher.OrganizationId = null;
sutProvider.GetDependency<IPolicyService>()
.AnyPoliciesApplicableToUserAsync(savingUserId, PolicyType.OrganizationDataOwnership)
.Returns(true);
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.SaveDetailsAsync(cipher, savingUserId, null));
Assert.Contains("restricted from saving items to your personal vault", exception.Message);
}
[Theory]
[BitAutoData]
public async Task SaveDetailsAsync_PersonalVault_WithOrganizationDataOwnershipPolicyDisabled_Succeeds(
SutProvider<CipherService> sutProvider,
CipherDetails cipher,
Guid savingUserId)
{
cipher.Id = default;
cipher.UserId = savingUserId;
cipher.OrganizationId = null;
sutProvider.GetDependency<IPolicyService>()
.AnyPoliciesApplicableToUserAsync(savingUserId, PolicyType.OrganizationDataOwnership)
.Returns(false);
await sutProvider.Sut.SaveDetailsAsync(cipher, savingUserId, null);
await sutProvider.GetDependency<ICipherRepository>()
.Received(1)
.CreateAsync(cipher);
}
[Theory]
[BitAutoData]
public async Task SaveDetailsAsync_PersonalVault_WithPolicyRequirementsEnabled_WithOrganizationDataOwnershipPolicyEnabled_Throws(
SutProvider<CipherService> sutProvider,
CipherDetails cipher,
Guid savingUserId)
{
cipher.Id = default;
cipher.UserId = savingUserId;
cipher.OrganizationId = null;
sutProvider.GetDependency<IFeatureService>()
.IsEnabled(FeatureFlagKeys.PolicyRequirements)
.Returns(true);
sutProvider.GetDependency<IPolicyRequirementQuery>()
.GetAsync<OrganizationDataOwnershipPolicyRequirement>(savingUserId)
.Returns(new OrganizationDataOwnershipPolicyRequirement(
OrganizationDataOwnershipState.Enabled,
[new PolicyDetails()]));
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.SaveDetailsAsync(cipher, savingUserId, null));
Assert.Contains("restricted from saving items to your personal vault", exception.Message);
}
[Theory]
[BitAutoData]
public async Task SaveDetailsAsync_PersonalVault_WithPolicyRequirementsEnabled_WithOrganizationDataOwnershipPolicyDisabled_Succeeds(
SutProvider<CipherService> sutProvider,
CipherDetails cipher,
Guid savingUserId)
{
cipher.Id = default;
cipher.UserId = savingUserId;
cipher.OrganizationId = null;
sutProvider.GetDependency<IFeatureService>()
.IsEnabled(FeatureFlagKeys.PolicyRequirements)
.Returns(true);
sutProvider.GetDependency<IPolicyRequirementQuery>()
.GetAsync<OrganizationDataOwnershipPolicyRequirement>(savingUserId)
.Returns(new OrganizationDataOwnershipPolicyRequirement(
OrganizationDataOwnershipState.Disabled,
[]));
await sutProvider.Sut.SaveDetailsAsync(cipher, savingUserId, null);
await sutProvider.GetDependency<ICipherRepository>()
.Received(1)
.CreateAsync(cipher);
}
[Theory]
[BitAutoData("")]
[BitAutoData("Correct Time")]
public async Task ShareAsync_CorrectRevisionDate_Passes(string revisionDateString,
SutProvider<CipherService> sutProvider, Cipher cipher, Organization organization, List<Guid> collectionIds)
{
var lastKnownRevisionDate = string.IsNullOrEmpty(revisionDateString) ? (DateTime?)null : cipher.RevisionDate;
var cipherRepository = sutProvider.GetDependency<ICipherRepository>();
cipherRepository.ReplaceAsync(cipher, collectionIds).Returns(true);
sutProvider.GetDependency<IOrganizationRepository>().GetByIdAsync(organization.Id).Returns(organization);
cipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[Guid.NewGuid().ToString()] = new CipherAttachment.MetaData { }
});
await sutProvider.Sut.ShareAsync(cipher, cipher, organization.Id, collectionIds, cipher.UserId.Value,
lastKnownRevisionDate);
await cipherRepository.Received(1).ReplaceAsync(cipher, collectionIds);
}
[Theory]
[BitAutoData("Correct Time")]
public async Task ShareAsync_FailReplace_Throws(string revisionDateString,
SutProvider<CipherService> sutProvider, Cipher cipher, Organization organization, List<Guid> collectionIds)
{
var lastKnownRevisionDate = string.IsNullOrEmpty(revisionDateString) ? (DateTime?)null : cipher.RevisionDate;
var cipherRepository = sutProvider.GetDependency<ICipherRepository>();
cipherRepository.ReplaceAsync(cipher, collectionIds).Returns(false);
sutProvider.GetDependency<IOrganizationRepository>().GetByIdAsync(organization.Id).Returns(organization);
cipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[Guid.NewGuid().ToString()] = new CipherAttachment.MetaData { }
});
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.ShareAsync(cipher, cipher, organization.Id, collectionIds, cipher.UserId.Value,
lastKnownRevisionDate));
Assert.Contains("Unable to save", exception.Message);
}
[Theory]
[BitAutoData("Correct Time")]
public async Task ShareAsync_HasV0Attachments_ReplaceAttachmentMetadataWithNewOneBeforeSavingCipher(string revisionDateString,
SutProvider<CipherService> sutProvider, Cipher cipher, Organization organization, List<Guid> collectionIds)
{
var lastKnownRevisionDate = string.IsNullOrEmpty(revisionDateString) ? (DateTime?)null : cipher.RevisionDate;
var originalCipher = CoreHelpers.CloneObject(cipher);
var cipherRepository = sutProvider.GetDependency<ICipherRepository>();
cipherRepository.ReplaceAsync(cipher, collectionIds).Returns(true);
sutProvider.GetDependency<IOrganizationRepository>().GetByIdAsync(organization.Id).Returns(organization);
var pushNotificationService = sutProvider.GetDependency<IPushNotificationService>();
var v0AttachmentId = Guid.NewGuid().ToString();
var anotherAttachmentId = Guid.NewGuid().ToString();
cipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[v0AttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameEncrypted"
},
[anotherAttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = anotherAttachmentId,
Key = "AwesomeKey",
FileName = "AnotherFilename",
ContainerName = "attachments",
Size = 300,
Validated = true
}
});
originalCipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[v0AttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameEncrypted",
TempMetadata = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameRe-EncryptedWithOrgKey",
Key = "NewAttachmentKey"
}
},
[anotherAttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = anotherAttachmentId,
Key = "AwesomeKey",
FileName = "AnotherFilename",
ContainerName = "attachments",
Size = 300,
Validated = true
}
});
await sutProvider.Sut.ShareAsync(originalCipher, cipher, organization.Id, collectionIds, cipher.UserId.Value,
lastKnownRevisionDate);
await cipherRepository.Received().ReplaceAsync(Arg.Is<Cipher>(c =>
c.GetAttachments()[v0AttachmentId].Key == "NewAttachmentKey"
&&
c.GetAttachments()[v0AttachmentId].FileName == "AFileNameRe-EncryptedWithOrgKey")
, collectionIds);
await pushNotificationService.Received(1).PushSyncCipherUpdateAsync(cipher, collectionIds);
}
[Theory]
[BitAutoData("Correct Time")]
public async Task ShareAsync_HasV0Attachments_StartSharingThoseAttachments(string revisionDateString,
SutProvider<CipherService> sutProvider, Cipher cipher, Organization organization, List<Guid> collectionIds)
{
var lastKnownRevisionDate = string.IsNullOrEmpty(revisionDateString) ? (DateTime?)null : cipher.RevisionDate;
var originalCipher = CoreHelpers.CloneObject(cipher);
var cipherRepository = sutProvider.GetDependency<ICipherRepository>();
cipherRepository.ReplaceAsync(cipher, collectionIds).Returns(true);
sutProvider.GetDependency<IOrganizationRepository>().GetByIdAsync(organization.Id).Returns(organization);
var attachmentStorageService = sutProvider.GetDependency<IAttachmentStorageService>();
var v0AttachmentId = Guid.NewGuid().ToString();
var anotherAttachmentId = Guid.NewGuid().ToString();
cipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[v0AttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameEncrypted",
TempMetadata = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameRe-EncryptedWithOrgKey",
Key = "NewAttachmentKey"
}
},
[anotherAttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = anotherAttachmentId,
Key = "AwesomeKey",
FileName = "AnotherFilename",
ContainerName = "attachments",
Size = 300,
Validated = true
}
});
originalCipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[v0AttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameEncrypted",
TempMetadata = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameRe-EncryptedWithOrgKey",
Key = "NewAttachmentKey"
}
},
[anotherAttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = anotherAttachmentId,
Key = "AwesomeKey",
FileName = "AnotherFilename",
ContainerName = "attachments",
Size = 300,
Validated = true
}
});
await sutProvider.Sut.ShareAsync(originalCipher, cipher, organization.Id, collectionIds, cipher.UserId.Value,
lastKnownRevisionDate);
await attachmentStorageService.Received().StartShareAttachmentAsync(cipher.Id,
organization.Id,
Arg.Is<CipherAttachment.MetaData>(m => m.Key == "NewAttachmentKey" && m.FileName == "AFileNameRe-EncryptedWithOrgKey"));
await attachmentStorageService.Received(0).StartShareAttachmentAsync(cipher.Id,
organization.Id,
Arg.Is<CipherAttachment.MetaData>(m => m.Key == "AwesomeKey" && m.FileName == "AnotherFilename"));
await attachmentStorageService.Received().CleanupAsync(cipher.Id);
}
[Theory]
[BitAutoData("Correct Time")]
public async Task ShareAsync_HasV0Attachments_StartShareThrows_PerformsRollback_Rethrows(string revisionDateString,
SutProvider<CipherService> sutProvider, Cipher cipher, Organization organization, List<Guid> collectionIds)
{
var lastKnownRevisionDate = string.IsNullOrEmpty(revisionDateString) ? (DateTime?)null : cipher.RevisionDate;
var originalCipher = CoreHelpers.CloneObject(cipher);
var cipherRepository = sutProvider.GetDependency<ICipherRepository>();
cipherRepository.ReplaceAsync(cipher, collectionIds).Returns(true);
sutProvider.GetDependency<IOrganizationRepository>().GetByIdAsync(organization.Id).Returns(organization);
var attachmentStorageService = sutProvider.GetDependency<IAttachmentStorageService>();
var collectionCipherRepository = sutProvider.GetDependency<ICollectionCipherRepository>();
collectionCipherRepository.GetManyByUserIdCipherIdAsync(cipher.UserId.Value, cipher.Id).Returns(
Task.FromResult((ICollection<CollectionCipher>)new List<CollectionCipher>
{
new CollectionCipher
{
CipherId = cipher.Id,
CollectionId = collectionIds[0]
},
new CollectionCipher
{
CipherId = cipher.Id,
CollectionId = Guid.NewGuid()
}
}));
var v0AttachmentId = Guid.NewGuid().ToString();
var anotherAttachmentId = Guid.NewGuid().ToString();
cipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[v0AttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameEncrypted",
TempMetadata = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameRe-EncryptedWithOrgKey",
Key = "NewAttachmentKey"
}
},
[anotherAttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = anotherAttachmentId,
Key = "AwesomeKey",
FileName = "AnotherFilename",
ContainerName = "attachments",
Size = 300,
Validated = true
}
});
originalCipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[v0AttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameEncrypted",
TempMetadata = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId,
ContainerName = "attachments",
FileName = "AFileNameRe-EncryptedWithOrgKey",
Key = "NewAttachmentKey"
}
},
[anotherAttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = anotherAttachmentId,
Key = "AwesomeKey",
FileName = "AnotherFilename",
ContainerName = "attachments",
Size = 300,
Validated = true
}
});
attachmentStorageService.StartShareAttachmentAsync(cipher.Id,
organization.Id,
Arg.Is<CipherAttachment.MetaData>(m => m.AttachmentId == v0AttachmentId))
.Returns(Task.FromException(new InvalidOperationException("ex from StartShareAttachmentAsync")));
var exception = await Assert.ThrowsAsync<InvalidOperationException>(
() => sutProvider.Sut.ShareAsync(cipher, cipher, organization.Id, collectionIds, cipher.UserId.Value,
lastKnownRevisionDate));
Assert.Contains("ex from StartShareAttachmentAsync", exception.Message);
await collectionCipherRepository.Received().UpdateCollectionsAsync(cipher.Id, cipher.UserId.Value,
Arg.Is<List<Guid>>(ids => ids.Count == 1 && ids[0] != collectionIds[0]));
await cipherRepository.Received().ReplaceAsync(Arg.Is<Cipher>(c =>
c.GetAttachments()[v0AttachmentId].Key == null
&&
c.GetAttachments()[v0AttachmentId].FileName == "AFileNameEncrypted"
&&
c.GetAttachments()[v0AttachmentId].TempMetadata == null)
);
}
[Theory]
[BitAutoData("Correct Time")]
public async Task ShareAsync_HasSeveralV0Attachments_StartShareThrowsOnSecondOne_PerformsRollback_Rethrows(string revisionDateString,
SutProvider<CipherService> sutProvider, Cipher cipher, Organization organization, List<Guid> collectionIds)
{
var lastKnownRevisionDate = string.IsNullOrEmpty(revisionDateString) ? (DateTime?)null : cipher.RevisionDate;
var originalCipher = CoreHelpers.CloneObject(cipher);
var cipherRepository = sutProvider.GetDependency<ICipherRepository>();
cipherRepository.ReplaceAsync(cipher, collectionIds).Returns(true);
var organizationRepository = sutProvider.GetDependency<IOrganizationRepository>();
organizationRepository.GetByIdAsync(organization.Id).Returns(organization);
var attachmentStorageService = sutProvider.GetDependency<IAttachmentStorageService>();
var userRepository = sutProvider.GetDependency<IUserRepository>();
var collectionCipherRepository = sutProvider.GetDependency<ICollectionCipherRepository>();
collectionCipherRepository.GetManyByUserIdCipherIdAsync(cipher.UserId.Value, cipher.Id).Returns(
Task.FromResult((ICollection<CollectionCipher>)new List<CollectionCipher>
{
new CollectionCipher
{
CipherId = cipher.Id,
CollectionId = collectionIds[0]
},
new CollectionCipher
{
CipherId = cipher.Id,
CollectionId = Guid.NewGuid()
}
}));
var v0AttachmentId1 = Guid.NewGuid().ToString();
var v0AttachmentId2 = Guid.NewGuid().ToString();
var anotherAttachmentId = Guid.NewGuid().ToString();
cipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[v0AttachmentId1] = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId1,
ContainerName = "attachments",
FileName = "AFileNameEncrypted",
TempMetadata = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId1,
ContainerName = "attachments",
FileName = "AFileNameRe-EncryptedWithOrgKey",
Key = "NewAttachmentKey"
}
},
[v0AttachmentId2] = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId2,
ContainerName = "attachments",
FileName = "AFileNameEncrypted2",
TempMetadata = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId2,
ContainerName = "attachments",
FileName = "AFileNameRe-EncryptedWithOrgKey2",
Key = "NewAttachmentKey2"
}
},
[anotherAttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = anotherAttachmentId,
Key = "AwesomeKey",
FileName = "AnotherFilename",
ContainerName = "attachments",
Size = 300,
Validated = true
}
});
originalCipher.SetAttachments(new Dictionary<string, CipherAttachment.MetaData>
{
[v0AttachmentId1] = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId1,
ContainerName = "attachments",
FileName = "AFileNameEncrypted",
TempMetadata = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId1,
ContainerName = "attachments",
FileName = "AFileNameRe-EncryptedWithOrgKey",
Key = "NewAttachmentKey"
}
},
[v0AttachmentId2] = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId2,
ContainerName = "attachments",
FileName = "AFileNameEncrypted2",
TempMetadata = new CipherAttachment.MetaData
{
AttachmentId = v0AttachmentId2,
ContainerName = "attachments",
FileName = "AFileNameRe-EncryptedWithOrgKey2",
Key = "NewAttachmentKey2"
}
},
[anotherAttachmentId] = new CipherAttachment.MetaData
{
AttachmentId = anotherAttachmentId,
Key = "AwesomeKey",
FileName = "AnotherFilename",
ContainerName = "attachments",
Size = 300,
Validated = true
}
});
attachmentStorageService.StartShareAttachmentAsync(cipher.Id,
organization.Id,
Arg.Is<CipherAttachment.MetaData>(m => m.AttachmentId == v0AttachmentId2))
.Returns(Task.FromException(new InvalidOperationException("ex from StartShareAttachmentAsync")));
var exception = await Assert.ThrowsAsync<InvalidOperationException>(
() => sutProvider.Sut.ShareAsync(cipher, cipher, organization.Id, collectionIds, cipher.UserId.Value,
lastKnownRevisionDate));
Assert.Contains("ex from StartShareAttachmentAsync", exception.Message);
await collectionCipherRepository.Received().UpdateCollectionsAsync(cipher.Id, cipher.UserId.Value,
Arg.Is<List<Guid>>(ids => ids.Count == 1 && ids[0] != collectionIds[0]));
await cipherRepository.Received().ReplaceAsync(Arg.Is<Cipher>(c =>
c.GetAttachments()[v0AttachmentId1].Key == null
&&
c.GetAttachments()[v0AttachmentId1].FileName == "AFileNameEncrypted"
&&
c.GetAttachments()[v0AttachmentId1].TempMetadata == null)
);
await cipherRepository.Received().ReplaceAsync(Arg.Is<Cipher>(c =>
c.GetAttachments()[v0AttachmentId2].Key == null
&&
c.GetAttachments()[v0AttachmentId2].FileName == "AFileNameEncrypted2"
&&
c.GetAttachments()[v0AttachmentId2].TempMetadata == null)
);
await userRepository.UpdateStorageAsync(cipher.UserId.Value);
await organizationRepository.UpdateStorageAsync(organization.Id);
await attachmentStorageService.Received().RollbackShareAttachmentAsync(cipher.Id, organization.Id,
Arg.Is<CipherAttachment.MetaData>(m => m.AttachmentId == v0AttachmentId1), Arg.Any<string>());
await attachmentStorageService.Received().CleanupAsync(cipher.Id);
}
[Theory]
[BitAutoData("")]
[BitAutoData("Correct Time")]
public async Task ShareManyAsync_CorrectRevisionDate_Passes(string revisionDateString,
SutProvider<CipherService> sutProvider, IEnumerable<CipherDetails> ciphers, Organization organization, List<Guid> collectionIds)
{
sutProvider.GetDependency<IOrganizationRepository>().GetByIdAsync(organization.Id)
.Returns(new Organization
{
PlanType = PlanType.EnterpriseAnnually,
MaxStorageGb = 100
});
var cipherInfos = ciphers.Select(c => (c,
string.IsNullOrEmpty(revisionDateString) ? null : (DateTime?)c.RevisionDate));
var sharingUserId = ciphers.First().UserId.Value;
await sutProvider.Sut.ShareManyAsync(cipherInfos, organization.Id, collectionIds, sharingUserId);
await sutProvider.GetDependency<ICipherRepository>().Received(1).UpdateCiphersAsync(sharingUserId,
Arg.Is<IEnumerable<Cipher>>(arg => !arg.Except(ciphers).Any()));
}
[Theory]
[BitAutoData]
public async Task RestoreAsync_UpdatesUserCipher(Guid restoringUserId, CipherDetails cipher, SutProvider<CipherService> sutProvider)
{
cipher.UserId = restoringUserId;
cipher.OrganizationId = null;
var initialRevisionDate = new DateTime(1970, 1, 1, 0, 0, 0);
cipher.DeletedDate = initialRevisionDate;
cipher.RevisionDate = initialRevisionDate;
sutProvider.GetDependency<IUserService>()
.GetUserByIdAsync(restoringUserId)
.Returns(new User
{
Id = restoringUserId,
});
await sutProvider.Sut.RestoreAsync(cipher, restoringUserId);
Assert.Null(cipher.DeletedDate);
Assert.NotEqual(initialRevisionDate, cipher.RevisionDate);
}
[Theory]
[OrganizationCipherCustomize]
[BitAutoData]
public async Task RestoreAsync_UpdatesOrganizationCipher(Guid restoringUserId, CipherDetails cipher, User user, SutProvider<CipherService> sutProvider)
{
cipher.OrganizationId = Guid.NewGuid();
cipher.Edit = false;
cipher.Manage = true;
sutProvider.GetDependency<IUserService>()
.GetUserByIdAsync(restoringUserId)
.Returns(user);
sutProvider.GetDependency<IApplicationCacheService>()
.GetOrganizationAbilityAsync(cipher.OrganizationId.Value)
.Returns(new OrganizationAbility
{
Id = cipher.OrganizationId.Value,
LimitItemDeletion = true
});
var initialRevisionDate = new DateTime(1970, 1, 1, 0, 0, 0);
cipher.DeletedDate = initialRevisionDate;
cipher.RevisionDate = initialRevisionDate;
await sutProvider.Sut.RestoreAsync(cipher, restoringUserId);
Assert.Null(cipher.DeletedDate);
Assert.NotEqual(initialRevisionDate, cipher.RevisionDate);
}
[Theory]
[BitAutoData]
public async Task RestoreAsync_WithAlreadyRestoredCipher_SkipsOperation(
Guid restoringUserId, CipherDetails cipherDetails, SutProvider<CipherService> sutProvider)
{
cipherDetails.DeletedDate = null;
await sutProvider.Sut.RestoreAsync(cipherDetails, restoringUserId, true);
await sutProvider.GetDependency<ICipherRepository>().DidNotReceiveWithAnyArgs().UpsertAsync(default);
await sutProvider.GetDependency<IEventService>().DidNotReceiveWithAnyArgs().LogCipherEventAsync(default, default);
await sutProvider.GetDependency<IPushNotificationService>().DidNotReceiveWithAnyArgs().PushSyncCipherUpdateAsync(default, default);
}
[Theory]
[BitAutoData]
public async Task RestoreAsync_WithPersonalCipherBelongingToDifferentUser_ThrowsBadRequestException(
Guid restoringUserId, CipherDetails cipherDetails, SutProvider<CipherService> sutProvider)
{
cipherDetails.UserId = Guid.NewGuid();
cipherDetails.OrganizationId = null;
sutProvider.GetDependency<IUserService>()
.GetUserByIdAsync(restoringUserId)
.Returns(new User
{
Id = restoringUserId,
});
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.RestoreAsync(cipherDetails, restoringUserId));
Assert.Contains("do not have permissions", exception.Message);
await sutProvider.GetDependency<ICipherRepository>().DidNotReceiveWithAnyArgs().UpsertAsync(default);
await sutProvider.GetDependency<IEventService>().DidNotReceiveWithAnyArgs().LogCipherEventAsync(default, default);
await sutProvider.GetDependency<IPushNotificationService>().DidNotReceiveWithAnyArgs().PushSyncCipherUpdateAsync(default, default);
}
[Theory]
[OrganizationCipherCustomize]
[BitAutoData]
public async Task RestoreAsync_WithOrgAdminOverride_RestoresCipher(
Guid restoringUserId, CipherDetails cipherDetails, SutProvider<CipherService> sutProvider)
{
cipherDetails.DeletedDate = DateTime.UtcNow;
await sutProvider.Sut.RestoreAsync(cipherDetails, restoringUserId, true);
Assert.Null(cipherDetails.DeletedDate);
Assert.NotEqual(DateTime.UtcNow, cipherDetails.RevisionDate);
await sutProvider.GetDependency<ICipherRepository>().Received(1).UpsertAsync(cipherDetails);
await sutProvider.GetDependency<IEventService>().Received(1).LogCipherEventAsync(cipherDetails, EventType.Cipher_Restored);
await sutProvider.GetDependency<IPushNotificationService>().Received(1).PushSyncCipherUpdateAsync(cipherDetails, null);
}
[Theory]
[OrganizationCipherCustomize]
[BitAutoData]
public async Task RestoreAsync_WithManagePermission_RestoresCipher(
Guid restoringUserId, CipherDetails cipherDetails, User user, SutProvider<CipherService> sutProvider)
{
cipherDetails.OrganizationId = Guid.NewGuid();
cipherDetails.DeletedDate = DateTime.UtcNow;
cipherDetails.Edit = false;
cipherDetails.Manage = true;
sutProvider.GetDependency<IUserService>()
.GetUserByIdAsync(restoringUserId)
.Returns(user);
sutProvider.GetDependency<IApplicationCacheService>()
.GetOrganizationAbilityAsync(cipherDetails.OrganizationId.Value)
.Returns(new OrganizationAbility
{
Id = cipherDetails.OrganizationId.Value,
LimitItemDeletion = true
});
await sutProvider.Sut.RestoreAsync(cipherDetails, restoringUserId);
Assert.Null(cipherDetails.DeletedDate);
Assert.NotEqual(DateTime.UtcNow, cipherDetails.RevisionDate);
await sutProvider.GetDependency<ICipherRepository>().Received(1).UpsertAsync(cipherDetails);
await sutProvider.GetDependency<IEventService>().Received(1).LogCipherEventAsync(cipherDetails, EventType.Cipher_Restored);
await sutProvider.GetDependency<IPushNotificationService>().Received(1).PushSyncCipherUpdateAsync(cipherDetails, null);
}
[Theory]
[OrganizationCipherCustomize]
[BitAutoData]
public async Task RestoreAsync_WithoutManagePermission_ThrowsBadRequestException(
Guid restoringUserId, CipherDetails cipherDetails, User user, SutProvider<CipherService> sutProvider)
{
cipherDetails.OrganizationId = Guid.NewGuid();
cipherDetails.DeletedDate = DateTime.UtcNow;
cipherDetails.Edit = true;
cipherDetails.Manage = false;
sutProvider.GetDependency<IUserService>()
.GetUserByIdAsync(restoringUserId)
.Returns(user);
sutProvider.GetDependency<IApplicationCacheService>()
.GetOrganizationAbilityAsync(cipherDetails.OrganizationId.Value)
.Returns(new OrganizationAbility
{
Id = cipherDetails.OrganizationId.Value,
LimitItemDeletion = true
});
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.RestoreAsync(cipherDetails, restoringUserId));
Assert.Contains("do not have permissions", exception.Message);
await sutProvider.GetDependency<ICipherRepository>().DidNotReceiveWithAnyArgs().UpsertAsync(default);
await sutProvider.GetDependency<IEventService>().DidNotReceiveWithAnyArgs().LogCipherEventAsync(default, default);
await sutProvider.GetDependency<IPushNotificationService>().DidNotReceiveWithAnyArgs().PushSyncCipherUpdateAsync(default, default);
}
[Theory]
[BitAutoData]
public async Task RestoreManyAsync_WithOrgAdmin_UpdatesCiphers(Guid organizationId, ICollection<CipherOrganizationDetails> ciphers,
SutProvider<CipherService> sutProvider)
{
var cipherIds = ciphers.Select(c => c.Id).ToArray();
var restoringUserId = ciphers.First().UserId.Value;
var previousRevisionDate = DateTime.UtcNow;
foreach (var cipher in ciphers)
{
cipher.RevisionDate = previousRevisionDate;
cipher.OrganizationId = organizationId;
}
sutProvider.GetDependency<ICipherRepository>().GetManyOrganizationDetailsByOrganizationIdAsync(organizationId).Returns(ciphers);
var revisionDate = previousRevisionDate + TimeSpan.FromMinutes(1);
sutProvider.GetDependency<ICipherRepository>().RestoreByIdsOrganizationIdAsync(Arg.Is<IEnumerable<Guid>>(ids => ids.All(i => cipherIds.Contains(i))), organizationId).Returns(revisionDate);
await sutProvider.Sut.RestoreManyAsync(cipherIds, restoringUserId, organizationId, true);
foreach (var cipher in ciphers)
{
Assert.Null(cipher.DeletedDate);
Assert.Equal(revisionDate, cipher.RevisionDate);
}
await sutProvider.GetDependency<IEventService>().Received(1).LogCipherEventsAsync(Arg.Is<IEnumerable<Tuple<Cipher, EventType, DateTime?>>>(events => events.All(e => cipherIds.Contains(e.Item1.Id))));
await sutProvider.GetDependency<IPushNotificationService>().Received(1).PushSyncCiphersAsync(restoringUserId);
}
[Theory]
[BitAutoData]
public async Task RestoreManyAsync_WithEmptyCipherIdsArray_DoesNothing(Guid restoringUserId,
SutProvider<CipherService> sutProvider)
{
var cipherIds = Array.Empty<Guid>();
await sutProvider.Sut.RestoreManyAsync(cipherIds, restoringUserId);
await AssertNoActionsAsync(sutProvider);
}
[Theory]
[BitAutoData]
public async Task RestoreManyAsync_WithNullCipherIdsArray_DoesNothing(Guid restoringUserId,
SutProvider<CipherService> sutProvider)
{
await sutProvider.Sut.RestoreManyAsync(null, restoringUserId);
await AssertNoActionsAsync(sutProvider);
}
[Theory]
[BitAutoData]
public async Task RestoreManyAsync_WithPersonalCipherBelongingToDifferentUser_DoesNotRestoreCiphers(
Guid restoringUserId, List<CipherDetails> ciphers, SutProvider<CipherService> sutProvider)
{
var cipherIds = ciphers.Select(c => c.Id).ToArray();
var differentUserId = Guid.NewGuid();
foreach (var cipher in ciphers)
{
cipher.UserId = differentUserId;
cipher.OrganizationId = null;
cipher.DeletedDate = DateTime.UtcNow;
}
sutProvider.GetDependency<ICipherRepository>()
.GetManyByUserIdAsync(restoringUserId)
.Returns(new List<CipherDetails>());
var result = await sutProvider.Sut.RestoreManyAsync(cipherIds, restoringUserId);
Assert.Empty(result);
await sutProvider.GetDependency<ICipherRepository>()
.Received(1)
.RestoreAsync(Arg.Is<IEnumerable<Guid>>(ids => !ids.Any()), restoringUserId);
await sutProvider.GetDependency<IEventService>()
.DidNotReceiveWithAnyArgs()
.LogCipherEventsAsync(Arg.Any<IEnumerable<Tuple<Cipher, EventType, DateTime?>>>());
await sutProvider.GetDependency<IPushNotificationService>()
.Received(1)
.PushSyncCiphersAsync(restoringUserId);
}
[Theory]
[OrganizationCipherCustomize]
[BitAutoData]
public async Task RestoreManyAsync_WithManagePermission_RestoresCiphers(
Guid restoringUserId, List<CipherDetails> ciphers, User user, SutProvider<CipherService> sutProvider)
{
var organizationId = Guid.NewGuid();
var cipherIds = ciphers.Select(c => c.Id).ToArray();
var previousRevisionDate = DateTime.UtcNow;
foreach (var cipher in ciphers)
{
cipher.OrganizationId = organizationId;
cipher.Edit = false;
cipher.Manage = true;
cipher.DeletedDate = DateTime.UtcNow;
cipher.RevisionDate = previousRevisionDate;
}
sutProvider.GetDependency<ICipherRepository>()
.GetManyByUserIdAsync(restoringUserId)
.Returns(ciphers);
sutProvider.GetDependency<IUserService>()
.GetUserByIdAsync(restoringUserId)
.Returns(user);
sutProvider.GetDependency<IApplicationCacheService>()
.GetOrganizationAbilitiesAsync()
.Returns(new Dictionary<Guid, OrganizationAbility>
{
{
organizationId, new OrganizationAbility
{
Id = organizationId,
LimitItemDeletion = true
}
}
});
var revisionDate = previousRevisionDate + TimeSpan.FromMinutes(1);
sutProvider.GetDependency<ICipherRepository>()
.RestoreAsync(Arg.Any<IEnumerable<Guid>>(), restoringUserId)
.Returns(revisionDate);
var result = await sutProvider.Sut.RestoreManyAsync(cipherIds, restoringUserId);
Assert.Equal(ciphers.Count, result.Count);
foreach (var cipher in result)
{
Assert.Null(cipher.DeletedDate);
Assert.Equal(revisionDate, cipher.RevisionDate);
}
await sutProvider.GetDependency<ICipherRepository>()
.Received(1)
.RestoreAsync(Arg.Is<IEnumerable<Guid>>(ids => ids.Count() == cipherIds.Count() &&
ids.All(id => cipherIds.Contains(id))), restoringUserId);
await sutProvider.GetDependency<IEventService>()
.Received(1)
.LogCipherEventsAsync(Arg.Any<IEnumerable<Tuple<Cipher, EventType, DateTime?>>>());
await sutProvider.GetDependency<IPushNotificationService>()
.Received(1)
.PushSyncCiphersAsync(restoringUserId);
}
[Theory]
[OrganizationCipherCustomize]
[BitAutoData]
public async Task RestoreManyAsync_WithoutManagePermission_DoesNotRestoreCiphers(
Guid restoringUserId, List<CipherDetails> ciphers, User user, SutProvider<CipherService> sutProvider)
{
var organizationId = Guid.NewGuid();
var cipherIds = ciphers.Select(c => c.Id).ToArray();
foreach (var cipher in ciphers)
{
cipher.OrganizationId = organizationId;
cipher.Edit = true;
cipher.Manage = false;
cipher.DeletedDate = DateTime.UtcNow;
}
sutProvider.GetDependency<ICipherRepository>()
.GetManyByUserIdAsync(restoringUserId)
.Returns(ciphers);
sutProvider.GetDependency<IUserService>()
.GetUserByIdAsync(restoringUserId)
.Returns(user);
sutProvider.GetDependency<IApplicationCacheService>()
.GetOrganizationAbilitiesAsync()
.Returns(new Dictionary<Guid, OrganizationAbility>
{
{
organizationId, new OrganizationAbility
{
Id = organizationId,
LimitItemDeletion = true
}
}
});
var result = await sutProvider.Sut.RestoreManyAsync(cipherIds, restoringUserId);
Assert.Empty(result);
await sutProvider.GetDependency<ICipherRepository>()
.Received(1)
.RestoreAsync(Arg.Is<IEnumerable<Guid>>(ids => !ids.Any()), restoringUserId);
await sutProvider.GetDependency<IEventService>()
.DidNotReceiveWithAnyArgs()
.LogCipherEventsAsync(Arg.Any<IEnumerable<Tuple<Cipher, EventType, DateTime?>>>());
await sutProvider.GetDependency<IPushNotificationService>()
.Received(1)
.PushSyncCiphersAsync(restoringUserId);
}
[Theory, BitAutoData]
public async Task ShareManyAsync_FreeOrgWithAttachment_Throws(SutProvider<CipherService> sutProvider,
IEnumerable<CipherDetails> ciphers, Guid organizationId, List<Guid> collectionIds)
{
sutProvider.GetDependency<IOrganizationRepository>().GetByIdAsync(organizationId).Returns(new Organization
{
PlanType = PlanType.Free
});
ciphers.FirstOrDefault().Attachments =
"{\"attachment1\":{\"Size\":\"250\",\"FileName\":\"superCoolFile\","
+ "\"Key\":\"superCoolFile\",\"ContainerName\":\"testContainer\",\"Validated\":false}}";
var cipherInfos = ciphers.Select(c => (c,
(DateTime?)c.RevisionDate));
var sharingUserId = ciphers.First().UserId.Value;
var exception = await Assert.ThrowsAsync<BadRequestException>(
() => sutProvider.Sut.ShareManyAsync(cipherInfos, organizationId, collectionIds, sharingUserId));
Assert.Contains("This organization cannot use attachments", exception.Message);
}
[Theory, BitAutoData]
public async Task ShareManyAsync_PaidOrgWithAttachment_Passes(SutProvider<CipherService> sutProvider,
IEnumerable<CipherDetails> ciphers, Guid organizationId, List<Guid> collectionIds)
{
sutProvider.GetDependency<IOrganizationRepository>().GetByIdAsync(organizationId)
.Returns(new Organization
{
PlanType = PlanType.EnterpriseAnnually,
MaxStorageGb = 100
});
ciphers.FirstOrDefault().Attachments =
"{\"attachment1\":{\"Size\":\"250\",\"FileName\":\"superCoolFile\","
+ "\"Key\":\"superCoolFile\",\"ContainerName\":\"testContainer\",\"Validated\":false}}";
var cipherInfos = ciphers.Select(c => (c,
(DateTime?)c.RevisionDate));
var sharingUserId = ciphers.First().UserId.Value;
await sutProvider.Sut.ShareManyAsync(cipherInfos, organizationId, collectionIds, sharingUserId);
await sutProvider.GetDependency<ICipherRepository>().Received(1).UpdateCiphersAsync(sharingUserId,
Arg.Is<IEnumerable<Cipher>>(arg => !arg.Except(ciphers).Any()));
}
| CipherServiceTests |
csharp | icsharpcode__AvalonEdit | ICSharpCode.AvalonEdit/Highlighting/HighlightingColorizer.cs | {
"start": 1483,
"end": 16024
} | public class ____ : DocumentColorizingTransformer
{
readonly IHighlightingDefinition definition;
TextView textView;
IHighlighter highlighter;
bool isFixedHighlighter;
/// <summary>
/// Creates a new HighlightingColorizer instance.
/// </summary>
/// <param name="definition">The highlighting definition.</param>
public HighlightingColorizer(IHighlightingDefinition definition)
{
if (definition == null)
throw new ArgumentNullException("definition");
this.definition = definition;
}
/// <summary>
/// Creates a new HighlightingColorizer instance that uses a fixed highlighter instance.
/// The colorizer can only be used with text views that show the document for which
/// the highlighter was created.
/// </summary>
/// <param name="highlighter">The highlighter to be used.</param>
public HighlightingColorizer(IHighlighter highlighter)
{
if (highlighter == null)
throw new ArgumentNullException("highlighter");
this.highlighter = highlighter;
this.isFixedHighlighter = true;
}
/// <summary>
/// Creates a new HighlightingColorizer instance.
/// Derived classes using this constructor must override the <see cref="CreateHighlighter"/> method.
/// </summary>
protected HighlightingColorizer()
{
}
void textView_DocumentChanged(object sender, EventArgs e)
{
TextView textView = (TextView)sender;
DeregisterServices(textView);
RegisterServices(textView);
}
/// <summary>
/// This method is called when a text view is removed from this HighlightingColorizer,
/// and also when the TextDocument on any associated text view changes.
/// </summary>
protected virtual void DeregisterServices(TextView textView)
{
if (highlighter != null) {
if (isInHighlightingGroup) {
highlighter.EndHighlighting();
isInHighlightingGroup = false;
}
highlighter.HighlightingStateChanged -= OnHighlightStateChanged;
// remove highlighter if it is registered
if (textView.Services.GetService(typeof(IHighlighter)) == highlighter)
textView.Services.RemoveService(typeof(IHighlighter));
if (!isFixedHighlighter) {
if (highlighter != null)
highlighter.Dispose();
highlighter = null;
}
}
}
/// <summary>
/// This method is called when a new text view is added to this HighlightingColorizer,
/// and also when the TextDocument on any associated text view changes.
/// </summary>
protected virtual void RegisterServices(TextView textView)
{
if (textView.Document != null) {
if (!isFixedHighlighter)
highlighter = textView.Document != null ? CreateHighlighter(textView, textView.Document) : null;
if (highlighter != null && highlighter.Document == textView.Document) {
// add service only if it doesn't already exist
if (textView.Services.GetService(typeof(IHighlighter)) == null) {
textView.Services.AddService(typeof(IHighlighter), highlighter);
}
highlighter.HighlightingStateChanged += OnHighlightStateChanged;
}
}
}
/// <summary>
/// Creates the IHighlighter instance for the specified text document.
/// </summary>
protected virtual IHighlighter CreateHighlighter(TextView textView, TextDocument document)
{
if (definition != null)
return new DocumentHighlighter(document, definition);
else
throw new NotSupportedException("Cannot create a highlighter because no IHighlightingDefinition was specified, and the CreateHighlighter() method was not overridden.");
}
/// <inheritdoc/>
protected override void OnAddToTextView(TextView textView)
{
if (this.textView != null) {
throw new InvalidOperationException("Cannot use a HighlightingColorizer instance in multiple text views. Please create a separate instance for each text view.");
}
base.OnAddToTextView(textView);
this.textView = textView;
textView.DocumentChanged += textView_DocumentChanged;
textView.VisualLineConstructionStarting += textView_VisualLineConstructionStarting;
textView.VisualLinesChanged += textView_VisualLinesChanged;
RegisterServices(textView);
}
/// <inheritdoc/>
protected override void OnRemoveFromTextView(TextView textView)
{
DeregisterServices(textView);
textView.DocumentChanged -= textView_DocumentChanged;
textView.VisualLineConstructionStarting -= textView_VisualLineConstructionStarting;
textView.VisualLinesChanged -= textView_VisualLinesChanged;
base.OnRemoveFromTextView(textView);
this.textView = null;
}
bool isInHighlightingGroup;
void textView_VisualLineConstructionStarting(object sender, VisualLineConstructionStartEventArgs e)
{
if (highlighter != null) {
// Force update of highlighting state up to the position where we start generating visual lines.
// This is necessary in case the document gets modified above the FirstLineInView so that the highlighting state changes.
// We need to detect this case and issue a redraw (through OnHighlightStateChanged)
// before the visual line construction reuses existing lines that were built using the invalid highlighting state.
lineNumberBeingColorized = e.FirstLineInView.LineNumber - 1;
if (!isInHighlightingGroup) {
// avoid opening group twice if there was an exception during the previous visual line construction
// (not ideal, but better than throwing InvalidOperationException "group already open"
// without any way of recovering)
highlighter.BeginHighlighting();
isInHighlightingGroup = true;
}
highlighter.UpdateHighlightingState(lineNumberBeingColorized);
lineNumberBeingColorized = 0;
}
}
void textView_VisualLinesChanged(object sender, EventArgs e)
{
if (highlighter != null && isInHighlightingGroup) {
highlighter.EndHighlighting();
isInHighlightingGroup = false;
}
}
DocumentLine lastColorizedLine;
/// <inheritdoc/>
protected override void Colorize(ITextRunConstructionContext context)
{
this.lastColorizedLine = null;
base.Colorize(context);
if (this.lastColorizedLine != context.VisualLine.LastDocumentLine) {
if (highlighter != null) {
// In some cases, it is possible that we didn't highlight the last document line within the visual line
// (e.g. when the line ends with a fold marker).
// But even if we didn't highlight it, we'll have to update the highlighting state for it so that the
// proof inside TextViewDocumentHighlighter.OnHighlightStateChanged holds.
lineNumberBeingColorized = context.VisualLine.LastDocumentLine.LineNumber;
highlighter.UpdateHighlightingState(lineNumberBeingColorized);
lineNumberBeingColorized = 0;
}
}
this.lastColorizedLine = null;
}
int lineNumberBeingColorized;
/// <inheritdoc/>
protected override void ColorizeLine(DocumentLine line)
{
if (highlighter != null) {
lineNumberBeingColorized = line.LineNumber;
HighlightedLine hl = highlighter.HighlightLine(lineNumberBeingColorized);
lineNumberBeingColorized = 0;
foreach (HighlightedSection section in hl.Sections) {
if (IsEmptyColor(section.Color))
continue;
ChangeLinePart(section.Offset, section.Offset + section.Length,
visualLineElement => ApplyColorToElement(visualLineElement, section.Color));
}
}
this.lastColorizedLine = line;
}
/// <summary>
/// Gets whether the color is empty (has no effect on a VisualLineTextElement).
/// For example, the C# "Punctuation" is an empty color.
/// </summary>
internal static bool IsEmptyColor(HighlightingColor color)
{
if (color == null)
return true;
return color.Background == null && color.Foreground == null
&& color.FontStyle == null && color.FontWeight == null
&& color.Underline == null && color.Strikethrough == null;
}
/// <summary>
/// Applies a highlighting color to a visual line element.
/// </summary>
protected virtual void ApplyColorToElement(VisualLineElement element, HighlightingColor color)
{
ApplyColorToElement(element, color, CurrentContext);
}
internal static void ApplyColorToElement(VisualLineElement element, HighlightingColor color, ITextRunConstructionContext context)
{
if (color.Foreground != null) {
Brush b = color.Foreground.GetBrush(context);
if (b != null)
element.TextRunProperties.SetForegroundBrush(b);
}
if (color.Background != null) {
Brush b = color.Background.GetBrush(context);
if (b != null)
element.BackgroundBrush = b;
}
if (color.FontStyle != null || color.FontWeight != null || color.FontFamily != null) {
Typeface tf = element.TextRunProperties.Typeface;
element.TextRunProperties.SetTypeface(new Typeface(
color.FontFamily ?? tf.FontFamily,
color.FontStyle ?? tf.Style,
color.FontWeight ?? tf.Weight,
tf.Stretch
));
}
if (color.Underline ?? false)
element.TextRunProperties.SetTextDecorations(TextDecorations.Underline);
if (color.Strikethrough ?? false)
element.TextRunProperties.SetTextDecorations(TextDecorations.Strikethrough);
if (color.FontSize.HasValue)
element.TextRunProperties.SetFontRenderingEmSize(color.FontSize.Value);
}
/// <summary>
/// This method is responsible for telling the TextView to redraw lines when the highlighting state has changed.
/// </summary>
/// <remarks>
/// Creation of a VisualLine triggers the syntax highlighter (which works on-demand), so it says:
/// Hey, the user typed "/*". Don't just recreate that line, but also the next one
/// because my highlighting state (at end of line) changed!
/// </remarks>
void OnHighlightStateChanged(int fromLineNumber, int toLineNumber)
{
if (lineNumberBeingColorized != 0) {
// Ignore notifications for any line except the one we're interested in.
// This improves the performance as Redraw() can take quite some time when called repeatedly
// while scanning the document (above the visible area) for highlighting changes.
if (toLineNumber <= lineNumberBeingColorized) {
return;
}
}
// The user may have inserted "/*" into the current line, and so far only that line got redrawn.
// So when the highlighting state is changed, we issue a redraw for the line immediately below.
// If the highlighting state change applies to the lines below, too, the construction of each line
// will invalidate the next line, and the construction pass will regenerate all lines.
Debug.WriteLine(string.Format("OnHighlightStateChanged forces redraw of lines {0} to {1}", fromLineNumber, toLineNumber));
// If the VisualLine construction is in progress, we have to avoid sending redraw commands for
// anything above the line currently being constructed.
// It takes some explanation to see why this cannot happen.
// VisualLines always get constructed from top to bottom.
// Each VisualLine construction calls into the highlighter and thus forces an update of the
// highlighting state for all lines up to the one being constructed.
// To guarantee that we don't redraw lines we just constructed, we need to show that when
// a VisualLine is being reused, the highlighting state at that location is still up-to-date.
// This isn't exactly trivial and the initial implementation was incorrect in the presence of external document changes
// (e.g. split view).
// For the first line in the view, the TextView.VisualLineConstructionStarting event is used to check that the
// highlighting state is up-to-date. If it isn't, this method will be executed, and it'll mark the first line
// in the view as requiring a redraw. This is safely possible because that event occurs before any lines are reused.
// Once we take care of the first visual line, we won't get in trouble with other lines due to the top-to-bottom
// construction process.
// We'll prove that: if line N is being reused, then the highlighting state is up-to-date until (end of) line N-1.
// Start of induction: the first line in view is reused only if the highlighting state was up-to-date
// until line N-1 (no change detected in VisualLineConstructionStarting event).
// Induction step:
// If another line N+1 is being reused, then either
// a) the previous line (the visual line containing document line N) was newly constructed
// or b) the previous line was reused
// In case a, the construction updated the highlighting state. This means the stack at end of line N is up-to-date.
// In case b, the highlighting state at N-1 was up-to-date, and the text of line N was not changed.
// (if the text was changed, the line could not have been reused).
// From this follows that the highlighting state at N is still up-to-date.
// The above proof holds even in the presence of folding: folding only ever hides text in the middle of a visual line.
// Our Colorize-override ensures that the highlighting state is always updated for the LastDocumentLine,
// so it will always invalidate the next visual line when a folded line is constructed
// and the highlighting stack has changed.
if (fromLineNumber == toLineNumber) {
textView.Redraw(textView.Document.GetLineByNumber(fromLineNumber));
} else {
// If there are multiple lines marked as changed; only the first one really matters
// for the highlighting during rendering.
// However this callback is also called outside of the rendering process, e.g. when a highlighter
// decides to re-highlight some section based on external feedback (e.g. semantic highlighting).
var fromLine = textView.Document.GetLineByNumber(fromLineNumber);
var toLine = textView.Document.GetLineByNumber(toLineNumber);
int startOffset = fromLine.Offset;
textView.Redraw(startOffset, toLine.EndOffset - startOffset);
}
/*
* Meta-comment: "why does this have to be so complicated?"
*
* The problem is that I want to re-highlight only on-demand and incrementally;
* and at the same time only repaint changed lines.
* So the highlighter and the VisualLine construction both have to run in a single pass.
* The highlighter must take care that it never touches already constructed visual lines;
* if it detects that something must be redrawn because the highlighting state changed,
* it must do so early enough in the construction process.
* But doing it too early means it doesn't have the information necessary to re-highlight and redraw only the desired parts.
*/
}
}
}
| HighlightingColorizer |
csharp | MassTransit__MassTransit | tests/MassTransit.Tests/SagaStateMachineTests/Respond_Specs.cs | {
"start": 3597,
"end": 3872
} | public class ____ :
CorrelatedBy<Guid>
{
public StatusRequested(Guid correlationId)
{
CorrelationId = correlationId;
}
public Guid CorrelationId { get; set; }
}
| StatusRequested |
csharp | dotnet__extensions | src/Libraries/Microsoft.Extensions.ServiceDiscovery.Dns/Resolver/DnsResolver.cs | {
"start": 28920,
"end": 29089
} | record ____ the answer section to which this TTL can be
// applied, the TTL must be carried by another method. This is done by
// including the SOA | in |
csharp | dotnet__efcore | src/EFCore.Relational/Migrations/Operations/ColumnOperation.cs | {
"start": 546,
"end": 4392
} | public abstract class ____ : MigrationOperation, ITableMigrationOperation
{
/// <summary>
/// The name of the column.
/// </summary>
public virtual string Name { get; set; } = null!;
/// <summary>
/// The schema that contains the table, or <see langword="null" /> if the default schema should be used.
/// </summary>
public virtual string? Schema { get; set; }
/// <summary>
/// The table which contains the column.
/// </summary>
public virtual string Table { get; set; } = null!;
/// <summary>
/// The CLR <see cref="Type" /> of the property or properties mapped to the column.
/// </summary>
public virtual Type ClrType { get; set; } = null!;
/// <summary>
/// The store type of the column--for example, 'nvarchar(max)'.
/// </summary>
public virtual string? ColumnType { get; set; }
/// <summary>
/// Indicates whether or not the column can contain Unicode data, or <see langword="null" /> if this is not specified or does
/// not apply to this column type.
/// </summary>
public virtual bool? IsUnicode { get; set; }
/// <summary>
/// Indicates whether or not the column is constrained to fixed-length data.
/// </summary>
public virtual bool? IsFixedLength { get; set; }
/// <summary>
/// The maximum amount of data that the column can store, or <see langword="null" /> if this is not specified or does
/// not apply to this column type.
/// </summary>
public virtual int? MaxLength { get; set; }
/// <summary>
/// The maximum number of digits that the column can store, or <see langword="null" />
/// if this is not specified or does not apply to this column type.
/// </summary>
public virtual int? Precision { get; set; }
/// <summary>
/// The maximum number of decimal places that the column can store, or <see langword="null" />
/// if this is not specified or does not apply to this column type.
/// </summary>
public virtual int? Scale { get; set; }
/// <summary>
/// Indicates whether or not this column acts as an automatic concurrency token in the same vein
/// as 'rowversion'/'timestamp' columns on SQL Server.
/// </summary>
public virtual bool IsRowVersion { get; set; }
/// <summary>
/// Indicates whether or not th column can store <see langword="null" /> values.
/// </summary>
public virtual bool IsNullable { get; set; }
/// <summary>
/// The default value for rows inserted without an explicit value for this column, or
/// <see langword="null" /> if there is no default.
/// </summary>
public virtual object? DefaultValue { get; set; }
/// <summary>
/// The SQL expression to use as the default constraint when creating the column,
/// or <see langword="null" /> if there is no default constraint.
/// </summary>
public virtual string? DefaultValueSql { get; set; }
/// <summary>
/// The SQL expression to use to compute the column value, <see langword="null" /> if the column
/// is not computed.
/// </summary>
public virtual string? ComputedColumnSql { get; set; }
/// <summary>
/// Whether the value of the computed column this property is mapped to is stored in the database, or calculated when
/// it is read.
/// </summary>
public virtual bool? IsStored { get; set; }
/// <summary>
/// Comment for this column
/// </summary>
public virtual string? Comment { get; set; }
/// <summary>
/// The collation for this column, or <see langword="null" /> if one hasn't been explicitly configured.
/// </summary>
public virtual string? Collation { get; set; }
}
| ColumnOperation |
csharp | ChilliCream__graphql-platform | src/HotChocolate/Data/test/Data.Filters.InMemory.Tests/QueryableFilterVisitorStringTests.cs | {
"start": 20633,
"end": 20753
} | public class ____
{
public int Id { get; set; }
public string Bar { get; set; } = null!;
}
| Foo |
csharp | cake-build__cake | src/Cake.Common/Tools/XUnit/XUnitRunnerUtilities.cs | {
"start": 300,
"end": 819
} | internal static class ____
{
internal static FilePath GetReportFileName(IReadOnlyList<FilePath> assemblyPaths, XUnit2Settings settings)
{
if (string.IsNullOrEmpty(settings.ReportName))
{
return assemblyPaths.Count == 1
? assemblyPaths[0].GetFilename()
: new FilePath("TestResults");
}
else
{
return settings.ReportName;
}
}
}
} | XUnitRunnerUtilities |
csharp | dotnet__efcore | test/EFCore.Specification.Tests/ModelBuilding101OneToOneTestBase.cs | {
"start": 22326,
"end": 23121
} | public class ____
{
public int Id { get; set; }
[ForeignKey("Blog"), Required]
public int BlogId { get; set; }
[ForeignKey("BlogId"), Required]
public Blog Blog { get; set; }
}
public DbSet<Blog> Blogs
=> Set<Blog>();
public DbSet<BlogHeader> BlogHeaders
=> Set<BlogHeader>();
protected override void OnModelCreating(ModelBuilder modelBuilder)
=> modelBuilder.Entity<BlogHeader>()
.HasOne(e => e.Blog)
.WithOne();
}
}
[ConditionalFact]
public virtual void OneToOneOptionalNoNavigationToDependentsTest()
=> Model101Test();
| BlogHeader |
csharp | dotnet__maui | src/Controls/samples/Controls.Sample.Embedding/Platforms/Windows/MainWindow.xaml.cs | {
"start": 78,
"end": 1472
} | partial class ____ : Microsoft.UI.Xaml.Window
{
EmbeddingScenarios.IScenario? _scenario;
MyMauiContent? _mauiView;
FrameworkElement? _nativeView;
public MainWindow()
{
InitializeComponent();
}
private async void OnRootLayoutLoaded(object? sender, RoutedEventArgs e)
{
// Sometimes Loaded fires twice...
if (_nativeView is not null)
return;
await Task.Yield();
// Uncomment the scenario to test:
//_scenario = new EmbeddingScenarios.Scenario1_Basic();
//_scenario = new EmbeddingScenarios.Scenario2_Scoped();
_scenario = new EmbeddingScenarios.Scenario3_Correct();
// create the view and (maybe) the window
(_mauiView, _nativeView) = _scenario.Embed(this);
// add the new view to the UI
RootLayout.Children.Insert(1, _nativeView);
}
private void OnWindowClosed(object? sender, WindowEventArgs args)
{
// Remove the view from the UI
RootLayout.Children.Remove(_nativeView);
// If we used a window, then clean that up
if (_mauiView?.Window is IWindow window)
window.Destroying();
}
private async void OnMagicClicked(object? sender, RoutedEventArgs e)
{
if (_mauiView?.DotNetBot is not Image bot)
return;
await bot.RotateToAsync(360, 1000);
bot.Rotation = 0;
bot.HeightRequest = 90;
}
private void OnNewWindowClicked(object? sender, RoutedEventArgs e)
{
var window = new MainWindow();
window.Activate();
}
}
| MainWindow |
csharp | npgsql__npgsql | src/Npgsql/NpgsqlFactory.cs | {
"start": 165,
"end": 2945
} | public sealed class ____ : DbProviderFactory, IServiceProvider
{
/// <summary>
/// Gets an instance of the <see cref="NpgsqlFactory"/>.
/// This can be used to retrieve strongly typed data objects.
/// </summary>
public static readonly NpgsqlFactory Instance = new();
NpgsqlFactory() {}
/// <summary>
/// Returns a strongly typed <see cref="DbCommand"/> instance.
/// </summary>
public override DbCommand CreateCommand() => new NpgsqlCommand();
/// <summary>
/// Returns a strongly typed <see cref="DbConnection"/> instance.
/// </summary>
public override DbConnection CreateConnection() => new NpgsqlConnection();
/// <summary>
/// Returns a strongly typed <see cref="DbParameter"/> instance.
/// </summary>
public override DbParameter CreateParameter() => new NpgsqlParameter();
/// <summary>
/// Returns a strongly typed <see cref="DbConnectionStringBuilder"/> instance.
/// </summary>
public override DbConnectionStringBuilder CreateConnectionStringBuilder() => new NpgsqlConnectionStringBuilder();
/// <summary>
/// Returns a strongly typed <see cref="DbCommandBuilder"/> instance.
/// </summary>
public override DbCommandBuilder CreateCommandBuilder() => new NpgsqlCommandBuilder();
/// <summary>
/// Returns a strongly typed <see cref="DbDataAdapter"/> instance.
/// </summary>
public override DbDataAdapter CreateDataAdapter() => new NpgsqlDataAdapter();
/// <summary>
/// Specifies whether the specific <see cref="DbProviderFactory"/> supports the <see cref="DbDataAdapter"/> class.
/// </summary>
public override bool CanCreateDataAdapter => true;
/// <summary>
/// Specifies whether the specific <see cref="DbProviderFactory"/> supports the <see cref="DbCommandBuilder"/> class.
/// </summary>
public override bool CanCreateCommandBuilder => true;
/// <inheritdoc/>
public override bool CanCreateBatch => true;
/// <inheritdoc/>
public override DbBatch CreateBatch() => new NpgsqlBatch();
/// <inheritdoc/>
public override DbBatchCommand CreateBatchCommand() => new NpgsqlBatchCommand();
/// <inheritdoc/>
public override DbDataSource CreateDataSource(string connectionString)
=> NpgsqlDataSource.Create(connectionString);
#region IServiceProvider Members
/// <summary>
/// Gets the service object of the specified type.
/// </summary>
/// <param name="serviceType">An object that specifies the type of service object to get.</param>
/// <returns>A service object of type serviceType, or null if there is no service object of type serviceType.</returns>
public object? GetService(Type serviceType) => null;
#endregion
}
| NpgsqlFactory |
csharp | AvaloniaUI__Avalonia | src/Avalonia.Base/Media/Immutable/ImmutableLinearGradientBrush.cs | {
"start": 165,
"end": 2155
} | public class ____ : ImmutableGradientBrush, ILinearGradientBrush
{
/// <summary>
/// Initializes a new instance of the <see cref="ImmutableLinearGradientBrush"/> class.
/// </summary>
/// <param name="gradientStops">The gradient stops.</param>
/// <param name="opacity">The opacity of the brush.</param>
/// <param name="transform">The transform of the brush.</param>
/// <param name="transformOrigin">The transform origin of the brush</param>
/// <param name="spreadMethod">The spread method.</param>
/// <param name="startPoint">The start point for the gradient.</param>
/// <param name="endPoint">The end point for the gradient.</param>
public ImmutableLinearGradientBrush(
IReadOnlyList<ImmutableGradientStop> gradientStops,
double opacity = 1,
ImmutableTransform? transform = null,
RelativePoint? transformOrigin = null,
GradientSpreadMethod spreadMethod = GradientSpreadMethod.Pad,
RelativePoint? startPoint = null,
RelativePoint? endPoint = null)
: base(gradientStops, opacity, transform, transformOrigin, spreadMethod)
{
StartPoint = startPoint ?? RelativePoint.TopLeft;
EndPoint = endPoint ?? RelativePoint.BottomRight;
}
/// <summary>
/// Initializes a new instance of the <see cref="ImmutableLinearGradientBrush"/> class.
/// </summary>
/// <param name="source">The brush from which this brush's properties should be copied.</param>
public ImmutableLinearGradientBrush(LinearGradientBrush source)
: base(source)
{
StartPoint = source.StartPoint;
EndPoint = source.EndPoint;
}
/// <inheritdoc/>
public RelativePoint StartPoint { get; }
/// <inheritdoc/>
public RelativePoint EndPoint { get; }
}
}
| ImmutableLinearGradientBrush |
csharp | louthy__language-ext | LanguageExt.Core/Units of Measure/Accel.cs | {
"start": 471,
"end": 3809
} | struct ____ :
IComparable<Accel>,
IEquatable<Accel>,
IComparable
{
readonly double Value;
internal Accel(double value) =>
Value = value;
public override string ToString() =>
Value + " m/s²";
public bool Equals(Accel other) =>
Value.Equals(other.Value);
public bool Equals(Accel other, double epsilon) =>
Math.Abs(other.Value - Value) < epsilon;
public override bool Equals(object? obj) =>
obj is Accel accel && Equals(accel);
public override int GetHashCode() =>
Value.GetHashCode();
public int CompareTo(object? obj) =>
obj switch
{
null => 1,
Accel other => CompareTo(other),
_ => throw new ArgumentException($"must be of type {nameof(Accel)}")
};
public int CompareTo(Accel other) =>
Value.CompareTo(other.Value);
public Accel Add(Accel rhs) =>
new (Value + rhs.Value);
public Accel Subtract(Accel rhs) =>
new (Value - rhs.Value);
public Accel Multiply(double rhs) =>
new (Value * rhs);
public Accel Divide(double rhs) =>
new (Value / rhs);
public static Accel operator *(Accel lhs, double rhs) =>
lhs.Multiply(rhs);
public static Accel operator *(double lhs, Accel rhs) =>
rhs.Multiply(lhs);
public static Velocity operator *(Accel lhs, Time rhs) =>
new (lhs.Value * rhs.Seconds);
public static Velocity operator *(Time lhs, Accel rhs) =>
new (lhs.Seconds * rhs.Value);
public static VelocitySq operator *(Accel lhs, Length rhs) =>
new (lhs.Value * rhs.Metres);
public static VelocitySq operator *(Length lhs, Accel rhs) =>
new (rhs.Value * lhs.Metres);
public static Length operator *(Accel lhs, TimeSq rhs) =>
new (lhs.Value * rhs.Seconds2);
public static Length operator *(TimeSq lhs, Accel rhs) =>
new (rhs.Value * lhs.Seconds2);
public static Accel operator +(Accel lhs, Accel rhs) =>
lhs.Add(rhs);
public static Accel operator -(Accel lhs, Accel rhs) =>
lhs.Subtract(rhs);
public static Accel operator /(Accel lhs, double rhs) =>
lhs.Divide(rhs);
public static double operator /(Accel lhs, Accel rhs) =>
lhs.Value / rhs.Value;
public static bool operator ==(Accel lhs, Accel rhs) =>
lhs.Equals(rhs);
public static bool operator !=(Accel lhs, Accel rhs) =>
!lhs.Equals(rhs);
public static bool operator >(Accel lhs, Accel rhs) =>
lhs.Value > rhs.Value;
public static bool operator <(Accel lhs, Accel rhs) =>
lhs.Value < rhs.Value;
public static bool operator >=(Accel lhs, Accel rhs) =>
lhs.Value >= rhs.Value;
public static bool operator <=(Accel lhs, Accel rhs) =>
lhs.Value <= rhs.Value;
public Accel Pow(double power) =>
new (Math.Pow(Value,power));
public Accel Round() =>
new (Math.Round(Value));
public Accel Sqrt() =>
new (Math.Sqrt(Value));
public Accel Abs() =>
new (Math.Abs(Value));
public Accel Min(Accel rhs) =>
new (Math.Min(Value, rhs.Value));
public Accel Max(Accel rhs) =>
new (Math.Max(Value, rhs.Value));
public double MetresPerSecond2 => Value;
}
| Accel |
csharp | dotnet__efcore | src/EFCore/ChangeTracking/Internal/Snapshot.cs | {
"start": 210633,
"end": 215173
} | public sealed class ____<T0, T1, T2, T3, T4, T5, T6, T7>
: ISnapshot
{
private static readonly Delegate[] ValueReaders =
[
(Snapshot<T0, T1, T2, T3, T4, T5, T6, T7> e) => e._value0,
(Snapshot<T0, T1, T2, T3, T4, T5, T6, T7> e) => e._value1,
(Snapshot<T0, T1, T2, T3, T4, T5, T6, T7> e) => e._value2,
(Snapshot<T0, T1, T2, T3, T4, T5, T6, T7> e) => e._value3,
(Snapshot<T0, T1, T2, T3, T4, T5, T6, T7> e) => e._value4,
(Snapshot<T0, T1, T2, T3, T4, T5, T6, T7> e) => e._value5,
(Snapshot<T0, T1, T2, T3, T4, T5, T6, T7> e) => e._value6,
(Snapshot<T0, T1, T2, T3, T4, T5, T6, T7> e) => e._value7
];
/// <summary>
/// This is an internal API that supports the Entity Framework Core infrastructure and not subject to
/// the same compatibility standards as public APIs. It may be changed or removed without notice in
/// any release. You should only use it directly in your code with extreme caution and knowing that
/// doing so can result in application failures when updating to a new Entity Framework Core release.
/// </summary>
public Snapshot(
T0 value0,
T1 value1,
T2 value2,
T3 value3,
T4 value4,
T5 value5,
T6 value6,
T7 value7)
{
_value0 = value0;
_value1 = value1;
_value2 = value2;
_value3 = value3;
_value4 = value4;
_value5 = value5;
_value6 = value6;
_value7 = value7;
}
private T0 _value0;
private T1 _value1;
private T2 _value2;
private T3 _value3;
private T4 _value4;
private T5 _value5;
private T6 _value6;
private T7 _value7;
/// <summary>
/// This is an internal API that supports the Entity Framework Core infrastructure and not subject to
/// the same compatibility standards as public APIs. It may be changed or removed without notice in
/// any release. You should only use it directly in your code with extreme caution and knowing that
/// doing so can result in application failures when updating to a new Entity Framework Core release.
/// </summary>
public T GetValue<T>(int index)
=> ((Func<Snapshot<T0, T1, T2, T3, T4, T5, T6, T7>, T>)ValueReaders[index])(this);
/// <summary>
/// This is an internal API that supports the Entity Framework Core infrastructure and not subject to
/// the same compatibility standards as public APIs. It may be changed or removed without notice in
/// any release. You should only use it directly in your code with extreme caution and knowing that
/// doing so can result in application failures when updating to a new Entity Framework Core release.
/// </summary>
public object? this[int index]
{
get => index switch
{
0 => _value0,
1 => _value1,
2 => _value2,
3 => _value3,
4 => _value4,
5 => _value5,
6 => _value6,
7 => _value7,
_ => throw new IndexOutOfRangeException()
};
set
{
switch (index)
{
case 0:
_value0 = (T0)value!;
break;
case 1:
_value1 = (T1)value!;
break;
case 2:
_value2 = (T2)value!;
break;
case 3:
_value3 = (T3)value!;
break;
case 4:
_value4 = (T4)value!;
break;
case 5:
_value5 = (T5)value!;
break;
case 6:
_value6 = (T6)value!;
break;
case 7:
_value7 = (T7)value!;
break;
default:
throw new IndexOutOfRangeException();
}
}
}
}
/// <summary>
/// This is an internal API that supports the Entity Framework Core infrastructure and not subject to
/// the same compatibility standards as public APIs. It may be changed or removed without notice in
/// any release. You should only use it directly in your code with extreme caution and knowing that
/// doing so can result in application failures when updating to a new Entity Framework Core release.
/// </summary>
| Snapshot |
csharp | dotnet__machinelearning | src/Microsoft.ML.Tokenizers/Model/SentencePieceBpeModel.cs | {
"start": 50027,
"end": 51246
} | private struct ____ : IEquatable<SymbolPair>, IComparable<SymbolPair>
{
public int Left { get; set; }
public int Right { get; set; }
public int Length { get; set; }
public float Score { get; set; }
public int Id { get; set; }
public SymbolPair(int left, int right, float score, int length, int id)
{
Left = left;
Right = right;
Score = score;
Length = length;
Id = id;
}
public int CompareTo(SymbolPair other)
{
if (Score != other.Score)
{
return other.Score.CompareTo(Score);
}
return other.Left.CompareTo(Left);
}
public override int GetHashCode()
{
int hashCode = 23;
hashCode = (hashCode * 37) + Score.GetHashCode();
hashCode = (hashCode * 37) + Left.GetHashCode();
return hashCode;
}
public bool Equals(SymbolPair other) => Left == other.Left && Score == other.Score;
}
| SymbolPair |
csharp | icsharpcode__AvalonEdit | ICSharpCode.AvalonEdit/Highlighting/HighlightedInlineBuilder.cs | {
"start": 1863,
"end": 6617
} | public sealed class ____
{
static HighlightingBrush MakeBrush(Brush b)
{
SolidColorBrush scb = b as SolidColorBrush;
if (scb != null)
return new SimpleHighlightingBrush(scb);
else
return null;
}
readonly string text;
List<int> stateChangeOffsets = new List<int>();
List<HighlightingColor> stateChanges = new List<HighlightingColor>();
int GetIndexForOffset(int offset)
{
if (offset < 0 || offset > text.Length)
throw new ArgumentOutOfRangeException("offset");
int index = stateChangeOffsets.BinarySearch(offset);
if (index < 0) {
index = ~index;
if (offset < text.Length) {
stateChanges.Insert(index, stateChanges[index - 1].Clone());
stateChangeOffsets.Insert(index, offset);
}
}
return index;
}
/// <summary>
/// Creates a new HighlightedInlineBuilder instance.
/// </summary>
public HighlightedInlineBuilder(string text)
{
if (text == null)
throw new ArgumentNullException("text");
this.text = text;
stateChangeOffsets.Add(0);
stateChanges.Add(new HighlightingColor());
}
/// <summary>
/// Creates a new HighlightedInlineBuilder instance.
/// </summary>
public HighlightedInlineBuilder(RichText text)
{
if (text == null)
throw new ArgumentNullException("text");
this.text = text.Text;
stateChangeOffsets.AddRange(text.stateChangeOffsets);
stateChanges.AddRange(text.stateChanges);
}
HighlightedInlineBuilder(string text, List<int> offsets, List<HighlightingColor> states)
{
this.text = text;
stateChangeOffsets = offsets;
stateChanges = states;
}
/// <summary>
/// Gets the text.
/// </summary>
public string Text {
get { return text; }
}
/// <summary>
/// Applies the properties from the HighlightingColor to the specified text segment.
/// </summary>
public void SetHighlighting(int offset, int length, HighlightingColor color)
{
if (color == null)
throw new ArgumentNullException("color");
if (color.Foreground == null && color.Background == null && color.FontStyle == null && color.FontWeight == null && color.Underline == null) {
// Optimization: don't split the HighlightingState when we're not changing
// any property. For example, the "Punctuation" color in C# is
// empty by default.
return;
}
int startIndex = GetIndexForOffset(offset);
int endIndex = GetIndexForOffset(offset + length);
for (int i = startIndex; i < endIndex; i++) {
stateChanges[i].MergeWith(color);
}
}
/// <summary>
/// Sets the foreground brush on the specified text segment.
/// </summary>
public void SetForeground(int offset, int length, Brush brush)
{
int startIndex = GetIndexForOffset(offset);
int endIndex = GetIndexForOffset(offset + length);
var hbrush = MakeBrush(brush);
for (int i = startIndex; i < endIndex; i++) {
stateChanges[i].Foreground = hbrush;
}
}
/// <summary>
/// Sets the background brush on the specified text segment.
/// </summary>
public void SetBackground(int offset, int length, Brush brush)
{
int startIndex = GetIndexForOffset(offset);
int endIndex = GetIndexForOffset(offset + length);
var hbrush = MakeBrush(brush);
for (int i = startIndex; i < endIndex; i++) {
stateChanges[i].Background = hbrush;
}
}
/// <summary>
/// Sets the font weight on the specified text segment.
/// </summary>
public void SetFontWeight(int offset, int length, FontWeight weight)
{
int startIndex = GetIndexForOffset(offset);
int endIndex = GetIndexForOffset(offset + length);
for (int i = startIndex; i < endIndex; i++) {
stateChanges[i].FontWeight = weight;
}
}
/// <summary>
/// Sets the font style on the specified text segment.
/// </summary>
public void SetFontStyle(int offset, int length, FontStyle style)
{
int startIndex = GetIndexForOffset(offset);
int endIndex = GetIndexForOffset(offset + length);
for (int i = startIndex; i < endIndex; i++) {
stateChanges[i].FontStyle = style;
}
}
/// <summary>
/// Creates WPF Run instances that can be used for TextBlock.Inlines.
/// </summary>
public Run[] CreateRuns()
{
return ToRichText().CreateRuns();
}
/// <summary>
/// Creates a RichText instance.
/// </summary>
public RichText ToRichText()
{
return new RichText(text, stateChangeOffsets.ToArray(), stateChanges.Select(FreezableHelper.GetFrozenClone).ToArray());
}
/// <summary>
/// Clones this HighlightedInlineBuilder.
/// </summary>
public HighlightedInlineBuilder Clone()
{
return new HighlightedInlineBuilder(this.text,
stateChangeOffsets.ToList(),
stateChanges.Select(sc => sc.Clone()).ToList());
}
}
}
| HighlightedInlineBuilder |
csharp | ServiceStack__ServiceStack | ServiceStack.Redis/src/ServiceStack.Redis/Support/Locking/DisposableDistributedLock.cs | {
"start": 97,
"end": 182
} | class ____ follows the Resource Allocation Is Initialization pattern
/// </summary>
| that |
csharp | dotnet__machinelearning | src/Microsoft.ML.FastTree/Utils/LinqExtensions.cs | {
"start": 320,
"end": 8308
} | internal static class ____
{
public static int ArgMin<T>(this T[] arr) where T : IComparable<T>
{
if (arr.Length == 0)
return -1;
int argMin = 0;
for (int i = 1; i < arr.Length; i++)
{
if (arr[i].CompareTo(arr[argMin]) < 0)
argMin = i;
}
return argMin;
}
public static int ArgMax<T>(this ReadOnlySpan<T> span) where T : IComparable<T>
{
if (span.Length == 0)
return -1;
int argMax = 0;
for (int i = 1; i < span.Length; i++)
{
if (span[i].CompareTo(span[argMax]) > 0)
argMax = i;
}
return argMax;
}
public static int ArgMin<T>(this T[] arr, int prefix) where T : IComparable<T>
{
int length = arr.Length < prefix ? arr.Length : prefix;
if (length == 0)
return -1;
int argMin = 0;
for (int i = 1; i < length; i++)
{
if (arr[i].CompareTo(arr[argMin]) < 0)
argMin = i;
}
return argMin;
}
public static int ArgMax<T>(this T[] arr, int prefix) where T : IComparable<T>
{
int length = arr.Length < prefix ? arr.Length : prefix;
if (length == 0)
return -1;
int argMax = 0;
for (int i = 1; i < length; i++)
{
if (arr[i].CompareTo(arr[argMax]) > 0)
argMax = i;
}
return argMax;
}
public static int ArgMax<T>(this IEnumerable<T> e) where T : IComparable<T>
{
T max = e.First();
int argMax = 0;
int i = 1;
foreach (T d in e.Skip(1))
{
if (d.CompareTo(max) > 0)
{
argMax = i;
max = d;
}
++i;
}
return argMax;
}
public static int ArgMaxRand<T>(this IEnumerable<T> e, Random rnd, double fraction) where T : IComparable<T>
{
T max = e.First();
int argMax = 0;
int i = 1;
foreach (T d in e.Skip(1))
{
if (d.CompareTo(max) > 0 && rnd.NextDouble() < fraction)
{
argMax = i;
max = d;
}
++i;
}
return argMax;
}
public static int ArgMax<T>(this IEnumerable<T> e, int prefix) where T : IComparable<T>
{
if (prefix <= 1)
return 0;
T max = e.First();
int argMax = 0;
int i = 0;
foreach (T d in e)
{
if (i == prefix)
break;
if (d.CompareTo(max) > 0)
{
argMax = i;
max = d;
}
++i;
}
return argMax;
}
public static int ArgMaxRand<T>(this IEnumerable<T> e, int prefix, Random rnd, double fraction) where T : IComparable<T>
{
if (prefix <= 1)
return 0;
T max = e.First();
int argMax = 0;
int i = 0;
foreach (T d in e)
{
if (i == prefix)
break;
if (d.CompareTo(max) > 0 && rnd.NextDouble() < fraction)
{
argMax = i;
max = d;
}
++i;
}
return argMax;
}
public static int ArgMin<T>(this IEnumerable<T> e) where T : IComparable<T>
{
T max = e.First();
int argMin = 0;
int i = 0;
foreach (T d in e)
{
if (d.CompareTo(max) < 0)
{
argMin = i;
max = d;
}
++i;
}
return argMin;
}
public static int ArgMin<T>(this IEnumerable<T> e, int prefix) where T : IComparable<T>
{
if (prefix <= 1)
return 0;
T max = e.First();
int argMin = 0;
int i = 0;
foreach (T d in e)
{
if (i == prefix)
break;
if (d.CompareTo(max) < 0)
{
argMin = i;
max = d;
}
++i;
}
return argMin;
}
// More efficient ToArray pre-allocates the length of array necessary
// Will truncate the IEnumerable at the given length.
public static T[] ToArray<T>(this IEnumerable<T> me, int length)
{
T[] items = new T[length];
int itemsIndex = 0;
foreach (T item in me)
{
items[itemsIndex++] = item;
if (itemsIndex >= length) // OPTIMIZE: Could have a separate routine that doesn't do this, for efficiency
break;
}
return items;
}
/// <summary>
/// RunningLength. Converts sequence like 1, 2, 3, 4
/// to 1, 3, 6, 10
/// </summary>
public static IEnumerable<int> CumulativeSum<T>(this IEnumerable<int> s)
{
int sum = 0;
foreach (var x in s)
{
sum = sum + x;
yield return sum;
}
}
//Merges 2 sorted lists in an ascending order
public static IEnumerable<T> MergeSortedList<T>(this IEnumerable<T> s1, IEnumerable<T> s2) where T : IComparable<T>
{
var e1 = s1.GetEnumerator();
var e2 = s2.GetEnumerator();
bool moreE1 = e1.MoveNext();
bool moreE2 = e2.MoveNext();
while (moreE1 && moreE2)
{
if (e1.Current.CompareTo(e2.Current) <= 0)
{
yield return e1.Current;
moreE1 = e1.MoveNext();
}
else
{
yield return e2.Current;
moreE2 = e2.MoveNext();
}
}
while (moreE1)
{
yield return e1.Current;
moreE1 = e1.MoveNext();
}
while (moreE2)
{
yield return e2.Current;
moreE2 = e2.MoveNext();
}
}
public static int SoftArgMax(this IEnumerable<double> values, Random rand)
{
int len = 0;
double max = double.NegativeInfinity;
foreach (double value in values)
{
++len;
if (value > max)
max = value;
}
if (len == 0)
return -1;
else if (double.IsNegativeInfinity(max))
{
lock (rand)
return rand.Next(len);
}
double total = values.Sum(value => Math.Exp(value - max));
// Loop just in case due to roundoff we don't choose anything in first pass -- very unlikely.
for (; ; )
{
double r;
lock (rand)
r = rand.NextDouble() * total;
int i = 0;
foreach (double value in values)
{
r -= Math.Exp(value - max);
if (r <= 0)
return i;
++i;
}
}
}
}
}
| LinqExtensions |
csharp | Cysharp__UniTask | src/UniTask/Assets/Plugins/UniTask/Runtime/Linq/Return.cs | {
"start": 719,
"end": 1631
} | class ____ : IUniTaskAsyncEnumerator<TValue>
{
readonly TValue value;
CancellationToken cancellationToken;
bool called;
public _Return(TValue value, CancellationToken cancellationToken)
{
this.value = value;
this.cancellationToken = cancellationToken;
this.called = false;
}
public TValue Current => value;
public UniTask<bool> MoveNextAsync()
{
cancellationToken.ThrowIfCancellationRequested();
if (!called)
{
called = true;
return CompletedTasks.True;
}
return CompletedTasks.False;
}
public UniTask DisposeAsync()
{
return default;
}
}
}
} | _Return |
csharp | unoplatform__uno | src/SamplesApp/UITests.Shared/Windows_UI_Xaml_Controls/ListView/ListViewVariableHeightComplexTemplate.xaml.cs | {
"start": 289,
"end": 452
} | partial class ____ : UserControl
{
public ListViewVariableHeightComplexTemplate()
{
this.InitializeComponent();
}
}
}
| ListViewVariableHeightComplexTemplate |
csharp | dotnet__maui | src/Controls/src/Core/Items/CarouselLayoutTypeConverter.cs | {
"start": 287,
"end": 1536
} | public class ____ : TypeConverter
{
public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType)
=> sourceType == typeof(string);
public override bool CanConvertTo(ITypeDescriptorContext? context, Type? destinationType)
=> destinationType == typeof(string);
public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value)
{
var strValue = value?.ToString();
if (strValue == "HorizontalList")
{
return LinearItemsLayout.CreateCarouselHorizontalDefault();
}
if (strValue == "VerticalList")
{
return LinearItemsLayout.CreateCarouselVerticalDefault();
}
throw new InvalidOperationException($"Cannot convert \"{strValue}\" into {typeof(LinearItemsLayout)}");
}
public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType)
{
if (value is not LinearItemsLayout lil)
{
throw new NotSupportedException();
}
if (lil == LinearItemsLayout.CarouselDefault)
{
return "HorizontalList";
}
if (lil == LinearItemsLayout.CarouselVertical)
{
return "VerticalList";
}
throw new NotSupportedException();
}
}
}
| CarouselLayoutTypeConverter |
csharp | LibreHardwareMonitor__LibreHardwareMonitor | LibreHardwareMonitorLib/Hardware/IHardwareChanged.cs | {
"start": 323,
"end": 456
} | internal interface ____
{
event HardwareEventHandler HardwareAdded;
event HardwareEventHandler HardwareRemoved;
} | IHardwareChanged |
csharp | unoplatform__uno | src/Uno.UWP/Generated/3.0.0.0/Windows.System/DiagnosticAccessStatus.cs | {
"start": 252,
"end": 809
} | public enum ____
{
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
Unspecified = 0,
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
Denied = 1,
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
Limited = 2,
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
Allowed = 3,
#endif
}
#endif
}
| DiagnosticAccessStatus |
csharp | ChilliCream__graphql-platform | src/HotChocolate/Core/src/Types/Types/Scalars/IntType.cs | {
"start": 394,
"end": 1572
} | public class ____ : IntegerTypeBase<int>
{
/// <summary>
/// Initializes a new instance of the <see cref="IntType"/> class.
/// </summary>
public IntType(int min, int max)
: this(
ScalarNames.Int,
TypeResources.IntType_Description,
min,
max,
BindingBehavior.Implicit)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="IntType"/> class.
/// </summary>
public IntType(
string name,
string? description = null,
int min = int.MinValue,
int max = int.MaxValue,
BindingBehavior bind = BindingBehavior.Explicit)
: base(name, min, max, bind)
{
Description = description;
}
/// <summary>
/// Initializes a new instance of the <see cref="IntType"/> class.
/// </summary>
[ActivatorUtilitiesConstructor]
public IntType()
: this(int.MinValue, int.MaxValue)
{
}
protected override int ParseLiteral(IntValueNode valueSyntax)
=> valueSyntax.ToInt32();
protected override IntValueNode ParseValue(int runtimeValue)
=> new(runtimeValue);
}
| IntType |
csharp | dotnet__aspire | src/Aspire.Dashboard/ServiceClient/IDashboardClient.cs | {
"start": 405,
"end": 3029
} | public interface ____ : IAsyncDisposable
{
Task WhenConnected { get; }
/// <summary>
/// Gets whether the client object is enabled for use.
/// </summary>
/// <remarks>
/// Users of <see cref="IDashboardClient"/> client should check <see cref="IsEnabled"/> before calling
/// any other members of this interface, to avoid exceptions.
/// </remarks>
bool IsEnabled { get; }
/// <summary>
/// Gets the application name advertised by the server.
/// </summary>
/// <remarks>
/// Intended for display in the UI.
/// </remarks>
string ApplicationName { get; }
/// <summary>
/// Gets the current set of resources and a stream of updates.
/// </summary>
/// <remarks>
/// The returned subscription will not complete on its own.
/// Callers are required to manage the lifetime of the subscription,
/// using cancellation during enumeration.
/// </remarks>
Task<ResourceViewModelSubscription> SubscribeResourcesAsync(CancellationToken cancellationToken);
/// <summary>
/// Gets a resource matching the specified name. This resource won't be updated with changes after it is fetched.
/// </summary>
ResourceViewModel? GetResource(string resourceName);
/// <summary>
/// Get the current resources.
/// </summary>
/// <returns></returns>
IReadOnlyList<ResourceViewModel> GetResources();
IAsyncEnumerable<WatchInteractionsResponseUpdate> SubscribeInteractionsAsync(CancellationToken cancellationToken);
Task SendInteractionRequestAsync(WatchInteractionsRequestUpdate request, CancellationToken cancellationToken);
/// <summary>
/// Gets a stream of console log messages for the specified resource.
/// Includes messages logged both before and after this method call.
/// </summary>
/// <remarks>
/// <para>The returned sequence may end when the resource terminates.
/// It is up to the implementation.</para>
/// </remarks>
/// <para>It is important that callers trigger <paramref name="cancellationToken"/>
/// so that resources owned by the sequence and its consumers can be freed.</para>
IAsyncEnumerable<IReadOnlyList<ResourceLogLine>> SubscribeConsoleLogs(string resourceName, CancellationToken cancellationToken);
IAsyncEnumerable<IReadOnlyList<ResourceLogLine>> GetConsoleLogs(string resourceName, CancellationToken cancellationToken);
Task<ResourceCommandResponseViewModel> ExecuteResourceCommandAsync(string resourceName, string resourceType, CommandViewModel command, CancellationToken cancellationToken);
}
| IDashboardClient |
csharp | ShareX__ShareX | ShareX/Forms/InspectWindowForm.cs | {
"start": 1240,
"end": 7928
} | public partial class ____ : Form
{
public WindowInfo SelectedWindow { get; private set; }
public bool IsWindow { get; private set; }
private bool updating;
public InspectWindowForm()
{
InitializeComponent();
rtbInfo.AddContextMenu();
ShareXResources.ApplyTheme(this, true);
SelectHandle(true);
}
private void UpdateWindowListMenu()
{
cmsWindowList.Items.Clear();
WindowsList windowsList = new WindowsList();
List<WindowInfo> windows = windowsList.GetVisibleWindowsList();
if (windows != null && windows.Count > 0)
{
List<ToolStripMenuItem> items = new List<ToolStripMenuItem>();
foreach (WindowInfo window in windows)
{
try
{
string title = window.Text;
string shortTitle = title.Truncate(50, "...");
ToolStripMenuItem tsmi = new ToolStripMenuItem(shortTitle);
tsmi.Click += (sender, e) => SelectWindow(window.Handle, true);
using (Icon icon = window.Icon)
{
if (icon != null && icon.Width > 0 && icon.Height > 0)
{
tsmi.Image = icon.ToBitmap();
}
}
items.Add(tsmi);
}
catch (Exception e)
{
DebugHelper.WriteException(e);
}
}
cmsWindowList.Items.AddRange(items.OrderBy(x => x.Text).ToArray());
}
}
private void SelectWindow(IntPtr handle, bool isWindow)
{
SelectedWindow = new WindowInfo(handle);
IsWindow = isWindow;
UpdateWindowInfo();
}
private bool SelectHandle(bool isWindow)
{
RegionCaptureOptions options = new RegionCaptureOptions()
{
DetectControls = !isWindow
};
SelectedWindow = null;
SimpleWindowInfo simpleWindowInfo = RegionCaptureTasks.GetWindowInfo(options);
if (simpleWindowInfo != null)
{
SelectWindow(simpleWindowInfo.Handle, isWindow);
return true;
}
UpdateWindowInfo();
return false;
}
private void UpdateWindowInfo()
{
updating = true;
btnRefresh.Enabled = SelectedWindow != null;
if (SelectedWindow != null && IsWindow)
{
cbTopMost.Visible = true;
cbTopMost.Checked = SelectedWindow.TopMost;
nudOpacity.Visible = true;
nudOpacity.SetValue((int)Math.Round(SelectedWindow.Opacity / 255.0 * 100));
lblOpacity.Visible = true;
lblOpacityTip.Visible = true;
}
else
{
cbTopMost.Visible = false;
nudOpacity.Visible = false;
lblOpacity.Visible = false;
lblOpacityTip.Visible = false;
}
rtbInfo.ResetText();
if (SelectedWindow != null)
{
try
{
AddInfo(Resources.InspectWindow_WindowHandle, SelectedWindow.Handle.ToString("X8"));
AddInfo(Resources.InspectWindow_WindowTitle, SelectedWindow.Text);
AddInfo(Resources.InspectWindow_ClassName, SelectedWindow.ClassName);
AddInfo(Resources.InspectWindow_ProcessName, SelectedWindow.ProcessName);
AddInfo(Resources.InspectWindow_ProcessFileName, SelectedWindow.ProcessFileName);
AddInfo(Resources.InspectWindow_ProcessIdentifier, SelectedWindow.ProcessId.ToString());
AddInfo(Resources.InspectWindow_WindowRectangle, SelectedWindow.Rectangle.ToStringProper());
AddInfo(Resources.InspectWindow_ClientRectangle, SelectedWindow.ClientRectangle.ToStringProper());
AddInfo(Resources.InspectWindow_WindowStyles, SelectedWindow.Style.ToString().Replace(", ", "\r\n"));
AddInfo(Resources.InspectWindow_ExtendedWindowStyles, SelectedWindow.ExStyle.ToString().Replace(", ", "\r\n"));
}
catch
{
}
}
updating = false;
}
private void AddInfo(string name, string value)
{
if (!string.IsNullOrEmpty(value))
{
if (rtbInfo.TextLength > 0)
{
rtbInfo.AppendLine();
rtbInfo.AppendLine();
}
rtbInfo.SetFontBold();
rtbInfo.AppendLine(name);
rtbInfo.SetFontRegular();
rtbInfo.AppendText(value);
}
}
private void mbWindowList_MouseDown(object sender, MouseEventArgs e)
{
UpdateWindowListMenu();
}
private void btnInspectWindow_Click(object sender, EventArgs e)
{
SelectHandle(true);
}
private void btnInspectControl_Click(object sender, EventArgs e)
{
SelectHandle(false);
}
private void btnRefresh_Click(object sender, EventArgs e)
{
UpdateWindowInfo();
}
private void cbTopMost_CheckedChanged(object sender, EventArgs e)
{
if (!updating && SelectedWindow != null)
{
try
{
WindowInfo windowInfo = new WindowInfo(SelectedWindow.Handle);
windowInfo.TopMost = cbTopMost.Checked;
UpdateWindowInfo();
}
catch
{
}
}
}
private void nudOpacity_ValueChanged(object sender, EventArgs e)
{
if (!updating && SelectedWindow != null)
{
try
{
WindowInfo windowInfo = new WindowInfo(SelectedWindow.Handle);
windowInfo.Opacity = (byte)Math.Round(nudOpacity.Value / 100 * 255);
UpdateWindowInfo();
}
catch
{
}
}
}
}
} | InspectWindowForm |
csharp | ServiceStack__ServiceStack | ServiceStack/src/ServiceStack/ServiceCollectionExtensions.cs | {
"start": 638,
"end": 819
} | public static class ____
{
public const int AutoQueryDataFeature = 10;
public const int AutoQueryFeature = 20;
public const int ValidationFeature = 100;
}
| ConfigurePriority |
csharp | icsharpcode__ILSpy | ICSharpCode.Decompiler/IL/Instructions.cs | {
"start": 67620,
"end": 68185
} | partial class ____ : CallInstruction
{
public CallVirt(IMethod method) : base(OpCode.CallVirt, method)
{
}
public override void AcceptVisitor(ILVisitor visitor)
{
visitor.VisitCallVirt(this);
}
public override T AcceptVisitor<T>(ILVisitor<T> visitor)
{
return visitor.VisitCallVirt(this);
}
public override T AcceptVisitor<C, T>(ILVisitor<C, T> visitor, C context)
{
return visitor.VisitCallVirt(this, context);
}
}
}
namespace ICSharpCode.Decompiler.IL
{
/// <summary>Unsafe function pointer call.</summary>
public sealed | CallVirt |
csharp | JoshClose__CsvHelper | src/CsvHelper/IObjectResolver.cs | {
"start": 390,
"end": 456
} | class ____ creates objects
/// from a given type.
/// </summary>
| that |
csharp | OrchardCMS__OrchardCore | src/OrchardCore/OrchardCore.Mvc.Core/LocationExpander/IViewLocationExpanderProvider.cs | {
"start": 84,
"end": 185
} | public interface ____ : IViewLocationExpander
{
int Priority { get; }
}
| IViewLocationExpanderProvider |
csharp | Antaris__RazorEngine | src/source/RazorEngine.Core/Templating/DelegateAppDomainFactory.cs | {
"start": 246,
"end": 1119
} | internal class ____ : IAppDomainFactory
{
#region Fields
private readonly Func<AppDomain> _factory;
#endregion
#region Constructor
/// <summary>
/// Initialises a new instance of <see cref="DelegateAppDomainFactory"/>.
/// </summary>
/// <param name="factory">The factory delegate.</param>
public DelegateAppDomainFactory(Func<AppDomain> factory)
{
Contract.Requires(factory != null);
_factory = factory;
}
#endregion
#region Methods
/// <summary>
/// Creates the <see cref="AppDomain"/>.
/// </summary>
/// <returns>The <see cref="AppDomain"/> instance.</returns>
public AppDomain CreateAppDomain()
{
return _factory();
}
#endregion
}
}
| DelegateAppDomainFactory |
csharp | npgsql__npgsql | src/Npgsql/PostgresDatabaseInfo.cs | {
"start": 9323,
"end": 22019
} | enum ____
SELECT typ.oid, enumlabel
FROM pg_enum
JOIN pg_type AS typ ON typ.oid = enumtypid
JOIN pg_namespace AS ns ON ns.oid = typ.typnamespace
{(schemaListSqlFragment is not null ? $"WHERE (ns.nspname IN ({schemaListSqlFragment}))" : "")}
ORDER BY oid{(withEnumSortOrder ? ", enumsortorder" : "")};";
/// <summary>
/// Loads type information from the backend specified by <paramref name="conn"/>.
/// </summary>
/// <param name="conn">The database connection.</param>
/// <param name="timeout">The timeout while loading types from the backend.</param>
/// <param name="async">True to load types asynchronously.</param>
/// <returns>
/// A collection of types loaded from the backend.
/// </returns>
/// <exception cref="TimeoutException" />
/// <exception cref="ArgumentOutOfRangeException">Unknown typtype for type '{internalName}' in pg_type: {typeChar}.</exception>
internal async Task<List<PostgresType>> LoadBackendTypes(NpgsqlConnector conn, NpgsqlTimeout timeout, bool async)
{
var versionQuery = "SELECT version();";
var typeLoading = conn.DataSource.Configuration.TypeLoading;
var loadTableComposites = typeLoading.LoadTableComposites;
// Escape the schemas configured by the user, we need these as literals to be used in an IN() operator, and we cannot use parameters.
// Add an opening quote, escape any quotes in the schema, and add a closing quote.
string? schemaListSqlFragment = null;
if (typeLoading.TypeLoadingSchemas is not null)
{
var builder = new StringBuilder(BuiltinSchemaListSqlFragment);
for (var i = 0; i < typeLoading.TypeLoadingSchemas.Length; i++)
{
builder.Append(", ");
var schema = typeLoading.TypeLoadingSchemas[i];
builder.Append('\'');
builder.Append(EscapeLiteral(schema));
builder.Append('\'');
}
schemaListSqlFragment = builder.ToString();
}
var loadTypesQuery = GenerateLoadTypesQuery(SupportsRangeTypes, SupportsMultirangeTypes, loadTableComposites, schemaListSqlFragment, HasTypeCategory);
var loadCompositeTypesQuery = GenerateLoadCompositeTypesQuery(loadTableComposites, schemaListSqlFragment);
var loadEnumFieldsQuery = SupportsEnumTypes
? GenerateLoadEnumFieldsQuery(HasEnumSortOrder, schemaListSqlFragment)
: string.Empty;
timeout.CheckAndApply(conn);
// The Lexer (https://github.com/postgres/postgres/blob/master/src/backend/replication/repl_scanner.l)
// and Parser (https://github.com/postgres/postgres/blob/master/src/backend/replication/repl_gram.y)
// for replication connections are pretty picky and somewhat flawed.
// Currently (2022-01-22) they do not support
// - SQL batches containing multiple commands
// - The <CR> ('\r') in Windows or Mac newlines
// - Comments
// For this reason we need clean up our type loading queries for replication connections and execute
// them individually instead of batched.
// Theoretically we cold even use the extended protocol + batching for regular (non-replication)
// connections but that would branch our code even more for very little gain.
var isReplicationConnection = conn.Settings.ReplicationMode != ReplicationMode.Off;
if (isReplicationConnection)
{
await conn.WriteQuery(versionQuery, async).ConfigureAwait(false);
await conn.WriteQuery(SanitizeForReplicationConnection(loadTypesQuery), async).ConfigureAwait(false);
await conn.WriteQuery(SanitizeForReplicationConnection(loadCompositeTypesQuery), async).ConfigureAwait(false);
if (SupportsEnumTypes)
await conn.WriteQuery(SanitizeForReplicationConnection(loadEnumFieldsQuery), async).ConfigureAwait(false);
static string SanitizeForReplicationConnection(string str)
{
var sb = new StringBuilder(str.Length);
using var c = str.GetEnumerator();
while (c.MoveNext())
{
switch (c.Current)
{
case '\r':
sb.Append('\n');
// Check for a \n after the \r
// and swallow it if it exists
if (c.MoveNext())
{
if (c.Current == '-')
goto case '-';
if (c.Current != '\n')
sb.Append(c.Current);
}
break;
case '-':
// Check if there is a second dash
if (c.MoveNext())
{
if (c.Current == '\r')
{
sb.Append('-');
goto case '\r';
}
if (c.Current != '-')
{
sb.Append('-');
sb.Append(c.Current);
break;
}
// Comment mode
// Swallow everything until we find a newline
while (c.MoveNext())
{
if (c.Current == '\r')
goto case '\r';
if (c.Current == '\n')
{
sb.Append('\n');
break;
}
}
}
break;
default:
sb.Append(c.Current);
break;
}
}
return sb.ToString();
}
}
else
{
var batchQuery = new StringBuilder(
versionQuery.Length +
loadTypesQuery.Length +
loadCompositeTypesQuery.Length +
(SupportsEnumTypes
? loadEnumFieldsQuery.Length
: 0))
.AppendLine(versionQuery)
.AppendLine(loadTypesQuery)
.AppendLine(loadCompositeTypesQuery);
if (SupportsEnumTypes)
batchQuery.AppendLine(loadEnumFieldsQuery);
await conn.WriteQuery(batchQuery.ToString(), async).ConfigureAwait(false);
}
await conn.Flush(async).ConfigureAwait(false);
var byOID = new Dictionary<uint, PostgresType>();
// First read the PostgreSQL version
Expect<RowDescriptionMessage>(await conn.ReadMessage(async).ConfigureAwait(false), conn);
// We read the message in non-sequential mode which buffers the whole message.
// There is no need to ensure data within the message boundaries
Expect<DataRowMessage>(await conn.ReadMessage(async).ConfigureAwait(false), conn);
// Note that here and below we don't assign ReadBuffer to a variable
// because we might allocate oversize buffer
conn.ReadBuffer.Skip(2); // Column count
LongVersion = ReadNonNullableString(conn.ReadBuffer);
Expect<CommandCompleteMessage>(await conn.ReadMessage(async).ConfigureAwait(false), conn);
if (isReplicationConnection)
Expect<ReadyForQueryMessage>(await conn.ReadMessage(async).ConfigureAwait(false), conn);
// Then load the types
Expect<RowDescriptionMessage>(await conn.ReadMessage(async).ConfigureAwait(false), conn);
IBackendMessage msg;
var unknownPostgresTypes = new List<PostgresTypeDefinition>();
while (true)
{
msg = await conn.ReadMessage(async).ConfigureAwait(false);
if (msg is not DataRowMessage)
break;
conn.ReadBuffer.Skip(2); // Column count
var nspname = ReadNonNullableString(conn.ReadBuffer);
var oid = uint.Parse(ReadNonNullableString(conn.ReadBuffer), NumberFormatInfo.InvariantInfo);
Debug.Assert(oid != 0);
var typname = ReadNonNullableString(conn.ReadBuffer);
var typtype = ReadNonNullableString(conn.ReadBuffer)[0];
var typnotnull = ReadNonNullableString(conn.ReadBuffer)[0] == 't';
var len = conn.ReadBuffer.ReadInt32();
var elemtypoid = len == -1 ? 0 : uint.Parse(conn.ReadBuffer.ReadString(len), NumberFormatInfo.InvariantInfo);
var postgresTypeDefinition = new PostgresTypeDefinition(nspname, oid, typname, typtype, typnotnull, elemtypoid);
if (!TryAddPostgresType(postgresTypeDefinition, byOID))
unknownPostgresTypes.Add(postgresTypeDefinition);
}
while (unknownPostgresTypes.Count > 0)
{
var hasChanges = false;
for (var i = unknownPostgresTypes.Count - 1; i >= 0; i--)
{
var unknownPostgresType = unknownPostgresTypes[i];
if (TryAddPostgresType(unknownPostgresType, byOID))
{
unknownPostgresTypes.RemoveAt(i);
hasChanges = true;
}
}
if (!hasChanges)
{
_connectionLogger.LogWarning("Unable to load '{UnknownTypeCount}' Postgres types while loading database info.",
unknownPostgresTypes.Count);
break;
}
}
Expect<CommandCompleteMessage>(msg, conn);
if (isReplicationConnection)
Expect<ReadyForQueryMessage>(await conn.ReadMessage(async).ConfigureAwait(false), conn);
// Then load the composite type fields
Expect<RowDescriptionMessage>(await conn.ReadMessage(async).ConfigureAwait(false), conn);
var currentOID = uint.MaxValue;
PostgresCompositeType? currentComposite = null;
var skipCurrent = false;
while (true)
{
msg = await conn.ReadMessage(async).ConfigureAwait(false);
if (msg is not DataRowMessage)
break;
conn.ReadBuffer.Skip(2); // Column count
var oid = uint.Parse(ReadNonNullableString(conn.ReadBuffer), NumberFormatInfo.InvariantInfo);
var attname = ReadNonNullableString(conn.ReadBuffer);
var atttypid = uint.Parse(ReadNonNullableString(conn.ReadBuffer), NumberFormatInfo.InvariantInfo);
if (oid != currentOID)
{
currentOID = oid;
if (!byOID.TryGetValue(oid, out var type)) // See #2020
{
_connectionLogger.LogWarning("Skipping composite type with OID {CompositeTypeOID} which was not found in pg_type", oid);
byOID.Remove(oid);
skipCurrent = true;
continue;
}
currentComposite = type as PostgresCompositeType;
if (currentComposite == null)
{
_connectionLogger.LogWarning("Type {TypeName} was referenced as a composite type but is a {type}", type.Name, type.GetType());
byOID.Remove(oid);
skipCurrent = true;
continue;
}
skipCurrent = false;
}
if (skipCurrent)
continue;
if (!byOID.TryGetValue(atttypid, out var fieldType)) // See #2020
{
_connectionLogger.LogWarning("Skipping composite type '{CompositeTypeName}' with field '{fieldName}' with type OID '{FieldTypeOID}', which could not be resolved to a PostgreSQL type.",
currentComposite!.DisplayName, attname, atttypid);
byOID.Remove(oid);
skipCurrent = true;
continue;
}
currentComposite!.MutableFields.Add(new PostgresCompositeType.Field(attname, fieldType));
}
Expect<CommandCompleteMessage>(msg, conn);
if (isReplicationConnection)
Expect<ReadyForQueryMessage>(await conn.ReadMessage(async).ConfigureAwait(false), conn);
if (SupportsEnumTypes)
{
// Then load the | fields |
csharp | xunit__xunit | src/xunit.v3.core/Utility/DelegatingMessageSink.cs | {
"start": 421,
"end": 1424
} | public class ____(
IMessageSink innerSink,
Action<IMessageSinkMessage>? callback = null) :
IMessageSink
{
readonly Action<IMessageSinkMessage>? callback = callback;
readonly IMessageSink innerSink = Guard.ArgumentNotNull(innerSink);
/// <inheritdoc/>
public virtual bool OnMessage(IMessageSinkMessage message)
{
callback?.Invoke(message);
return innerSink.OnMessage(message);
}
}
/// <summary>
/// Implementation of <see cref="IMessageSink" /> that delegates to another implementation of
/// <see cref="IMessageSink" /> while calling into an optional callback for each message. In addition,
/// it issues a <see cref="Finished" /> event when a message of the type <typeparamref name="TFinalMessage"/>
/// is seen and records the final message for later retrieval.
/// </summary>
/// <typeparam name="TFinalMessage">The type of the T final message.</typeparam>
/// <param name="innerSink">The inner message sink.</param>
/// <param name="callback">The callback.</param>
| DelegatingMessageSink |
csharp | unoplatform__uno | src/Uno.UI/Generated/3.0.0.0/Microsoft.UI.Input/CrossSlidingEventArgs.cs | {
"start": 293,
"end": 2562
} | public partial class ____
{
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
internal CrossSlidingEventArgs()
{
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public global::Microsoft.UI.Input.CrossSlidingState CrossSlidingState
{
get
{
throw new global::System.NotImplementedException("The member CrossSlidingState CrossSlidingEventArgs.CrossSlidingState is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=CrossSlidingState%20CrossSlidingEventArgs.CrossSlidingState");
}
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public global::Microsoft.UI.Input.PointerDeviceType PointerDeviceType
{
get
{
throw new global::System.NotImplementedException("The member PointerDeviceType CrossSlidingEventArgs.PointerDeviceType is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=PointerDeviceType%20CrossSlidingEventArgs.PointerDeviceType");
}
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public global::Windows.Foundation.Point Position
{
get
{
throw new global::System.NotImplementedException("The member Point CrossSlidingEventArgs.Position is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=Point%20CrossSlidingEventArgs.Position");
}
}
#endif
// Forced skipping of method Microsoft.UI.Input.CrossSlidingEventArgs.CrossSlidingState.get
// Forced skipping of method Microsoft.UI.Input.CrossSlidingEventArgs.PointerDeviceType.get
// Forced skipping of method Microsoft.UI.Input.CrossSlidingEventArgs.Position.get
}
}
| CrossSlidingEventArgs |
csharp | dotnet__maui | src/Compatibility/Material/src/Android/MaterialDatePickerRenderer.cs | {
"start": 312,
"end": 1720
} | public class ____ : DatePickerRendererBase<MaterialPickerTextInputLayout>, ITabStop
{
MaterialPickerTextInputLayout _textInputLayout;
MaterialPickerEditText _textInputEditText;
public MaterialDatePickerRenderer(Context context) : base(MaterialContextThemeWrapper.Create(context))
{
}
protected override AView ControlUsedForAutomation => EditText;
protected override EditText EditText => _textInputEditText;
protected override MaterialPickerTextInputLayout CreateNativeControl()
{
LayoutInflater inflater = LayoutInflater.FromContext(Context);
var view = inflater.Inflate(Resource.Layout.MaterialPickerTextInput, null);
_textInputLayout = (MaterialPickerTextInputLayout)view;
_textInputEditText = _textInputLayout.FindViewById<MaterialPickerEditText>(Resource.Id.materialformsedittext);
return _textInputLayout;
}
protected override void OnElementChanged(ElementChangedEventArgs<DatePicker> e)
{
base.OnElementChanged(e);
_textInputLayout.SetHint(string.Empty, Element);
UpdateBackgroundColor();
}
protected override void UpdateBackgroundColor() =>
_textInputLayout?.ApplyBackgroundColor(Element.BackgroundColor, Element.TextColor);
protected override void UpdateTextColor() => ApplyTheme();
void ApplyTheme() => _textInputLayout?.ApplyTheme(Element.TextColor, Color.Default);
AView ITabStop.TabStop => EditText;
}
} | MaterialDatePickerRenderer |
csharp | OrchardCMS__OrchardCore | src/OrchardCore.Modules/OrchardCore.ContentPreview/Settings/PreviewPartSettingsDisplayDriver.cs | {
"start": 384,
"end": 2143
} | public sealed class ____ : ContentTypePartDefinitionDisplayDriver<PreviewPart>
{
private readonly ILiquidTemplateManager _templateManager;
internal readonly IStringLocalizer S;
public PreviewPartSettingsDisplayDriver(
ILiquidTemplateManager templateManager,
IStringLocalizer<PreviewPartSettingsDisplayDriver> localizer)
{
_templateManager = templateManager;
S = localizer;
}
public override IDisplayResult Edit(ContentTypePartDefinition contentTypePartDefinition, BuildEditorContext context)
{
return Initialize<PreviewPartSettingsViewModel>("PreviewPartSettings_Edit", model =>
{
var settings = contentTypePartDefinition.GetSettings<PreviewPartSettings>();
model.Pattern = settings.Pattern;
model.PreviewPartSettings = settings;
}).Location("Content");
}
public override async Task<IDisplayResult> UpdateAsync(ContentTypePartDefinition contentTypePartDefinition, UpdateTypePartEditorContext context)
{
var model = new PreviewPartSettingsViewModel();
await context.Updater.TryUpdateModelAsync(model, Prefix,
m => m.Pattern);
if (!string.IsNullOrEmpty(model.Pattern) && !_templateManager.Validate(model.Pattern, out var errors))
{
context.Updater.ModelState.AddModelError(nameof(model.Pattern), S["Pattern doesn't contain a valid Liquid expression. Details: {0}", string.Join(" ", errors)]);
}
else
{
context.Builder.WithSettings(new PreviewPartSettings
{
Pattern = model.Pattern,
});
}
return Edit(contentTypePartDefinition, context);
}
}
| PreviewPartSettingsDisplayDriver |
csharp | mongodb__mongo-csharp-driver | tests/MongoDB.Driver.Tests/Linq/Linq3ImplementationWithLinq2Tests/Translators/LegacyPredicateTranslatorTests.cs | {
"start": 37494,
"end": 39280
} | private class ____
{
public ObjectId Id { get; set; }
[BsonElement("x")]
public int X { get; set; }
[BsonElement("lx")]
public long LX { get; set; }
[BsonElement("y")]
public int Y { get; set; }
[BsonElement("d")]
public D D { get; set; }
[BsonElement("da")]
public List<D> DA { get; set; }
[BsonElement("s")]
[BsonIgnoreIfNull]
public string S { get; set; }
[BsonElement("a")]
[BsonIgnoreIfNull]
public int[] A { get; set; }
[BsonElement("b")]
public bool B { get; set; }
[BsonElement("l")]
[BsonIgnoreIfNull]
public List<int> L { get; set; }
[BsonElement("dbref")]
[BsonIgnoreIfNull]
public MongoDBRef DBRef { get; set; }
[BsonElement("e")]
[BsonIgnoreIfDefault]
[BsonRepresentation(BsonType.String)]
public E E { get; set; }
[BsonElement("en")]
[BsonRepresentation(BsonType.String)]
public E? ENullable { get; set; }
[BsonElement("ea")]
[BsonIgnoreIfNull]
public E[] EA { get; set; }
[BsonElement("f")]
public F F { get; set; }
[BsonElement("sa")]
[BsonIgnoreIfNull]
public string[] SA { get; set; }
[BsonElement("ba")]
[BsonIgnoreIfNull]
public bool[] BA { get; set; }
[BsonElement("date")]
public DateTime Date { get; set; }
[BsonElement("nuldub")]
public double? NullableDouble { get; set; }
}
| C |
csharp | MassTransit__MassTransit | src/MassTransit.Abstractions/JobService/Contracts/JobService/JobAttemptCompleted.cs | {
"start": 107,
"end": 450
} | public interface ____
{
Guid JobId { get; }
Guid AttemptId { get; }
int RetryAttempt { get; }
DateTime Timestamp { get; }
TimeSpan Duration { get; }
Dictionary<string, object>? InstanceProperties { get; }
Dictionary<string, object>? JobTypeProperties { get; }
}
}
| JobAttemptCompleted |
csharp | unoplatform__uno | src/SamplesApp/UITests.Shared/Windows_UI_Xaml/UIElementTests/UIElement_Clipping.cs | {
"start": 167,
"end": 292
} | partial class ____ : UserControl
{
public UIElement_Clipping()
{
this.InitializeComponent();
}
}
}
| UIElement_Clipping |
csharp | nopSolutions__nopCommerce | src/Plugins/Nop.Plugin.Shipping.UPS/API/Rates/RateClient.cs | {
"start": 164776,
"end": 165354
} | public partial class ____
{
/// <summary>
/// Identifies the Chemcial Record. Required if SubVersion is greater than or equal to 1701.
/// </summary>
[Newtonsoft.Json.JsonProperty("ChemicalRecordIdentifier", Required = Newtonsoft.Json.Required.DisallowNull, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore)]
[System.ComponentModel.DataAnnotations.StringLength(3, MinimumLength = 1)]
public string ChemicalRecordIdentifier { get; set; }
/// <summary>
/// This is the hazard | HazMat_HazMatChemicalRecord |
csharp | neuecc__MessagePack-CSharp | tests/MessagePack.SourceGenerator.Tests/Resources/InterfaceUnion/Formatters.MessagePack.GeneratedMessagePackResolver.UnionContainerFormatter.g.cs | {
"start": 314,
"end": 1643
} | internal sealed class ____ : MsgPack::Formatters.IMessagePackFormatter<global::UnionContainer>
{
public void Serialize(ref MsgPack::MessagePackWriter writer, global::UnionContainer value, MsgPack::MessagePackSerializerOptions options)
{
if (value == null)
{
writer.WriteNil();
return;
}
MsgPack::IFormatterResolver formatterResolver = options.Resolver;
writer.WriteArrayHeader(1);
MsgPack::FormatterResolverExtensions.GetFormatterWithVerify<global::IMyType>(formatterResolver).Serialize(ref writer, value.Value, options);
}
public global::UnionContainer Deserialize(ref MsgPack::MessagePackReader reader, MsgPack::MessagePackSerializerOptions options)
{
if (reader.TryReadNil())
{
return null;
}
options.Security.DepthStep(ref reader);
MsgPack::IFormatterResolver formatterResolver = options.Resolver;
var length = reader.ReadArrayHeader();
var ____result = new global::UnionContainer();
for (int i = 0; i < length; i++)
{
switch (i)
{
case 0:
____result.Value = MsgPack::FormatterResolverExtensions.GetFormatterWithVerify<global::IMyType>(formatterResolver).Deserialize(ref reader, options);
break;
default:
reader.Skip();
break;
}
}
reader.Depth--;
return ____result;
}
}
}
}
| UnionContainerFormatter |
csharp | dotnet__orleans | test/TesterInternal/PlacementFilterTests/SiloMetadataPlacementFilterTests.cs | {
"start": 11283,
"end": 11667
} | public class ____(ILocalSiloDetails localSiloDetails) : Grain, IPreferredMatchMultipleFilteredGrain
{
public Task<SiloAddress> GetHostingSilo() => Task.FromResult(localSiloDetails.SiloAddress);
}
#pragma warning disable ORLEANSEXP004
[PreferredMatchSiloMetadataPlacementFilter(["not.there"]), RandomPlacement]
#pragma warning restore ORLEANSEXP004
| PreferredMatchMultipleFilteredGrain |
csharp | MassTransit__MassTransit | tests/MassTransit.EntityFrameworkIntegration.Tests/UsingEntityFramework_Specs.cs | {
"start": 469,
"end": 4364
} | public class ____ :
InMemoryTestFixture
{
[Test]
[Explicit]
public async Task Should_handle_the_big_load()
{
var tasks = new List<Task>();
var sagaIds = new Guid[200];
for (var i = 0; i < 200; i++)
{
var correlationId = Guid.NewGuid();
tasks.Add(InputQueueSendEndpoint.Send(new GirlfriendYelling { CorrelationId = correlationId }));
sagaIds[i] = correlationId;
}
await Task.WhenAll(tasks);
for (var i = 0; i < 200; i++)
{
Guid? sagaId = await _repository.Value.ShouldContainSaga(sagaIds[i], TestTimeout);
Assert.That(sagaId.HasValue, Is.True);
}
}
[Test]
public async Task Should_have_removed_the_state_machine()
{
var correlationId = Guid.NewGuid();
await InputQueueSendEndpoint.Send(new GirlfriendYelling { CorrelationId = correlationId });
Guid? sagaId = await _repository.Value.ShouldContainSaga(correlationId, TestTimeout);
Assert.That(sagaId.HasValue, Is.True);
await InputQueueSendEndpoint.Send(new SodOff { CorrelationId = correlationId });
sagaId = await _repository.Value.ShouldNotContainSaga(correlationId, TestTimeout);
Assert.That(sagaId.HasValue, Is.False);
}
[Test]
public async Task Should_have_the_state_machine()
{
var correlationId = Guid.NewGuid();
await InputQueueSendEndpoint.Send(new GirlfriendYelling { CorrelationId = correlationId });
Guid? sagaId = await _repository.Value.ShouldContainSaga(correlationId, TestTimeout);
Assert.That(sagaId.HasValue, Is.True);
await InputQueueSendEndpoint.Send(new GotHitByACar { CorrelationId = correlationId });
sagaId = await _repository.Value.ShouldContainSagaInState(correlationId, _machine, _machine.Dead, TestTimeout);
Assert.That(sagaId.HasValue, Is.True);
var instance = await GetSaga(correlationId);
Assert.That(instance.Screwed, Is.True);
}
SuperShopper _machine;
readonly ISagaDbContextFactory<ShoppingChore> _sagaDbContextFactory;
readonly Lazy<ISagaRepository<ShoppingChore>> _repository;
protected override void ConfigureInMemoryReceiveEndpoint(IInMemoryReceiveEndpointConfigurator configurator)
{
_machine = new SuperShopper();
configurator.UseMessageRetry(x =>
{
x.Handle<DbUpdateException>();
x.Immediate(5);
});
configurator.StateMachineSaga(_machine, _repository.Value);
}
public When_using_EntityFramework()
{
_sagaDbContextFactory = new DelegateSagaDbContextFactory<ShoppingChore>(
() => new ShoppingChoreSagaDbContext(LocalDbConnectionStringProvider.GetLocalDbConnectionString()));
_repository = new Lazy<ISagaRepository<ShoppingChore>>(() => EntityFrameworkSagaRepository<ShoppingChore>.CreatePessimistic(_sagaDbContextFactory));
}
[OneTimeTearDown]
public void Teardown()
{
}
async Task<ShoppingChore> GetSaga(Guid id)
{
using (var dbContext = _sagaDbContextFactory.Create())
{
var sagaInstance = dbContext.Set<ShoppingChore>().SingleOrDefault(x => x.CorrelationId == id);
return sagaInstance;
}
}
protected override void ConfigureInMemoryBus(IInMemoryBusFactoryConfigurator configurator)
{
base.ConfigureInMemoryBus(configurator);
configurator.ConcurrentMessageLimit = 16;
}
}
| When_using_EntityFramework |
csharp | dotnet__tye | src/Microsoft.Tye.Core/DockerCompose/DockerComposeParser.cs | {
"start": 424,
"end": 16179
} | public class ____ : IDisposable
{
private YamlStream _yamlStream;
private FileInfo? _fileInfo;
private TextReader _reader;
public DockerComposeParser(string yamlContent, FileInfo? fileInfo = null)
: this(new StringReader(yamlContent), fileInfo)
{
}
public DockerComposeParser(FileInfo fileInfo)
: this(fileInfo.OpenText(), fileInfo)
{
}
internal DockerComposeParser(TextReader reader, FileInfo? fileInfo = null)
{
_reader = reader;
_yamlStream = new YamlStream();
_fileInfo = fileInfo;
}
public ConfigApplication ParseConfigApplication()
{
try
{
_yamlStream.Load(_reader);
}
catch (YamlException ex)
{
throw new TyeYamlException(ex.Start, "Unable to parse tye.yaml. See inner exception.", ex);
}
var app = new ConfigApplication();
// TODO assuming first document.
var document = _yamlStream.Documents[0];
var node = document.RootNode;
ThrowIfNotYamlMapping(node);
app.Source = _fileInfo!;
Parse((YamlMappingNode)node, app);
app.Name ??= NameInferer.InferApplicationName(_fileInfo!);
// TODO confirm if these are ever null.
foreach (var service in app.Services)
{
service.Bindings ??= new List<ConfigServiceBinding>();
service.Configuration ??= new List<ConfigConfigurationSource>();
service.Volumes ??= new List<ConfigVolume>();
service.Tags ??= new List<string>();
}
foreach (var ingress in app.Ingress)
{
ingress.Bindings ??= new List<ConfigIngressBinding>();
ingress.Rules ??= new List<ConfigIngressRule>();
ingress.Tags ??= new List<string>();
}
return app;
}
public static string GetScalarValue(YamlNode node)
{
if (node.NodeType != YamlNodeType.Scalar)
{
throw new TyeYamlException(node.Start,
CoreStrings.FormatUnexpectedType(YamlNodeType.Scalar.ToString(), node.NodeType.ToString()));
}
return ((YamlScalarNode)node).Value!;
}
public static string GetScalarValue(string key, YamlNode node)
{
if (node.NodeType != YamlNodeType.Scalar)
{
throw new TyeYamlException(node.Start, CoreStrings.FormatExpectedYamlScalar(key));
}
return ((YamlScalarNode)node).Value!;
}
public static void ThrowIfNotYamlSequence(string key, YamlNode node)
{
if (node.NodeType != YamlNodeType.Sequence)
{
throw new TyeYamlException(node.Start, CoreStrings.FormatExpectedYamlSequence(key));
}
}
public static void ThrowIfNotYamlMapping(YamlNode node)
{
if (node.NodeType != YamlNodeType.Mapping)
{
throw new TyeYamlException(node.Start,
CoreStrings.FormatUnexpectedType(YamlNodeType.Mapping.ToString(), node.NodeType.ToString()));
}
}
public void Dispose()
{
_reader.Dispose();
}
internal static void Parse(YamlMappingNode node, ConfigApplication app)
{
foreach (var child in node.Children)
{
var key = YamlParser.GetScalarValue(child.Key);
switch (key)
{
case "version":
break;
case "volumes":
break;
case "services":
ParseServiceList((child.Value as YamlMappingNode)!, app);
break;
case "networks":
break;
case "configs":
break;
case "secrets":
break;
default:
throw new TyeYamlException(child.Key.Start, CoreStrings.FormatUnrecognizedKey(key));
}
}
}
private static void ParseServiceList(YamlMappingNode node, ConfigApplication app)
{
foreach (var child in node.Children)
{
var service = new ConfigService();
service.Name = YamlParser.GetScalarValue(child.Key);
ParseService((child.Value as YamlMappingNode)!, service);
app.Services.Add(service);
}
}
private static void ParseService(YamlMappingNode node, ConfigService service)
{
foreach (var child in node.Children)
{
var key = YamlParser.GetScalarValue(child.Key);
switch (key)
{
case "build":
ParseBuild((child.Value as YamlMappingNode)!, service);
break;
case "cap_add":
break;
case "cap_drop":
break;
case "cgroup_parent":
break;
case "command":
break;
case "configs":
break;
case "container_name":
break;
case "credential_spec":
break;
case "depends_on":
break;
case "deploy":
break;
case "devices":
break;
case "dns":
break;
case "dns_search":
break;
case "endpoint":
break;
case "env_file":
break;
case "environment":
ParseEnvironment(child.Value, service);
break;
case "expose":
break;
case "external_links":
break;
case "extra_hosts":
break;
case "healthcheck":
break;
case "image":
service.Image = YamlParser.GetScalarValue(child.Value);
break;
case "init":
break;
case "isolation":
break;
case "labels":
break;
case "links":
break;
case "logging":
break;
case "network_mode":
break;
case "networks":
break;
case "pid":
break;
case "ports":
ParsePortSequence((child.Value as YamlSequenceNode)!, service);
break;
case "restart":
break;
case "secrets":
break;
case "security_opt":
break;
case "stop_grace_period":
break;
case "stop_signal":
break;
case "sysctls":
break;
case "tmpfs":
break;
case "ulimits":
break;
case "userns_mode":
break;
case "volumes":
break;
case "user":
break;
case "working_dir":
break;
case "domainname":
break;
case "hostname":
break;
case "ipc":
break;
case "mac_address":
break;
case "privileged":
break;
case "read_only":
break;
case "shm_size":
break;
case "stdin_open":
break;
case "tty":
break;
default:
throw new TyeYamlException(child.Key.Start, CoreStrings.FormatUnrecognizedKey(key));
}
}
}
private static void ParseEnvironment(YamlNode node, ConfigService service)
{
if (node is YamlSequenceNode sequenceNode)
{
foreach (var arg in sequenceNode)
{
var configItem = new ConfigConfigurationSource();
var argString = YamlParser.GetScalarValue(arg);
if (argString.Contains('='))
{
var parts = argString.Split('=');
configItem.Name = parts[0];
configItem.Value = parts[1];
}
else
{
configItem.Name = argString;
}
service.Configuration.Add(configItem);
}
}
else
{
var mappingNode = (node as YamlMappingNode)!;
foreach (var arg in mappingNode)
{
var configItem = new ConfigConfigurationSource();
configItem.Name = YamlParser.GetScalarValue(arg.Key);
configItem.Value = YamlParser.GetScalarValue(arg.Value);
service.Configuration.Add(configItem);
}
}
}
private static void ParsePortSequence(YamlSequenceNode portSequence, ConfigService service)
{
foreach (var port in portSequence)
{
var portString = YamlParser.GetScalarValue(port);
var binding = new ConfigServiceBinding();
if (portString.Contains(':'))
{
var ports = portString.Split(':');
binding.Port = int.Parse(ports[0]);
binding.ContainerPort = int.Parse(ports[1]);
}
else
{
binding.Port = int.Parse(portString);
binding.ContainerPort = int.Parse(portString);
}
// TODO how to specify protocol with docker compose. Using http for now.
binding.Protocol = "http";
service.Bindings.Add(binding);
}
}
private static void ParseVolumes(YamlMappingNode node, ConfigApplication app)
{
foreach (var child in node.Children)
{
var key = YamlParser.GetScalarValue(child.Key);
switch (key)
{
case "driver":
break;
case "driver_opts":
break;
case "external":
break;
case "labels":
break;
case "name":
break;
default:
throw new TyeYamlException(child.Key.Start, CoreStrings.FormatUnrecognizedKey(key));
}
}
}
private static void ParseNetworks(YamlMappingNode node, ConfigApplication app)
{
foreach (var child in node.Children)
{
var key = YamlParser.GetScalarValue(child.Key);
switch (key)
{
case "driver":
break;
case "driver_opts":
break;
case "attachable":
break;
case "enable_ipv6":
break;
case "ipam":
break;
case "internal":
break;
case "external":
break;
case "labels":
break;
case "name":
break;
default:
throw new TyeYamlException(child.Key.Start, CoreStrings.FormatUnrecognizedKey(key));
}
}
}
private static readonly string[] FileFormats = new[] { "*.csproj", "*.fsproj" };
// Build seems like it would just work, context would just point to the csproj if no dockerfile is present.
private static void ParseBuild(YamlMappingNode node, ConfigService service)
{
foreach (var child in node.Children)
{
var key = YamlParser.GetScalarValue(child.Key);
switch (key)
{
case "context":
// Potentially find project or context based on that.
// could potentially specify a project here instead?
var folder = new DirectoryInfo(YamlParser.GetScalarValue(child.Value));
foreach (var format in FileFormats)
{
var projs = Directory.GetFiles(folder.FullName, format);
if (projs.Length == 1)
{
service.Project = projs[0];
break;
}
if (projs.Length > 1)
{
throw new TyeYamlException("Multiple proj files found in directory, have only a single proj file in the context directory.");
}
}
// check if folder has proj file, and use that.
break;
case "dockerfile":
service.DockerFile = YamlParser.GetScalarValue(child.Value);
break;
case "args":
//service.Configuration = ParseDockerBuildArgs((child.Value as YamlSequenceNode)!, service);
break;
case "cache_from":
break;
case "labels":
break;
case "network":
break;
case "shm_size":
break;
case "target":
break;
default:
throw new TyeYamlException(child.Key.Start, CoreStrings.FormatUnrecognizedKey(key));
}
}
}
//private static List<ConfigConfigurationSource> ParseDockerBuildArgs(YamlSequenceNode node, ConfigService service)
//{
//}
}
}
| DockerComposeParser |
csharp | grandnode__grandnode2 | src/Modules/Grand.Module.Api/Commands/Models/Catalog/DeleteProductAttributeMappingCommand.cs | {
"start": 107,
"end": 279
} | public class ____ : IRequest<bool>
{
public ProductDto Product { get; set; }
public ProductAttributeMappingDto Model { get; set; }
} | DeleteProductAttributeMappingCommand |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.