content stringlengths 5 1.04M | avg_line_length float64 1.75 12.9k | max_line_length int64 2 244k | alphanum_fraction float64 0 0.98 | licenses list | repository_name stringlengths 7 92 | path stringlengths 3 249 | size int64 5 1.04M | lang stringclasses 2
values |
|---|---|---|---|---|---|---|---|---|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Elasticsearch.Net;
namespace Nest
{
public class BulkAllObservable<T> : IDisposable, IObservable<IBulkAllResponse> where T : class
{
private readonly int _backOffRetries;
private readonly TimeSpan _backOffTime;
private readonly int _bulkSize;
private readonly IElasticClient _client;
private readonly CancellationToken _compositeCancelToken;
private readonly CancellationTokenSource _compositeCancelTokenSource;
private readonly Action<IBulkResponseItem, T> _droppedDocumentCallBack;
private readonly int _maxDegreeOfParallelism;
private readonly IBulkAllRequest<T> _partitionedBulkRequest;
private readonly Func<IBulkResponseItem, T, bool> _retryPredicate;
private Action _incrementFailed = () => { };
private Action _incrementRetries = () => { };
public BulkAllObservable(
IElasticClient client,
IBulkAllRequest<T> partitionedBulkRequest,
CancellationToken cancellationToken = default(CancellationToken)
)
{
_client = client;
_partitionedBulkRequest = partitionedBulkRequest;
_backOffRetries = _partitionedBulkRequest.BackOffRetries.GetValueOrDefault(CoordinatedRequestDefaults.BulkAllBackOffRetriesDefault);
_backOffTime = _partitionedBulkRequest?.BackOffTime?.ToTimeSpan() ?? CoordinatedRequestDefaults.BulkAllBackOffTimeDefault;
_bulkSize = _partitionedBulkRequest.Size ?? CoordinatedRequestDefaults.BulkAllSizeDefault;
_retryPredicate = _partitionedBulkRequest.RetryDocumentPredicate ?? RetryBulkActionPredicate;
_droppedDocumentCallBack = _partitionedBulkRequest.DroppedDocumentCallback ?? DroppedDocumentCallbackDefault;
_maxDegreeOfParallelism =
_partitionedBulkRequest.MaxDegreeOfParallelism ?? CoordinatedRequestDefaults.BulkAllMaxDegreeOfParallelismDefault;
_compositeCancelTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
_compositeCancelToken = _compositeCancelTokenSource.Token;
}
public bool IsDisposed { get; private set; }
public void Dispose()
{
IsDisposed = true;
_compositeCancelTokenSource?.Cancel();
_compositeCancelTokenSource?.Dispose();
}
public IDisposable Subscribe(IObserver<IBulkAllResponse> observer)
{
observer.ThrowIfNull(nameof(observer));
BulkAll(observer);
return this;
}
public IDisposable Subscribe(BulkAllObserver observer)
{
_incrementFailed = observer.IncrementTotalNumberOfFailedBuffers;
_incrementRetries = observer.IncrementTotalNumberOfRetries;
return Subscribe((IObserver<IBulkAllResponse>)observer);
}
private void BulkAll(IObserver<IBulkAllResponse> observer)
{
var documents = _partitionedBulkRequest.Documents;
var partitioned = new PartitionHelper<T>(documents, _bulkSize);
#pragma warning disable 4014
partitioned.ForEachAsync(
#pragma warning restore 4014
(buffer, page) => BulkAsync(buffer, page, 0),
(buffer, response) => observer.OnNext(response),
ex => OnCompleted(ex, observer),
_maxDegreeOfParallelism
);
}
private void OnCompleted(Exception exception, IObserver<IBulkAllResponse> observer)
{
if (exception != null)
observer.OnError(exception);
else
{
try
{
RefreshOnCompleted();
observer.OnCompleted();
}
catch (Exception e)
{
observer.OnError(e);
}
}
}
private void RefreshOnCompleted()
{
if (!_partitionedBulkRequest.RefreshOnCompleted) return;
var indices = _partitionedBulkRequest.RefreshIndices ?? _partitionedBulkRequest.Index;
if (indices == null) return;
var refresh = _client.Refresh(indices);
if (!refresh.IsValid) throw Throw($"Refreshing after all documents have indexed failed", refresh.ApiCall);
}
private async Task<IBulkAllResponse> BulkAsync(IList<T> buffer, long page, int backOffRetries)
{
_compositeCancelToken.ThrowIfCancellationRequested();
var r = _partitionedBulkRequest;
var response = await _client.BulkAsync(s =>
{
s.Index(r.Index).Type(r.Type);
if (r.BufferToBulk != null) r.BufferToBulk(s, buffer);
else s.IndexMany(buffer);
if (!string.IsNullOrEmpty(r.Pipeline)) s.Pipeline(r.Pipeline);
#pragma warning disable 618
if (r.Refresh.HasValue) s.Refresh(r.Refresh.Value);
#pragma warning restore 618
if (r.Routing != null) s.Routing(r.Routing);
if (r.WaitForActiveShards.HasValue) s.WaitForActiveShards(r.WaitForActiveShards.ToString());
return s;
}, _compositeCancelToken)
.ConfigureAwait(false);
_compositeCancelToken.ThrowIfCancellationRequested();
if (!response.ApiCall.Success)
return await HandleBulkRequest(buffer, page, backOffRetries, response);
var documentsWithResponse = response.Items.Zip(buffer, Tuple.Create).ToList();
HandleDroppedDocuments(documentsWithResponse, response);
var retryDocuments = documentsWithResponse
.Where(x => !x.Item1.IsValid && _retryPredicate(x.Item1, x.Item2))
.Select(x => x.Item2)
.ToList();
if (retryDocuments.Count > 0 && backOffRetries < _backOffRetries)
return await RetryDocuments(page, ++backOffRetries, retryDocuments);
else if (retryDocuments.Count > 0)
throw ThrowOnBadBulk(response, $"Bulk indexing failed and after retrying {backOffRetries} times");
_partitionedBulkRequest.BackPressure?.Release();
return new BulkAllResponse { Retries = backOffRetries, Page = page };
}
private void HandleDroppedDocuments(List<Tuple<IBulkResponseItem, T>> documentsWithResponse, IBulkResponse response)
{
var droppedDocuments = documentsWithResponse
.Where(x => !x.Item1.IsValid && !_retryPredicate(x.Item1, x.Item2))
.ToList();
if (droppedDocuments.Count <= 0) return;
foreach (var dropped in droppedDocuments) _droppedDocumentCallBack(dropped.Item1, dropped.Item2);
if (!_partitionedBulkRequest.ContinueAfterDroppedDocuments)
throw ThrowOnBadBulk(response, $"BulkAll halted after receiving failures that can not be retried from _bulk");
}
private async Task<IBulkAllResponse> HandleBulkRequest(IList<T> buffer, long page, int backOffRetries, IBulkResponse response)
{
var clientException = response.ApiCall.OriginalException as ElasticsearchClientException;
//TODO expose this on IAPiCallDetails as RetryLater in 7.0?
var failureReason = clientException?.FailureReason.GetValueOrDefault(PipelineFailure.Unexpected);
switch (failureReason)
{
case PipelineFailure.MaxRetriesReached:
//TODO move this to its own PipelineFailure classification in 7.0
if (response.ApiCall.AuditTrail.Last().Event == AuditEvent.FailedOverAllNodes)
throw ThrowOnBadBulk(response, $"BulkAll halted after attempted bulk failed over all the active nodes");
return await RetryDocuments(page, ++backOffRetries, buffer);
case PipelineFailure.CouldNotStartSniffOnStartup:
case PipelineFailure.BadAuthentication:
case PipelineFailure.NoNodesAttempted:
case PipelineFailure.SniffFailure:
case PipelineFailure.Unexpected:
throw ThrowOnBadBulk(response,
$"BulkAll halted after {nameof(PipelineFailure)}{failureReason.GetStringValue()} from _bulk");
default:
return await RetryDocuments(page, ++backOffRetries, buffer);
}
}
private async Task<IBulkAllResponse> RetryDocuments(long page, int backOffRetries, IList<T> retryDocuments)
{
_incrementRetries();
await Task.Delay(_backOffTime, _compositeCancelToken).ConfigureAwait(false);
return await BulkAsync(retryDocuments, page, backOffRetries).ConfigureAwait(false);
}
private Exception ThrowOnBadBulk(IElasticsearchResponse response, string message)
{
_incrementFailed();
_partitionedBulkRequest.BackPressure?.Release();
return Throw(message, response.ApiCall);
}
private static ElasticsearchClientException Throw(string message, IApiCallDetails details) =>
new ElasticsearchClientException(PipelineFailure.BadResponse, message, details);
private static bool RetryBulkActionPredicate(IBulkResponseItem bulkResponseItem, T d) => bulkResponseItem.Status == 429;
private static void DroppedDocumentCallbackDefault(IBulkResponseItem bulkResponseItem, T d) { }
}
}
| 38.186047 | 135 | 0.770889 | [
"Apache-2.0"
] | Henr1k80/elasticsearch-net | src/Nest/Document/Multiple/BulkAll/BulkAllObservable.cs | 8,212 | C# |
// <copyright file="AppConfigurationClientTest.cs" company="3M">
// Copyright (c) 3M. All rights reserved.
// </copyright>
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Azure;
using Azure.Data.AppConfiguration;
using Mmm.Iot.Common.Services.Config;
using Mmm.Iot.Common.Services.External.AppConfiguration;
using Mmm.Iot.Common.Services.Models;
using Mmm.Iot.Common.TestHelpers;
using Moq;
using Xunit;
namespace Mmm.Iot.Common.Services.Test
{
public class AppConfigurationClientTest
{
private const string MockConnectionString = @"Endpoint=https://abc.azconfig.io;Id=1:/1;Secret=1234";
private readonly Mock<ConfigurationClient> client;
private readonly Mock<AppConfig> mockConfig;
private readonly Mock<Response> mockResponse;
private readonly ConfigurationSetting configurationSetting;
private readonly AppConfigurationClient appConfigClient;
private readonly Mock<IConfigurationClientFactory> mockFactory;
private readonly Random rand;
private Dictionary<string, AppConfigCacheValue> cache = new Dictionary<string, AppConfigCacheValue>();
public AppConfigurationClientTest()
{
this.mockConfig = new Mock<AppConfig>();
this.mockConfig.Object.AppConfigurationConnectionString = MockConnectionString;
this.client = new Mock<ConfigurationClient>(MockConnectionString);
this.mockResponse = new Mock<Response>();
this.configurationSetting = new ConfigurationSetting("test", "test");
this.rand = new Random();
this.mockFactory = new Mock<IConfigurationClientFactory>();
this.mockFactory
.Setup(x => x.Create())
.Returns(this.client.Object);
this.appConfigClient = new AppConfigurationClient(this.mockConfig.Object, this.mockFactory.Object);
}
[Fact]
public async Task SetAppConfigByKeyAndValueTest()
{
string key = this.rand.NextString();
string value = this.rand.NextString();
Response<ConfigurationSetting> response = Response.FromValue(ConfigurationModelFactory.ConfigurationSetting("test", "test"), this.mockResponse.Object);
this.client.Setup(c => c.SetConfigurationSettingAsync(It.IsAny<ConfigurationSetting>(), true, It.IsAny<CancellationToken>()))
.Returns(Task<Response>.FromResult(response));
await this.appConfigClient.SetValueAsync(key, value);
Assert.True(true);
}
[Fact]
public void GetAppConfigValueByKeyTest()
{
string key = this.rand.NextString();
string value = this.rand.NextString();
Response<ConfigurationSetting> response = Response.FromValue(ConfigurationModelFactory.ConfigurationSetting("test", "test"), this.mockResponse.Object);
this.client.Setup(c => c.GetConfigurationSetting(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()))
.Returns(response);
string result = this.appConfigClient.GetValue(key);
Assert.Equal(result, response.Value.Value);
}
[Fact]
public async Task GetAppConfigStatusReturnsHealthyTest()
{
this.mockConfig.Setup(x => x.Global.Location).Returns("eastus");
Response<ConfigurationSetting> response = Response.FromValue(ConfigurationModelFactory.ConfigurationSetting("test", "test"), this.mockResponse.Object);
this.client.Setup(c => c.GetConfigurationSettingAsync("test", It.IsAny<string>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(response);
StatusResultServiceModel result = await this.appConfigClient.StatusAsync();
Assert.True(result.IsHealthy);
}
[Fact]
public async Task GetAppConfigStatusReturnsUnhealthyOnExceptionTest()
{
Response<ConfigurationSetting> response = Response.FromValue(ConfigurationModelFactory.ConfigurationSetting(string.Empty, string.Empty), this.mockResponse.Object);
this.client.Setup(c => c.GetConfigurationSettingAsync(string.Empty, It.IsAny<string>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(response);
StatusResultServiceModel result = await this.appConfigClient.StatusAsync();
Assert.False(result.IsHealthy);
}
[Fact]
public async Task DeleteAppConfigKeyAsyncTest()
{
string key = this.rand.NextString();
Response<string> response = Response.FromValue(key, this.mockResponse.Object);
this.client
.Setup(x => x.DeleteConfigurationSettingAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()))
.Returns(Task<Response>.FromResult(this.mockResponse.Object));
await this.appConfigClient.DeleteKeyAsync(key);
Assert.True(true);
}
}
} | 45.763636 | 175 | 0.676798 | [
"MIT"
] | JonathanAsbury-ACS/azure-iot-platform-dotnet | test/services/common/Services.Test/AppConfigurationClientTest.cs | 5,034 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace DesktopMagicPluginAPI.Inputs
{
/// <summary>
/// Mouse Buttons
/// </summary>
public enum MouseButton
{
/// <summary>
/// The left mouse button.
/// </summary>
Left,
/// <summary>
/// The middle mouse button.
/// </summary>
Middle,
/// <summary>
/// The right mouse button.
/// </summary>
Right,
}
} | 18.965517 | 38 | 0.530909 | [
"MIT"
] | Stone-Red-Code/DesktopMagic | src/DesktopMagicPluginAPI/Inputs/MouseButton.cs | 552 | C# |
//////////////////////////////////////////////////////////////////////////////////
//
// Author: Sase
// Email: sase@stilsoft.net
//
// This software may be modified and distributed under the terms
// of the MIT license. See the LICENSE file for details.
//
//////////////////////////////////////////////////////////////////////////////////
using CasparCg.AmcpClient.Commands.Query.Common.Response;
namespace CasparCg.AmcpClient.Commands.Query
{
/// <summary>
/// Get the current delay on a channel.
/// </summary>
public class InfoChannelDelayCommand : AbstractInfoChannelCommandWithSubCommand<InfoChannelDelayCommandResponse>
{
// INFO
// [video_channel:int]
// DELAY
internal override string SubCommandName { get; } = "DELAY";
public InfoChannelDelayCommand(int? channel = null)
{
Channel = channel;
}
}
} | 26.228571 | 116 | 0.530501 | [
"MIT"
] | StilSoft/CasparCG.AmcpClient | CasparCG.AmcpClient/Commands/Query/InfoChannelDelayCommand.cs | 920 | C# |
using PaintDotNet;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Text;
namespace PdnBench
{
class TransformBenchmark
: Benchmark
{
public const int Iterations = 30;
private Surface dst;
private MaskedSurface src;
private Matrix transform;
private bool highQuality;
protected override void OnExecute()
{
for (int i = 0; i < Iterations; ++i)
{
this.src.Draw(this.dst, this.transform, this.highQuality ? ResamplingAlgorithm.Bilinear : ResamplingAlgorithm.NearestNeighbor);
}
}
public TransformBenchmark(string name, Surface dst, MaskedSurface src, Matrix transform, bool highQuality)
: base(name)
{
this.dst = dst;
this.src = src;
this.transform = transform.Clone();
this.highQuality = highQuality;
}
}
}
| 27.837838 | 144 | 0.585437 | [
"MIT"
] | Geo3gamer/OpenPDN | extras/PdnBench/TransformBenchmark.cs | 1,030 | C# |
using Sample.Shared.Interfaces;
using Sample.Shared.Repositories;
using Sample.Shared.Services;
using Microsoft.Extensions.DependencyInjection;
namespace Sample.Shared.Configuration
{
public static class AzureSearchtStartupExtensions
{
/// <summary>
/// Register dependencies required for the AzureSearchFeedService.
/// This can be used by both the .NET Core web app and the CMS app.
/// </summary>
/// <param name="services"></param>
/// <returns></returns>
public static IServiceCollection AddAzureSearchServices(this IServiceCollection services)
{
services.AddSingleton<IPageRepository, PageRepository>();
services.AddTransient<IAzureSearchFeedService, AzureSearchFeedService>();
return services;
}
}
}
| 34.583333 | 97 | 0.683133 | [
"MIT"
] | heywills/xperience-cross-platform-service | Sample.Shared/Configuration/AzureSearchtStartupExtensions.cs | 832 | C# |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
using System;
using System.Collections.Generic;
using Aliyun.Acs.Core.Transform;
using Aliyun.Acs.Dds.Model.V20151201;
namespace Aliyun.Acs.Dds.Transform.V20151201
{
public class CheckRecoveryConditionResponseUnmarshaller
{
public static CheckRecoveryConditionResponse Unmarshall(UnmarshallerContext _ctx)
{
CheckRecoveryConditionResponse checkRecoveryConditionResponse = new CheckRecoveryConditionResponse();
checkRecoveryConditionResponse.HttpResponse = _ctx.HttpResponse;
checkRecoveryConditionResponse.RequestId = _ctx.StringValue("CheckRecoveryCondition.RequestId");
checkRecoveryConditionResponse.DBInstanceName = _ctx.StringValue("CheckRecoveryCondition.DBInstanceName");
checkRecoveryConditionResponse.IsValid = _ctx.BooleanValue("CheckRecoveryCondition.IsValid");
return checkRecoveryConditionResponse;
}
}
}
| 40.47619 | 110 | 0.777059 | [
"Apache-2.0"
] | AxiosCros/aliyun-openapi-net-sdk | aliyun-net-sdk-dds/Dds/Transform/V20151201/CheckRecoveryConditionResponseUnmarshaller.cs | 1,700 | C# |
namespace CyclopsSpeedUpgrades
{
using MoreCyclopsUpgrades.API;
using MoreCyclopsUpgrades.API.Upgrades;
using SMLHelper.V2.Crafting;
using SMLHelper.V2.Handlers;
using UnityEngine;
internal class CyclopsSpeedModule : CyclopsUpgrade
{
private const string MaxRatingKey = "CySpeedMaxed";
public static string MaxRatingAchived => Language.main.Get(MaxRatingKey);
private const string SpeedRatingKey = "CySpeedCurrent";
public static string SpeedRatingText(int boosterCount, float multiplier)
{
return Language.main.GetFormat(SpeedRatingKey, boosterCount, Mathf.RoundToInt(multiplier * 100f));
}
public CyclopsSpeedModule()
: base("CyclopsSpeedModule",
"Cyclops Speed Boost Module",
"Increases the drive power of the cyclops engines, adding greater speeds at the cost of higher energy consumption.\n" +
$"Can stack up to {SpeedHandler.MaxSpeedBoosters} boosters for maximum effect at highest cost.")
{
OnFinishedPatching += () =>
{
LanguageHandler.SetLanguageLine(MaxRatingKey, "Maximum speed rating reached");
LanguageHandler.SetLanguageLine(SpeedRatingKey, "Speed rating is now at +{0} ({1}%).");
};
}
public override CraftTree.Type FabricatorType { get; } = CraftTree.Type.CyclopsFabricator;
public override string AssetsFolder { get; } = "CyclopsSpeedUpgrades/Assets";
public override string[] StepsToFabricatorTab { get; } = MCUServices.CrossMod.StepsToCyclopsModulesTabInCyclopsFabricator;
protected override TechData GetBlueprintRecipe()
{
return new TechData()
{
craftAmount = 1,
Ingredients =
{
new Ingredient(TechType.Aerogel, 1),
new Ingredient(TechType.Magnetite, 1),
new Ingredient(TechType.Lubricant, 1),
}
};
}
internal SpeedHandler CreateSpeedUpgradeHandler(SubRoot cyclops)
{
return new SpeedHandler(this, cyclops);
}
internal SpeedOverlay CreateSpeedIconOverlay(uGUI_ItemIcon icon, InventoryItem upgradeModule)
{
return new SpeedOverlay(icon, upgradeModule, this);
}
}
}
| 39.387097 | 138 | 0.620803 | [
"MIT"
] | Denkkar/PrimeSonicSubnauticaMods | CyclopsSpeedUpgrades/CyclopsSpeedModule.cs | 2,444 | C# |
//-----------------------------------------------------------------------
// <copyright file="QueueSinkSpec.cs" company="Akka.NET Project">
// Copyright (C) 2009-2018 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2018 .NET Foundation <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Akka.Actor;
using Akka.Pattern;
using Akka.Streams.Dsl;
using Akka.Streams.TestKit;
using Akka.Streams.TestKit.Tests;
using Akka.Streams.Util;
using Akka.TestKit;
using FluentAssertions;
using Xunit;
using Xunit.Abstractions;
namespace Akka.Streams.Tests.Dsl
{
public class QueueSinkSpec : AkkaSpec
{
private readonly ActorMaterializer _materializer;
private readonly TimeSpan _pause = TimeSpan.FromMilliseconds(300);
private static TestException TestException()
{
return new TestException("boom");
}
public QueueSinkSpec(ITestOutputHelper output) : base(output)
{
_materializer = Sys.Materializer();
}
[Fact]
public void QueueSink_should_send_the_elements_as_result_of_future()
{
this.AssertAllStagesStopped(() =>
{
var expected = new List<Option<int>>
{
new Option<int>(1),
new Option<int>(2),
new Option<int>(3),
new Option<int>()
};
var queue = Source.From(expected.Where(o => o.HasValue).Select(o => o.Value))
.RunWith(Sink.Queue<int>(), _materializer);
expected.ForEach(v =>
{
queue.PullAsync().PipeTo(TestActor);
ExpectMsg(v);
});
}, _materializer);
}
[Fact]
public void QueueSink_should_allow_to_have_only_one_future_waiting_for_result_in_each_point_in_time()
{
this.AssertAllStagesStopped(() =>
{
var probe = this.CreateManualPublisherProbe<int>();
var queue = Source.FromPublisher(probe).RunWith(Sink.Queue<int>(), _materializer);
var sub = probe.ExpectSubscription();
var future = queue.PullAsync();
var future2 = queue.PullAsync();
future2.Invoking(t => t.Wait(RemainingOrDefault)).ShouldThrow<IllegalStateException>();
sub.SendNext(1);
future.PipeTo(TestActor);
ExpectMsg(new Option<int>(1));
sub.SendComplete();
queue.PullAsync();
}, _materializer);
}
[Fact]
public void QueueSink_should_wait_for_next_element_from_upstream()
{
this.AssertAllStagesStopped(() =>
{
var probe = this.CreateManualPublisherProbe<int>();
var queue = Source.FromPublisher(probe).RunWith(Sink.Queue<int>(), _materializer);
var sub = probe.ExpectSubscription();
queue.PullAsync().PipeTo(TestActor);
ExpectNoMsg(_pause);
sub.SendNext(1);
ExpectMsg(new Option<int>(1));
sub.SendComplete();
queue.PullAsync();
}, _materializer);
}
[Fact]
public void QueueSink_should_fail_future_on_stream_failure()
{
this.AssertAllStagesStopped(() =>
{
var probe = this.CreateManualPublisherProbe<int>();
var queue = Source.FromPublisher(probe).RunWith(Sink.Queue<int>(), _materializer);
var sub = probe.ExpectSubscription();
queue.PullAsync().PipeTo(TestActor);
ExpectNoMsg(_pause);
sub.SendError(TestException());
ExpectMsg<Status.Failure>(
f => f.Cause is AggregateException && f.Cause.InnerException.Equals(TestException()));
}, _materializer);
}
[Fact]
public void QueueSink_should_fail_future_when_stream_failed()
{
this.AssertAllStagesStopped(() =>
{
var probe = this.CreateManualPublisherProbe<int>();
var queue = Source.FromPublisher(probe).RunWith(Sink.Queue<int>(), _materializer);
var sub = probe.ExpectSubscription();
sub.SendError(TestException());
queue.Invoking(q => q.PullAsync().Wait(RemainingOrDefault))
.ShouldThrow<TestException>();
}, _materializer);
}
[Fact]
public void QueueSink_should_timeout_future_when_stream_cannot_provide_data()
{
this.AssertAllStagesStopped(() =>
{
var probe = this.CreateManualPublisherProbe<int>();
var queue = Source.FromPublisher(probe).RunWith(Sink.Queue<int>(), _materializer);
var sub = probe.ExpectSubscription();
queue.PullAsync().PipeTo(TestActor);
ExpectNoMsg(_pause);
sub.SendNext(1);
ExpectMsg(new Option<int>(1));
sub.SendComplete();
queue.PullAsync();
}, _materializer);
}
[Fact]
public void QueueSink_should_fail_pull_future_when_stream_is_completed()
{
this.AssertAllStagesStopped(() =>
{
var probe = this.CreateManualPublisherProbe<int>();
var queue = Source.FromPublisher(probe).RunWith(Sink.Queue<int>(), _materializer);
var sub = probe.ExpectSubscription();
queue.PullAsync().PipeTo(TestActor);
sub.SendNext(1);
ExpectMsg(new Option<int>(1));
sub.SendComplete();
var future = queue.PullAsync();
future.Wait(_pause).Should().BeTrue();
future.Result.Should().Be(Option<int>.None);
((Task)queue.PullAsync()).ContinueWith(t =>
{
t.Exception.InnerException.Should().BeOfType<IllegalStateException>();
}, TaskContinuationOptions.OnlyOnFaulted).Wait(TimeSpan.FromMilliseconds(300));
}, _materializer);
}
[Fact]
public void QueueSink_should_keep_on_sending_even_after_the_buffer_has_been_full()
{
this.AssertAllStagesStopped(() =>
{
const int bufferSize = 16;
const int streamElementCount = bufferSize + 4;
var sink = Sink.Queue<int>().WithAttributes(Attributes.CreateInputBuffer(bufferSize, bufferSize));
var tuple = Source.From(Enumerable.Range(1, streamElementCount))
.AlsoToMaterialized(
Flow.Create<int>().Take(bufferSize).WatchTermination(Keep.Right).To(Sink.Ignore<int>()),
Keep.Right)
.ToMaterialized(sink, Keep.Both)
.Run(_materializer);
var probe = tuple.Item1;
var queue = tuple.Item2;
probe.Wait(TimeSpan.FromMilliseconds(300)).Should().BeTrue();
for (var i = 1; i <= streamElementCount; i++)
{
queue.PullAsync().PipeTo(TestActor);
ExpectMsg(new Option<int>(i));
}
queue.PullAsync().PipeTo(TestActor);
ExpectMsg(Option<int>.None);
}, _materializer);
}
[Fact]
public void QueueSink_should_work_with_one_element_buffer()
{
this.AssertAllStagesStopped(() =>
{
var sink = Sink.Queue<int>().WithAttributes(Attributes.CreateInputBuffer(1, 1));
var probe = this.CreateManualPublisherProbe<int>();
var queue = Source.FromPublisher(probe).RunWith(sink, _materializer);
var sub = probe.ExpectSubscription();
queue.PullAsync().PipeTo(TestActor);
sub.SendNext(1); // should pull next element
ExpectMsg(new Option<int>(1));
queue.PullAsync().PipeTo(TestActor);
ExpectNoMsg(); // element requested but buffer empty
sub.SendNext(2);
ExpectMsg(new Option<int>(2));
sub.SendComplete();
var future = queue.PullAsync();
future.Wait(_pause).Should().BeTrue();
future.Result.Should().Be(Option<int>.None);
}, _materializer);
}
[Fact]
public void QueueSink_should_fail_to_materialize_with_zero_sized_input_buffer()
{
Source.Single(1)
.Invoking(
s => s.RunWith(Sink.Queue<int>().WithAttributes(Attributes.CreateInputBuffer(0, 0)), _materializer))
.ShouldThrow<ArgumentException>();
}
}
}
| 37.70082 | 120 | 0.540928 | [
"Apache-2.0"
] | EajksEajks/Akka.NET | src/core/Akka.Streams.Tests/Dsl/QueueSinkSpec.cs | 9,201 | C# |
using System;
using System.IO;
using System.Linq;
using Shouldly;
using Versionize.Tests.TestSupport;
using Xunit;
namespace Versionize.Tests
{
public class ProjectsTests
{
[Fact]
public void ShouldDiscoverAllProjects()
{
var tempDir = TempDir.Create();
TempCsProject.Create(Path.Join(tempDir, "project1"));
TempCsProject.Create(Path.Join(tempDir, "project2"));
var projects = Projects.Discover(tempDir);
projects.GetProjectFiles().Count().ShouldBe(2);
}
[Fact]
public void ShouldDetectInconsistentVersions()
{
var tempDir = TempDir.Create();
TempCsProject.Create(Path.Join(tempDir, "project1"), "2.0.0");
TempCsProject.Create(Path.Join(tempDir, "project2"), "1.1.1");
var projects = Projects.Discover(tempDir);
projects.HasInconsistentVersioning().ShouldBeTrue();
}
[Fact]
public void ShouldDetectConsistentVersions()
{
var tempDir = TempDir.Create();
TempCsProject.Create(Path.Join(tempDir, "project1"));
TempCsProject.Create(Path.Join(tempDir, "project2"));
var projects = Projects.Discover(tempDir);
projects.HasInconsistentVersioning().ShouldBeFalse();
}
[Fact]
public void ShouldWriteAllVersionsToProjectFiles()
{
var tempDir = TempDir.Create();
TempCsProject.Create(Path.Join(tempDir, "project1"), "1.1.1");
TempCsProject.Create(Path.Join(tempDir, "project2"), "1.1.1");
var projects = Projects.Discover(tempDir);
projects.WriteVersion(new Version(2, 0, 0));
var updated = Projects.Discover(tempDir);
updated.Version.ShouldBe(new Version("2.0.0"));
}
}
}
| 31.266667 | 74 | 0.59968 | [
"MIT"
] | benedict1986/versionize | Versionize.Tests/ProjectsTests.cs | 1,878 | C# |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
namespace Microsoft.WindowsAzure.Commands.Test.TrafficManager.Endpoints
{
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Microsoft.WindowsAzure.Commands.Test.Utilities.Common;
using Microsoft.WindowsAzure.Commands.TrafficManager.Endpoint;
using Microsoft.WindowsAzure.Commands.TrafficManager.Models;
using Microsoft.WindowsAzure.Management.TrafficManager.Models;
using System;
using System.Collections.Generic;
using System.Linq;
[TestClass]
public class AddTrafficManagerEndpointTests : TestBase
{
private const string ProfileName = "my-profile";
private const string ProfileDomainName = "my.profile.trafficmanager.net";
private const LoadBalancingMethod DefaultLoadBalancingMethod = LoadBalancingMethod.Failover;
private const string DomainName = "www.example.com";
private const string CloudServiceType = "CloudService";
private const string AzureWebsiteType = "AzureWebsite";
private const string AnyType = "Any";
private const EndpointStatus Status = EndpointStatus.Enabled;
private const int Weight = 3;
private MockCommandRuntime mockCommandRuntime;
private AddAzureTrafficManagerEndpoint cmdlet;
[TestInitialize]
public void TestSetup()
{
mockCommandRuntime = new MockCommandRuntime();
}
[TestMethod]
public void AddTrafficManagerEndpointCloudService()
{
ProfileWithDefinition original = GetProfileWithDefinition();
// Setup
cmdlet = new AddAzureTrafficManagerEndpoint
{
DomainName = DomainName,
Type = CloudServiceType,
Weight = Weight,
Status = Status.ToString(),
TrafficManagerProfile = original,
CommandRuntime = mockCommandRuntime
};
// Action
cmdlet.ExecuteCmdlet();
// Assert
var actual = mockCommandRuntime.OutputPipeline[0] as ProfileWithDefinition;
// All the properties stay the same except the endpoints
AssertAllProfilePropertiesDontChangeExceptEndpoints(original, actual);
// There is a new endpoint with the new domain name in "actual"
Assert.IsTrue(actual.Endpoints.Any(e => e.DomainName == DomainName));
}
[TestMethod]
public void AddTrafficManagerEndpointWebsite()
{
ProfileWithDefinition original = GetProfileWithDefinition();
// Setup
cmdlet = new AddAzureTrafficManagerEndpoint
{
DomainName = DomainName,
Type = AzureWebsiteType,
Weight = Weight,
TrafficManagerProfile = original,
CommandRuntime = mockCommandRuntime,
Status = "Enabled"
};
// Action
cmdlet.ExecuteCmdlet();
// Assert
var actual = mockCommandRuntime.OutputPipeline[0] as ProfileWithDefinition;
// All the properties stay the same except the endpoints
AssertAllProfilePropertiesDontChangeExceptEndpoints(original, actual);
// There is a new endpoint with the new domain name in "actual" but not in "original"
Assert.IsTrue(actual.Endpoints.Any(e => e.DomainName == DomainName));
}
[TestMethod]
public void AddTrafficManagerEndpointAny()
{
ProfileWithDefinition original = GetProfileWithDefinition();
cmdlet = new AddAzureTrafficManagerEndpoint
{
DomainName = DomainName,
Type = AnyType,
Weight = Weight,
TrafficManagerProfile = original,
CommandRuntime = mockCommandRuntime,
Status = "Enabled"
};
// Action
cmdlet.ExecuteCmdlet();
var actual = mockCommandRuntime.OutputPipeline[0] as ProfileWithDefinition;
// Assert
// All the properties stay the same except the endpoints
AssertAllProfilePropertiesDontChangeExceptEndpoints(original, actual);
// There is a new endpoint with the new domain name in "actual" but not in "original"
Assert.IsTrue(actual.Endpoints.Any(e => e.DomainName == DomainName));
}
[TestMethod]
public void AddTrafficManagerEndpointAlreadyExistsFails()
{
// Setup
ProfileWithDefinition original = GetProfileWithDefinition();
var existingEndpoint = new TrafficManagerEndpoint
{
DomainName = DomainName,
Type = EndpointType.Any,
Status = EndpointStatus.Enabled
};
original.Endpoints.Add(existingEndpoint);
cmdlet = new AddAzureTrafficManagerEndpoint
{
DomainName = DomainName,
Type = AnyType,
TrafficManagerProfile = original,
CommandRuntime = mockCommandRuntime
};
// Action + Assert
Testing.AssertThrows<Exception>(() => cmdlet.ExecuteCmdlet());
}
[TestMethod]
public void AddTrafficManagerEndpointNoWeightNoLocation()
{
// Setup
ProfileWithDefinition original = GetProfileWithDefinition();
cmdlet = new AddAzureTrafficManagerEndpoint
{
DomainName = DomainName,
Type = AnyType,
TrafficManagerProfile = original,
CommandRuntime = mockCommandRuntime,
Status = "Enabled"
};
// Action
cmdlet.ExecuteCmdlet();
var actual = mockCommandRuntime.OutputPipeline[0] as ProfileWithDefinition;
// Assert
// All the properties stay the same except the endpoints
AssertAllProfilePropertiesDontChangeExceptEndpoints(original, actual);
// There is a new endpoint with the new domain name in "actual" but not in "original"
Assert.IsTrue(actual.Endpoints.Any(e => e.DomainName == DomainName));
TrafficManagerEndpoint endpoint = actual.Endpoints.First(e => e.DomainName == DomainName);
Assert.AreEqual(1, endpoint.Weight);
Assert.IsNull(endpoint.Location);
}
private ProfileWithDefinition GetProfileWithDefinition()
{
return new ProfileWithDefinition
{
DomainName = ProfileDomainName,
Name = ProfileName,
Endpoints = new List<TrafficManagerEndpoint>(),
LoadBalancingMethod = DefaultLoadBalancingMethod,
MonitorPort = 80,
Status = ProfileDefinitionStatus.Enabled,
MonitorRelativePath = "/",
TimeToLiveInSeconds = 30
};
}
private void AssertAllProfilePropertiesDontChangeExceptEndpoints(
ProfileWithDefinition original,
ProfileWithDefinition actual)
{
Assert.AreEqual(original.DomainName, actual.DomainName);
Assert.AreEqual(original.Name, actual.Name);
Assert.AreEqual(original.LoadBalancingMethod, actual.LoadBalancingMethod);
Assert.AreEqual(original.MonitorPort, actual.MonitorPort);
Assert.AreEqual(original.Status, actual.Status);
Assert.AreEqual(original.MonitorRelativePath, actual.MonitorRelativePath);
Assert.AreEqual(original.TimeToLiveInSeconds, actual.TimeToLiveInSeconds);
}
}
}
| 38.831111 | 103 | 0.593911 | [
"MIT"
] | stankovski/azure-sdk-tools | src/ServiceManagement/TrafficManager/Commands.TrafficManager.Test/Endpoints/AddTrafficManagerEndpointTests.cs | 8,515 | C# |
using System;
using System.Reflection;
using System.Runtime.InteropServices;
// Cyotek Color Picker controls library
// Copyright © 2013-2015 Cyotek Ltd.
// http://cyotek.com/blog/tag/colorpicker
// Licensed under the MIT License. See license.txt for the full text.
// If you use this code in your applications, donations or attribution are welcome
[assembly: AssemblyTitle("Cyotek Color Picker Controls")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Cyotek Color Picker Controls")]
[assembly: AssemblyCopyright("Copyright © 2013-2015 Cyotek Ltd.")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
[assembly: ComVisible(false)]
[assembly: Guid("de546619-59b3-438e-8b5a-3d149e146b22")]
[assembly: CLSCompliant(true)]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.15318.9")]
| 35.192308 | 82 | 0.762842 | [
"MIT"
] | sraboy/skaa_editor | Cyotek.Windows.Forms.ColorPicker/Properties/AssemblyInfo.cs | 919 | C# |
// <auto-generated />
namespace AdminLteMvc.Migrations
{
using System.CodeDom.Compiler;
using System.Data.Entity.Migrations;
using System.Data.Entity.Migrations.Infrastructure;
using System.Resources;
[GeneratedCode("EntityFramework.Migrations", "6.1.3-40302")]
public sealed partial class Truckers : IMigrationMetadata
{
private readonly ResourceManager Resources = new ResourceManager(typeof(Truckers));
string IMigrationMetadata.Id
{
get { return "202012020205137_Truckers"; }
}
string IMigrationMetadata.Source
{
get { return null; }
}
string IMigrationMetadata.Target
{
get { return Resources.GetString("Target"); }
}
}
}
| 26.9 | 91 | 0.615861 | [
"MIT"
] | ataharasystemsolutions/KTI_TEST | AdminLteMvc/AdminLteMvc/Migrations/202012020205137_Truckers.Designer.cs | 807 | C# |
using PropertyCross.Presenter;
using System;
namespace PropertyCross
{
public class MarshalInvokeService : IMarshalInvokeService
{
public void Invoke(Action action)
{
// there is no need to marshal to the UI thread with Windows Phone
action();
}
}
}
| 18.8 | 72 | 0.698582 | [
"MIT"
] | ColinEberhardt/PropertyCross | xamarin/windowsphone/PropertyCross/MarshalInvokeService.cs | 284 | C# |
/**
* 封装MultiMap,用于重用
*/
namespace ET
{
public class MultiMapComponent<T, K>: Entity
{
public MultiMap<T, K> MultiMap = new MultiMap<T, K>();
public override void Dispose()
{
if (IsDisposed)
{
return;
}
base.Dispose();
MultiMap.Clear();
}
}
} | 16 | 62 | 0.44837 | [
"MIT"
] | Noname-Studio/ET | Unity/Assets/Model/Core/MultiMapComponent.cs | 382 | C# |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Animation;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using System.Xml.Linq;
using IOPath = System.IO.Path;
namespace TreeSlides
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
Node tree;
Node currentNode;
double inactiveVsActiveScale = 1;
double nestingShiftFraction = 0.1;
double lineFraction = 0.1;
Size? canvasSize;
TimeSpan transitionDuration = TimeSpan.FromSeconds(0.4);
public MainWindow()
{
InitializeComponent();
LoadBackground();
SnapsToDevicePixels = true;
Loaded += (o, e) => Load();
}
void LoadBackground()
{
var path = IOPath.Combine(IOPath.GetDirectoryName(Assembly.GetEntryAssembly().Location), "bg.png");
image.Source = new BitmapImage(new Uri(path));
}
void ToggleFullScreen()
{
if (this.WindowStyle == WindowStyle.None)
{
this.WindowStyle = WindowStyle.ThreeDBorderWindow;
WindowState = WindowState.Normal;
}
else
{
this.WindowStyle = WindowStyle.None;
WindowState = WindowState.Maximized;
}
}
void Load()
{
presentationCanvas.Children.Clear();
canvasSize = null;
LoadTree();
int displayIndex = 0;
Action<Node, Node, int> add = null;
Node previous = null;
add = (n, parent, level) =>
{
n.ContentUI.SetValue(TextBlock.TextWrappingProperty, TextWrapping.Wrap);
n.ContentUI.Opacity = 0;
n.Parent = parent;
n.Index = displayIndex++;
n.Previous = previous;
if (previous != null)
previous.Next = n;
previous = n;
n.Level = level;
var ui = n.ContentUI;
presentationCanvas.Children.Add(ui);
var trans = new MatrixTransform(Matrix.Identity);
ui.RenderTransform = trans;
for (int i = 0; i < n.Nodes.Count; i++)
{
var child = n.Nodes[i];
add(child, n, level + 1);
}
};
add(tree, null, 0);
ActivateNode(tree);
}
void LoadTree()
{
var filename = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(GetType().Assembly.Location), "topics.xml");
var x = XDocument.Load(new FileStream(filename, FileMode.Open));
var list = new List<Node>();
Action<List<Node>, XElement> cellReader = null;
cellReader = (collection, cell) =>
{
var textNode = cell.Nodes().FirstOrDefault(n => n is XText) as XText;
string text = textNode == null ? null : textNode.Value?.Trim().Replace("\\n", Environment.NewLine).Replace("\\t", " ");
var node = new Node(text);
collection.Add(node);
var innerGrid = cell.Element("grid");
if (innerGrid != null)
{
foreach (var row in innerGrid.Elements("row"))
{
var firstCell = row.Element("cell");
if (firstCell != null)
{
cellReader(node.Nodes, firstCell);
}
}
}
};
cellReader(list, x.Element("cell"));
tree = new Node("Topics") { Nodes = list[0].Nodes };
}
protected override void OnPreviewKeyDown(KeyEventArgs e)
{
base.OnPreviewKeyDown(e);
Node nextNode = null;
switch (e.Key)
{
case Key.Right:
nextNode = currentNode.Next;
break;
case Key.Left:
nextNode = currentNode.Previous;
break;
case Key.Escape:
nextNode = currentNode.Parent;
break;
case Key.Up:
{
var parent = currentNode.Parent;
if (parent != null)
{
var i = parent.Nodes.IndexOf(currentNode) - 1;
nextNode = i >= 0 ? parent.Nodes[i] : null;
}
}
break;
case Key.Down:
{
var parent = currentNode.Parent;
if (parent != null)
{
var i = parent.Nodes.IndexOf(currentNode) + 1;
if (i < parent.Nodes.Count)
nextNode = parent.Nodes[i];
}
}
break;
case Key.Home:
nextNode = tree;
break;
case Key.F11:
ToggleFullScreen();
break;
case Key.F5:
Load();
break;
}
if (nextNode != null)
ActivateNode(nextNode);
}
protected override void OnRenderSizeChanged(SizeChangedInfo sizeInfo)
{
base.OnRenderSizeChanged(sizeInfo);
if (currentNode != null)
ActivateNode(currentNode);
}
Size GetCanvasSize()
{
var size = presentationCanvas.RenderSize;
var width = size.Width;
var height = size.Height;
var canvasSize = new Size(width * 0.7, height * 0.7);
if (canvasSize != this.canvasSize)
{
//remeasure
var fontSize = canvasSize.Height * lineFraction;
var node = tree;
while (node != null)
{
node.ContentUI.SetValue(TextBlock.FontSizeProperty, fontSize);
node.ContentUI.SetValue(FrameworkElement.MaxWidthProperty, canvasSize.Width);
node.ContentUI.Measure(canvasSize);
node = node.Next;
}
this.canvasSize = canvasSize;
}
return canvasSize;
}
void ActivateNode(Node newNode)
{
currentNode = newNode;
var canvasSize = GetCanvasSize();
var ancestors = new HashSet<Node>() { newNode };
var parent = newNode.Parent;
while (parent != null)
{
ancestors.Add(parent);
parent = parent.Parent;
}
Dictionary<Node, double?> yPositions = new Dictionary<Node, double?>(presentationCanvas.Children.Count);
Action<Node, bool> yDeterminer = null;
double y = 0;
bool folding = false;
yDeterminer = (n, visible) =>
{
if (!visible)
{
folding |= (double)n.ContentUI.GetValue(UIElement.OpacityProperty) > 0;
yPositions[n] = null;
}
else
{
yPositions[n] = y;
y += n.ContentUI.DesiredSize.Height + lineFraction * canvasSize.Height;
}
foreach (var child in n.Nodes)
{
var childVisible = visible && ancestors.Contains(n);
yDeterminer(child, childVisible);
}
};
yDeterminer(tree, true);
var containerSize = presentationCanvas.RenderSize;
var canvasLeft = (containerSize.Width - canvasSize.Width) / 2;
var canvasYCenter = containerSize.Height / 2;
var offset = yPositions[newNode].Value + newNode.ContentUI.DesiredSize.Height / 2 - canvasYCenter;
var transitDelay = folding ? TimeSpan.FromSeconds(0.3) : TimeSpan.Zero;
var node = tree;
while (node != null)
{
var position = yPositions[node];
if (position == null)
{
ChangeElementOpacity(node.ContentUI, 0);
}
else
{
var isSiblingOrChild = node.Parent == newNode || node.Parent == newNode.Parent;
var opacity = node == newNode ? 1 : (node.Index > newNode.Index && !isSiblingOrChild ? 0.2 : 0.4);
var opacityDelay = transitDelay;
if ((double)node.ContentUI.GetValue(UIElement.OpacityProperty) == 0)
opacityDelay += TimeSpan.FromSeconds(0.4);
ChangeElementOpacity(node.ContentUI, opacity, opacityDelay);
var x = canvasLeft + (node.Level - newNode.Level) * canvasSize.Width * nestingShiftFraction;
var elementY = position.Value - offset;
var to = CalcTransform(node.ContentUI, x, elementY, inactiveVsActiveScale);
if ((node.ContentUI.RenderTransform as MatrixTransform).Matrix == Matrix.Identity)
(node.ContentUI.RenderTransform as MatrixTransform).Matrix = to;
TransitElement(node.ContentUI, to, transitDelay);
}
node = node.Next;
}
}
void ChangeElementOpacity(UIElement element, double opacity, TimeSpan delay = default(TimeSpan))
{
element.BeginAnimation(UIElement.OpacityProperty, new DoubleAnimation(opacity, TimeSpan.FromSeconds(0.3)) { BeginTime = delay });
}
void TransitElement(UIElement element, Matrix to, TimeSpan delay, Matrix? from = null)
{
var anim = new MatrixAnimation(from, to, transitionDuration) { EasingFunction = new CubicEase { EasingMode = EasingMode.EaseOut } };
anim.BeginTime = delay;
element.RenderTransform.BeginAnimation(MatrixTransform.MatrixProperty, anim);
}
Matrix CalcTransform(UIElement element, double leftX, double topY, double scale)
{
var measured = element.DesiredSize;
if (measured.Width == 0 || measured.Height == 0)
return Matrix.Identity;
var result = Matrix.Identity;
result.Scale(scale, scale);
result.Translate(leftX, topY);
return result;
}
}
class Node
{
public object Content;
public UIElement ContentUI;
public List<Node> Nodes = new List<Node>();
public Node Parent;
public Node Next;
public Node Previous;
public int Level;
public int Index;
public Node() { }
public Node(string content)
{
Content = content;
ContentUI = new TextBlock { Text = content, Foreground = Brushes.White };
}
public override string ToString()
{
return (Content ?? "").ToString();
}
}
}
| 30.745501 | 144 | 0.490635 | [
"MIT"
] | JakoDav/TreeSlides | TreeSlides/MainWindow.xaml.cs | 11,962 | C# |
using ServiceCommon;
using Microsoft.EntityFrameworkCore;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Text;
namespace SystemService.Domain
{
[Table("Resource")]
public class Resource : BaseEntityWithNoTenant
{
[Required]
public string ResourceCode { get; set; }
[Required]
public string ResourceName { get; set; }
[Required]
public EnumResourceType ResourceType { get; set; }
[Required]
public Guid ParentResourceID { get; set; }
public virtual Resource ParentResource { get; set; }
public virtual ICollection<Resource> ChildrenResources { get; set; }
[Required]
public int SortNO { get; set; }
public string ResourceDesc { get; set; }
}
[Table("Tenant")]
public class Tenant : BaseEntityWithNoTenant
{
[Required]
public string TenantCode { get; set; }
[Required]
public string TenantName { get; set; }
[Required]
public string TenantUrl { get; set; }
[Required]
public string TenantLogo { get; set; }
[Required]
public int SortNO { get; set; }
public string TenantDesc { get; set; }
}
}
| 24.981481 | 76 | 0.624907 | [
"MIT"
] | 281902112/MicroserviceDemo | BasicServices/SystemService/SystemService.Domain/Entities/AllEnitities.cs | 1,351 | C# |
using SistemaBiomedicinaCsharp25042019.Apresentacao;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace SistemaBiomedicinaCsharp25042019.Apresentacao
{
public partial class frm_Login : Form
{
public frm_Login()
{
InitializeComponent();
}
private void btnEntrar_Click(object sender, EventArgs e)
{
// CRIAR VARIAVEL
string usuario = txtUsuario.Text;
string senha = txtSenha.Text;
//VERIFICAR SE OS CAMPOS ESTAO VAZIOS
if (string.IsNullOrEmpty(usuario) || string.IsNullOrEmpty(senha))
{
MessageBox.Show("Preencha os campos vazios!!");
//SE NAO ESTIVER VAZIO FAZER CONEXAO COM O BD
}
else
{
//CONECTAR NO BANCO E NA PROCEDURE LOGIN
SqlCommand cmd = new SqlCommand("login", Modelo.Estaticos.con);
try
{
Modelo.Estaticos.abrir();
cmd.CommandType = (System.Data.CommandType)4; //SE FOR IGUAL A 4 ELE CONSEGUIU ENCONTRAR A INFORMACAO
cmd.Parameters.AddWithValue("@nome", usuario);
cmd.Parameters.AddWithValue("@senha", senha);
cmd.Parameters.Add("@msg", SqlDbType.VarChar, 100).Direction = (System.Data.ParameterDirection)2;
//EXECUTAR NOSSA CONSULTA
cmd.ExecuteNonQuery();
Modelo.Estaticos.usuarioNome = txtUsuario.Text;
string msg = System.Convert.ToString(cmd.Parameters["@msg"].Value);
MessageBox.Show(msg);
//VERIFICAR QUAL MENSAGEM RETORNOU
if (msg == "Dados Incorretos")
{
this.txtUsuario.Clear();
this.txtSenha.Clear();
this.txtUsuario.Focus();
}
else
{
frm_Menu frmMenu = new frm_Menu();
this.Hide();
frmMenu.ShowDialog();
this.Close();
this.Visible = false;
}
}
catch (Exception ex)
{
MessageBox.Show("Erro ao fazer login " + ex.Message, "Erro" );
Modelo.Estaticos.fechar();
}
}
}
}
}
| 32.892857 | 121 | 0.498371 | [
"MIT"
] | lucasestevan/SistemaBiomedica | Codigo Fonte C#/SistemaBiomedicinaCsharp/SistemaBiomedicinaCsharp25042019/Apresentacao/frm_Login.cs | 2,765 | C# |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading.Tasks;
using EasyNetQ;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using MongoDB.Driver;
using Newtonsoft.Json;
using Yunyong.EventBus.Events;
namespace Yunyong.EventBus.EasyNetQ
{
/// <summary>
/// EventBus 通过RabbitMQ的实现
/// </summary>
/// <seealso cref="IEventBus" />
/// <seealso cref="IDisposable" />
internal class EventBusEasyNetQ : IEventBus, IDisposable
{
//private IServiceScope ServiceScope { get; }
private readonly IMongoDatabase _db = MongoDbContext.GetMongoDB();
public EventBusEasyNetQ(IBus bus, IServiceProvider provider, string prefix, ILoggerFactory loggerFactory, bool enableEventLog = true)
{
Bus = bus;
Provider = provider;
Prefix = prefix;
EnableEventLog = enableEventLog;
Logger = loggerFactory.CreateLogger<EventBusEasyNetQ>();
//ServiceScope = provider.CreateScope();
}
private ILogger Logger { get; }
private IBus Bus { get; }
private IServiceProvider Provider { get; }
private string Prefix { get; }
private bool EnableEventLog { get; }
private Dictionary<string, ISubscriptionResult> Subscriptions { get; } =
new Dictionary<string, ISubscriptionResult>();
public void Dispose()
{
//ServiceScope?.Dispose();
foreach (var key in Subscriptions.Keys)
{
var queueName = Subscriptions[key].Queue.Name;
//result.Dispose();
Subscriptions[key].Dispose();
Publish(new EventUnsubscribeEvent {EventType = key, QueueName = queueName, Prefix = Prefix});
}
}
/// <summary>
/// 异步消息(匿名)
/// </summary>
/// <typeparam name="TEvent">The type of the event.</typeparam>
/// <param name="e">The e.</param>
/// <param name="topic">The topic.</param>
public void Publish<TEvent>(TEvent e, string topic = null) where TEvent : class, IEvent
{
var sw = new Stopwatch();
sw.Start();
try
{
if (string.IsNullOrEmpty(topic))
{
Bus.Publish(e);
}
else
{
Bus.Publish(e, topic);
}
if (!(e is EventUnsubscribeEvent))
{
TraceEventPublishRecordLog(new EventPublishRecord
{
Id = e.Id,
Token = e.Token,
EventType = GetEventKey<TEvent>(),
EventJson = JsonConvert.SerializeObject(e,
new JsonSerializerSettings {ReferenceLoopHandling = ReferenceLoopHandling.Ignore}),
Topic = topic,
SendType = "Publish",
Time = DateTime.Now.ToUniversalTime()
});
}
}
catch (Exception ex)
{
if (!(e is EventUnsubscribeEvent))
{
TraceEventPublishRecordLog(new EventPublishErrorRecord
{
Id = e.Id,
Token = e.Token,
EventType = GetEventKey<TEvent>(),
EventJson = JsonConvert.SerializeObject(e,
new JsonSerializerSettings {ReferenceLoopHandling = ReferenceLoopHandling.Ignore}),
Topic = topic,
SendType = "Publish",
Error = ex.ToString(),
Time = DateTime.Now.ToUniversalTime()
});
}
}
finally
{
sw.Stop();
Logger.LogWarning($"{sw.Elapsed}\t{GetType().Name}.Publish({e.GetType().Name} e, string topic = {topic})");
}
}
///// <summary>
///// 异步消息(上下文用户请求(无上下文时,赋给平台用户上下文))
///// </summary>
///// <typeparam name="TEvent">The type of the event.</typeparam>
///// <typeparam name="TUserContext">The type of the user context.</typeparam>
///// <param name="e">The e.</param>
///// <param name="userContext">The user context.</param>
///// <param name="topic">The topic.</param>
//public void Publish<TEvent, TUserContext>(TEvent e, TUserContext userContext, string topic = null)
// where TEvent : class, IEvent where TUserContext : IUserContext
//{
// try
// {
// //e.Token = GetToken(userContext);
// if (string.IsNullOrEmpty(topic))
// Bus.Publish(e);
// else
// Bus.Publish(e, topic);
// if (!(e is EventUnsubscribeEvent))
// TraceEventPublishRecordLog(new EventPublishRecord
// {
// Id = e.Id,
// Token = e.Token,
// EventType = GetEventKey<TEvent>(),
// EventJson = JsonConvert.SerializeObject(e,
// new JsonSerializerSettings { ReferenceLoopHandling = ReferenceLoopHandling.Ignore }),
// Topic = topic,
// SendType = "Publish",
// Time = DateTime.Now.ToUniversalTime()
// });
// }
// catch (Exception ex)
// {
// if (!(e is EventUnsubscribeEvent))
// TraceEventPublishRecordLog(new EventPublishErrorRecord
// {
// Id = e.Id,
// Token = e.Token,
// EventType = GetEventKey<TEvent>(),
// EventJson = JsonConvert.SerializeObject(e,
// new JsonSerializerSettings { ReferenceLoopHandling = ReferenceLoopHandling.Ignore }),
// Topic = topic,
// SendType = "Publish",
// Error = ex.ToString(),
// Time = DateTime.Now.ToUniversalTime()
// });
// }
//}
public void Subscribe<TEvent, TEventHandler>(string topic = null) where TEvent : class, IEvent
where TEventHandler : class, IEventHandler<TEvent>
{
ISubscriptionResult result;
try
{
if (string.IsNullOrEmpty(topic))
{
result = Bus.Subscribe<TEvent>(GetEventKey<TEvent>(),
e =>
{
try
{
using (var scope = Provider.CreateScope())
{
var handler = scope.ServiceProvider.GetService<TEventHandler>();
//<TEventHandler>();
Logger.LogWarning($"{handler.GetType().Name}.Handle({e.GetType().Name} e)");
handler?.Handle(e);
}
}
catch (Exception ex)
{
Logger.LogError(ex, ex.Message);
// ignore
}
});
}
else
{
result = Bus.Subscribe<TEvent>(GetEventKey<TEvent>(),
e =>
{
try
{
using (var scope = Provider.CreateScope())
{
var handler = scope.ServiceProvider.GetService<TEventHandler>();
Logger.LogWarning($"{handler.GetType().Name}.Handle({e.GetType().Name} e)");
handler?.Handle(e);
}
}
catch
{
// ignore
}
},
x => x.WithTopic(topic));
}
}
catch (Exception e)
{
Logger.LogError(e, e.Message);
throw;
}
if (result != null)
{
Subscriptions[GetEventKey<TEvent>()] = result;
if (typeof(TEvent) != typeof(EventSubscribeEvent))
{
Publish(new EventSubscribeEvent
{
EventType = GetEventKey<TEvent>(),
QueueName = result.Queue.Name,
Prefix = Prefix,
Handler = typeof(TEventHandler).FullName
});
}
}
}
public void Unsubscribe<TEvent, TEventHandler>() where TEvent : class, IEvent
where TEventHandler : class, IEventHandler<TEvent>, new()
{
}
/// <summary>
/// 同步请求(匿名)
/// </summary>
/// <typeparam name="TRequest">The type of the request.</typeparam>
/// <typeparam name="TResponse">The type of the response.</typeparam>
/// <param name="request">The request.</param>
/// <returns></returns>
public TResponse Request<TRequest, TResponse>(TRequest request)
where TRequest : EventRequest where TResponse : EventResponse
{
try
{
var response = Bus.Request<TRequest, TResponse>(request);
TraceEventPublishRecordLog(new EventPublishRecord
{
Id = request.Id,
Token = request.Token,
EventType = typeof(TRequest).FullName,
EventJson = JsonConvert.SerializeObject(request,
new JsonSerializerSettings {ReferenceLoopHandling = ReferenceLoopHandling.Ignore}),
//Topic = topic,
SendType = "Request",
Time = DateTime.Now.ToUniversalTime()
});
return response;
}
catch (Exception ex)
{
TraceEventPublishRecordLog(new EventPublishErrorRecord
{
Id = request.Id,
Token = request.Token,
EventType = typeof(TRequest).FullName,
EventJson = JsonConvert.SerializeObject(request,
new JsonSerializerSettings {ReferenceLoopHandling = ReferenceLoopHandling.Ignore}),
//Topic = topic,
SendType = "Request",
Error = ex.ToString(),
Time = DateTime.Now.ToUniversalTime()
});
return null;
}
}
///// <summary>
///// 同步请求(上下文用户请求(无上下文时,赋给平台用户上下文))
///// </summary>
///// <typeparam name="TRequest">The type of the request.</typeparam>
///// <typeparam name="TResponse">The type of the response.</typeparam>
///// <typeparam name="TUserContext">The type of the user context.</typeparam>
///// <param name="request">The request.</param>
///// <param name="userContext">The user context.</param>
///// <returns></returns>
//public TResponse Request<TRequest, TResponse, TUserContext>(TRequest request, TUserContext userContext)
// where TRequest : EventRequest where TResponse : EventResponse where TUserContext : IUserContext
//{
// try
// {
// var response = Bus.Request<TRequest, TResponse>(request);
// TraceEventPublishRecordLog(new EventPublishRecord
// {
// Id = request.Id,
// Token = request.Token,
// EventType = typeof(TRequest).FullName,
// EventJson = JsonConvert.SerializeObject(request,
// new JsonSerializerSettings { ReferenceLoopHandling = ReferenceLoopHandling.Ignore }),
// //Topic = topic,
// SendType = "Request",
// Time = DateTime.Now.ToUniversalTime()
// });
// return response;
// }
// catch (Exception ex)
// {
// TraceEventPublishRecordLog(new EventPublishErrorRecord
// {
// Id = request.Id,
// Token = request.Token,
// EventType = typeof(TRequest).FullName,
// EventJson = JsonConvert.SerializeObject(request,
// new JsonSerializerSettings { ReferenceLoopHandling = ReferenceLoopHandling.Ignore }),
// //Topic = topic,
// SendType = "Request",
// Error = ex.ToString(),
// Time = DateTime.Now.ToUniversalTime()
// });
// return null;
// }
//}
public async Task<TResponse> RequestAsync<TRequest, TResponse>(TRequest request)
where TRequest : EventRequest where TResponse : EventResponse
{
//return await Bus.RequestAsync<TRequest, TResponse>(request);
try
{
var response = await Bus.RequestAsync<TRequest, TResponse>(request);
TraceEventPublishRecordLog(new EventPublishRecord
{
Id = request.Id,
Token = request.Token,
EventType = typeof(TRequest).FullName,
EventJson = JsonConvert.SerializeObject(request,
new JsonSerializerSettings {ReferenceLoopHandling = ReferenceLoopHandling.Ignore}),
//Topic = topic,
SendType = "RequestAsync",
Time = DateTime.Now.ToUniversalTime()
});
return response;
}
catch (AggregateException ae)
{
ae.Flatten().Handle(it => true);
return null;
}
catch (Exception ex)
{
TraceEventPublishRecordLog(new EventPublishErrorRecord
{
Id = request.Id,
Token = request.Token,
EventType = typeof(TRequest).FullName,
EventJson = JsonConvert.SerializeObject(request,
new JsonSerializerSettings {ReferenceLoopHandling = ReferenceLoopHandling.Ignore}),
//Topic = topic,
SendType = "RequestAsync",
Error = ex.ToString(),
Time = DateTime.Now.ToUniversalTime()
});
return null;
}
}
public void Respond<TRequest, TResponse, TRequestHandler>() where TRequest : EventRequest
where TResponse : EventResponse
where TRequestHandler : IRequestHandler<TRequest, TResponse>
{
try
{
//Bus.Respond<TRequest, TResponse>(handler.Handle);
Bus.Respond<TRequest, TResponse>(request =>
{
using (var scope = Provider.CreateScope())
{
var handler = scope.ServiceProvider.GetService<TRequestHandler>();
var sw = new Stopwatch();
sw.Start();
try
{
return handler.Handle(request);
}
finally
{
sw.Stop();
Logger.LogWarning($"{sw.Elapsed}\t{handler.GetType().Name}.Handle({request.GetType().Name} request)");
}
}
});
Publish(new EventSubscribeEvent
{
EventType = GetEventKey<TRequest>(),
//QueueName = result.Queue.Name,
Prefix = Prefix,
Handler = typeof(TRequestHandler).FullName
});
}
catch (Exception e)
{
Logger.LogError(e, e.Message);
// ignored
}
}
public void Send<TEvent>(string queue, TEvent e) where TEvent : class, IEvent
{
try
{
Bus.Send(queue, e);
TraceEventPublishRecordLog(new EventPublishRecord
{
Id = e.Id,
Token = e.Token,
EventType = typeof(TEvent).FullName,
EventJson = JsonConvert.SerializeObject(e,
new JsonSerializerSettings {ReferenceLoopHandling = ReferenceLoopHandling.Ignore}),
//Topic = topic,
SendType = "Send",
Time = DateTime.Now.ToUniversalTime()
});
}
catch (Exception ex)
{
TraceEventPublishRecordLog(new EventPublishErrorRecord
{
Id = e.Id,
Token = e.Token,
EventType = GetEventKey<TEvent>(),
EventJson = JsonConvert.SerializeObject(e,
new JsonSerializerSettings {ReferenceLoopHandling = ReferenceLoopHandling.Ignore}),
//Topic = topic,
SendType = "Send",
Error = ex.ToString(),
Time = DateTime.Now.ToUniversalTime()
});
}
}
public void Receive<TEvent, TEventHandler>(string queue, TEventHandler handler) where TEvent : class, IEvent
where TEventHandler : IEventHandler<TEvent>
{
Bus.Receive<TEvent>(queue, handler.Handle);
}
public void RespondAsync<TRequest, TResponse, TRequestHandler>()
where TRequest : EventRequest
where TResponse : EventResponse
where TRequestHandler : IAsyncRequestHandler<TRequest, TResponse>
{
try
{
Bus.RespondAsync<TRequest, TResponse>(request =>
{
using (var scope = Provider.CreateScope())
{
var handler = scope.ServiceProvider.GetService<TRequestHandler>();
var sw = new Stopwatch();
sw.Start();
try
{
return handler.Handle(request);
}
finally
{
sw.Stop();
Logger.LogWarning($"{sw.Elapsed}\t{handler.GetType().Name}.Handle({request.GetType().Name} request)");
}
}
});
}
catch (Exception e)
{
Logger.LogError(e, e.Message);
// ignored
}
}
private void TraceEventPublishRecordLog<TEventPublishRecord>(TEventPublishRecord record)
{
if (EnableEventLog)
{
var recordCollection = _db?.GetCollection<TEventPublishRecord>(typeof(TEventPublishRecord).Name);
Logger.LogInformation(JsonConvert.SerializeObject(record, Formatting.Indented,
new JsonSerializerSettings {ReferenceLoopHandling = ReferenceLoopHandling.Ignore}));
recordCollection?.InsertOne(record);
}
}
private string GetEventKey<T>()
{
return $"{Prefix}.{typeof(T).FullName}";
}
}
} | 39.505769 | 141 | 0.457966 | [
"Apache-2.0"
] | yunyongtech/Yunyong | src/Yunyong/EventBus/Yunyong.EventBus.EasyNetQ/EventBusEasyNetQ.cs | 20,707 | C# |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Text;
namespace PdfiumViewerHB
{
/// <summary>
/// Describes all links on a page.
/// </summary>
public class PdfPageLinks
{
/// <summary>
/// All links of the page.
/// </summary>
public IList<PdfPageLink> Links { get; private set; }
internal PdfPageLinks(IList<PdfPageLink> links)
{
if (links == null)
throw new ArgumentNullException("links");
Links = new ReadOnlyCollection<PdfPageLink>(links);
}
}
}
| 23.222222 | 63 | 0.591707 | [
"Apache-2.0"
] | mosheb/PdfiumViewerHB | PdfiumViewerHB/PdfPageLinks.cs | 629 | C# |
using System;
using System.Buffers;
using System.Diagnostics;
using DefaultEcs.Internal.System;
using DefaultEcs.Threading;
namespace DefaultEcs.System
{
/// <summary>
/// Represents a base class to process updates on a given <see cref="EntitySet"/> instance.
/// Only <see cref="Entity.Get{T}()"/>, <see cref="Entity.Set{T}(in T)"/> and <see cref="Entity.SetSameAs{T}(in Entity)"/> operation on already present component type are safe.
/// </summary>
/// <typeparam name="T">The type of the object used as state to update the system.</typeparam>
public abstract class AEntitySetSystem<T> : ISystem<T>
{
#region Types
private class Runnable : IParallelRunnable
{
private readonly AEntitySetSystem<T> _system;
public T CurrentState;
public int EntitiesPerIndex;
public Runnable(AEntitySetSystem<T> system)
{
_system = system;
}
public void Run(int index, int maxIndex)
{
int start = index * EntitiesPerIndex;
_system.Update(CurrentState, _system.Set.GetEntities(start, index == maxIndex ? _system.Set.Count - start : EntitiesPerIndex));
}
}
#endregion
#region Fields
private readonly bool _useBuffer;
private readonly IParallelRunner _runner;
private readonly Runnable _runnable;
private readonly int _minEntityCountByRunnerIndex;
#endregion
#region Properties
/// <summary>
/// Gets the <see cref="EntitySet"/> instance on which this system operates.
/// </summary>
public EntitySet Set { get; }
/// <summary>
/// Gets the <see cref="DefaultEcs.World"/> instance on which this system operates.
/// </summary>
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
public World World { get; }
#endregion
#region Initialisation
private AEntitySetSystem(Func<object, EntitySet> factory, IParallelRunner runner, int minEntityCountByRunnerIndex)
{
Set = factory(this);
World = Set.World;
_runner = runner ?? DefaultParallelRunner.Default;
_runnable = new Runnable(this);
_minEntityCountByRunnerIndex = _runner.DegreeOfParallelism > 1 ? minEntityCountByRunnerIndex : int.MaxValue;
}
/// <summary>
/// Initialise a new instance of the <see cref="AEntitySetSystem{T}"/> class with the given <see cref="EntitySet"/> and <see cref="IParallelRunner"/>.
/// </summary>
/// <param name="set">The <see cref="EntitySet"/> on which to process the update.</param>
/// <param name="runner">The <see cref="IParallelRunner"/> used to process the update in parallel if not null.</param>
/// <param name="minEntityCountByRunnerIndex">The minimum number of <see cref="Entity"/> per runner index to use the given <paramref name="runner"/>.</param>
/// <exception cref="ArgumentNullException"><paramref name="set"/> is null.</exception>
protected AEntitySetSystem(EntitySet set, IParallelRunner runner, int minEntityCountByRunnerIndex = 0)
: this(set is null ? throw new ArgumentNullException(nameof(set)) : _ => set, runner, minEntityCountByRunnerIndex)
{ }
/// <summary>
/// Initialise a new instance of the <see cref="AEntitySetSystem{T}"/> class with the given <see cref="EntitySet"/>.
/// </summary>
/// <param name="set">The <see cref="EntitySet"/> on which to process the update.</param>
/// <param name="useBuffer">Whether the entities should be copied before being processed.</param>
/// <exception cref="ArgumentNullException"><paramref name="set"/> is null.</exception>
protected AEntitySetSystem(EntitySet set, bool useBuffer = false)
: this(set, null)
{
_useBuffer = useBuffer;
}
/// <summary>
/// Initialise a new instance of the <see cref="AEntitySetSystem{T}"/> class with the given <see cref="DefaultEcs.World"/> and factory.
/// The current instance will be passed as the first parameter of the factory.
/// </summary>
/// <param name="world">The <see cref="DefaultEcs.World"/> from which to get the <see cref="Entity"/> instances to process the update.</param>
/// <param name="factory">The factory used to create the <see cref="EntitySet"/>.</param>
/// <param name="runner">The <see cref="IParallelRunner"/> used to process the update in parallel if not null.</param>
/// <param name="minEntityCountByRunnerIndex">The minimum number of <see cref="Entity"/> per runner index to use the given <paramref name="runner"/>.</param>
/// <exception cref="ArgumentNullException"><paramref name="world"/> is null.</exception>
/// <exception cref="ArgumentNullException"><paramref name="factory"/> is null.</exception>
protected AEntitySetSystem(World world, Func<object, World, EntitySet> factory, IParallelRunner runner, int minEntityCountByRunnerIndex)
: this(world is null ? throw new ArgumentNullException(nameof(world)) : factory is null ? throw new ArgumentNullException(nameof(factory)) : o => factory(o, world), runner, minEntityCountByRunnerIndex)
{ }
/// <summary>
/// Initialise a new instance of the <see cref="AEntitySetSystem{T}"/> class with the given <see cref="DefaultEcs.World"/>.
/// To create the inner <see cref="EntitySet"/>, <see cref="WithAttribute"/> and <see cref="WithoutAttribute"/> attributes will be used.
/// </summary>
/// <param name="world">The <see cref="DefaultEcs.World"/> from which to get the <see cref="Entity"/> instances to process the update.</param>
/// <param name="runner">The <see cref="IParallelRunner"/> used to process the update in parallel if not null.</param>
/// <param name="minEntityCountByRunnerIndex">The minimum number of <see cref="Entity"/> per runner index to use the given <paramref name="runner"/>.</param>
/// <exception cref="ArgumentNullException"><paramref name="world"/> is null.</exception>
protected AEntitySetSystem(World world, IParallelRunner runner, int minEntityCountByRunnerIndex = 0)
: this(world, DefaultFactory, runner, minEntityCountByRunnerIndex)
{ }
/// <summary>
/// Initialise a new instance of the <see cref="AEntitySetSystem{T}"/> class with the given <see cref="DefaultEcs.World"/>.
/// To create the inner <see cref="EntitySet"/>, <see cref="WithAttribute"/> and <see cref="WithoutAttribute"/> attributes will be used.
/// </summary>
/// <param name="world">The <see cref="DefaultEcs.World"/> from which to get the <see cref="Entity"/> instances to process the update.</param>
/// <param name="factory">The factory used to create the <see cref="EntitySet"/>.</param>
/// <param name="useBuffer">Whether the entities should be copied before being processed.</param>
/// <exception cref="ArgumentNullException"><paramref name="world"/> is null.</exception>
/// <exception cref="ArgumentNullException"><paramref name="factory"/> is null.</exception>
protected AEntitySetSystem(World world, Func<object, World, EntitySet> factory, bool useBuffer)
: this(world, factory, null, 0)
{
_useBuffer = useBuffer;
}
/// <summary>
/// Initialise a new instance of the <see cref="AEntitySetSystem{T}"/> class with the given <see cref="DefaultEcs.World"/>.
/// To create the inner <see cref="EntitySet"/>, <see cref="WithAttribute"/> and <see cref="WithoutAttribute"/> attributes will be used.
/// </summary>
/// <param name="world">The <see cref="DefaultEcs.World"/> from which to get the <see cref="Entity"/> instances to process the update.</param>
/// <param name="useBuffer">Whether the entities should be copied before being processed.</param>
/// <exception cref="ArgumentNullException"><paramref name="world"/> is null.</exception>
protected AEntitySetSystem(World world, bool useBuffer = false)
: this(world, DefaultFactory, useBuffer)
{ }
#endregion
#region Methods
private static EntitySet DefaultFactory(object o, World w) => EntityRuleBuilderFactory.Create(o.GetType())(o, w).AsSet();
/// <summary>
/// Performs a pre-update treatment.
/// </summary>
/// <param name="state">The state to use.</param>
protected virtual void PreUpdate(T state) { }
/// <summary>
/// Performs a post-update treatment.
/// </summary>
/// <param name="state">The state to use.</param>
protected virtual void PostUpdate(T state) { }
/// <summary>
/// Update the given <see cref="Entity"/> instance once.
/// </summary>
/// <param name="state">The state to use.</param>
/// <param name="entity">The <see cref="Entity"/> instance to update.</param>
protected virtual void Update(T state, in Entity entity) { }
/// <summary>
/// Update the given <see cref="Entity"/> instances once.
/// </summary>
/// <param name="state">The state to use.</param>
/// <param name="entities">The <see cref="Entity"/> instances to update.</param>
protected virtual void Update(T state, ReadOnlySpan<Entity> entities)
{
foreach (ref readonly Entity entity in entities)
{
Update(state, entity);
}
}
#endregion
#region ISystem
/// <summary>
/// Gets or sets whether the current <see cref="AEntitySetSystem{T}"/> instance should update or not.
/// </summary>
public bool IsEnabled { get; set; } = true;
/// <summary>
/// Updates the system once.
/// Does nothing if <see cref="IsEnabled"/> is false or if the inner <see cref="EntitySet"/> is empty.
/// </summary>
/// <param name="state">The state to use.</param>
public void Update(T state)
{
if (IsEnabled && Set.Count > 0)
{
PreUpdate(state);
if (_useBuffer)
{
Entity[] buffer = ArrayPool<Entity>.Shared.Rent(Set.Count);
Set.GetEntities().CopyTo(buffer);
Update(state, new ReadOnlySpan<Entity>(buffer, 0, Set.Count));
ArrayPool<Entity>.Shared.Return(buffer);
}
else
{
_runnable.EntitiesPerIndex = Set.Count / _runner.DegreeOfParallelism;
if (_runnable.EntitiesPerIndex < _minEntityCountByRunnerIndex)
{
Update(state, Set.GetEntities());
}
else
{
_runnable.CurrentState = state;
_runner.Run(_runnable);
}
}
Set.Complete();
PostUpdate(state);
}
}
#endregion
#region IDisposable
/// <summary>
/// Disposes of the inner <see cref="EntitySet"/> instance.
/// </summary>
public virtual void Dispose()
{
GC.SuppressFinalize(this);
Set.Dispose();
}
#endregion
}
}
| 45.558594 | 213 | 0.604647 | [
"MIT-0"
] | DagobertDev/DefaultEcs | source/DefaultEcs/System/AEntitySetSystem.cs | 11,665 | C# |
// <copyright file="XmpIptcProvider.cs" company="Taasss">Copyright (c) 2009 All Right Reserved</copyright>
// <author>Ben Vincent</author>
// <date>2010-02-18</date>
// <summary>XmpIptcProvider Class</summary>
namespace Fotofly.MetadataProviders
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Drawing;
using System.Text;
using System.Windows.Media.Imaging;
using Fotofly.BitmapMetadataTools;
using Fotofly.MetadataQueries;
public class XmpIptcProvider : BaseProvider
{
public XmpIptcProvider(BitmapMetadata bitmapMetadata)
: base(bitmapMetadata)
{
}
public Address AddressOfLocationShown
{
get
{
Address address = new Address();
address.Country = this.LocationShownCountry;
address.City = this.LocationShownCity;
address.Region = this.LocationShownRegion;
address.AddressLine = this.LocationShownSubLocation;
if (address.IsValidAddress)
{
return address;
}
else
{
return new Address();
}
}
set
{
this.LocationShownCountry = value.Country;
this.LocationShownCity = value.City;
this.LocationShownRegion = value.Region;
this.LocationShownSubLocation = value.AddressLine;
}
}
/// <summary>
/// City
/// </summary>
public string LocationShownCity
{
get
{
string returnValue = this.BitmapMetadata.GetQuery<string>(XmpIptcQueries.LocationShownCity.Query);
if (string.IsNullOrEmpty(returnValue))
{
return string.Empty;
}
else
{
return returnValue;
}
}
set
{
if (this.ValueHasChanged(value, this.LocationShownCity))
{
if (string.IsNullOrEmpty(value))
{
this.BitmapMetadata.RemoveQuery(XmpIptcQueries.LocationShownCity.Query);
}
else
{
this.BitmapMetadata.SetQuery(XmpIptcQueries.LocationShownCity.Query, value);
}
}
}
}
/// <summary>
/// County
/// </summary>
public string LocationShownCountry
{
get
{
string returnValue = this.BitmapMetadata.GetQuery<string>(XmpIptcQueries.LocationShownCountryName.Query);
if (string.IsNullOrEmpty(returnValue))
{
return string.Empty;
}
else
{
return returnValue;
}
}
set
{
if (this.ValueHasChanged(value, this.LocationShownCountry))
{
if (string.IsNullOrEmpty(value))
{
this.BitmapMetadata.RemoveQuery(XmpIptcQueries.LocationShownCountryName.Query);
}
else
{
this.BitmapMetadata.SetQuery(XmpIptcQueries.LocationShownCountryName.Query, value);
}
}
}
}
/// <summary>
/// Region, also used for State, County or Province
/// </summary>
public string LocationShownRegion
{
get
{
string returnValue = this.BitmapMetadata.GetQuery<string>(XmpIptcQueries.LocationShownProvinceState.Query);
if (string.IsNullOrEmpty(returnValue))
{
return string.Empty;
}
else
{
return returnValue;
}
}
set
{
if (this.ValueHasChanged(value, this.LocationShownRegion))
{
if (string.IsNullOrEmpty(value))
{
this.BitmapMetadata.RemoveQuery(XmpIptcQueries.LocationShownProvinceState.Query);
}
else
{
this.BitmapMetadata.SetQuery(XmpIptcQueries.LocationShownProvinceState.Query, value);
}
}
}
}
/// <summary>
/// Sublocation
/// </summary>
public string LocationShownSubLocation
{
get
{
string returnValue = this.BitmapMetadata.GetQuery<string>(XmpIptcQueries.LocationShownSublocation.Query);
if (string.IsNullOrEmpty(returnValue))
{
return string.Empty;
}
else
{
return returnValue;
}
}
set
{
if (this.ValueHasChanged(value, this.LocationShownSubLocation))
{
if (string.IsNullOrEmpty(value))
{
this.BitmapMetadata.RemoveQuery(XmpIptcQueries.LocationShownSublocation.Query);
}
else
{
this.BitmapMetadata.SetQuery(XmpIptcQueries.LocationShownSublocation.Query, value);
}
}
}
}
public Address AddressOfLocationCreated
{
get
{
Address address = new Address();
address.Country = this.LocationCreatedCountry;
address.City = this.LocationCreatedCity;
address.Region = this.LocationCreatedRegion;
address.AddressLine = this.LocationCreatedSubLocation;
if (address.IsValidAddress)
{
return address;
}
else
{
return new Address();
}
}
set
{
this.LocationCreatedCountry = value.Country;
this.LocationCreatedCity = value.City;
this.LocationCreatedRegion = value.Region;
this.LocationCreatedSubLocation = value.AddressLine;
}
}
/// <summary>
/// City
/// </summary>
public string LocationCreatedCity
{
get
{
string returnValue = this.BitmapMetadata.GetQuery<string>(XmpIptcQueries.LocationCreatedCity.Query);
if (string.IsNullOrEmpty(returnValue))
{
return string.Empty;
}
else
{
return returnValue;
}
}
set
{
if (this.ValueHasChanged(value, this.LocationCreatedCity))
{
if (string.IsNullOrEmpty(value))
{
this.BitmapMetadata.RemoveQuery(XmpIptcQueries.LocationCreatedCity.Query);
}
else
{
this.BitmapMetadata.SetQuery(XmpIptcQueries.LocationCreatedCity.Query, value);
}
}
}
}
/// <summary>
/// County
/// </summary>
public string LocationCreatedCountry
{
get
{
string returnValue = this.BitmapMetadata.GetQuery<string>(XmpIptcQueries.LocationCreatedCountryName.Query);
if (string.IsNullOrEmpty(returnValue))
{
return string.Empty;
}
else
{
return returnValue;
}
}
set
{
if (this.ValueHasChanged(value, this.LocationCreatedCountry))
{
if (string.IsNullOrEmpty(value))
{
this.BitmapMetadata.RemoveQuery(XmpIptcQueries.LocationCreatedCountryName.Query);
}
else
{
this.BitmapMetadata.SetQuery(XmpIptcQueries.LocationCreatedCountryName.Query, value);
}
}
}
}
/// <summary>
/// Region, also used for State, County or Province
/// </summary>
public string LocationCreatedRegion
{
get
{
string returnValue = this.BitmapMetadata.GetQuery<string>(XmpIptcQueries.LocationCreatedProvinceState.Query);
if (string.IsNullOrEmpty(returnValue))
{
return string.Empty;
}
else
{
return returnValue;
}
}
set
{
if (this.ValueHasChanged(value, this.LocationCreatedRegion))
{
if (string.IsNullOrEmpty(value))
{
this.BitmapMetadata.RemoveQuery(XmpIptcQueries.LocationCreatedProvinceState.Query);
}
else
{
this.BitmapMetadata.SetQuery(XmpIptcQueries.LocationCreatedProvinceState.Query, value);
}
}
}
}
/// <summary>
/// Sublocation
/// </summary>
public string LocationCreatedSubLocation
{
get
{
string returnValue = this.BitmapMetadata.GetQuery<string>(XmpIptcQueries.LocationCreatedSublocation.Query);
if (string.IsNullOrEmpty(returnValue))
{
return string.Empty;
}
else
{
return returnValue;
}
}
set
{
if (this.ValueHasChanged(value, this.LocationCreatedSubLocation))
{
if (string.IsNullOrEmpty(value))
{
this.BitmapMetadata.RemoveQuery(XmpIptcQueries.LocationCreatedSublocation.Query);
}
else
{
this.BitmapMetadata.SetQuery(XmpIptcQueries.LocationCreatedSublocation.Query, value);
}
}
}
}
}
}
| 30.434066 | 125 | 0.449088 | [
"MIT"
] | Azure-Samples/AIDeveloperResources | AIBlog-IntelligentTravelJournal/FotoFlyPorted/MetadataProviders/XmpIptcProvider.cs | 11,078 | C# |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Threading.Tasks;
using ITSMSkill.Responses.Knowledge;
using ITSMSkill.Responses.Main;
using ITSMSkill.Responses.Shared;
using ITSMSkill.Responses.Ticket;
using ITSMSkill.Tests.API.Fakes;
using ITSMSkill.Tests.Flow.Strings;
using ITSMSkill.Tests.Flow.Utterances;
using ITSMSkill.Utilities;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace ITSMSkill.Tests.Flow
{
[TestClass]
[TestCategory("UnitTests")]
public class TicketCloseFlowTests : SkillTestBase
{
[TestMethod]
public async Task CloseTest()
{
await this.GetTestFlow()
.Send(StartActivity)
.AssertReply(AssertContains(MainResponses.WelcomeMessage))
.AssertReply(AssertContains(MainResponses.FirstPromptMessage))
.Send(TicketCloseUtterances.Close)
.AssertReply(ShowAuth())
.Send(MagicCode)
.AssertReply(AssertContains(SharedResponses.InputTicketNumber))
.Send(MockData.CloseTicketNumber)
.AssertReply(AssertContains(TicketResponses.TicketTarget, null, CardStrings.Ticket))
.AssertReply(AssertContains(SharedResponses.InputReason))
.Send(MockData.CloseTicketReason)
.AssertReply(AssertContains(TicketResponses.TicketClosed, null, CardStrings.TicketUpdate))
.AssertReply(ActionEndMessage())
.StartTestAsync();
}
[TestMethod]
public async Task CloseWithNumberReasonTest()
{
var confirmReason = new Dictionary<string, object>
{
{ "Reason", MockData.CloseTicketReason }
};
await this.GetTestFlow()
.Send(StartActivity)
.AssertReply(AssertContains(MainResponses.WelcomeMessage))
.AssertReply(AssertContains(MainResponses.FirstPromptMessage))
.Send(TicketCloseUtterances.CloseWithNumberReason)
.AssertReply(ShowAuth())
.Send(MagicCode)
.AssertReply(AssertContains(TicketResponses.TicketTarget, null, CardStrings.Ticket))
.AssertReply(AssertStartsWith(SharedResponses.ConfirmReason, confirmReason))
.Send(NonLuisUtterances.Yes)
.AssertReply(AssertContains(TicketResponses.TicketClosed, null, CardStrings.TicketUpdate))
.AssertReply(ActionEndMessage())
.StartTestAsync();
}
[TestMethod]
public async Task CloseTestAction()
{
await this.GetSkillTestFlow()
.Send(TicketCloseUtterances.CloseAction)
.AssertReply(ShowAuth())
.Send(MagicCode)
.AssertReply(AssertContains(SharedResponses.InputTicketNumber))
.Send(MockData.CloseTicketNumber)
.AssertReply(AssertContains(TicketResponses.TicketTarget, null, CardStrings.Ticket))
.AssertReply(AssertContains(SharedResponses.InputReason))
.Send(MockData.CloseTicketReason)
.AssertReply(AssertContains(TicketResponses.TicketClosed, null, CardStrings.TicketUpdate))
.AssertReply(SkillActionEndMessage(true))
.StartTestAsync();
}
[TestMethod]
public async Task CloseWithNumberReasonActionTest()
{
var confirmReason = new Dictionary<string, object>
{
{ "Reason", MockData.CloseTicketReason }
};
await this.GetSkillTestFlow()
.Send(TicketCloseUtterances.CloseWithNumberReasonAction)
.AssertReply(ShowAuth())
.Send(MagicCode)
.AssertReply(AssertContains(TicketResponses.TicketTarget, null, CardStrings.Ticket))
.AssertReply(AssertStartsWith(SharedResponses.ConfirmReason, confirmReason))
.Send(NonLuisUtterances.Yes)
.AssertReply(AssertContains(TicketResponses.TicketClosed, null, CardStrings.TicketUpdate))
.AssertReply(SkillActionEndMessage(true))
.StartTestAsync();
}
}
}
| 42.221154 | 106 | 0.63744 | [
"MIT"
] | ConnectionMaster/botframework-components | skills/csharp/tests/itsmskill.tests/Flow/TicketCloseFlowTests.cs | 4,393 | C# |
using System;
using System.Net;
using System.Text;
using System.IO;
using System.Text.Json;
using System.Security.Cryptography;
using System.Threading.Tasks;
using System.Threading;
using System.Collections.Generic;
using HeyRed.Mime;
using System.Linq;
namespace BackblazeUploader
{
/// <summary>
/// Does all the work for multi part uploads
/// </summary>
public class MultiPartUpload
{
#region Properties and variables
/// <summary>
/// Pivate string containing the path to the file being uploaded.
/// </summary>
private string mpathToFile;
/// <summary>
/// The path to the file being uploaded.
/// </summary>
public string pathToFile { get {
return mpathToFile;
}
set
{
mpathToFile = value;
fileMime = MimeTypesMap.GetMimeType(value);
}
}
/// <summary>
/// mime type fype for the file
/// </summary>
string fileMime;
/// <summary>
/// <see cref="FileInfo"/> object for the file being uploaded.
/// </summary>
FileInfo fileInfo;
/// <summary>
/// Instance of <see cref="UploadDetails"/> holding the info about upload progress. Should only be updated once a lock on <see cref="uploadDetailsLock"/> has been obtained.
/// </summary>
UploadDetails uploadDetails;
/// <summary>
/// Size of local file in bytes
/// </summary>
long localFileSize;
/// <summary>
/// Lock object for <see cref="uploadDetails"/>
/// </summary>
object uploadDetailsLock = new object();
/// <summary>
/// Name of the file being uploaded
/// </summary>
String fileName;
/// <summary>
/// Contains the fileDetails about the file being uploaded.
/// </summary>
public FileDetails fileDetails;
/// <summary>
/// Flag to let the thread manager know there are no remaining parts.
/// </summary>
private bool noMoreThreads = false;
/// <summary>
/// Max number of threads specified by the cli arguments, default is 20.
/// </summary>
int maxThreads;
/// <summary>
/// Used to monitor bandwidth performance
/// </summary>
BandwidthMonitor bandwidthMonitor = new BandwidthMonitor();
/// <summary>
/// List of all the threads we have created, access required by multiple parts of the page.
/// </summary>
List<Thread> AllThreads = new List<Thread>();
#endregion
#region Constructor
/// <summary>
/// Constructor sets various items based on the passed in data.
/// </summary>
/// <param name="pathToFile">Path of the file to be uploaded</param>
public MultiPartUpload(string pathToFile)
{
//Set filePath from the incoming because it generates the mime type
this.pathToFile = pathToFile;
//Create fileInfo object
fileInfo = new FileInfo(pathToFile);
//Set local file size
localFileSize = fileInfo.Length;
//Get maximum number of threads from parsed options
maxThreads = Singletons.options.Threads;
//Sets filename from fileInfo
fileName = fileInfo.Name;
}
#endregion
#region Methods
#region Controllers, Starters & Finishers
/// <summary>
/// Manages every aspect of the upload from start to finish.
/// </summary>
public void UploadFile()
{
//Create a timestamp for logging time spent
DateTime Start = DateTime.Now;
//Create a new upload details to hold the details of the upload
uploadDetails = new UploadDetails();
//Start large file upload
StartLargeFile();
//Upload the parts
RunUploadWorkers();
//Finish Large File upload
FinishLargeFile();
#region Output final status message.
//Get end datetime
DateTime End = DateTime.Now;
//Get the difference between the two as a string
string diffInSeconds = Math.Round((End - Start).TotalSeconds, 1).ToString();
//Get Mbps
//First get MB
var MBs = fileInfo.Length * 0.00000095367432;
//Then mb
var Mbs = MBs * 8;
//Then calculate Mbps
var Mbps = Mbs / (End - Start).TotalSeconds;
StaticHelpers.DebugLogger($"Operation Finished. Operation Took: {diffInSeconds} seconds and transferred {Math.Round(MBs, 2)}MBs at a speed of {Math.Round(Mbps, 2)}Mbps", DebugLevel.Info);
#endregion
}
/// <summary>
/// Calls b2_start_large_file and gets the required idents to begin upload.
/// </summary>
public async void StartLargeFile()
{
// Setup JSON to post.
String startLargeFileJsonStr = "{\"bucketId\":\"" + Singletons.authenticationDetails.bucketId + "\",\"fileName\":\"" + fileName + "\",\"contentType\":\"" + fileMime + "\"}";
byte[] jsonData = Encoding.UTF8.GetBytes(startLargeFileJsonStr);
// Send over the wire
HttpWebRequest startLargeFileRequest = (HttpWebRequest)WebRequest.Create(Singletons.authenticationDetails.apiUrl + "/b2api/v2/b2_start_large_file");
startLargeFileRequest.Method = "POST";
startLargeFileRequest.Headers.Add("Authorization", Singletons.authenticationDetails.authorizationToken);
startLargeFileRequest.ContentType = "application/json; charset=utf-8";
startLargeFileRequest.ContentLength = jsonData.Length;
using (Stream stream = startLargeFileRequest.GetRequestStream())
{
stream.Write(jsonData, 0, jsonData.Length);
stream.Close();
}
// Handle the response and print the json
try
{
HttpWebResponse startLargeFileResponse = (HttpWebResponse)startLargeFileRequest.GetResponse();
//Trying swapping this so we get a stream for deserialization
//using (StringReader responseReader = new StringReader(new StreamReader(startLargeFileResponse.GetResponseStream()).ReadToEnd()))
using (Stream responseStream = startLargeFileResponse.GetResponseStream())
{
fileDetails = await JsonSerializer.DeserializeAsync<FileDetails>(responseStream);
}
startLargeFileResponse.Close();
}
catch (WebException e)
{
using (HttpWebResponse errorResponse = (HttpWebResponse)e.Response)
{
StaticHelpers.DebugLogger($"Internal Worker Error with API.Error code: {errorResponse.StatusCode}. Retrying....", DebugLevel.Warn );
using (StreamReader reader = new StreamReader(errorResponse.GetResponseStream()))
{
String text = reader.ReadToEnd();
StaticHelpers.DebugLogger($"Internal Worker Error with API.Error code: {text}. Retrying....", DebugLevel.Warn );
}
}
}
}
/// <summary>
/// Finishes the file upload by calling b2_finish_large_file to combine all parts.
/// </summary>
public void FinishLargeFile()
{
// Create a request object and copy it to the memory stream.
B2FinishLargeFileRequest finishLargeFileData = new B2FinishLargeFileRequest
{
fileId = fileDetails.fileId,
partSha1Array = uploadDetails.partSha1Array
};
//So instead of using the old json serialized things going to use the new one...
string FinishLargeFileInfoJsonString = JsonSerializer.Serialize<B2FinishLargeFileRequest>(finishLargeFileData);
//Convert the string to a memory stream
byte[] byteArray = Encoding.UTF8.GetBytes(FinishLargeFileInfoJsonString);
MemoryStream finishLargeFileMemStream = new MemoryStream(byteArray);
HttpWebRequest finishLargeFileRequest = (HttpWebRequest)WebRequest.Create(Singletons.authenticationDetails.apiUrl + "/b2api/v2/b2_finish_large_file");
finishLargeFileRequest.Method = "POST";
finishLargeFileRequest.Headers.Add("Authorization", Singletons.authenticationDetails.authorizationToken);
finishLargeFileRequest.ContentType = "application/json; charset=utf-8";
finishLargeFileRequest.ContentLength = finishLargeFileMemStream.Length;
finishLargeFileMemStream.WriteTo(finishLargeFileRequest.GetRequestStream());
HttpWebResponse finishLargeFileResponse;
try
{
finishLargeFileResponse = (HttpWebResponse)finishLargeFileRequest.GetResponse();
}
catch (WebException e)
{
using (WebResponse r = e.Response)
{
HttpWebResponse httpResponse = (HttpWebResponse)r;
StaticHelpers.DebugLogger($"Internal Worker Error with API.Error code: {httpResponse.StatusCode}. Retrying....", DebugLevel.Warn );
using (Stream dataE = r.GetResponseStream())
using (var reader = new StreamReader(dataE))
{
string text = reader.ReadToEnd();
StaticHelpers.DebugLogger($"Internal Worker Error with API.Error code: {text}. Retrying....", DebugLevel.Warn );
}
}
}
}
#endregion
#region Thread Management
/// <summary>
/// Runs & Manages the upload workers.
/// </summary>
public void RunUploadWorkers()
{
//Start the bandwidth monitor
bandwidthMonitor.startMonitoring();
//Set WorkFinished to false
bool WorkFinished = false;
//Put up a blank status summary
StaticHelpers.UpdateSummary($"Progress: 0%");
//Start a continious loop until we have finished the work
while (WorkFinished == false)
{
//If we are under max threads and bandwidth monitor doesn't forbid it and we haven't already started all parts
if (maxThreads > AllThreads.Count() && bandwidthMonitor.CanIncrease == true && noMoreThreads == false)
{
//Create thread to run StartUploadWorker
Thread thread = new Thread(StartUploadWorker);
//Start the thread
thread.Start();
//Put the thread in our list of threads
AllThreads.Add(thread);
}
//Recalculate number of active threads by removing inactive
AllThreads.RemoveAll(thread => thread.ThreadState == System.Threading.ThreadState.Stopped);
//Output a debug message
StaticHelpers.DebugLogger($"Current number of threads = {AllThreads.Count}", DebugLevel.FullDebug);
//If whole file has been uploaded and all threads are stopped
if (uploadDetails.totalBytesSent >= localFileSize && AllThreads.Count() == 0)
{
//We have reached the end of parts to upload so tell the system not to upload anymore
WorkFinished = true;
}
//Add a wait so we aren't too aggressive in adding threads
Thread.Sleep(3000);
}
bandwidthMonitor.StopMonitoring = true;
StaticHelpers.DebugLogger("Upload has finished.", DebugLevel.Verbose);
}
#endregion
#region Worker
/// <summary>
/// Starts and is the upload worker that does the actual uploads.
/// </summary>
public async void StartUploadWorker()
{
StaticHelpers.DebugLogger("Starting an internal upload worker", DebugLevel.Verbose);
//Get our object containing our authorisation URLs
UploadPartsUrlDetails uploadPartsUrlDetails = await GetUploadPartUrl();
#region Loop where work is done
//While there are bytes still be sent
while (uploadDetails.totalBytesSent < localFileSize)
{
#region Check Bandwidth Monitor
//If the bandwidth monitor requires a reduction in usage
if (bandwidthMonitor.reduceUsage || bandwidthMonitor.urgentReduceUsage)
{
//Check thread count is greater than 1
if (AllThreads.Count(thread => thread.ThreadState != System.Threading.ThreadState.Stopped) > 1)
{
//Log to debug
StaticHelpers.DebugLogger("Received Kill Request from Bandwidth Monitor. Killing self....", DebugLevel.Verbose);
//Set reduceUsage to false
bandwidthMonitor.reduceUsage = false;
//Kill this thread
break;
} else
{
StaticHelpers.DebugLogger("Received Kill Request from Bandwidth Monitor HOWEVER as the only remaining thread I am ignoring.", DebugLevel.Verbose);
}
}
#endregion
#region Get details from uploadDetails & Data from file
//Create variables outside of the lock so we can set it inside but still access it outside
//For a snapshot of uploadDetails
UploadDetails uploadDetailsSnapshot = new UploadDetails();
//Create the byte array for the data we are going to use
byte[] data = new byte[Singletons.options.PartSize * (1000 * 1000)];
//Lock the uploadDetails
lock (uploadDetailsLock)
{
#region Check if we are on the last part or even if there are no parts left
//If there is nothing left to upload
if ((localFileSize - uploadDetails.totalBytesSent) == 0)
{
//Break out of the loop as there is no more work to do
break;
}
//If the remaining bytes are less the minimum part size
if ((localFileSize - uploadDetails.totalBytesSent) <= uploadDetails.minimumPartSize)
{
//Changes the bytes sent for part to the remaining number of bytes
uploadDetails.bytesSentForPart = (localFileSize - uploadDetails.totalBytesSent);
}
#endregion
#region Read & hash File
// Generate SHA1 Chunk
// Open stream of the file
FileStream f = File.OpenRead(pathToFile);
//Seek to the location in the file we are currently up to
f.Seek(uploadDetails.totalBytesSent, SeekOrigin.Begin);
//Read the data from the file that we are going to use this time
f.Read(data, 0, (int)uploadDetails.bytesSentForPart);
//Create a blank SHA1 hash
SHA1 sha1 = SHA1.Create();
//Hash the bytes in our current data and keep the hash in hashData
byte[] hashData = sha1.ComputeHash(data, 0, (int)uploadDetails.bytesSentForPart);
//Dispose of the hash
sha1.Dispose();
//Create a string builder to manipulate the hash
StringBuilder sb = new StringBuilder();
//Add data to every byte in the range
foreach (byte b in hashData)
{
sb.Append(b.ToString("x2"));
}
//Close the file read because we now have the data
f.Close();
//Add the hash to the hash array
uploadDetails.partSha1Array.Add(sb.ToString());
#endregion
#region Finalise Operations on uploadDetails so we can release lock
//Get all the values we might need to use internally. OR just make a snapshot of Upload Details? (Yes this should work!)
uploadDetailsSnapshot = uploadDetails.CloneMe();
//Update the actual uploadDetails with what we intend to do.
//Increment the partNo
uploadDetails.partNo++;
//Increment the totalBytesSent
uploadDetails.totalBytesSent = uploadDetails.totalBytesSent + uploadDetails.bytesSentForPart;
#endregion
}
#endregion
//To count number of failed attempts
int WebRequestAttempt = 1;
//To allow retry of the failed request
RetryPartUpload:
//Output urls for debugging
//StaticHelpers.DebugLogger("UploadPartsURL is: " + uploadPartsUrlDetails.uploadUrl + ". Key is: " + uploadPartsUrlDetails.authorizationToken, DebugLevel.FullDebug);
//Start a new web request
HttpWebRequest uploadPartRequest = (HttpWebRequest)WebRequest.Create(uploadPartsUrlDetails.uploadUrl);
//Set to post
uploadPartRequest.Method = "POST";
//Set the request timeout to 5 minutes
uploadPartRequest.Timeout = 5*60*1000;
//Set authorization token (using the one for the current uploadPartUrl)
//Intentionally generating error:
//uploadPartRequest.Headers.Add("Authorization", uploadPartsUrlDetails.authorizationToken + "r");
uploadPartRequest.Headers.Add("Authorization", uploadPartsUrlDetails.authorizationToken);
//Set the part number
uploadPartRequest.Headers.Add("X-Bz-Part-Number", uploadDetailsSnapshot.partNo.ToString());
//Set the sha1 hash from the array (minus one on the part number because 0-index array
uploadPartRequest.Headers.Add("X-Bz-Content-Sha1", (String)uploadDetailsSnapshot.partSha1Array[(uploadDetailsSnapshot.partNo - 1)]);
//Set content type to json
uploadPartRequest.ContentType = "application/json; charset=utf-8";
//Set the content length to the bytes sent for the part
uploadPartRequest.ContentLength = uploadDetailsSnapshot.bytesSentForPart;
//Create a stream to use for the uploadPartRequest (this may be the one to change to a filestream)
using (Stream stream = uploadPartRequest.GetRequestStream())
{
//Write the data (through the stream?) to the uploadPartRequest
stream.Write(data, 0, (int)uploadDetailsSnapshot.bytesSentForPart);
//Close the stream
stream.Close();
}
//Set upload response to null
HttpWebResponse uploadPartResponse = null;
//Verbose message
StaticHelpers.DebugLogger("Starting upload of part " + uploadDetailsSnapshot.partNo, DebugLevel.Verbose);
//Try the upload
try
{
//Try the upload and set the upload part response to the response
uploadPartResponse = (HttpWebResponse)uploadPartRequest.GetResponse();
}
//If theres an exception catch and output it
catch (WebException e)
{
if (e.Response == null)
{
StaticHelpers.DebugLogger("Upload has failed with error: " + e.Message, DebugLevel.Warn);
}
else
{
using (WebResponse r = e.Response)
{
HttpWebResponse httpResponse = (HttpWebResponse)r;
StaticHelpers.DebugLogger($"Internal Worker Error with API.Error code: {httpResponse.StatusCode}. Retrying....", DebugLevel.Warn );
using (Stream dataE = r.GetResponseStream())
using (var reader = new StreamReader(dataE))
{
string text = reader.ReadToEnd();
StaticHelpers.DebugLogger($"Internal Worker Error with API.Error code: {text}. Retrying....", DebugLevel.Warn );
}
}
}
//If we have failed less than 5 times
if (WebRequestAttempt < 5)
{
//Log a message
StaticHelpers.DebugLogger("Upload has failed, getting fresh uploadparturl and retrying....", DebugLevel.Verbose);
//Get our object containing our authorisation URLs
uploadPartsUrlDetails = await GetUploadPartUrl();
//Output fresh url for debugging
StaticHelpers.DebugLogger("Fresh UploadPartsURL is: " + uploadPartsUrlDetails.uploadUrl + ". Key is: " + uploadPartsUrlDetails.authorizationToken, DebugLevel.FullDebug);
//Wait a while
int secToWait = WebRequestAttempt * 2;
Thread.Sleep(secToWait * 1000);
//Increment counter
WebRequestAttempt++;
//Retry
goto RetryPartUpload;
}
}
//Close the upload part response
uploadPartResponse.Close();
//Lock so we can work on uploadDetails
lock (uploadDetailsLock)
{
//Update uploadDetails with the fact this part has been completed
uploadDetails.BytesConfirmedSent = uploadDetails.BytesConfirmedSent + uploadDetailsSnapshot.bytesSentForPart;
//Calculate the decimal amount completed
double decimalPercentage = (double)uploadDetails.BytesConfirmedSent / (double)localFileSize;
//Calculate the percentage completed
int percentage = (int)(decimalPercentage * 100);
//Output to the console the percentage completed
StaticHelpers.UpdateSummary($"Progress: {percentage}%");
}
//Log to the debugger what part we've just done
StaticHelpers.DebugLogger("Uploaded Part " + uploadDetailsSnapshot.partNo, DebugLevel.Verbose);
}
#endregion
//Check whether we have finished or if just this thread being killed:
if (uploadDetails.totalBytesSent >= localFileSize)
{
//We have reached the end of parts to upload so tell the system not to upload anymore
noMoreThreads = true;
}
StaticHelpers.DebugLogger("Internal upload worker is dead.", DebugLevel.Verbose);
}
#endregion
#region Helper Methods
/// <summary>
/// Gets the uploadPartUrl for each worker function.
/// </summary>
/// <returns></returns>
public async Task<UploadPartsUrlDetails> GetUploadPartUrl()
{
//Taken from https://www.backblaze.com/b2/docs/b2_get_upload_part_url.html with edits
//Create an UploadPartsUrlDetails data object to hold the data
UploadPartsUrlDetails uploadPartsUrlDetails = new UploadPartsUrlDetails();
// Get Upload URL
String getUploadUrlJsonStr = "{\"fileId\":\"" + fileDetails.fileId + "\"}";
byte[] getUloadUrlJsonData = Encoding.UTF8.GetBytes(getUploadUrlJsonStr);
//To allow us to count the number of failed attempts
int WebRequestAttempt = 1;
//To allow us to retry the webrequest if it fails (there is probably a better option here but I don't know it
RetryRequest:
HttpWebRequest getUploadUrlRequest = (HttpWebRequest)WebRequest.Create(Singletons.authenticationDetails.apiUrl + "/b2api/v2/b2_get_upload_part_url");
//Intentionally generating an error for testing purposes
//HttpWebRequest getUploadUrlRequest = (HttpWebRequest)WebRequest.Create(Singletons.authenticationDetails.apiUrl + "/b2api/v2/b2_get_upload_part_ur");
getUploadUrlRequest.Method = "POST";
getUploadUrlRequest.Headers.Add("Authorization", Singletons.authenticationDetails.authorizationToken);
getUploadUrlRequest.ContentType = "application/json; charset=utf-8";
getUploadUrlRequest.ContentLength = getUloadUrlJsonData.Length;
using (Stream stream = getUploadUrlRequest.GetRequestStream())
{
stream.Write(getUloadUrlJsonData, 0, getUloadUrlJsonData.Length);
stream.Close();
}
// Handle the response and print the json
try
{
HttpWebResponse getUploadUrlResponse = (HttpWebResponse)getUploadUrlRequest.GetResponse();
//I have made heavy changes here so errors are almost certainly due to that!
using (Stream responseStream = getUploadUrlResponse.GetResponseStream())
{
UploadPartUrlResponse uploadPartUrlResponse = await JsonSerializer.DeserializeAsync<UploadPartUrlResponse>(responseStream);
uploadPartsUrlDetails.authorizationToken = uploadPartUrlResponse.authorizationToken;
uploadPartsUrlDetails.uploadUrl = uploadPartUrlResponse.uploadUrl;
}
getUploadUrlResponse.Close();
}
catch (WebException e)
{
//Print error to console before retrying
using (HttpWebResponse errorResponse = (HttpWebResponse)e.Response)
{
StaticHelpers.DebugLogger($"Internal Worker Error with API.Error code: {errorResponse.StatusCode}. Retrying....", DebugLevel.Warn );
using (StreamReader reader = new StreamReader(errorResponse.GetResponseStream()))
{
String text = reader.ReadToEnd();
StaticHelpers.DebugLogger($"Internal Worker Error with API.Error code: {text}. Retrying....", DebugLevel.Warn );
}
}
//If we have failed less than 5 times
if (WebRequestAttempt < 5)
{
//Log a message
StaticHelpers.DebugLogger("We have failed to get a part upload URL, retrying....", DebugLevel.Verbose);
//Wait a while
int secToWait = WebRequestAttempt * 2;
Thread.Sleep(secToWait * 1000);
//Increment counter
WebRequestAttempt++;
//Go back to retry the request
goto RetryRequest;
}
}
return uploadPartsUrlDetails;
}
#endregion
#endregion
}
}
| 49.740484 | 200 | 0.555339 | [
"MIT"
] | genericitperson/BackblazeUploader | src/BackblazeUploader/MultiPartUpload.cs | 28,750 | C# |
using System.Net;
namespace Twilio.Http
{
/// <summary>
/// Twilio response
/// </summary>
public class Response
{
/// <summary>
/// HTTP status code
/// </summary>
public HttpStatusCode StatusCode { get; }
/// <summary>
/// Content string
/// </summary>
public string Content { get; }
/// <summary>
/// Create a new Response
/// </summary>
/// <param name="statusCode">HTTP status code</param>
/// <param name="content">Content string</param>
public Response (HttpStatusCode statusCode, string content)
{
StatusCode = statusCode;
Content = content;
}
}
}
| 23 | 67 | 0.514946 | [
"MIT"
] | FMV1491/twilio-csharp | src/Twilio/Http/Response.cs | 736 | C# |
namespace FileHelpers
{
/// <summary>Indicate the method used to calculate the current progress</summary>
public enum ProgressMode
{
/// <summary>Notify the percent completed.</summary>
NotifyPercent,
/// <summary>Notify the Record completed.</summary>
NotifyRecords,
/// <summary>Notify the bytes read so far</summary>
NotifyBytes,
/// <summary>Don't call to the progress handler.</summary>
DontNotify = 0
}
} | 23.789474 | 83 | 0.685841 | [
"MIT"
] | mcavigelli/FileHelpers | FileHelpers/Enums/ProgressMode.cs | 452 | C# |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Net.Http;
using System.Threading.Tasks;
namespace Microsoft.DotNet.DarcLib
{
public interface IGitRepo
{
/// <summary>
/// Checks that a repository exists
/// </summary>
/// <param name="repoUri">Repository uri</param>
/// <returns>True if the repository exists, false otherwise.</returns>
Task<bool> RepoExistsAsync(string repoUri);
/// <summary>
/// Create a new branch in a repository
/// </summary>
/// <param name="repoUri">Repo to create a branch in</param>
/// <param name="newBranch">New branch name</param>
/// <param name="baseBranch">Base of new branch</param>
Task CreateBranchAsync(string repoUri, string newBranch, string baseBranch);
/// <summary>
/// Delete a branch from a repository
/// </summary>
/// <param name="repoUri">Repository where the branch lives</param>
/// <param name="branch">The branch to delete</param>
Task DeleteBranchAsync(string repoUri, string branch);
/// <summary>
/// Commit or update a set of files to a repo
/// </summary>
/// <param name="filesToCommit">Files to comit</param>
/// <param name="repoUri">Remote repository URI</param>
/// <param name="branch">Branch to push to</param>
/// <param name="commitMessage">Commit message</param>
/// <returns></returns>
Task CommitFilesAsync(List<GitFile> filesToCommit, string repoUri, string branch, string commitMessage);
/// <summary>
/// Search pull requests matching the specified criteria
/// </summary>
/// <param name="repoUri">URI of repo containing the pull request</param>
/// <param name="pullRequestBranch">Source branch for PR</param>
/// <param name="status">Current PR status</param>
/// <param name="keyword">Keyword</param>
/// <param name="author">Author</param>
/// <returns>List of pull requests matching the specified criteria</returns>
Task<IEnumerable<int>> SearchPullRequestsAsync(
string repoUri,
string pullRequestBranch,
PrStatus status,
string keyword = null,
string author = null);
/// <summary>
/// Get the status of a pull request
/// </summary>
/// <param name="pullRequestUrl">URI of pull request</param>
/// <returns>Pull request status</returns>
Task<PrStatus> GetPullRequestStatusAsync(string pullRequestUrl);
/// <summary>
/// Retrieve information on a specific pull request
/// </summary>
/// <param name="pullRequestUrl">Uri of the pull request</param>
/// <returns>Information on the pull request.</returns>
Task<PullRequest> GetPullRequestAsync(string pullRequestUrl);
/// <summary>
/// Create a new pull request for a repository
/// </summary>
/// <param name="repoUri">Repo to create the pull request for.</param>
/// <param name="pullRequest">Pull request data</param>
/// <returns></returns>
Task<string> CreatePullRequestAsync(string repoUri, PullRequest pullRequest);
/// <summary>
/// Update a pull request with new information
/// </summary>
/// <param name="pullRequestUri">Uri of pull request to update</param>
/// <param name="pullRequest">Pull request info to update</param>
/// <returns></returns>
Task UpdatePullRequestAsync(string pullRequestUri, PullRequest pullRequest);
/// <summary>
/// Merge a pull request
/// </summary>
/// <param name="pullRequestUrl">Uri of pull request to merge</param>
/// <param name="parameters">Settings for merge</param>
/// <returns></returns>
Task MergePullRequestAsync(string pullRequestUrl, MergePullRequestParameters parameters);
/// <summary>
/// Create a new comment, or update the last comment with an updated message,
/// if that comment was created by Darc.
/// </summary>
/// <param name="pullRequestUrl">Url of pull request</param>
/// <param name="message">Message to post</param>
Task CreateOrUpdatePullRequestCommentAsync(string pullRequestUrl, string message);
/// <summary>
/// Retrieve a set of file under a specific path at a commit
/// </summary>
/// <param name="repoUri">Repository URI</param>
/// <param name="commit">Commit to get files at</param>
/// <param name="path">Path to retrieve files from</param>
/// <returns>Set of files under <paramref name="path"/> at <paramref name="commit"/></returns>
Task<List<GitFile>> GetFilesAtCommitAsync(string repoUri, string commit, string path);
/// <summary>
/// Retrieve the contents of a repository file as a string
/// </summary>
/// <param name="filePath">Path to file</param>
/// <param name="repoUri">Repository URI</param>
/// <param name="branch">Branch to get file contents from</param>
/// <returns>File contents.</returns>
Task<string> GetFileContentsAsync(string filePath, string repoUri, string branch);
/// <summary>
/// Get the latest commit in a repo on the specific branch
/// </summary>
/// <param name="repoUri">Repository uri</param>
/// <param name="branch">Branch to retrieve the latest sha for</param>
/// <returns>Latest sha. Null if no commits were found.</returns>
Task<string> GetLastCommitShaAsync(string repoUri, string branch);
/// <summary>
/// Retrieve the list of status checks on a PR.
/// </summary>
/// <param name="pullRequestUrl">Uri of pull request</param>
/// <returns>List of status checks.</returns>
Task<IList<Check>> GetPullRequestChecksAsync(string pullRequestUrl);
/// <summary>
/// Retrieve the list of reviews on a PR.
/// </summary>
/// <param name="pullRequestUrl">Uri of pull request</param>
/// <returns>List of pull request reviews.</returns>
Task<IList<Review>> GetPullRequestReviewsAsync(string pullRequestUrl);
/// <summary>
/// Diff two commits in a repository and return information about them.
/// </summary>
/// <param name="repoUri">Repository uri</param>
/// <param name="baseVersion">Base version</param>
/// <param name="targetVersion">Target version</param>
/// <returns>Diff information</returns>
Task<GitDiff> GitDiffAsync(string repoUri, string baseVersion, string targetVersion);
/// <summary>
/// Clone a remote repository.
/// </summary>
/// <param name="repoUri">Repository uri</param>
/// <param name="commit">Branch, commit, or tag to checkout</param>
/// <param name="targetDirectory">Directory to clone to</param>
/// <param name="gitDirectory">Location for .git directory, or null for default</param>
/// <returns></returns>
void Clone(string repoUri, string commit, string targetDirectory, string gitDirectory);
/// <summary>
/// Checkout the repository to a given state.
/// </summary>
/// <param name="repoPath">Path to the local repository</param>
/// <param name="commit">Tag, branch, or commit to checkout</param>
/// <param name="force">True to force the checkout (loses work)</param>
void Checkout(string repoPath, string commit, bool force);
/// <summary>
/// Add a remote to local repo if it does not already exist, and attempt to fetch commits.
/// </summary>
/// <param name="repoDir">The local repo directory</param>
/// <param name="repoUrl">The remote URL to add</param>
void AddRemoteIfMissing(string repoDir, string repoUrl);
}
public class PullRequest
{
public string Title { get; set; }
public string Description { get; set; }
public string BaseBranch { get; set; }
public string HeadBranch { get; set; }
}
}
| 44.978947 | 112 | 0.613035 | [
"MIT"
] | jonfortescue/arcade-services | src/Microsoft.DotNet.Darc/src/DarcLib/IGitRepo.cs | 8,546 | C# |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Linq;
using System.Runtime.CompilerServices;
using Microsoft.AspNetCore.Html;
using Microsoft.AspNetCore.Mvc.ViewFeatures;
using Microsoft.Extensions.Options;
namespace OrchardCore.ResourceManagement
{
public class ResourceManager : IResourceManager
{
private readonly Dictionary<ResourceTypeName, RequireSettings> _required = new Dictionary<ResourceTypeName, RequireSettings>();
private readonly Dictionary<string, ResourceRequiredContext[]> _builtResources;
private readonly IEnumerable<IResourceManifestProvider> _providers;
private readonly IFileVersionProvider _fileVersionProvider;
private ResourceManifest _dynamicManifest;
private List<LinkEntry> _links;
private Dictionary<string, MetaEntry> _metas;
private List<IHtmlContent> _headScripts;
private List<IHtmlContent> _footScripts;
private List<IHtmlContent> _styles;
private HashSet<string> _localScripts;
private HashSet<string> _localStyles;
private readonly IResourceManifestState _resourceManifestState;
private readonly ResourceManagementOptions _options;
public ResourceManager(
IEnumerable<IResourceManifestProvider> resourceProviders,
IResourceManifestState resourceManifestState,
IOptions<ResourceManagementOptions> options,
IFileVersionProvider fileVersionProvider)
{
_resourceManifestState = resourceManifestState;
_options = options.Value;
_providers = resourceProviders;
_fileVersionProvider = fileVersionProvider;
_builtResources = new Dictionary<string, ResourceRequiredContext[]>(StringComparer.OrdinalIgnoreCase);
}
public IEnumerable<ResourceManifest> ResourceManifests
{
get
{
if (_resourceManifestState.ResourceManifests == null)
{
var builder = new ResourceManifestBuilder();
foreach (var provider in _providers)
{
provider.BuildManifests(builder);
}
_resourceManifestState.ResourceManifests = builder.ResourceManifests;
}
return _resourceManifestState.ResourceManifests;
}
}
public ResourceManifest InlineManifest => _dynamicManifest ?? (_dynamicManifest = new ResourceManifest());
public RequireSettings RegisterResource(string resourceType, string resourceName)
{
if (resourceType == null)
{
return ThrowArgumentNullException<RequireSettings>(nameof(resourceType));
}
if (resourceName == null)
{
return ThrowArgumentNullException<RequireSettings>(nameof(resourceName));
}
var key = new ResourceTypeName(resourceType, resourceName);
if (!_required.TryGetValue(key, out var settings))
{
settings = new RequireSettings(_options)
{
Type = resourceType,
Name = resourceName
};
_required[key] = settings;
}
_builtResources[resourceType] = null;
return settings;
}
public RequireSettings RegisterUrl(string resourceType, string resourcePath, string resourceDebugPath)
{
if (resourceType == null)
{
return ThrowArgumentNullException<RequireSettings>(nameof(resourceType));
}
if (resourcePath == null)
{
return ThrowArgumentNullException<RequireSettings>(nameof(resourcePath));
}
// ~/ ==> convert to absolute path (e.g. /orchard/..)
if (resourcePath.StartsWith("~/", StringComparison.Ordinal))
{
resourcePath = _options.ContentBasePath + resourcePath.Substring(1);
}
if (resourceDebugPath != null && resourceDebugPath.StartsWith("~/", StringComparison.Ordinal))
{
resourceDebugPath = _options.ContentBasePath + resourceDebugPath.Substring(1);
}
return RegisterResource(
resourceType,
GetResourceKey(resourcePath, resourceDebugPath)).Define(d => d.SetUrl(resourcePath, resourceDebugPath));
}
public void RegisterHeadScript(IHtmlContent script)
{
if (_headScripts == null)
{
_headScripts = new List<IHtmlContent>();
}
_headScripts.Add(script);
}
public void RegisterFootScript(IHtmlContent script)
{
if (_footScripts == null)
{
_footScripts = new List<IHtmlContent>();
}
_footScripts.Add(script);
}
public void RegisterStyle(IHtmlContent style)
{
if (_styles == null)
{
_styles = new List<IHtmlContent>();
}
_styles.Add(style);
}
public void NotRequired(string resourceType, string resourceName)
{
if (resourceType == null)
{
ThrowArgumentNullException(nameof(resourceType));
return;
}
if (resourceName == null)
{
ThrowArgumentNullException(nameof(resourceName));
return;
}
var key = new ResourceTypeName(resourceType, resourceName);
_builtResources[resourceType] = null;
_required.Remove(key);
}
public ResourceDefinition FindResource(RequireSettings settings)
{
return FindResource(settings, true);
}
private ResourceDefinition FindResource(RequireSettings settings, bool resolveInlineDefinitions)
{
// find the resource with the given type and name
// that has at least the given version number. If multiple,
// return the resource with the greatest version number.
// If not found and an inlineDefinition is given, define the resource on the fly
// using the action.
var name = settings.Name ?? "";
var type = settings.Type;
var stream = ResourceManifests.SelectMany(x => x.GetResources(type));
var resource = FindMatchingResource(stream, settings, name);
if (resource == null && _dynamicManifest != null)
{
stream = _dynamicManifest.GetResources(type);
resource = FindMatchingResource(stream, settings, name);
}
if (resolveInlineDefinitions && resource == null)
{
// Does not seem to exist, but it's possible it is being
// defined by a Define() from a RequireSettings somewhere.
if (ResolveInlineDefinitions(settings.Type))
{
// if any were defined, now try to find it
resource = FindResource(settings, false);
}
}
return resource;
}
private ResourceDefinition FindMatchingResource(
IEnumerable<KeyValuePair<string, IList<ResourceDefinition>>> stream,
RequireSettings settings,
string name)
{
Version lower = null;
Version upper = null;
if (!String.IsNullOrEmpty(settings.Version))
{
// Specific version, filter
lower = GetLowerBoundVersion(settings.Version);
upper = GetUpperBoundVersion(settings.Version);
}
ResourceDefinition resource = null;
foreach (var r in stream)
{
if (String.Equals(r.Key, name, StringComparison.OrdinalIgnoreCase))
{
foreach (var resourceDefinition in r.Value)
{
var version = resourceDefinition.Version != null
? new Version(resourceDefinition.Version)
: null;
if (lower != null)
{
if (lower > version || version >= upper)
{
continue;
}
}
// Use the highest version of all matches
if (resource == null
|| (resourceDefinition.Version != null && new Version(resource.Version) < version))
{
resource = resourceDefinition;
}
}
}
}
return resource;
}
/// <summary>
/// Returns the upper bound value of a required version number.
/// For instance, 3.1.0 returns 3.1.1, 4 returns 5.0.0, 6.1 returns 6.2.0
/// </summary>
private Version GetUpperBoundVersion(string minimumVersion)
{
if (!Version.TryParse(minimumVersion, out var version))
{
// Is is a single number?
if (int.TryParse(minimumVersion, out var major))
{
return new Version(major + 1, 0, 0);
}
}
if (version.Build != -1)
{
return new Version(version.Major, version.Minor, version.Build + 1);
}
if (version.Minor != -1)
{
return new Version(version.Major, version.Minor + 1, 0);
}
return version;
}
/// <summary>
/// Returns the lower bound value of a required version number.
/// For instance, 3.1.0 returns 3.1.0, 4 returns 4.0.0, 6.1 returns 6.1.0
/// </summary>
private Version GetLowerBoundVersion(string minimumVersion)
{
if (!Version.TryParse(minimumVersion, out var version))
{
// Is is a single number?
if (int.TryParse(minimumVersion, out var major))
{
return new Version(major, 0, 0);
}
}
return version;
}
private bool ResolveInlineDefinitions(string resourceType)
{
bool anyWereDefined = false;
foreach (var settings in ResolveRequiredResources(resourceType))
{
if (settings.InlineDefinition == null)
{
continue;
}
// defining it on the fly
var resource = FindResource(settings, false);
if (resource == null)
{
// does not already exist, so define it
resource = InlineManifest.DefineResource(resourceType, settings.Name).SetBasePath(settings.BasePath);
anyWereDefined = true;
}
settings.InlineDefinition(resource);
settings.InlineDefinition = null;
}
return anyWereDefined;
}
private IEnumerable<RequireSettings> ResolveRequiredResources(string resourceType)
{
foreach (var (key, value) in _required)
{
if (key.Type == resourceType)
{
yield return value;
}
}
}
public IEnumerable<LinkEntry> GetRegisteredLinks() => DoGetRegisteredLinks();
private List<LinkEntry> DoGetRegisteredLinks()
{
return _links ?? EmptyList<LinkEntry>.Instance;
}
public IEnumerable<MetaEntry> GetRegisteredMetas() => DoGetRegisteredMetas();
private Dictionary<string, MetaEntry>.ValueCollection DoGetRegisteredMetas()
{
return _metas?.Values ?? EmptyValueCollection<MetaEntry>.Instance;
}
public IEnumerable<IHtmlContent> GetRegisteredHeadScripts() => DoGetRegisteredHeadScripts();
public List<IHtmlContent> DoGetRegisteredHeadScripts()
{
return _headScripts ?? EmptyList<IHtmlContent>.Instance;
}
public IEnumerable<IHtmlContent> GetRegisteredFootScripts() => DoGetRegisteredFootScripts();
public List<IHtmlContent> DoGetRegisteredFootScripts()
{
return _footScripts ?? EmptyList<IHtmlContent>.Instance;
}
public IEnumerable<IHtmlContent> GetRegisteredStyles() => DoGetRegisteredStyles();
public List<IHtmlContent> DoGetRegisteredStyles()
{
return _styles ?? EmptyList<IHtmlContent>.Instance;
}
public IEnumerable<ResourceRequiredContext> GetRequiredResources(string resourceType)
=> DoGetRequiredResources(resourceType);
private ResourceRequiredContext[] DoGetRequiredResources(string resourceType)
{
if (_builtResources.TryGetValue(resourceType, out var requiredResources) && requiredResources != null)
{
return requiredResources;
}
var allResources = new OrderedDictionary();
foreach (var settings in ResolveRequiredResources(resourceType))
{
var resource = FindResource(settings);
if (resource == null)
{
throw new InvalidOperationException($"Could not find a resource of type '{settings.Type}' named '{settings.Name}' with version '{settings.Version ?? "any"}'.");
}
ExpandDependencies(resource, settings, allResources);
}
requiredResources = new ResourceRequiredContext[allResources.Count];
var i = 0;
foreach (DictionaryEntry entry in allResources)
{
requiredResources[i++] = new ResourceRequiredContext
{
Resource = (ResourceDefinition)entry.Key,
Settings = (RequireSettings)entry.Value,
FileVersionProvider = _fileVersionProvider
};
}
_builtResources[resourceType] = requiredResources;
return requiredResources;
}
protected virtual void ExpandDependencies(
ResourceDefinition resource,
RequireSettings settings,
OrderedDictionary allResources)
{
if (resource == null)
{
return;
}
// Use any additional dependencies from the settings without mutating the resource that is held in a singleton collection.
List<string> dependencies = null;
if (resource.Dependencies != null)
{
dependencies = new List<string>(resource.Dependencies);
if (settings.Dependencies != null)
{
dependencies.AddRange(settings.Dependencies);
}
}
else if (settings.Dependencies != null)
{
dependencies = new List<string>(settings.Dependencies);
}
// Settings is given so they can cascade down into dependencies. For example, if Foo depends on Bar, and Foo's required
// location is Head, so too should Bar's location.
// forge the effective require settings for this resource
// (1) If a require exists for the resource, combine with it. Last settings in gets preference for its specified values.
// (2) If no require already exists, form a new settings object based on the given one but with its own type/name.
settings = allResources.Contains(resource)
? ((RequireSettings)allResources[resource]).Combine(settings)
: new RequireSettings(_options) { Type = resource.Type, Name = resource.Name }.Combine(settings);
if (dependencies != null)
{
// share search instance
var tempSettings = new RequireSettings();
for (var i = 0; i < dependencies.Count; i++)
{
var d = dependencies[i];
var idx = d.IndexOf(':');
var name = d;
string version = null;
if (idx != -1)
{
name = d.Substring(0, idx);
version = d.Substring(idx + 1);
}
tempSettings.Type = resource.Type;
tempSettings.Name = name;
tempSettings.Version = version;
var dependency = FindResource(tempSettings);
if (dependency == null)
{
continue;
}
ExpandDependencies(dependency, settings, allResources);
}
}
allResources[resource] = settings;
}
public void RegisterLink(LinkEntry link)
{
if (_links == null)
{
_links = new List<LinkEntry>();
}
var href = link.Href;
if (href != null && href.StartsWith("~/", StringComparison.Ordinal))
{
link.Href = _options.ContentBasePath + href.Substring(1);
}
if (link.AppendVersion)
{
link.Href = _fileVersionProvider.AddFileVersionToPath(_options.ContentBasePath, link.Href);
}
_links.Add(link);
}
public void RegisterMeta(MetaEntry meta)
{
if (meta == null)
{
return;
}
if (_metas == null)
{
_metas = new Dictionary<string, MetaEntry>();
}
var index = meta.Name ?? meta.Property ?? meta.HttpEquiv ?? "charset";
_metas[index] = meta;
}
public void AppendMeta(MetaEntry meta, string contentSeparator)
{
if (meta == null)
{
return;
}
var index = meta.Name ?? meta.Property ?? meta.HttpEquiv;
if (String.IsNullOrEmpty(index))
{
return;
}
if (_metas == null)
{
_metas = new Dictionary<string, MetaEntry>();
}
if (_metas.TryGetValue(index, out var existingMeta))
{
meta = MetaEntry.Combine(existingMeta, meta, contentSeparator);
}
_metas[index] = meta;
}
public void RenderMeta(IHtmlContentBuilder builder)
{
var first = true;
foreach (var meta in DoGetRegisteredMetas())
{
if (!first)
{
builder.AppendHtml(System.Environment.NewLine);
}
first = false;
builder.AppendHtml(meta.GetTag());
}
}
public void RenderHeadLink(IHtmlContentBuilder builder)
{
var first = true;
var registeredLinks = DoGetRegisteredLinks();
for (var i = 0; i < registeredLinks.Count; i++)
{
var link = registeredLinks[i];
if (!first)
{
builder.AppendHtml(System.Environment.NewLine);
}
first = false;
builder.AppendHtml(link.GetTag());
}
}
public void RenderStylesheet(IHtmlContentBuilder builder)
{
var first = true;
var styleSheets = DoGetRequiredResources("stylesheet");
foreach (var context in styleSheets)
{
if (!first)
{
builder.AppendHtml(System.Environment.NewLine);
}
first = false;
builder.AppendHtml(context.GetHtmlContent(_options.ContentBasePath));
}
var registeredStyles = DoGetRegisteredStyles();
for (var i = 0; i < registeredStyles.Count; i++)
{
var context = registeredStyles[i];
if (!first)
{
builder.AppendHtml(System.Environment.NewLine);
}
first = false;
builder.AppendHtml(context);
}
}
public void RenderHeadScript(IHtmlContentBuilder builder)
{
var headScripts = DoGetRequiredResources("script");
var first = true;
foreach (var context in headScripts)
{
if (context.Settings.Location != ResourceLocation.Head)
{
continue;
}
if (!first)
{
builder.AppendHtml(System.Environment.NewLine);
}
first = false;
builder.AppendHtml(context.GetHtmlContent(_options.ContentBasePath));
}
var registeredHeadScripts = DoGetRegisteredHeadScripts();
for (var i = 0; i < registeredHeadScripts.Count; i++)
{
var context = registeredHeadScripts[i];
if (!first)
{
builder.AppendHtml(System.Environment.NewLine);
}
first = false;
builder.AppendHtml(context);
}
}
public void RenderFootScript(IHtmlContentBuilder builder)
{
var footScripts = DoGetRequiredResources("script");
var first = true;
foreach (var context in footScripts)
{
if (context.Settings.Location != ResourceLocation.Foot)
{
continue;
}
if (!first)
{
builder.AppendHtml(System.Environment.NewLine);
}
first = false;
builder.AppendHtml(context.GetHtmlContent(_options.ContentBasePath));
}
var registeredFootScripts = DoGetRegisteredFootScripts();
for (var i = 0; i < registeredFootScripts.Count; i++)
{
var context = registeredFootScripts[i];
if (!first)
{
builder.AppendHtml(System.Environment.NewLine);
}
first = false;
builder.AppendHtml(context);
}
}
public void RenderLocalScript(RequireSettings settings, IHtmlContentBuilder builder)
{
var localScripts = DoGetRequiredResources("script");
_localScripts ??= new HashSet<string>();
var first = true;
foreach (var context in localScripts)
{
if ((context.Settings.Location == ResourceLocation.Unspecified || context.Settings.Location == ResourceLocation.Inline) &&
(_localScripts.Add(context.Settings.Name) || context.Settings.Name == settings.Name))
{
if (!first)
{
builder.AppendHtml(System.Environment.NewLine);
}
first = false;
builder.AppendHtml(context.GetHtmlContent(_options.ContentBasePath));
}
}
}
public void RenderLocalStyle(RequireSettings settings, IHtmlContentBuilder builder)
{
var localStyles = DoGetRequiredResources("stylesheet");
_localStyles ??= new HashSet<string>();
var first = true;
foreach (var context in localStyles)
{
if (context.Settings.Location == ResourceLocation.Inline &&
(_localStyles.Add(context.Settings.Name) || context.Settings.Name == settings.Name))
{
if (!first)
{
builder.AppendHtml(System.Environment.NewLine);
}
first = false;
builder.AppendHtml(context.GetHtmlContent(_options.ContentBasePath));
}
}
}
private readonly struct ResourceTypeName : IEquatable<ResourceTypeName>
{
public readonly string Type;
public readonly string Name;
public ResourceTypeName(string resourceType, string resourceName)
{
Type = resourceType;
Name = resourceName;
}
public bool Equals(ResourceTypeName other)
{
return Type == other.Type && Name == other.Name;
}
public override int GetHashCode()
{
return HashCode.Combine(Type, Name);
}
public override string ToString() => "(" + Type + ", " + Name + ")";
}
private string GetResourceKey(string releasePath, string debugPath)
{
if (_options.DebugMode && !string.IsNullOrWhiteSpace(debugPath))
{
return debugPath;
}
else
{
return releasePath;
}
}
private static class EmptyList<T>
{
public static readonly List<T> Instance = new List<T>();
}
private static class EmptyValueCollection<T>
{
public static readonly Dictionary<string, T>.ValueCollection Instance = new Dictionary<string, T>.ValueCollection(new Dictionary<string, T>());
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static void ThrowArgumentNullException(string paramName)
{
ThrowArgumentNullException<object>(paramName);
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static T ThrowArgumentNullException<T>(string paramName)
{
throw new ArgumentNullException(paramName);
}
}
}
| 33.734513 | 180 | 0.527807 | [
"BSD-3-Clause"
] | mysterious1223/OrchardCore | src/OrchardCore/OrchardCore.ResourceManagement/ResourceManager.cs | 26,684 | C# |
// Copyright (c) 2016, David Aramant
// Distributed under the 3-clause BSD license. For full terms see the file LICENSE.
using System;
using System.Diagnostics;
namespace SectorDirector.Core.FormatModels.Wad
{
[DebuggerDisplay("{ToString()}")]
public struct LumpInfo : IEquatable<LumpInfo>
{
public readonly int Position;
public readonly int Size;
public readonly LumpName Name;
public LumpInfo(int position, int size, LumpName name)
{
Position = position;
Size = size;
Name = name;
}
public override string ToString() => $"{Name} (offset: {Position}, size: {Size})";
#region Equality
public bool Equals(LumpInfo other)
{
return Position == other.Position && Size == other.Size && Name.Equals(other.Name);
}
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) return false;
return obj is LumpInfo other && Equals(other);
}
public override int GetHashCode()
{
unchecked
{
var hashCode = Position;
hashCode = (hashCode * 397) ^ Size;
hashCode = (hashCode * 397) ^ Name.GetHashCode();
return hashCode;
}
}
public static bool operator ==(LumpInfo left, LumpInfo right)
{
return left.Equals(right);
}
public static bool operator !=(LumpInfo left, LumpInfo right)
{
return !left.Equals(right);
}
#endregion
}
}
| 27.366667 | 95 | 0.554202 | [
"BSD-3-Clause"
] | davidaramant/sector-director | src/Core/FormatModels/Wad/LumpInfo.cs | 1,642 | C# |
using System.Collections.Generic;
using DevComponents.AdvTree;
using PostSharp.Patterns.Contracts;
using ThreatsManager.AutoGenRules.Engine;
using ThreatsManager.Interfaces.ObjectModel.Properties;
namespace ThreatsManager.Utilities.WinForms.Rules
{
internal class PropertyTypeItemContext : ButtonItemContext
{
public PropertyTypeItemContext([NotNull] IPropertyType propertyType, Scope scope) : base(scope)
{
PropertyType = propertyType;
if (propertyType is IArrayPropertyType)
ContextType = PropertyTypeItemContextType.EnumValue;
else if (propertyType is IBoolPropertyType)
ContextType = PropertyTypeItemContextType.Boolean;
else if (propertyType is IDecimalPropertyType)
ContextType = PropertyTypeItemContextType.Comparison;
else if (propertyType is IIdentityReferencePropertyType)
ContextType = PropertyTypeItemContextType.Comparison;
else if (propertyType is IIntegerPropertyType)
ContextType = PropertyTypeItemContextType.Comparison;
else if (propertyType is IJsonSerializableObjectPropertyType)
ContextType = PropertyTypeItemContextType.Comparison;
else if (propertyType is IListMultiPropertyType)
ContextType = PropertyTypeItemContextType.EnumValue;
else if (propertyType is IListPropertyType)
ContextType = PropertyTypeItemContextType.EnumValue;
else if (propertyType is ISingleLineStringPropertyType)
ContextType = PropertyTypeItemContextType.Comparison;
else if (propertyType is IStringPropertyType)
ContextType = PropertyTypeItemContextType.Comparison;
else if (propertyType is ITokensPropertyType)
ContextType = PropertyTypeItemContextType.Comparison;
}
public PropertyTypeItemContext([NotNull] IPropertyType propertyType,
Scope scope,
PropertyTypeItemContextType type) : this(propertyType, scope)
{
ContextType = type;
}
public IPropertyType PropertyType { get; private set; }
public PropertyTypeItemContextType ContextType { get; private set; }
public override SelectionRuleNode CreateNode([NotNull] Node node, params object[] parameters)
{
SelectionRuleNode result = null;
if (parameters != null && parameters.Length >= 4 &&
parameters[0] is PropertyTypeItemContextType contextType &&
parameters[1] is string schemaNs &&
parameters[2] is string schemaName)
{
switch (contextType)
{
case PropertyTypeItemContextType.Boolean:
if (parameters[3] is bool boolValue)
result = new BooleanRuleNode(node.Text, schemaNs, schemaName, boolValue) { Scope = Scope };
break;
case PropertyTypeItemContextType.Comparison:
if (parameters[3] is ComparisonOperator op &&
parameters[4] is string textValue)
result = new ComparisonRuleNode(node.Text, schemaNs, schemaName, op, textValue) { Scope = Scope };
break;
case PropertyTypeItemContextType.EnumValue:
if (parameters[3] is IEnumerable<string> values &&
parameters[4] is string value)
result = new EnumValueRuleNode(node.Text, schemaNs, schemaName, values, value) { Scope = Scope };
break;
}
}
return result;
}
}
internal enum PropertyTypeItemContextType
{
Boolean,
Comparison,
EnumValue
}
} | 44.863636 | 126 | 0.617781 | [
"MIT"
] | simonec73/threatsmanager | Studio/ThreatsManager.Utilities.WinForms/Rules/PropertyTypeItemContext.cs | 3,950 | C# |
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.Formats.Jpeg;
using SixLabors.ImageSharp.Formats.Png;
using SixLabors.ImageSharp.PixelFormats;
using DevBlog.Application.Enums;
using System;
using System.IO;
namespace DevBlog.Application.Extensions
{
public static class ImageExtensions
{
public static byte[] ToByteArray(this Image<Rgba32> image, ImageFormat format)
{
using var memoryStream = new MemoryStream();
switch (format)
{
case ImageFormat.Jpg:
image.SaveAsJpeg(memoryStream, new JpegEncoder { Quality = 90, Subsample = JpegSubsample.Ratio444 });
break;
case ImageFormat.Png:
image.SaveAsPng(memoryStream, new PngEncoder { CompressionLevel = PngCompressionLevel.Level6 });
break;
default:
throw new ArgumentOutOfRangeException(nameof(format), format, null);
}
memoryStream.Position = 0;
return memoryStream.ToArray();
}
}
} | 26.705882 | 106 | 0.745595 | [
"MIT"
] | Pastafarian/DevBlog | server/DevBlog.Application/Extensions/ImageExtensions.cs | 910 | C# |
using Files.Shared.Enums;
using Files.Helpers;
using Files.Extensions;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Windows.Storage;
namespace Files.Filesystem.FilesystemHistory
{
public class StorageHistoryOperations : IStorageHistoryOperations
{
#region Private Members
private IFilesystemOperations filesystemOperations;
private IFilesystemHelpers filesystemHelpers;
private IShellPage associatedInstance;
private readonly CancellationToken cancellationToken;
#endregion Private Members
#region Constructor
public StorageHistoryOperations(IShellPage associatedInstance, CancellationToken cancellationToken)
{
this.associatedInstance = associatedInstance;
this.cancellationToken = cancellationToken;
filesystemOperations = new ShellFilesystemOperations(associatedInstance);
filesystemHelpers = this.associatedInstance.FilesystemHelpers;
}
#endregion Constructor
#region IStorageHistoryOperations
public async Task<ReturnResult> Redo(IStorageHistory history)
{
ReturnResult returnStatus = ReturnResult.InProgress;
Progress<FileSystemStatusCode> errorCode = new Progress<FileSystemStatusCode>();
errorCode.ProgressChanged += (s, e) => { returnStatus = e.ToStatus(); };
switch (history.OperationType)
{
case FileOperationType.CreateNew: // CreateNew PASS
{
if (IsHistoryNull(history))
{
break;
}
for (int i = 0; i < history.Source.Count; i++)
{
await filesystemOperations.CreateAsync(history.Source[i], errorCode, cancellationToken);
}
break;
}
case FileOperationType.CreateLink: // CreateLink PASS
{
if (IsHistoryNull(history))
{
break;
}
await filesystemOperations.CreateShortcutItemsAsync(history.Source, await history.Destination.Select(item => item.Path).ToListAsync(), null, errorCode, cancellationToken);
break;
}
case FileOperationType.Rename: // Rename PASS
{
if (IsHistoryNull(history))
{
break;
}
NameCollisionOption collision = NameCollisionOption.GenerateUniqueName;
for (int i = 0; i < history.Source.Count; i++)
{
await filesystemOperations.RenameAsync(
history.Source[i],
Path.GetFileName(history.Destination[i].Path),
collision,
errorCode,
cancellationToken);
}
break;
}
case FileOperationType.Copy: // Copy PASS
{
if (IsHistoryNull(history))
{
break;
}
return await filesystemHelpers.CopyItemsAsync(history.Source, history.Destination.Select(item => item.Path), false, false);
}
case FileOperationType.Move: // Move PASS
{
if (IsHistoryNull(history))
{
break;
}
return await filesystemHelpers.MoveItemsAsync(history.Source, history.Destination.Select(item => item.Path), false, false);
}
case FileOperationType.Extract: // Extract PASS
{
returnStatus = ReturnResult.Success;
Debugger.Break();
break;
}
case FileOperationType.Recycle: // Recycle PASS
{
if (IsHistoryNull(history.Destination))
{
break;
}
var newHistory = await filesystemOperations.DeleteItemsAsync(history.Source, null, errorCode, false, cancellationToken);
if (newHistory != null)
{
// We need to change the recycled item paths (since IDs are different) - for Undo() to work
App.HistoryWrapper.ModifyCurrentHistory(newHistory);
}
else
{
App.HistoryWrapper.RemoveHistory(history, true);
}
break;
}
case FileOperationType.Restore: // Restore PASS
{
if (IsHistoryNull(history))
{
break;
}
await filesystemHelpers.RestoreItemsFromTrashAsync(history.Source, history.Destination.Select(item => item.Path), false);
break;
}
case FileOperationType.Delete: // Delete PASS
{
returnStatus = ReturnResult.Success;
break;
}
}
return returnStatus;
}
public async Task<ReturnResult> Undo(IStorageHistory history)
{
ReturnResult returnStatus = ReturnResult.InProgress;
Progress<FileSystemStatusCode> errorCode = new Progress<FileSystemStatusCode>();
errorCode.ProgressChanged += (s, e) => returnStatus = e.ToStatus();
switch (history.OperationType)
{
case FileOperationType.CreateNew: // CreateNew PASS
{
// Opposite: Delete created items
if (IsHistoryNull(history.Source))
{
break;
}
return await filesystemHelpers.DeleteItemsAsync(history.Source, false, true, false);
}
case FileOperationType.CreateLink: // CreateLink PASS
{
// Opposite: Delete created items
if (IsHistoryNull(history.Destination))
{
break;
}
return await filesystemHelpers.DeleteItemsAsync(history.Destination, false, true, false);
}
case FileOperationType.Rename: // Rename PASS
{
// Opposite: Restore original item names
if (IsHistoryNull(history))
{
break;
}
NameCollisionOption collision = NameCollisionOption.GenerateUniqueName;
for (int i = 0; i < history.Destination.Count(); i++)
{
await filesystemOperations.RenameAsync(
history.Destination[i],
Path.GetFileName(history.Source[i].Path),
collision,
errorCode,
cancellationToken);
}
break;
}
case FileOperationType.Copy: // Copy PASS
{
// Opposite: Delete copied items
if (IsHistoryNull(history.Destination))
{
break;
}
return await filesystemHelpers.DeleteItemsAsync(history.Destination, false, true, false);
}
case FileOperationType.Move: // Move PASS
{
// Opposite: Move the items to original directory
if (IsHistoryNull(history))
{
break;
}
return await filesystemHelpers.MoveItemsAsync(history.Destination, history.Source.Select(item => item.Path), false, false);
}
case FileOperationType.Extract: // Extract PASS
{
// Opposite: No opposite for archive extraction
returnStatus = ReturnResult.Success;
Debugger.Break();
break;
}
case FileOperationType.Recycle: // Recycle PASS
{
// Opposite: Restore recycled items
if (IsHistoryNull(history))
{
break;
}
returnStatus = await filesystemHelpers.RestoreItemsFromTrashAsync(history.Destination, history.Source.Select(item => item.Path), false);
if (returnStatus == ReturnResult.IntegrityCheckFailed) // Not found, corrupted
{
App.HistoryWrapper.RemoveHistory(history, false);
}
break;
}
case FileOperationType.Restore: // Restore PASS
{
// Opposite: Move restored items to Recycle Bin
if (IsHistoryNull(history.Destination))
{
break;
}
var newHistory = await filesystemOperations.DeleteItemsAsync(history.Destination, null, errorCode, false, cancellationToken);
if (newHistory != null)
{
// We need to change the recycled item paths (since IDs are different) - for Redo() to work
App.HistoryWrapper.ModifyCurrentHistory(newHistory);
}
else
{
App.HistoryWrapper.RemoveHistory(history, false);
}
break;
}
case FileOperationType.Delete: // Delete PASS
{
// Opposite: No opposite for pernament deletion
returnStatus = ReturnResult.Success;
break;
}
}
return returnStatus;
}
#endregion IStorageHistoryOperations
#region Private Helpers
// history.Destination is null with CreateNew
private bool IsHistoryNull(IStorageHistory history) =>
!(history.Source.All((item) => item != null && !string.IsNullOrWhiteSpace(item.Path))
&& (history.Destination == null || history.Destination.All((item) => item != null && !string.IsNullOrWhiteSpace(item.Path))));
private bool IsHistoryNull(IEnumerable<IStorageItemWithPath> source) =>
!source.All((item) => item != null && !string.IsNullOrWhiteSpace(item.Path));
#endregion Private Helpers
#region IDisposable
public void Dispose()
{
filesystemOperations?.Dispose();
filesystemHelpers?.Dispose();
associatedInstance = null;
filesystemOperations = null;
filesystemHelpers = null;
}
#endregion IDisposable
}
} | 36.12828 | 195 | 0.46062 | [
"MIT"
] | Creperi/Files | src/Files.Uwp/Filesystem/StorageHistory/StorageHistoryOperations.cs | 12,394 | C# |
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Windows.Threading;
using Dragablz;
using TCC.Data;
using TCC.Data.Chat;
using TCC.Settings;
namespace TCC.ViewModels
{
public class ChatViewModel : INotifyPropertyChanged
{
private SynchronizedObservableCollection<HeaderedItemViewModel> _tabVMs;
private bool _paused;
//private bool _lfgOn;
//private double _backgroundOpacity = 0.3;
private DispatcherTimer _hideTimer;
private ChatWindowSettings _windowSettings;
public event Action<bool> VisibilityChanged;
public bool Paused
{
get => _paused;
set
{
if (_paused == value) return;
_paused = value;
NPC();
}
}
//public bool IsTeraOnTop
//{
// get => WindowManager.IsTccVisible;
//}
public SynchronizedObservableCollection<HeaderedItemViewModel> TabVMs
{
get => _tabVMs;
set
{
if (_tabVMs == value) return;
_tabVMs = value;
NPC(nameof(TabVMs));
}
}
public IInterTabClient InterTabClient { get; } = new ChatTabClient();
public List<Tab> Tabs
{
get
{
var ret = new List<Tab>();
TabVMs.ToList().ForEach(x => ret.Add(x.Content as Tab));
return ret;
}
}
public SynchronizedObservableCollection<LFG> LFGs => ChatWindowManager.Instance.LFGs;
public Tab CurrentTab { get; set; }
//public double ChatWindowOpacity => Settings.Settings.ChatWindowOpacity;
public Func<HeaderedItemViewModel> AddNewTabCommand
{
get
{
return
() =>
{
var t = new HeaderedItemViewModel();
var content = new Tab("NEW TAB", new ChatChannel[] { }, new ChatChannel[] { }, new string[] { },
new string[] { });
content.PropertyChanged += (_, __) =>
{
if (__.PropertyName == nameof(Tab.TabName)) t.Header = content.TabName;
};
t.Content = content;
return t;
};
}
}
//public bool LfgOn
//{
// get => _lfgOn; set
// {
// if (_lfgOn == value) return;
// _lfgOn = value;
// NPC();
// }
//}
//public double BackgroundOpacity
//{
// get => _backgroundOpacity; set
// {
// if (_backgroundOpacity == value) return;
// _backgroundOpacity = value;
// NPC();
// }
//}
public ChatWindowSettings WindowSettings
{
get => _windowSettings;
set
{
if (_windowSettings == value) return;
if (_windowSettings != null)
{
_windowSettings.TimeoutChanged -= ChangeTimerInterval;
}
_windowSettings = value;
if (_windowSettings != null)
{
_hideTimer = new DispatcherTimer { Interval = TimeSpan.FromSeconds(WindowSettings.HideTimeout) };
_hideTimer.Tick += OnHideTimerTick;
_windowSettings.TimeoutChanged += ChangeTimerInterval;
}
}
}
private void ChangeTimerInterval()
{
_hideTimer.Interval = TimeSpan.FromSeconds(WindowSettings.HideTimeout);
_hideTimer.Refresh();
}
public void RefreshHideTimer()
{
_hideTimer.Refresh();
}
public void StopHideTimer()
{
_hideTimer.Stop();
VisibilityChanged?.Invoke(true);// IsChatVisible = true;
}
//public void NotifyOpacityChange()
//{
// NPC(nameof(ChatWindowOpacity));
//}
public ChatViewModel()
{
TabVMs = new SynchronizedObservableCollection<HeaderedItemViewModel>();
ChatWindowManager.Instance.NewMessage += CheckAttention;
TabVMs.CollectionChanged += TabVMs_CollectionChanged;
}
private void OnHideTimerTick(object sender, EventArgs e)
{
VisibilityChanged?.Invoke(false); //IsChatVisible = false;
_hideTimer.Stop();
}
private void TabVMs_CollectionChanged(object sender, System.Collections.Specialized.NotifyCollectionChangedEventArgs e)
{
//if (!canUpdate) return;
//var w = ChatWindowManager.Instance.FindMyWindow(this);
//w.UpdateSettings();
}
public void LoadTabs(IEnumerable<Tab> tabs = null)
{
if (tabs != null)
{
foreach (var chatTabsSetting in tabs)
{
TabVMs.Add(new HeaderedItemViewModel(chatTabsSetting.TabName, chatTabsSetting));
}
}
if (TabVMs.Count != 0) return;
var all = new Tab("ALL", new ChatChannel[] { }, new ChatChannel[] { }, new string[] { }, new[] { "System" });
var guild = new Tab("GUILD", new[] { ChatChannel.Guild, ChatChannel.GuildNotice, }, new ChatChannel[] { }, new string[] { }, new string[] { });
var group = new Tab("GROUP", new[]{ChatChannel.Party, ChatChannel.PartyNotice,
ChatChannel.RaidLeader, ChatChannel.RaidNotice,
ChatChannel.Raid, ChatChannel.Ress,ChatChannel.Death,
ChatChannel.Group, ChatChannel.GroupAlerts }, new ChatChannel[] { }, new string[] { }, new string[] { });
var w = new Tab("WHISPERS", new[] { ChatChannel.ReceivedWhisper, ChatChannel.SentWhisper, }, new ChatChannel[] { }, new string[] { }, new string[] { });
var sys = new Tab("SYSTEM", new ChatChannel[] { }, new ChatChannel[] { }, new[] { "System" }, new string[] { });
TabVMs.Add(new HeaderedItemViewModel(all.TabName, all));
TabVMs.Add(new HeaderedItemViewModel(guild.TabName, guild));
TabVMs.Add(new HeaderedItemViewModel(group.TabName, group));
TabVMs.Add(new HeaderedItemViewModel(w.TabName, w));
TabVMs.Add(new HeaderedItemViewModel(sys.TabName, sys));
CurrentTab = TabVMs[0].Content as Tab;
//ChatWindowManager.Instance.FindMyWindow(this).UpdateSettings();
}
private void CheckAttention(ChatMessage chatMessage)
{
//chatMessage.Animate = false; //set animate to false if the message is not going in the active tab
if (chatMessage.ContainsPlayerName || chatMessage.Channel == ChatChannel.ReceivedWhisper)
{
//var tabs = TabVMs.Where(x => ((Tab)x.Content).Channels.Contains(chatMessage.Channel)).ToList();
//tabs.ForEach(tab =>
//{
// ((Tab) tab.Content).Attention = true;
//});
//if (tabs.Count != 0) return;
//tabs = TabVMs.Where(x => !((Tab)x.Content).ExcludedChannels.Contains(chatMessage.Channel)).ToList();
//tabs.ForEach(tab =>
//{
// ((Tab)tab.Content).Attention = true;
//});
TabVMs.Where(x => ((Tab)x.Content).GetDispatcher().Invoke(() => ((Tab)x.Content).Messages.Contains(chatMessage))).ToList().ForEach(t => ((Tab)t.Content).Attention = true);
//var t = TabVMs.FirstOrDefault(x => ((Tab)x.Content).Channels.Contains(chatMessage.Channel));
//if (t != null)
//{
// ((Tab)t.Content).Attention = true;
//}
//else
//{
// t = TabVMs.FirstOrDefault(x => !((Tab)x.Content).ExcludedChannels.Contains(chatMessage.Channel));
// if (t != null) ((Tab)t.Content).Attention = true;
//}
}
}
private void NPC([CallerMemberName] string prop = null)
{
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(prop));
}
internal void RemoveTab(Tab dc)
{
var t = TabVMs.FirstOrDefault(x => x.Content == dc);
if (t != null) TabVMs.Remove(t);
}
public event PropertyChangedEventHandler PropertyChanged;
public void CheckVisibility(IList newItems)
{
if (CurrentTab == null)
{
CurrentTab = TabVMs[0].Content as Tab;
}
if (!CurrentTab.Filter(newItems[0] as ChatMessage)) return;
RefreshHideTimer();
VisibilityChanged?.Invoke(true); // IsChatVisible = true;
}
}
} | 37.21371 | 187 | 0.522483 | [
"MIT"
] | Seyuna/Tera-custom-cooldowns | TCC.Core/ViewModels/ChatViewModel.cs | 9,231 | C# |
#region -- License Terms --
//
// MessagePack for CLI
//
// Copyright (C) 2017 FUJIWARA, Yusuke
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion -- License Terms --
using System;
using System.Collections.Generic;
#if !MSTEST
using NUnit.Framework;
#else
using TestFixtureAttribute = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestClassAttribute;
using TestAttribute = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestMethodAttribute;
using TimeoutAttribute = NUnit.Framework.TimeoutAttribute;
using Assert = NUnit.Framework.Assert;
using Is = NUnit.Framework.Is;
#endif
namespace MsgPack
{
[TestFixture]
[Timeout( 30000 )]
public class FastByteArrayUnpackerTest : ByteArrayUnpackerTest
{
protected override bool ShouldCheckSubtreeUnpacker
{
get { return false; }
}
protected override ByteArrayUnpacker CreateUnpacker( byte[] source, int offset )
{
return Unpacker.Create( source, offset, new UnpackerOptions { ValidationLevel = UnpackerValidationLevel.None } );
}
}
}
| 31.28 | 116 | 0.752558 | [
"Apache-2.0"
] | KKL1982/msgpack-cli | test/MsgPack.UnitTest/FastByteArrayUnpackerTest.cs | 1,564 | C# |
#if !NETSTANDARD13
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the rds-2014-10-31.normal.json service model.
*/
using Amazon.Runtime;
namespace Amazon.RDS.Model
{
/// <summary>
/// Paginator for the DescribeEvents operation
///</summary>
public interface IDescribeEventsPaginator
{
/// <summary>
/// Enumerable containing all full responses for the operation
/// </summary>
IPaginatedEnumerable<DescribeEventsResponse> Responses { get; }
/// <summary>
/// Enumerable containing all of the Events
/// </summary>
IPaginatedEnumerable<Event> Events { get; }
}
}
#endif | 31.25 | 101 | 0.6816 | [
"Apache-2.0"
] | PureKrome/aws-sdk-net | sdk/src/Services/RDS/Generated/Model/_bcl45+netstandard/IDescribeEventsPaginator.cs | 1,250 | C# |
//{[{
using Windows.System;
using Windows.UI.Core;
using Windows.UI.Xaml.Input;
using Windows.UI.Xaml.Navigation;
using Param_RootNamespace.ViewModels;
//}]}
namespace Param_RootNamespace.Services
{
internal class ActivationService
{
//^^
//{[{
public static NavigationServiceEx NavigationService => ViewModelLocator.Current.NavigationService;
public static readonly KeyboardAccelerator AltLeftKeyboardAccelerator = BuildKeyboardAccelerator(VirtualKey.Left, VirtualKeyModifiers.Menu);
public static readonly KeyboardAccelerator BackKeyboardAccelerator = BuildKeyboardAccelerator(VirtualKey.GoBack);
//}]}
public ActivationService(App app, Type defaultNavItem, Lazy<UIElement> shell = null)
{
}
public async Task ActivateAsync(object activationArgs)
{
if (Window.Current.Content == null)
{
Window.Current.Content = _shell?.Value ?? new Frame();
//{[{
NavigationService.NavigationFailed += (sender, e) =>
{
throw e.Exception;
};
NavigationService.Navigated += Frame_Navigated;
if (SystemNavigationManager.GetForCurrentView() != null)
{
SystemNavigationManager.GetForCurrentView().BackRequested += ActivationService_BackRequested;
}
//}]}
}
}
//{[{
private void Frame_Navigated(object sender, NavigationEventArgs e)
{
SystemNavigationManager.GetForCurrentView().AppViewBackButtonVisibility = NavigationService.CanGoBack ?
AppViewBackButtonVisibility.Visible : AppViewBackButtonVisibility.Collapsed;
}
private static KeyboardAccelerator BuildKeyboardAccelerator(VirtualKey key, VirtualKeyModifiers? modifiers = null)
{
var keyboardAccelerator = new KeyboardAccelerator() { Key = key };
if (modifiers.HasValue)
{
keyboardAccelerator.Modifiers = modifiers.Value;
}
keyboardAccelerator.Invoked += OnKeyboardAcceleratorInvoked;
return keyboardAccelerator;
}
private static void OnKeyboardAcceleratorInvoked(KeyboardAccelerator sender, KeyboardAcceleratorInvokedEventArgs args)
{
var result = NavigationService.GoBack();
args.Handled = result;
}
private void ActivationService_BackRequested(object sender, BackRequestedEventArgs e)
{
var result = NavigationService.GoBack();
e.Handled = result;
}
//}]}
}
}
| 35.324675 | 148 | 0.626471 | [
"MIT"
] | ammogcoder/WindowsTemplateStudio | templates/Uwp/_composition/MVVMLight/Project.BlankTabbedPivot.AddSystemBackButton/Services/ActivationService_postaction.cs | 2,722 | C# |
using System.Text.Json.Serialization;
namespace SimplyCast.Models.ContactManager
{
public class MetadataColumn
{
[JsonPropertyName("id")]
public int? Id { get; set; }
[JsonPropertyName("name")]
public string Name { get; set; }
[JsonPropertyName("type")]
public string Type { get; set; }
[JsonPropertyName("userDefined")]
public int UserDefined { get; set; }
[JsonPropertyName("visible")]
public int Visible { get; set; }
[JsonPropertyName("editable")]
public int Editable { get; set; }
[JsonPropertyName("valueColumn")]
public string ValueColumn { get; set; }
[JsonPropertyName("links")]
public Link[] Links { get; set; }
}
}
| 22.823529 | 47 | 0.590206 | [
"MIT"
] | stevedorries/csharp-wrapper | Models/ContactManager/MetadataColumn.cs | 776 | C# |
using DG.Tweening;
using Json;
namespace JTween.Image {
public class JTweenImageFillAmount : JTweenBase {
private float m_beginAmount = 0;
private float m_toAmount = 0;
private UnityEngine.UI.Image m_Image;
public JTweenImageFillAmount() {
m_tweenType = (int)JTweenImage.FillAmount;
m_tweenElement = JTweenElement.Image;
}
public float BeginAmount {
get {
return m_beginAmount;
}
set {
m_beginAmount = value;
}
}
public float ToAmount {
get {
return m_toAmount;
}
set {
m_toAmount = value;
}
}
protected override void Init() {
if (null == m_target) return;
// end if
m_Image = m_target.GetComponent<UnityEngine.UI.Image>();
if (null == m_Image) return;
// end if
m_beginAmount = m_Image.fillAmount;
}
protected override Tween DOPlay() {
if (null == m_Image) return null;
// end if
return m_Image.DOFillAmount(m_toAmount, m_duration);
}
public override void Restore() {
if (null == m_Image) return;
// end if
m_Image.fillAmount = m_beginAmount;
}
protected override void JsonTo(IJsonNode json) {
if (json.Contains("beginAmount")) BeginAmount = json.GetFloat("beginAmount");
// end if
if (json.Contains("amount")) m_toAmount = json.GetFloat("beginAamountmount");
// end if
Restore();
}
protected override void ToJson(ref IJsonNode json) {
json.SetFloat("beginAmount", m_beginAmount);
json.SetFloat("amount", m_toAmount);
}
protected override bool CheckValid(out string errorInfo) {
if (null == m_Image) {
errorInfo = GetType().FullName + " GetComponent<Image> is null";
return false;
} // end if
errorInfo = string.Empty;
return true;
}
}
}
| 28.805195 | 89 | 0.517583 | [
"Unlicense"
] | HelloWindows/AccountBook | client/framework/GameFramework-master/JTween/JTween/Image/JTweenImageFillAmount.cs | 2,220 | C# |
#if GRIFFIN
namespace Pinwheel.Griffin.TextureTool
{
public enum GTextureFilterType
{
Curve, Blur, Invert, Step, Warp
}
}
#endif
| 15.8 | 40 | 0.632911 | [
"MIT"
] | Abelark/Project-3---Rocket-Punch | Assets/Polaris - Low Poly Ecosystem/Polaris - Low Poly Terrain Engine/Runtime/Scripts/TextureTool/Filters/GTextureFilterType.cs | 158 | C# |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Listas.Classes
{
public class Curso
{
// Tornando nossa lista em uma coleção de apenas leitura...
public List<Aula> Aulas { get; set; }
public string Nome { get; set; }
public string Instrutor { get; set; }
public Curso()
{
}
public Curso( string nome, string instrutor)
{
Nome = nome;
}
public override string ToString()
{
return $" - [Curso: {Nome}] - \n";
}
}
}
| 18.621622 | 67 | 0.557329 | [
"MIT"
] | mattheustoscano/Csharp | Collections/Listas/Classes/Curso.cs | 693 | C# |
// Copyright 2021 Jon Skeet. All rights reserved.
// Use of this source code is governed by the Apache License 2.0,
// as found in the LICENSE.txt file.
using Commons.Music.Midi;
using System.Threading.Tasks;
namespace XTouchMini.Model
{
/// <summary>
/// Controller for working with an X-Touch Mini in Standard mode.
/// </summary>
public class XTouchMiniStandardController : XTouchMiniController
{
private XTouchMiniStandardController(string portName) : base(portName)
{
}
/// <summary>
/// Connects to an X-Touch Mini and sets it to Standard mode.
/// </summary>
/// <param name="name">The MIDI name of the input/output ports.</param>
public static Task<XTouchMiniStandardController> ConnectAsync(string portName) =>
ConnectAsync(new XTouchMiniStandardController(portName));
public override async Task<bool> MaybeReconnect()
{
var result = await base.MaybeReconnect().ConfigureAwait(false);
if (result)
{
SetOperationMode(OperationMode.Standard);
}
return result;
}
protected override void HandleMidiMessage(byte[] data)
{
switch (data[0])
{
case 0xba:
// Sliders: 0x09 for layer A, 0x0a for layer B
if (data[1] == 0x09 || data[1] == 0x0a)
{
OnFaderMoved(data[1] == 0x09 ? Layer.LayerA : Layer.LayerB, data[2]);
}
// Knobs, 0x01-0x08 for layer A, 0x0b-0x12 for layer B
else
{
OnKnobTurned(data[1] % 0xa, data[1] < 0x0b ? Layer.LayerA : Layer.LayerB, data[2]);
}
break;
case 0x8a:
case 0x9a:
byte note = data[1];
bool down = data[0] == 0x9a;
if (note < 8)
{
// Map 0x00-0x07 to knobs 1-8 (layer A)
OnKnobPressRelease(note + 1, Layer.LayerA, down);
}
else if (note < 0x18)
{
// Map 0x08-0x17 to buttons 1-16 (layer A)
OnButtonPressRelease(note - 7, Layer.LayerA, down);
}
else if (note < 0x20)
{
// Map 0x18-0x1f to knobs 1-8 (layer B)
OnKnobPressRelease(note - 0x17, Layer.LayerB, down);
}
else
{
// Map 0x20-0x2f to buttons 1-16 (layer B)
OnButtonPressRelease(note - 0x1f, Layer.LayerB, down);
}
break;
}
}
public void SetActiveLayer(Layer layer) =>
SendMidiMessage(0xc0, (byte) (layer - 1));
public void SetKnobPosition(int knob, int position) =>
SendMidiMessage(0xba, (byte) knob, (byte) position);
public void SetKnobRingStyle(int knob, KnobRingStyle style) =>
SendMidiMessage(0xb0, (byte) knob, (byte) style);
/// <summary>
/// Sets the ring lights for a knob.
/// </summary>
/// <param name="knob">The knob to set the lights for</param>
/// <param name="state">The overall state: off, on, or blinking</param>
/// <param name="value">The individual value (0 for off, </param>
public void SetKnobRingLights(int knob, LedState state, int value)
{
byte midiValue = (state, value) switch
{
(LedState.Off, _) => 0,
(_, 0) => 0,
(LedState.On, 14) => 27,
(LedState.Blinking, 14) => 28,
(LedState.On, >= 1 and <= 13) => (byte) value,
(LedState.Blinking, >= 1 and <= 13) => (byte) (value + 13),
_ => 0
};
SendMidiMessage(0xb0, (byte) (knob + 8), midiValue);
}
public void SetButtonState(int button, LedState state) =>
SendMidiMessage(0x90, (byte) (button - 1), (byte) state);
}
}
| 38.078947 | 107 | 0.48376 | [
"Apache-2.0"
] | jskeet/DemoCode | XTouchMini/XTouchMini.Model/XTouchMiniStandardController.cs | 4,343 | C# |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Globalization;
using Xunit;
namespace System.Globalization.CalendarsTests
{
// System.Globalization.ThaiBuddhistCalendar.AddYears(DateTime,System.Int32)
public class ThaiBuddhistCalendarAddYears
{
private int _errorNo = 0;
private readonly int[] _DAYS_PER_MONTHS_IN_LEAP_YEAR = new int[13]
{
0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31
};
private readonly int[] _DAYS_PER_MONTHS_IN_NO_LEAP_YEAR = new int[13]
{
0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31
};
#region Positive Tests
// PosTest1: Verify the add years greater than zero
[Fact]
public void PosTest1()
{
System.Globalization.Calendar tbc = new ThaiBuddhistCalendar();
Random rand = new Random(-55);
int year = rand.Next(tbc.MinSupportedDateTime.Year + 543, tbc.MaxSupportedDateTime.Year + 543);
int month = rand.Next(1, 12);
int day;
if (IsLeapYear(year))
{
day = rand.Next(1, _DAYS_PER_MONTHS_IN_LEAP_YEAR[month] + 1);
}
else
{
day = rand.Next(1, _DAYS_PER_MONTHS_IN_NO_LEAP_YEAR[month] + 1);
}
DateTime dt = tbc.ToDateTime(year, month, day, 0, 0, 0, 0);
int addvalue = rand.Next(1, (tbc.MaxSupportedDateTime.Year - year + 543));
VerificationHelper(dt, addvalue);
}
// PosTest2: Verify the add years less than zero
[Fact]
public void PosTest2()
{
System.Globalization.Calendar tbc = new ThaiBuddhistCalendar();
Random rand = new Random(-55);
int year = rand.Next(tbc.MinSupportedDateTime.Year + 543, tbc.MaxSupportedDateTime.Year + 543);
int month = rand.Next(1, 12);
int day;
if (IsLeapYear(year))
{
day = rand.Next(1, _DAYS_PER_MONTHS_IN_LEAP_YEAR[month] + 1);
}
else
{
day = rand.Next(1, _DAYS_PER_MONTHS_IN_NO_LEAP_YEAR[month] + 1);
}
DateTime dt = tbc.ToDateTime(year, month, day, 0, 0, 0, 0);
int addvalue = rand.Next((tbc.MinSupportedDateTime.Year - year + 543), 0);
VerificationHelper(dt, addvalue);
}
// PosTest3: Verify the DateTime is ThaiBuddhistCalendar MaxSupportDateTime
[Fact]
public void PosTest3()
{
System.Globalization.Calendar tbc = new ThaiBuddhistCalendar();
DateTime dt = tbc.MaxSupportedDateTime;
int i = 0;
VerificationHelper(dt, i);
}
// PosTest4: Verify the DateTime is ThaiBuddhistCalendar MinSupportedDateTime
[Fact]
public void PosTest4()
{
System.Globalization.Calendar tbc = new ThaiBuddhistCalendar();
DateTime dt = tbc.MinSupportedDateTime;
int i = 0;
VerificationHelper(dt, i);
}
// PosTest5: Verify the add years is zero
[Fact]
public void PosTest5()
{
System.Globalization.Calendar tbc = new ThaiBuddhistCalendar();
Random rand = new Random(-55);
int year = rand.Next(tbc.MinSupportedDateTime.Year + 543, tbc.MaxSupportedDateTime.Year + 543);
int month = rand.Next(1, 12);
int day;
if (IsLeapYear(year))
{
day = rand.Next(1, _DAYS_PER_MONTHS_IN_LEAP_YEAR[month] + 1);
}
else
{
day = rand.Next(1, _DAYS_PER_MONTHS_IN_NO_LEAP_YEAR[month] + 1);
}
DateTime dt = tbc.ToDateTime(year, month, day, 0, 0, 0, 0);
int i = 0;
VerificationHelper(dt, i);
}
#endregion
#region Negative tests
// NegTest1: The resulting DateTime is greater than the supported range
[Fact]
public void NegTest1()
{
System.Globalization.Calendar tbc = new ThaiBuddhistCalendar();
Random rand = new Random(-55);
int year = rand.Next(tbc.MinSupportedDateTime.Year + 543, tbc.MaxSupportedDateTime.Year + 543);
int month = rand.Next(1, 12);
int day;
if (IsLeapYear(year))
{
day = rand.Next(1, 30);
}
else
{
day = rand.Next(1, 29);
}
DateTime dt = tbc.ToDateTime(year, month, day, 0, 0, 0, 0);
int addValue = rand.Next((tbc.MaxSupportedDateTime.Year - year + 543), Int32.MaxValue);
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
tbc.AddMonths(dt, addValue);
});
}
// NegTest2: The resulting DateTime is less than the supported range
[Fact]
public void NegTest2()
{
System.Globalization.Calendar tbc = new ThaiBuddhistCalendar();
Random rand = new Random(-55);
int year = rand.Next(tbc.MinSupportedDateTime.Year + 543, tbc.MaxSupportedDateTime.Year + 543);
int month = rand.Next(1, 12);
int day;
if (IsLeapYear(year))
{
day = rand.Next(1, 30);
}
else
{
day = rand.Next(1, 29);
}
DateTime dt = tbc.ToDateTime(year, month, day, 0, 0, 0, 0);
int addValue = rand.Next(Int32.MinValue, tbc.MinSupportedDateTime.Year - year + 543);
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
tbc.AddMonths(dt, addValue);
});
}
#endregion
#region Helper Methods
private bool IsLeapYear(int i)
{
int year = i - 543;
return ((year % 4) == 0) && !(((year % 100) == 0) || ((year % 400) == 0));
}
private void VerificationHelper(DateTime value, int addValue)
{
System.Globalization.Calendar tbc = new ThaiBuddhistCalendar();
DateTime newDate = tbc.AddYears(value, addValue);
_errorNo++;
Assert.Equal(newDate.Year, (value.Year + addValue));
if (value.Month == 2)
{
if ((IsLeapYear(value.Year) && value.Day == 29) || (!IsLeapYear(value.Year) && value.Day == 28))
{
if (IsLeapYear(newDate.Year))
{
Assert.Equal(29, newDate.Day);
}
else
{
Assert.Equal(28, newDate.Day);
}
}
}
}
#endregion
}
} | 35.221106 | 112 | 0.522756 | [
"MIT"
] | 690486439/corefx | src/System.Globalization.Calendars/tests/ThaiBuddhistCalendar/ThaiBuddhistCalendarAddYears.cs | 7,009 | C# |
using System.Runtime.Serialization;
namespace SpotifyWebHelperAPI.Models
{
[DataContract(Name = "Status")]
public class StatusDto
{
[DataMember(Name = "error")]
public ErrorDto Error { get; set; }
[DataMember(Name = "version")]
public int Version { get; set; }
[DataMember(Name = "client_version")]
public string ClientVersion { get; set; }
[DataMember(Name = "playing")]
public bool Playing { get; set; }
[DataMember(Name = "shuffle")]
public bool Shuffle { get; set; }
[DataMember(Name = "repeat")]
public bool Repeat { get; set; }
[DataMember(Name = "play_enabled")]
public bool PlayEnabled { get; set; }
[DataMember(Name = "prev_enabled")]
public bool PrevEnabled { get; set; }
[DataMember(Name = "track")]
public TrackDto Track { get; set; }
[DataMember(Name = "playing_position")]
public double PlayingPosition { get; set; }
[DataMember(Name = "server_time")]
public int ServerTime { get; set; }
[DataMember(Name = "volume")]
public double Volume { get; set; }
[DataMember(Name = "online")]
public bool Online { get; set; }
[DataMember(Name = "open_graph_state")]
public OpenGraphStateDto OpenGraphState { get; set; }
[DataMember(Name = "running")]
public bool Running { get; set; }
}
} | 27.622642 | 61 | 0.579918 | [
"MIT"
] | joakimskoog/SpotifyWebHelperAPI | SpotifyWebHelperAPI/Models/StatusDto.cs | 1,466 | C# |
namespace Microsoft.Azure.PowerShell.Cmdlets.Kusto.Models.Api20200215
{
using static Microsoft.Azure.PowerShell.Cmdlets.Kusto.Runtime.Extensions;
/// <summary>A class that contains virtual network definition.</summary>
public partial class VirtualNetworkConfiguration :
Microsoft.Azure.PowerShell.Cmdlets.Kusto.Models.Api20200215.IVirtualNetworkConfiguration,
Microsoft.Azure.PowerShell.Cmdlets.Kusto.Models.Api20200215.IVirtualNetworkConfigurationInternal
{
/// <summary>Backing field for <see cref="DataManagementPublicIPId" /> property.</summary>
private string _dataManagementPublicIPId;
/// <summary>Data management's service public IP address resource id.</summary>
[Microsoft.Azure.PowerShell.Cmdlets.Kusto.Origin(Microsoft.Azure.PowerShell.Cmdlets.Kusto.PropertyOrigin.Owned)]
public string DataManagementPublicIPId { get => this._dataManagementPublicIPId; set => this._dataManagementPublicIPId = value; }
/// <summary>Backing field for <see cref="EnginePublicIPId" /> property.</summary>
private string _enginePublicIPId;
/// <summary>Engine service's public IP address resource id.</summary>
[Microsoft.Azure.PowerShell.Cmdlets.Kusto.Origin(Microsoft.Azure.PowerShell.Cmdlets.Kusto.PropertyOrigin.Owned)]
public string EnginePublicIPId { get => this._enginePublicIPId; set => this._enginePublicIPId = value; }
/// <summary>Backing field for <see cref="SubnetId" /> property.</summary>
private string _subnetId;
/// <summary>The subnet resource id.</summary>
[Microsoft.Azure.PowerShell.Cmdlets.Kusto.Origin(Microsoft.Azure.PowerShell.Cmdlets.Kusto.PropertyOrigin.Owned)]
public string SubnetId { get => this._subnetId; set => this._subnetId = value; }
/// <summary>Creates an new <see cref="VirtualNetworkConfiguration" /> instance.</summary>
public VirtualNetworkConfiguration()
{
}
}
/// A class that contains virtual network definition.
public partial interface IVirtualNetworkConfiguration :
Microsoft.Azure.PowerShell.Cmdlets.Kusto.Runtime.IJsonSerializable
{
/// <summary>Data management's service public IP address resource id.</summary>
[Microsoft.Azure.PowerShell.Cmdlets.Kusto.Runtime.Info(
Required = true,
ReadOnly = false,
Description = @"Data management's service public IP address resource id.",
SerializedName = @"dataManagementPublicIpId",
PossibleTypes = new [] { typeof(string) })]
string DataManagementPublicIPId { get; set; }
/// <summary>Engine service's public IP address resource id.</summary>
[Microsoft.Azure.PowerShell.Cmdlets.Kusto.Runtime.Info(
Required = true,
ReadOnly = false,
Description = @"Engine service's public IP address resource id.",
SerializedName = @"enginePublicIpId",
PossibleTypes = new [] { typeof(string) })]
string EnginePublicIPId { get; set; }
/// <summary>The subnet resource id.</summary>
[Microsoft.Azure.PowerShell.Cmdlets.Kusto.Runtime.Info(
Required = true,
ReadOnly = false,
Description = @"The subnet resource id.",
SerializedName = @"subnetId",
PossibleTypes = new [] { typeof(string) })]
string SubnetId { get; set; }
}
/// A class that contains virtual network definition.
internal partial interface IVirtualNetworkConfigurationInternal
{
/// <summary>Data management's service public IP address resource id.</summary>
string DataManagementPublicIPId { get; set; }
/// <summary>Engine service's public IP address resource id.</summary>
string EnginePublicIPId { get; set; }
/// <summary>The subnet resource id.</summary>
string SubnetId { get; set; }
}
} | 49.8125 | 137 | 0.675784 | [
"MIT"
] | Arsasana/azure-powershell | src/Kusto/generated/api/Models/Api20200215/VirtualNetworkConfiguration.cs | 3,906 | C# |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using Inbox.Common.Model;
using Newtonsoft.Json.Linq;
namespace Inbox.Business
{
public class Mailbox
{
public Folder All => client.All;
public Folder Inbox => client.Inbox;
public Folder Archive => client.Archive;
public Folder Drafts => client.Drafts;
public Folder Junk => client.Junk;
public Folder Sent => client.Sent;
public Folder Trash => client.Trash;
public IEnumerable<Bundle> Bundles { get; }
public IEnumerable<Reminder> Reminders { get; }
private Client client;
public Mailbox(MailboxSettings settings)
{
// Load assemblies if needed
foreach (string plugin in settings.Plugins)
{
Assembly pluginAssembly = AppDomain.CurrentDomain.GetAssemblies()
.FirstOrDefault(a =>
{
if (string.Equals(a.FullName, plugin, StringComparison.InvariantCultureIgnoreCase))
return true;
if (!a.IsDynamic)
{
string assemblyFile = Path.GetFileNameWithoutExtension(a.Location);
if (string.Equals(assemblyFile, plugin, StringComparison.InvariantCultureIgnoreCase))
return true;
}
return false;
});
if (pluginAssembly != null)
continue;
if (Path.IsPathRooted(plugin))
{
if (File.Exists(plugin))
{
try
{
pluginAssembly = Assembly.LoadFrom(plugin);
continue;
}
catch { }
}
}
string pluginFile = plugin;
if (!pluginFile.EndsWith(".exe") && !pluginFile.EndsWith(".dll"))
pluginFile += ".dll";
Assembly currentAssembly = Assembly.GetEntryAssembly();
string[] searchPaths = new[]
{
Environment.CurrentDirectory,
AppDomain.CurrentDomain.BaseDirectory,
currentAssembly.IsDynamic ? Path.GetDirectoryName(currentAssembly.Location) : null
};
foreach (string searchPath in searchPaths)
{
if (searchPath == null)
continue;
string pluginPath = Path.Combine(searchPath, pluginFile);
if (File.Exists(pluginPath))
{
try
{
pluginAssembly = Assembly.LoadFrom(pluginPath);
break;
}
catch { }
}
}
if (pluginAssembly != null)
continue;
throw new FileNotFoundException($"Could not find plugin {plugin} in any known directory");
}
// Create client
Type clientType = AppDomain.CurrentDomain.GetAssemblies()
.Select(a => a.GetType(settings.ClientType, false))
.FirstOrDefault(t => t != null);
if (clientType == null)
throw new Exception($"Could not find type {settings.ClientType} in any loaded assemblies");
client = Activator.CreateInstance(clientType) as Client;
if (client == null)
throw new Exception($"Type {settings.ClientType} is not a valid client type");
// Load client configuration
if (settings.ClientSettings != null)
{
foreach (JProperty property in settings.ClientSettings.Properties())
{
PropertyInfo propertyInfo = clientType.GetProperty(property.Name);
if (propertyInfo == null)
throw new Exception($"Property {property.Name} was not found on client type {settings.ClientType}");
object value = property.Value.ToObject(propertyInfo.PropertyType);
propertyInfo.SetValue(client, value);
}
}
// Connect and authenticate
client.Connect();
client.Authenticate();
}
}
}
| 34.294118 | 124 | 0.488636 | [
"MIT"
] | jbatonnet/inbox | Inbox.Business/Mailbox.cs | 4,666 | C# |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Linq.Expressions;
using JetBrains.Annotations;
using Remotion.Linq;
using Remotion.Linq.Clauses;
using Remotion.Linq.Clauses.ResultOperators;
using Remotion.Linq.Clauses.StreamedData;
namespace Microsoft.EntityFrameworkCore.Query.ResultOperators.Internal
{
public class FromSqlResultOperator : SequenceTypePreservingResultOperatorBase, IQueryAnnotation
{
public FromSqlResultOperator([NotNull] string sql, [NotNull] Expression arguments)
{
Sql = sql;
Arguments = arguments;
}
public virtual IQuerySource QuerySource { get; [NotNull] set; }
public virtual QueryModel QueryModel { get; set; }
public virtual string Sql { get; }
public virtual Expression Arguments { get; }
public override string ToString() => $"FromSql('{Sql}')";
public override ResultOperatorBase Clone([NotNull] CloneContext cloneContext)
=> new FromSqlResultOperator(Sql, Arguments);
public override void TransformExpressions([NotNull] Func<Expression, Expression> transformation)
{
}
public override StreamedSequence ExecuteInMemory<T>([NotNull] StreamedSequence input) => input;
}
}
| 34.365854 | 111 | 0.712562 | [
"Apache-2.0"
] | davidroth/EntityFrameworkCore | src/Microsoft.EntityFrameworkCore.Relational/Query/ResultOperators/Internal/FromSqlResultOperator.cs | 1,409 | C# |
// Copyright (c) Microsoft Open Technologies, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.IO;
using System.Text;
using Microsoft.CodeAnalysis.Text;
using Xunit;
namespace Microsoft.CodeAnalysis.UnitTests
{
public class StringTextTest_UTF8NoBOM : StringTextTest_Default
{
protected override SourceText Create(string source)
{
byte[] buffer = GetBytes(new UTF8Encoding(encoderShouldEmitUTF8Identifier: false), source);
using (var stream = new MemoryStream(buffer, 0, buffer.Length, writable: false, publiclyVisible: true))
{
return EncodedStringText.Create(stream);
}
}
}
}
| 34.608696 | 184 | 0.694724 | [
"Apache-2.0"
] | enginekit/copy_of_roslyn | Src/Compilers/Core/CodeAnalysisTest/Text/StringTextTest_UTF8NoBOM.cs | 798 | C# |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Data;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using ParcInfo.ucControls;
using System.Data.Entity;
namespace ParcInfo.frmList
{
public partial class Dashboard : UserControl
{
public string RoleI;
public string RoleR;
public Color GetColor(string statut)
{
Color color = Color.Transparent;
switch (statut)
{
case "en attente":
color = Color.FromArgb(255, 192, 0);
break;
case "en retard":
color = Color.FromArgb(238, 82, 83);
break;
case "en cours":
color = Color.FromArgb(241, 196, 15);
break;
case "terminer":
color = Color.FromArgb(46, 204, 113);
break;
default:
break;
}
return color;
}
public void GetRolesName()
{
var t = GlobVars.cuUser.RoleUtilisateurs1.Where(x => x.IdUtilisateur == GlobVars.cuUser.Id && x.IsDeleted == 0);
foreach (var v in t)
if (v.Nom.ToLower().Contains("Consulter".ToLower()) && v.Nom.ToLower().Contains("demandes".ToLower()) && v.IsDeleted != 1)
RoleR = v.Nom.ToLower();
foreach (var v in t)
if (v.Nom.ToLower().Contains("Consulter".ToLower()) && v.Nom.ToLower().Contains("interventions".ToLower()) && v.IsDeleted != 1)
RoleI = v.Nom.ToLower();
}
public Dashboard()
{
InitializeComponent();
if (GlobVars.cuUser != null)
{
var u = GlobVars.cuUser;
using (ParcInformatiqueEntities context = new ParcInformatiqueEntities())
{
if (u.isAdmin == 1)
{
GetDashAdmin(true, true);
Dem(context.GetRequestbyStatut(), context.GetInterventionBystatut());
}
else
{
if (RoleI == "Consulter tous les interventions".ToLower() && RoleR == "Consulter tous les demandes".ToLower())
{
GetDashAdmin(true, true);
Dem(context.GetRequestbyStatut(), context.GetInterventionBystatut());
}
else
{
GetDashUser();
Dem(context.GetAssignedRequestbyStatut(), context.GetInterventionBystatut());
}
}
}
}
}
public void CreateLblDash(string t, int id, string Code, string client, string date, string statut, Color color, Control c)
{
lblDashTablecs lbl = new lblDashTablecs();
lbl.Name = t;
lbl.ID = id.ToString();
lbl.LblID = Code;
lbl.LblClient = client;
lbl.LblDate = date;
lbl.LblStatut = statut;
lbl.lblStatutColor = color;
c.Controls.Add(lbl);
}
public void GetDashAdmin(bool hasallr,bool hasalli)
{
using (ParcInformatiqueEntities context = new ParcInformatiqueEntities())
{
var listOrder = new List<string> { "en retard", "en attente", "en cours", "terminer" };
if (hasallr == true)
{
//demande
lblDemEncours.Text = context.GetRequestCours.Count().ToString();
lblDemEnRetard.Text = context.GetRequestRetard.Count().ToString();
lblTotalDem.Text = context.GetRequestbyStatut().Count.ToString();
var ls = (from c in context.GetRequestbyStatut()
select new
{
c.IdReq,
c.Id,
c.Employee.Client.Nom,
c.Datedemande,
c.Getstatut,
color = GetColor(c.Getstatut)
}).OrderBy(i => listOrder.IndexOf(i.Getstatut)).ThenBy(d => d.Datedemande).Take(5).ToList();
foreach (var item in ls)
{
CreateLblDash("dem", item.Id, item.IdReq, item.Nom, item.Datedemande.ToString(), item.Getstatut, item.color, pnlDemande);
}
}
if (hasalli == true)
{
//interevention
lblIntEnCours.Text = context.GetIntervEncours.Count().ToString();
lblIntEnRetard.Text = context.GetIntervenretard.Count().ToString();
lblTotalInterv.Text = context.GetInterventionBystatut().Count.ToString();
var lsx = (from c in context.GetInterventionBystatut()
select new
{
c.IdIntrv,
c.Id,
c.Client.Nom,
c.DateIntervention,
c.Getstatut,
color = GetColor(c.Getstatut)
}).OrderBy(i => listOrder.IndexOf(i.Getstatut)).ThenBy(d => d.DateIntervention).Take(5).ToList();
foreach (var item in lsx)
{
CreateLblDash("int", item.Id, item.IdIntrv, item.Nom, item.DateIntervention.ToString(), item.Getstatut, item.color, pnlIntervention);
}
}
}
}
public void GetDashUser()
{
using (ParcInformatiqueEntities context = new ParcInformatiqueEntities())
{
var listOrder = new List<string> { "en retard", "en attente", "en cours", "terminer" };
bool hasallr = false;
bool hasalli = false;
if (RoleR == "Consulter tous les demandes".ToLower())
hasallr = true;
if (RoleI == "Consulter tous les interventions".ToLower())
hasalli = true;
if (hasalli == true)
GetDashAdmin(false, true);
if (hasallr == true)
GetDashAdmin(true, false);
if (hasallr == false)
{
//demande
lblDemEncours.Text = context.GetAssignedRequestCours.Count().ToString();
lblDemEnRetard.Text = context.GetAssignedRequestRetard.Count().ToString();
lblTotalDem.Text = context.GetAssignedRequestbyStatut().Count.ToString();
var ls = (from c in context.GetAssignedRequestbyStatut()
select new
{
c.IdReq,
c.Id,
c.Employee.Client.Nom,
c.Datedemande,
c.Getstatut,
color = GetColor(c.Getstatut)
}).OrderBy(i => listOrder.IndexOf(i.Getstatut)).ThenBy(d => d.Datedemande).Take(5).ToList();
foreach (var item in ls)
{
CreateLblDash("dem", item.Id, item.IdReq, item.Nom, item.Datedemande.ToString(), item.Getstatut, item.color, pnlDemande);
}
}
if (hasalli == false)
{
//intervention
lblIntEnCours.Text = context.GeAssignedtIntervEncours.Count().ToString();
lblIntEnCours.Text = context.GetAssignedIntervenretard.Count().ToString();
lblTotalInterv.Text = context.GetAssignedInterventionBystatut().Count.ToString();
var lsx = (from c in context.GetAssignedInterventionBystatut()
select new
{
c.IdIntrv,
c.Id,
c.Client.Nom,
c.DateIntervention,
c.Getstatut,
color = GetColor(c.Getstatut)
}).OrderBy(i => listOrder.IndexOf(i.Getstatut)).ThenBy(d => d.DateIntervention).Take(5).ToList();
foreach (var item in lsx)
{
CreateLblDash("int", item.Id, item.IdIntrv, item.Nom, item.DateIntervention.ToString(), item.Getstatut, item.color, pnlIntervention);
}
}
}
}
private void button1_Click(object sender, EventArgs e)
{
var d = DateTime.Now;
var d2 = DateTime.Now.AddDays(1);
MessageBox.Show((d2 - d).TotalMinutes.ToString());
}
public string GetNum(int n)
{
return n < 9 ? "0" : "";
}
public void Dem(ICollection<Demande> listD, ICollection<Intervention> listI)
{
using (ParcInformatiqueEntities context = new ParcInformatiqueEntities())
{
var dc = (from c in context.Demandes
join i in context.Interventions on c.Id equals i.IdDemande
select new
{ ad = DbFunctions.DiffMinutes(c.Datedemande, i.DateIntervention) }).ToList();
// var listDx = dc.AsEnumerable().AsQueryable();
// var ds = dc.Select(c => new { ad = (c.DateIntervention.Value - c.Datedemande.Value).TotalMinutes }).ToList();
//ad = DbFunctions.DiffMinutes(c.Datedemande, i.DateIntervention)
if (dc.Count > 0)
{
var d = dc.Sum(dx => dx.ad) / dc.Count;
var days = GetNum((int)(d / 1440));
var hours = GetNum((int)(d / 60));
var min = GetNum((int)(d % 60));
string varx = string.Format("{0}:{1}:{2}",(days + (d / 1440)), (hours + (d / 60)), (min + (d % 60)));
lbltest.Text = varx.ToString();
}
else
{
lbltest.Text = "00:00:00";
}
}
}
}
}
| 40.785185 | 157 | 0.448329 | [
"MIT"
] | driwand/ParcInfo-Dev | ParcInfo/frmList/Dashboard.cs | 11,014 | C# |
// <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.AzureStack.Management.Gallery.Admin.Models
{
/// <summary>
/// Defines values for GalleryItemMenuReferenceType.
/// </summary>
public static class GalleryItemMenuReferenceType
{
public const string GalleryItem = "GalleryItem";
public const string ItemGroup = "ItemGroup";
}
}
| 30 | 74 | 0.717391 | [
"MIT"
] | 0rland0Wats0n/azure-sdk-for-net | sdk/azurestack/Microsoft.AzureStack.Management.Gallery.Admin/src/Generated/Models/GalleryItemMenuReferenceType.cs | 690 | C# |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class GenerateProjectile : MonoBehaviour {
public GameObject projectile;
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
}
| 13.904762 | 49 | 0.69863 | [
"MIT"
] | gbaelen/RunStickmanRun | Assets/Ressource/Scripts/Bad Guys/Canon.cs | 294 | C# |
using PropertyWriter.Annotation;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PropertyWriter.DocSample.Readability4
{
[PwProject]
public class Project
{
[PwMaster("プレイヤー")]
public Player[] Players { get; set; }
}
public class Player
{
[PwMember("プレイヤーID")]
public int Id { get; set; }
[PwMember("名前")]
public string Name { get; set; }
[PwMember("体力")]
public int Hp { get; set; }
[PwMember("攻撃力")]
public int Attack { get; set; }
public override string ToString() => $"{Id}: {Name}";
}
}
| 19.419355 | 55 | 0.67608 | [
"MIT"
] | NumAniCloud/PropertyWriter | Dev/PropertyWriter.DocSample/Readability4.cs | 638 | C# |
using System.Threading.Tasks;
using JetBrains.Annotations;
using Lykke.Service.ConfirmationCodes.Core.Services;
using StackExchange.Redis;
namespace Lykke.Service.ConfirmationCodes.Services
{
[UsedImplicitly]
public class Google2FaBlacklistService : IGoogle2FaBlacklistService
{
private readonly IDatabase _redisDb;
private readonly int _maxTries;
private const string SuccessScript = @"
local num=redis.call('get', KEYS[1])
if(num ~= false)
then
if(tonumber(num) < tonumber(ARGV[1]))
then
redis.call('del', KEYS[1])
end
end";
public Google2FaBlacklistService(
IDatabase redisDb,
int maxTries)
{
_redisDb = redisDb;
_maxTries = maxTries;
}
public Task ClientFailedAsync(string clientId)
{
return _redisDb.StringIncrementAsync(GetCounterKeyForClient(clientId));
}
public Task ResetAsyncAsync(string clientId)
{
return _redisDb.KeyDeleteAsync(GetCounterKeyForClient(clientId));
}
public async Task<bool> IsClientBlockedAsync(string clientId)
{
return (int) await _redisDb.StringGetAsync(GetCounterKeyForClient(clientId)) >= _maxTries;
}
public async Task ClientSucceededAsync(string clientId)
{
var clientKey = GetCounterKeyForClient(clientId);
await _redisDb.ScriptEvaluateAsync(SuccessScript, new[] {(RedisKey)clientKey}, new[] {(RedisValue)_maxTries});
}
private static string GetCounterKeyForClient(string clientId)
{
return $"Google2FaFailsCounter:{clientId}";
}
}
}
| 30.533333 | 122 | 0.606441 | [
"MIT"
] | LykkeCity/Lykke.Service.ConfirmationCodes | src/Lykke.Service.ConfirmationCodes.Services/Google2FaBlacklistService.cs | 1,834 | C# |
using CoreFrame.DataRepository;
using CoreFrame.Entity.Base_SysManage;
using System.Collections.Generic;
using System.Linq;
namespace CoreFrame.Business.Cache
{
class UserRoleCache : BaseCache<List<string>>
{
public UserRoleCache()
: base("UserRoleCache", userId =>
{
var list = DbFactory.GetRepository()
.GetIQueryable<Base_UserRoleMap>()
.Where(x => x.UserId == userId)
.Select(x => x.RoleId)
.ToList();
return list;
})
{
}
}
}
| 24.68 | 54 | 0.523501 | [
"MIT"
] | cityjoy/NetCoreFrame.Admin | CoreFrame.Business/Cache/UserRoleCache.cs | 619 | C# |
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
#if !SILVERLIGHT
using System.Runtime.Serialization;
#endif
#if SERVERFX
using OpenRiaServices.DomainServices.Server;
#else
using OpenRiaServices.DomainServices.Client;
#endif
namespace OpenRiaServices.DomainServices
{
internal static class TypeUtility
{
/// <summary>
/// List of public key tokens used for System assemblies
/// </summary>
private static string[] systemAssemblyPublicKeyTokens =
{
"b77a5c561934e089", // mscorlib, System, System.ComponentModel.Composition, and System.Core
"31bf3856ad364e35", // OpenRiaServices.DomainServices.*, System.ComponentModel.DataAnnotations
"b03f5f7f11d50a3a", // Microsoft.VisualBasic, Microsoft.CSharp, System.Configuration
"7cec85d7bea7798e" // Silverlight system assemblies
};
#if !WIZARD
// list of "simple" types we will always accept for
// serialization, inclusion from entities, etc.
// Primitive types are not here -- test for them via Type.IsPrimitive
private static HashSet<Type> predefinedTypes = new HashSet<Type>
{
typeof(string),
typeof(decimal),
typeof(DateTime),
typeof(DateTimeOffset),
typeof(TimeSpan),
typeof(Guid),
typeof(Uri)
};
/// <summary>
/// Returns <c>true</c> if the given type is a <see cref="Nullable"/>
/// </summary>
/// <param name="type">The type to test</param>
/// <returns><c>true</c> if the given type is a nullable type</returns>
public static bool IsNullableType(Type type)
{
return type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>);
}
/// <summary>
/// If the given type is <see cref="Nullable"/>, returns the element type,
/// otherwise simply returns the input type
/// </summary>
/// <param name="type">The type to test that may or may not be Nullable</param>
/// <returns>Either the input type or, if it was Nullable, its element type</returns>
public static Type GetNonNullableType(Type type)
{
return IsNullableType(type) ? type.GetGenericArguments()[0] : type;
}
/// <summary>
/// Returns <c>true</c> if the given type is a primitive type or one
/// of our standard acceptable simple types, such as <see cref="String"/>,
/// <see cref="Guid"/>, or one of our standard generic types whose generic
/// argument is primitive or simple (e.g. Nullable, IEnumerable, IDictionary<TKey,TValue>).
/// </summary>
/// <param name="type">The type to test</param>
/// <returns><c>true</c> if the type is a primitive or standard acceptable types</returns>
public static bool IsPredefinedType(Type type)
{
return IsPredefinedSimpleType(type) ||
IsPredefinedListType(type) ||
IsPredefinedDictionaryType(type);
}
/// <summary>
/// Returns <c>true</c> if the given type is <see cref="IEnumerable<T>"/> or an <see cref="IList"/> type,
/// and is either an interface, an array, or has a default constructor.
/// </summary>
/// <param name="type">The type to test</param>
/// <returns><c>true</c> if the type is a primitive or standard acceptable types</returns>
public static bool IsPredefinedListType(Type type)
{
if (IsSupportedCollectionType(type))
{
Type elementType = GetElementType(type);
return IsPredefinedSimpleType(elementType);
}
return false;
}
/// <summary>
/// Returns <c>true</c> if specified type is a supported collection Type. This method only checks the collection
/// Type itself, not whether the element Type is supported.
/// </summary>
/// <param name="type">The type to test</param>
/// <returns><c>true</c> if the type is a suppored collection Type.</returns>
public static bool IsSupportedCollectionType(Type type)
{
if (type.IsArray ||
(type.IsGenericType && type.GetGenericTypeDefinition() == typeof(IEnumerable<>)) ||
(typeof(IList).IsAssignableFrom(type) && type.GetConstructor(new Type[0]) != null))
{
return true;
}
return false;
}
/// <summary>
/// Returns <c>true</c> if <paramref name="type"/> implements <see cref="IDictionary<TKey,TValue>"/> and
/// its generic type arguments are acceptable predefined simple types.
/// </summary>
/// <param name="type">The type to test.</param>
/// <returns><c>true</c> if the type is a <see cref="IDictionary<TKey,TValue>"/> with supported generic type arguments.</returns>
public static bool IsPredefinedDictionaryType(Type type)
{
Type genericType;
if (typeof(IDictionary<,>).DefinitionIsAssignableFrom(type, out genericType))
{
return genericType.GetGenericArguments().All(t => IsPredefinedSimpleType(t));
}
return false;
}
/// <summary>
/// Returns <c>true</c> if the given type is either primitive or one of our
/// standard acceptable simple types, such as <see cref="String"/>,
/// <see cref="Guid"/>, etc
/// </summary>
/// <param name="type">The type to test</param>
/// <returns><c>true</c> if the type is a primitive or standard acceptable types</returns>
public static bool IsPredefinedSimpleType(Type type)
{
type = GetNonNullableType(type);
// primitive types (except IntPtr and UIntPtr) are supported
if (type.IsPrimitive && type != typeof(IntPtr) && type != typeof(UIntPtr))
{
return true;
}
if (type.IsEnum)
{
return true;
}
if (predefinedTypes.Contains(type))
{
return true;
}
if (BinaryTypeUtility.IsTypeBinary(type))
{
return true;
}
// We test XElement by Type Name so our client framework assembly can avoid
// taking an assembly reference to System.Xml.Linq
if (string.Compare(type.FullName, "System.Xml.Linq.XElement", StringComparison.Ordinal) == 0)
{
return true;
}
return false;
}
/// <summary>
/// This method determines if the specified Type should be treated as a
/// complex type by the framework.
/// </summary>
/// <param name="type">The type to check.</param>
/// <returns>True if the type is a complex type, false otherwise.</returns>
public static bool IsComplexType(Type type)
{
#if !SERVERFX
// Client side we can rely on derivaition from ComplexObject
if (!typeof(ComplexObject).IsAssignableFrom(type))
{
return false;
}
#else
if (!type.IsVisible || type.IsGenericType || type.IsAbstract)
{
return false;
}
if (typeof(IEnumerable).IsAssignableFrom(type))
{
return false;
}
if (!type.IsClass)
{
return false;
}
if (type.GetConstructor(Type.EmptyTypes) == null)
{
return false;
}
if (TypeUtility.IsPredefinedType(type))
{
return false;
}
// can't be a framework type
if (IsSystemAssembly(type.Assembly))
{
return false;
}
// server side only checks
// can't be an entity
if (TypeDescriptor.GetProperties(type).Cast<PropertyDescriptor>().Any(p => p.Attributes[typeof(KeyAttribute)] != null))
{
return false;
}
#endif
return true;
}
/// <summary>
/// Determines whether the specified type is one of the supported collection types
/// with a complex element type.
/// </summary>
/// <param name="type">The type to check.</param>
/// <returns>True if the type is a supported complex collection type, false otherwise.</returns>
public static bool IsComplexTypeCollection(Type type)
{
// This check doesn't include dictionary types, since dictionaries of CTs aren't supported currently
return TypeUtility.IsSupportedCollectionType(type) && TypeUtility.IsComplexType(TypeUtility.GetElementType(type));
}
/// <summary>
/// Determines whether the specified type is a complex type or a collection of
/// complex types.
/// </summary>
/// <param name="type">The type to check.</param>
/// <returns>True if the specified type is a complex type or a collection of
/// complex types, false otherwise.</returns>
public static bool IsSupportedComplexType(Type type)
{
return TypeUtility.IsComplexType(type) || TypeUtility.IsComplexTypeCollection(type);
}
/// <summary>
/// Returns the underlying element type starting from a given type.
/// </summary>
/// <remarks>
/// Simple types simply return the input type.
/// If the given type is an array, this method returns the array's
/// element type.
/// If the type is a generic type of <see cref="IEnumerable"/>,
/// or <see cref="Nullable"/>, this method returns the element
/// type of the generic parameter
/// </remarks>
/// <param name="type"><see cref="Type"/> to examine.</param>
/// <returns>The underlying element type starting from the given type</returns>
public static Type GetElementType(Type type)
{
// Any simple type has no element type -- it is the element type itself
if (IsPredefinedSimpleType(type))
{
return type;
}
// Array, pointers, etc.
if (type.HasElementType)
{
return type.GetElementType();
}
// IEnumerable<T> returns T
Type ienum = FindIEnumerable(type);
if (ienum != null)
{
Type genericArg = ienum.GetGenericArguments()[0];
return genericArg;
}
return type;
}
/// <summary>
/// Determines whether the generic type definition is assignable from the derived type.
/// </summary>
/// <remarks>
/// This behaves just like <see cref="Type.IsAssignableFrom"/> except that it determines
/// whether any generic type that can be made from the <paramref name="genericTypeDefinition"/>
/// is assignable from <paramref name="derivedType"/>.
/// </remarks>
/// <param name="genericTypeDefinition">The generic type definition</param>
/// <param name="derivedType">The type to determine assignability from</param>
/// <returns>Whether the type definition is assignable from the derived type</returns>
internal static bool DefinitionIsAssignableFrom(this Type genericTypeDefinition, Type derivedType)
{
Type genericType = null;
return DefinitionIsAssignableFrom(genericTypeDefinition, derivedType, out genericType);
}
/// <summary>
/// Determines whether the generic type definition is assignable from the derived type.
/// </summary>
/// <remarks>
/// This behaves just like <see cref="Type.IsAssignableFrom"/> except that it determines
/// whether any generic type that can be made from the <paramref name="genericTypeDefinition"/>
/// is assignable from <paramref name="derivedType"/>.
/// </remarks>
/// <param name="genericTypeDefinition">The generic type definition</param>
/// <param name="derivedType">The type to determine assignability from</param>
/// <param name="genericType">The generic base class or interface implemented by the derived
/// type that can be made from the <paramref name="genericTypeDefinition"/>. This value is
/// null when the method return false.
/// </param>
/// <returns>Whether the type definition is assignable from the derived type</returns>
internal static bool DefinitionIsAssignableFrom(this Type genericTypeDefinition, Type derivedType, out Type genericType)
{
genericType = derivedType;
while (genericType != null)
{
if (genericTypeDefinition.IsInterface)
{
bool interfaceMatched = false;
foreach (Type interfaceType in genericType.GetInterfaces().Concat(new[] { derivedType }))
{
if (interfaceType.IsGenericType &&
genericTypeDefinition == interfaceType.GetGenericTypeDefinition())
{
interfaceMatched = true;
genericType = interfaceType;
break;
}
}
if (interfaceMatched)
{
break;
}
}
else
{
if (genericType.IsGenericType &&
genericTypeDefinition == genericType.GetGenericTypeDefinition())
{
break;
}
}
genericType = genericType.BaseType;
}
return genericType != null;
}
internal static Type FindIEnumerable(Type seqType)
{
if (seqType == null || seqType == typeof(string))
{
return null;
}
if (seqType.IsArray)
{
return typeof(IEnumerable<>).MakeGenericType(seqType.GetElementType());
}
if (seqType.IsGenericType)
{
foreach (Type arg in seqType.GetGenericArguments())
{
Type ienum = typeof(IEnumerable<>).MakeGenericType(arg);
if (ienum.IsAssignableFrom(seqType))
{
return ienum;
}
}
}
Type[] ifaces = seqType.GetInterfaces();
if (ifaces != null && ifaces.Length > 0)
{
foreach (Type iface in ifaces)
{
Type ienum = FindIEnumerable(iface);
if (ienum != null)
{
return ienum;
}
}
}
if (seqType.BaseType != null && seqType.BaseType != typeof(object))
{
return FindIEnumerable(seqType.BaseType);
}
return null;
}
#endif
/// <summary>
/// Performs a check against an assembly to determine if it's a known
/// System assembly.
/// </summary>
/// <param name="assembly">The assembly to check.</param>
/// <returns><c>true</c> if the assembly is known to be a system assembly, otherwise <c>false</c>.</returns>
internal static bool IsSystemAssembly(this Assembly assembly)
{
return IsSystemAssembly(assembly.FullName);
}
/// <summary>
/// Performs a check against an <see cref="AssemblyName"/> to determine if it's a known
/// System assembly.
/// </summary>
/// <param name="assemblyName">The assembly name to check.</param>
/// <returns><c>true</c> if the assembly is known to be a system assembly, otherwise <c>false</c>.</returns>
internal static bool IsSystemAssembly(this AssemblyName assemblyName)
{
return IsSystemAssembly(assemblyName.FullName);
}
/// <summary>
/// Performs a check against an assembly's full name to determine if it's a known
/// System assembly.
/// </summary>
/// <remarks>
/// We can't use Assembly.GetName().GetPublicKeyToken() since that requires FileIOPermissions.
/// </remarks>
/// <param name="assemblyFullName">The <see cref="AssemblyName.FullName"/> to check.</param>
/// <returns><c>true</c> if the assembly is known to be a system assembly, otherwise <c>false</c>.</returns>
internal static bool IsSystemAssembly(string assemblyFullName)
{
// parse the public key token
int idx = assemblyFullName.IndexOf("PublicKeyToken=", StringComparison.OrdinalIgnoreCase);
if (idx == 0)
{
return false;
}
string publicKeyToken = assemblyFullName.Substring(idx + 15);
return systemAssemblyPublicKeyTokens.Any(p => p.Equals(publicKeyToken, StringComparison.OrdinalIgnoreCase));
}
}
} | 39.423841 | 143 | 0.55871 | [
"Apache-2.0"
] | jeffhandley/OpenRiaServices | OpenRiaServices.DomainServices.Client/Framework/Portable/Data/TypeUtility.cs | 17,861 | C# |
using System.Threading.Tasks;
using Avalonia.Input.Platform;
using UIKit;
namespace Avalonia.iOS
{
public class Clipboard : IClipboard
{
public Task<string> GetTextAsync()
{
return Task.FromResult(UIPasteboard.General.String);
}
public Task SetTextAsync(string text)
{
UIPasteboard.General.String = text;
return Task.FromResult(0);
}
public Task ClearAsync()
{
UIPasteboard.General.String = "";
return Task.FromResult(0);
}
}
} | 22.076923 | 64 | 0.578397 | [
"MIT"
] | AvtRikki/Avalonia | src/iOS/Avalonia.iOS/Clipboard.cs | 574 | C# |
// Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. See the LICENSE.md file in the project root for more information.
using System.ComponentModel.Composition;
namespace Microsoft.VisualStudio.ProjectSystem.VS.UI
{
/// <summary>
/// Provides an abstraction over dialogs to make them unit testable. Each dialog will have its own abstraction which
/// can be retrieved from this service.
/// </summary>
[Export(typeof(IDialogServices))]
[AppliesTo(ProjectCapability.DotNet)]
internal class DialogServices : IDialogServices
{
private readonly IUserNotificationServices _userNotificationServices;
[ImportingConstructor]
public DialogServices(IUserNotificationServices userNotificationServices)
{
_userNotificationServices = userNotificationServices;
}
public bool DontShowAgainMessageBox(string caption, string message, string? checkboxText, bool initialStateOfCheckbox, string learnMoreText, string learnMoreUrl)
{
var dlg = new DontShowAgainMessageBox(caption, message, checkboxText, initialStateOfCheckbox, learnMoreText, learnMoreUrl, _userNotificationServices);
bool? result = dlg.ShowModal();
if (result == true)
{
return dlg.CheckboxState;
}
return false;
}
}
}
| 41.305556 | 201 | 0.685272 | [
"MIT"
] | 77-A/.Net-Project | src/Microsoft.VisualStudio.ProjectSystem.Managed.VS/ProjectSystem/VS/UI/DialogServices.cs | 1,454 | C# |
using System.ComponentModel;
namespace SiteWatcher.Domain.Enums;
public enum ELanguage
{
[Description("pt")]
BrazilianPortuguese = 1,
[Description("en")]
English = 2,
[Description("es")]
Spanish = 3
} | 15.2 | 35 | 0.657895 | [
"MIT"
] | xilapa/SiteWatcher | src/Domain/Enums/ELanguage.cs | 228 | C# |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the ds-2015-04-16.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using System.Net;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.DirectoryService.Model
{
/// <summary>
/// This is the response object from the CancelSchemaExtension operation.
/// </summary>
public partial class CancelSchemaExtensionResponse : AmazonWebServiceResponse
{
}
} | 29.710526 | 100 | 0.736935 | [
"Apache-2.0"
] | ChristopherButtars/aws-sdk-net | sdk/src/Services/DirectoryService/Generated/Model/CancelSchemaExtensionResponse.cs | 1,129 | C# |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace System.Reflection
{
internal static class MdConstant
{
public static unsafe object? GetValue(MetadataImport scope, int token, RuntimeTypeHandle fieldTypeHandle, bool raw)
{
string? stringVal = scope.GetDefaultValue(token, out long buffer, out int length, out CorElementType corElementType);
RuntimeType fieldType = fieldTypeHandle.GetRuntimeType();
if (fieldType.IsEnum && !raw)
{
// NOTE: Unlike in `TypeBuilder.SetConstantValue`, if `fieldType` describes
// a nullable enum type `Nullable<TEnum>`, we do not unpack it to `TEnum` to
// successfully enter this `if` clause. Default values of `TEnum?`-typed
// parameters have been reported as values of the underlying type, changing
// this now might be a breaking change.
long defaultValue = 0;
switch (corElementType)
{
#region Switch
case CorElementType.ELEMENT_TYPE_VOID:
return DBNull.Value;
case CorElementType.ELEMENT_TYPE_CHAR:
defaultValue = *(char*)&buffer;
break;
case CorElementType.ELEMENT_TYPE_I1:
defaultValue = *(sbyte*)&buffer;
break;
case CorElementType.ELEMENT_TYPE_U1:
defaultValue = *(byte*)&buffer;
break;
case CorElementType.ELEMENT_TYPE_I2:
defaultValue = *(short*)&buffer;
break;
case CorElementType.ELEMENT_TYPE_U2:
defaultValue = *(ushort*)&buffer;
break;
case CorElementType.ELEMENT_TYPE_I4:
defaultValue = *(int*)&buffer;
break;
case CorElementType.ELEMENT_TYPE_U4:
defaultValue = *(uint*)&buffer;
break;
case CorElementType.ELEMENT_TYPE_I8:
defaultValue = buffer;
break;
case CorElementType.ELEMENT_TYPE_U8:
defaultValue = buffer;
break;
case CorElementType.ELEMENT_TYPE_CLASS:
return null;
default:
throw new FormatException(SR.Arg_BadLiteralFormat);
#endregion
}
return RuntimeType.CreateEnum(fieldType, defaultValue);
}
else if (fieldType == typeof(DateTime))
{
long defaultValue = 0;
switch (corElementType)
{
#region Switch
case CorElementType.ELEMENT_TYPE_VOID:
return DBNull.Value;
case CorElementType.ELEMENT_TYPE_I8:
defaultValue = buffer;
break;
case CorElementType.ELEMENT_TYPE_U8:
defaultValue = buffer;
break;
case CorElementType.ELEMENT_TYPE_CLASS:
return null;
default:
throw new FormatException(SR.Arg_BadLiteralFormat);
#endregion
}
return new DateTime(defaultValue);
}
else
{
return corElementType switch
{
CorElementType.ELEMENT_TYPE_VOID => DBNull.Value,
CorElementType.ELEMENT_TYPE_CHAR => *(char*)&buffer,
CorElementType.ELEMENT_TYPE_I1 => *(sbyte*)&buffer,
CorElementType.ELEMENT_TYPE_U1 => *(byte*)&buffer,
CorElementType.ELEMENT_TYPE_I2 => *(short*)&buffer,
CorElementType.ELEMENT_TYPE_U2 => *(ushort*)&buffer,
CorElementType.ELEMENT_TYPE_I4 => *(int*)&buffer,
CorElementType.ELEMENT_TYPE_U4 => *(uint*)&buffer,
CorElementType.ELEMENT_TYPE_I8 => buffer,
CorElementType.ELEMENT_TYPE_U8 => (ulong)buffer,
CorElementType.ELEMENT_TYPE_BOOLEAN => (*(int*)&buffer != 0),
CorElementType.ELEMENT_TYPE_R4 => *(float*)&buffer,
CorElementType.ELEMENT_TYPE_R8 => *(double*)&buffer,
CorElementType.ELEMENT_TYPE_STRING => stringVal ?? string.Empty,
CorElementType.ELEMENT_TYPE_CLASS => null,
_ => throw new FormatException(SR.Arg_BadLiteralFormat),
};
}
}
}
}
| 38.923664 | 129 | 0.497156 | [
"MIT"
] | 2m0nd/runtime | src/coreclr/src/System.Private.CoreLib/src/System/Reflection/MdConstant.cs | 5,099 | C# |
using System.Collections.Generic;
using Microsoft.Health.Fhir.Anonymizer.Api;
using Presidio.Model;
using Xunit;
namespace Fhir.Anonymizer.Core.UnitTests.Api
{
public class PresidioApiHandlerTests
{
[Fact]
public void GivenAnalyzeCalled_WhenAnalyzerApiCalled_PresidioTextAnalysisRecognizerResultsShouldBeReturned()
{
var presidioApiHandler = new PresidioApiHandler("en", new AnalyzerApiMock(), new AnonymizerApiMock());
var recognizerResults = presidioApiHandler.Analyze("Text For Analysis");
var recognizerResult = recognizerResults[0];
Assert.Equal(AnalyzerApiMock.DefaultStart, recognizerResult.Start);
Assert.Equal(AnalyzerApiMock.DefaultEnd, recognizerResult.End);
Assert.Equal(AnalyzerApiMock.DefaultScore, recognizerResult.Score);
Assert.Equal(AnalyzerApiMock.EntityType, recognizerResult.EntityType);
}
[Fact]
public void GivenAnonymizeCalled_WhenAnonymizerApiCalled_PresidioAnonymizeResponseShouldBeReturned()
{
var presidioApiHandler = new PresidioApiHandler("en", new AnalyzerApiMock(), new AnonymizerApiMock());
var anonymizeResponse = presidioApiHandler.Anonymize("Text For Analysis", new List<RecognizerResult>());
Assert.Equal(AnonymizerApiMock.DefaultText, anonymizeResponse.Text);
Assert.Same(AnonymizerApiMock.s_defaultOperatorEntities, anonymizeResponse.Items);
}
}
} | 41.162162 | 116 | 0.715693 | [
"MIT"
] | itye-msft/FHIR-Tools-for-Anonymization | src/Microsoft.Health.Fhir.Anonymizer.Shared.Core.UnitTests/Api/PresidioApiHandlerTests.cs | 1,525 | C# |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class Player : MonoBehaviour
{
public float maxHP = 100;
public float timeBetweenShoots = 0.5f;
public GameObject bulletPrefab;
public Transform bulletOrigin;
public Text hpText;
private float currentHP;
private float timeOfLastShoot;
private void Start()
{
currentHP = maxHP;
hpText.text = "HP: " + currentHP;
}
private void Update()
{
if (Input.GetKeyDown(KeyCode.Space))
{
if (Time.time > timeOfLastShoot + timeBetweenShoots)
Shoot();
}
}
public void Damage(float amount)
{
currentHP -= amount;
hpText.text = "HP: " + currentHP;
if (currentHP <= 0f)
{
Debug.Log("Game Over");
Destroy(this.gameObject);
}
}
private void Shoot()
{
Instantiate(bulletPrefab, bulletOrigin.position, bulletOrigin.rotation);
timeOfLastShoot = Time.time;
}
} | 20.415094 | 80 | 0.59427 | [
"MIT"
] | JesusFerniz/Parcial_SpaceShooter | Assets/SpaceShooter/Scripts/Player.cs | 1,084 | C# |
// <auto-generated />
using Abp.Authorization;
using Abp.BackgroundJobs;
using Abp.Events.Bus.Entities;
using Abp.Notifications;
using Lesson.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage;
using Microsoft.EntityFrameworkCore.Storage.Internal;
using Microsoft.EntityFrameworkCore.ValueGeneration;
using System;
namespace Lesson.Migrations
{
[DbContext(typeof(LessonDbContext))]
[Migration("20180201051646_Upgraded_To_Abp_v3.4.0")]
partial class Upgraded_To_Abp_v340
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "2.0.1-rtm-125")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
modelBuilder.Entity("Abp.Application.Editions.Edition", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("int");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<long?>("DeleterUserId")
.HasColumnType("bigint");
b.Property<DateTime?>("DeletionTime")
.HasColumnType("datetime2");
b.Property<string>("DisplayName")
.IsRequired()
.HasColumnType("nvarchar(64)")
.HasMaxLength(64);
b.Property<bool>("IsDeleted")
.HasColumnType("bit");
b.Property<DateTime?>("LastModificationTime")
.HasColumnType("datetime2");
b.Property<long?>("LastModifierUserId")
.HasColumnType("bigint");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("nvarchar(32)")
.HasMaxLength(32);
b.HasKey("Id");
b.ToTable("AbpEditions");
});
modelBuilder.Entity("Abp.Application.Features.FeatureSetting", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<string>("Discriminator")
.IsRequired()
.HasColumnType("nvarchar(max)");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<string>("Value")
.IsRequired()
.HasColumnType("nvarchar(2000)")
.HasMaxLength(2000);
b.HasKey("Id");
b.ToTable("AbpFeatures");
b.HasDiscriminator<string>("Discriminator").HasValue("FeatureSetting");
});
modelBuilder.Entity("Abp.Auditing.AuditLog", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<string>("BrowserInfo")
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<string>("ClientIpAddress")
.HasColumnType("nvarchar(64)")
.HasMaxLength(64);
b.Property<string>("ClientName")
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<string>("CustomData")
.HasColumnType("nvarchar(2000)")
.HasMaxLength(2000);
b.Property<string>("Exception")
.HasColumnType("nvarchar(2000)")
.HasMaxLength(2000);
b.Property<int>("ExecutionDuration")
.HasColumnType("int");
b.Property<DateTime>("ExecutionTime")
.HasColumnType("datetime2");
b.Property<int?>("ImpersonatorTenantId")
.HasColumnType("int");
b.Property<long?>("ImpersonatorUserId")
.HasColumnType("bigint");
b.Property<string>("MethodName")
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<string>("Parameters")
.HasColumnType("nvarchar(1024)")
.HasMaxLength(1024);
b.Property<string>("ServiceName")
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long?>("UserId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("TenantId", "ExecutionDuration");
b.HasIndex("TenantId", "ExecutionTime");
b.HasIndex("TenantId", "UserId");
b.ToTable("AbpAuditLogs");
});
modelBuilder.Entity("Abp.Authorization.PermissionSetting", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<string>("Discriminator")
.IsRequired()
.HasColumnType("nvarchar(max)");
b.Property<bool>("IsGranted")
.HasColumnType("bit");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.HasKey("Id");
b.HasIndex("TenantId", "Name");
b.ToTable("AbpPermissions");
b.HasDiscriminator<string>("Discriminator").HasValue("PermissionSetting");
});
modelBuilder.Entity("Abp.Authorization.Roles.RoleClaim", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<string>("ClaimType")
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<string>("ClaimValue")
.HasColumnType("nvarchar(max)");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<int>("RoleId")
.HasColumnType("int");
b.Property<int?>("TenantId")
.HasColumnType("int");
b.HasKey("Id");
b.HasIndex("RoleId");
b.HasIndex("TenantId", "ClaimType");
b.ToTable("AbpRoleClaims");
});
modelBuilder.Entity("Abp.Authorization.Users.UserAccount", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<long?>("DeleterUserId")
.HasColumnType("bigint");
b.Property<DateTime?>("DeletionTime")
.HasColumnType("datetime2");
b.Property<string>("EmailAddress")
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<bool>("IsDeleted")
.HasColumnType("bit");
b.Property<DateTime?>("LastLoginTime")
.HasColumnType("datetime2");
b.Property<DateTime?>("LastModificationTime")
.HasColumnType("datetime2");
b.Property<long?>("LastModifierUserId")
.HasColumnType("bigint");
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long>("UserId")
.HasColumnType("bigint");
b.Property<long?>("UserLinkId")
.HasColumnType("bigint");
b.Property<string>("UserName")
.HasColumnType("nvarchar(32)")
.HasMaxLength(32);
b.HasKey("Id");
b.HasIndex("EmailAddress");
b.HasIndex("UserName");
b.HasIndex("TenantId", "EmailAddress");
b.HasIndex("TenantId", "UserId");
b.HasIndex("TenantId", "UserName");
b.ToTable("AbpUserAccounts");
});
modelBuilder.Entity("Abp.Authorization.Users.UserClaim", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<string>("ClaimType")
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<string>("ClaimValue")
.HasColumnType("nvarchar(max)");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long>("UserId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("UserId");
b.HasIndex("TenantId", "ClaimType");
b.ToTable("AbpUserClaims");
});
modelBuilder.Entity("Abp.Authorization.Users.UserLogin", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<string>("LoginProvider")
.IsRequired()
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<string>("ProviderKey")
.IsRequired()
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long>("UserId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("UserId");
b.HasIndex("TenantId", "UserId");
b.HasIndex("TenantId", "LoginProvider", "ProviderKey");
b.ToTable("AbpUserLogins");
});
modelBuilder.Entity("Abp.Authorization.Users.UserLoginAttempt", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<string>("BrowserInfo")
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<string>("ClientIpAddress")
.HasColumnType("nvarchar(64)")
.HasMaxLength(64);
b.Property<string>("ClientName")
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<byte>("Result")
.HasColumnType("tinyint");
b.Property<string>("TenancyName")
.HasColumnType("nvarchar(64)")
.HasMaxLength(64);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long?>("UserId")
.HasColumnType("bigint");
b.Property<string>("UserNameOrEmailAddress")
.HasColumnType("nvarchar(255)")
.HasMaxLength(255);
b.HasKey("Id");
b.HasIndex("UserId", "TenantId");
b.HasIndex("TenancyName", "UserNameOrEmailAddress", "Result");
b.ToTable("AbpUserLoginAttempts");
});
modelBuilder.Entity("Abp.Authorization.Users.UserOrganizationUnit", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<bool>("IsDeleted")
.HasColumnType("bit");
b.Property<long>("OrganizationUnitId")
.HasColumnType("bigint");
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long>("UserId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("TenantId", "OrganizationUnitId");
b.HasIndex("TenantId", "UserId");
b.ToTable("AbpUserOrganizationUnits");
});
modelBuilder.Entity("Abp.Authorization.Users.UserRole", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<int>("RoleId")
.HasColumnType("int");
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long>("UserId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("UserId");
b.HasIndex("TenantId", "RoleId");
b.HasIndex("TenantId", "UserId");
b.ToTable("AbpUserRoles");
});
modelBuilder.Entity("Abp.Authorization.Users.UserToken", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<string>("LoginProvider")
.HasColumnType("nvarchar(max)");
b.Property<string>("Name")
.HasColumnType("nvarchar(max)");
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long>("UserId")
.HasColumnType("bigint");
b.Property<string>("Value")
.HasColumnType("nvarchar(max)");
b.HasKey("Id");
b.HasIndex("UserId");
b.HasIndex("TenantId", "UserId");
b.ToTable("AbpUserTokens");
});
modelBuilder.Entity("Abp.BackgroundJobs.BackgroundJobInfo", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<bool>("IsAbandoned")
.HasColumnType("bit");
b.Property<string>("JobArgs")
.IsRequired()
.HasColumnType("nvarchar(max)")
.HasMaxLength(1048576);
b.Property<string>("JobType")
.IsRequired()
.HasColumnType("nvarchar(512)")
.HasMaxLength(512);
b.Property<DateTime?>("LastTryTime")
.HasColumnType("datetime2");
b.Property<DateTime>("NextTryTime")
.HasColumnType("datetime2");
b.Property<byte>("Priority")
.HasColumnType("tinyint");
b.Property<short>("TryCount")
.HasColumnType("smallint");
b.HasKey("Id");
b.HasIndex("IsAbandoned", "NextTryTime");
b.ToTable("AbpBackgroundJobs");
});
modelBuilder.Entity("Abp.Configuration.Setting", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<DateTime?>("LastModificationTime")
.HasColumnType("datetime2");
b.Property<long?>("LastModifierUserId")
.HasColumnType("bigint");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long?>("UserId")
.HasColumnType("bigint");
b.Property<string>("Value")
.HasColumnType("nvarchar(2000)")
.HasMaxLength(2000);
b.HasKey("Id");
b.HasIndex("UserId");
b.HasIndex("TenantId", "Name");
b.ToTable("AbpSettings");
});
modelBuilder.Entity("Abp.EntityHistory.EntityChange", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<DateTime>("ChangeTime")
.HasColumnType("datetime2");
b.Property<byte>("ChangeType")
.HasColumnType("tinyint");
b.Property<long>("EntityChangeSetId")
.HasColumnType("bigint");
b.Property<string>("EntityId")
.HasColumnType("nvarchar(48)")
.HasMaxLength(48);
b.Property<string>("EntityTypeFullName")
.HasColumnType("nvarchar(192)")
.HasMaxLength(192);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.HasKey("Id");
b.HasIndex("EntityChangeSetId");
b.HasIndex("EntityTypeFullName", "EntityId");
b.ToTable("AbpEntityChanges");
});
modelBuilder.Entity("Abp.EntityHistory.EntityChangeSet", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<string>("BrowserInfo")
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<string>("ClientIpAddress")
.HasColumnType("nvarchar(64)")
.HasMaxLength(64);
b.Property<string>("ClientName")
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<string>("ExtensionData")
.HasColumnType("nvarchar(max)");
b.Property<int?>("ImpersonatorTenantId")
.HasColumnType("int");
b.Property<long?>("ImpersonatorUserId")
.HasColumnType("bigint");
b.Property<string>("Reason")
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long?>("UserId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("TenantId", "CreationTime");
b.HasIndex("TenantId", "Reason");
b.HasIndex("TenantId", "UserId");
b.ToTable("AbpEntityChangeSets");
});
modelBuilder.Entity("Abp.EntityHistory.EntityPropertyChange", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<long>("EntityChangeId")
.HasColumnType("bigint");
b.Property<string>("NewValue")
.HasColumnType("nvarchar(512)")
.HasMaxLength(512);
b.Property<string>("OriginalValue")
.HasColumnType("nvarchar(512)")
.HasMaxLength(512);
b.Property<string>("PropertyName")
.HasColumnType("nvarchar(96)")
.HasMaxLength(96);
b.Property<string>("PropertyTypeFullName")
.HasColumnType("nvarchar(192)")
.HasMaxLength(192);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.HasKey("Id");
b.HasIndex("EntityChangeId");
b.ToTable("AbpEntityPropertyChanges");
});
modelBuilder.Entity("Abp.Localization.ApplicationLanguage", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("int");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<long?>("DeleterUserId")
.HasColumnType("bigint");
b.Property<DateTime?>("DeletionTime")
.HasColumnType("datetime2");
b.Property<string>("DisplayName")
.IsRequired()
.HasColumnType("nvarchar(64)")
.HasMaxLength(64);
b.Property<string>("Icon")
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<bool>("IsDeleted")
.HasColumnType("bit");
b.Property<bool>("IsDisabled")
.HasColumnType("bit");
b.Property<DateTime?>("LastModificationTime")
.HasColumnType("datetime2");
b.Property<long?>("LastModifierUserId")
.HasColumnType("bigint");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("nvarchar(10)")
.HasMaxLength(10);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.HasKey("Id");
b.HasIndex("TenantId", "Name");
b.ToTable("AbpLanguages");
});
modelBuilder.Entity("Abp.Localization.ApplicationLanguageText", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<string>("LanguageName")
.IsRequired()
.HasColumnType("nvarchar(10)")
.HasMaxLength(10);
b.Property<DateTime?>("LastModificationTime")
.HasColumnType("datetime2");
b.Property<long?>("LastModifierUserId")
.HasColumnType("bigint");
b.Property<string>("Source")
.IsRequired()
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<string>("Value")
.IsRequired()
.HasColumnType("nvarchar(max)")
.HasMaxLength(67108864);
b.HasKey("Id");
b.HasIndex("TenantId", "Source", "LanguageName", "Key");
b.ToTable("AbpLanguageTexts");
});
modelBuilder.Entity("Abp.Notifications.NotificationInfo", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uniqueidentifier");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<string>("Data")
.HasColumnType("nvarchar(max)")
.HasMaxLength(1048576);
b.Property<string>("DataTypeName")
.HasColumnType("nvarchar(512)")
.HasMaxLength(512);
b.Property<string>("EntityId")
.HasColumnType("nvarchar(96)")
.HasMaxLength(96);
b.Property<string>("EntityTypeAssemblyQualifiedName")
.HasColumnType("nvarchar(512)")
.HasMaxLength(512);
b.Property<string>("EntityTypeName")
.HasColumnType("nvarchar(250)")
.HasMaxLength(250);
b.Property<string>("ExcludedUserIds")
.HasColumnType("nvarchar(max)")
.HasMaxLength(131072);
b.Property<string>("NotificationName")
.IsRequired()
.HasColumnType("nvarchar(96)")
.HasMaxLength(96);
b.Property<byte>("Severity")
.HasColumnType("tinyint");
b.Property<string>("TenantIds")
.HasColumnType("nvarchar(max)")
.HasMaxLength(131072);
b.Property<string>("UserIds")
.HasColumnType("nvarchar(max)")
.HasMaxLength(131072);
b.HasKey("Id");
b.ToTable("AbpNotifications");
});
modelBuilder.Entity("Abp.Notifications.NotificationSubscriptionInfo", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uniqueidentifier");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<string>("EntityId")
.HasColumnType("nvarchar(96)")
.HasMaxLength(96);
b.Property<string>("EntityTypeAssemblyQualifiedName")
.HasColumnType("nvarchar(512)")
.HasMaxLength(512);
b.Property<string>("EntityTypeName")
.HasColumnType("nvarchar(250)")
.HasMaxLength(250);
b.Property<string>("NotificationName")
.HasColumnType("nvarchar(96)")
.HasMaxLength(96);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<long>("UserId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("NotificationName", "EntityTypeName", "EntityId", "UserId");
b.HasIndex("TenantId", "NotificationName", "EntityTypeName", "EntityId", "UserId");
b.ToTable("AbpNotificationSubscriptions");
});
modelBuilder.Entity("Abp.Notifications.TenantNotificationInfo", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uniqueidentifier");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<string>("Data")
.HasColumnType("nvarchar(max)")
.HasMaxLength(1048576);
b.Property<string>("DataTypeName")
.HasColumnType("nvarchar(512)")
.HasMaxLength(512);
b.Property<string>("EntityId")
.HasColumnType("nvarchar(96)")
.HasMaxLength(96);
b.Property<string>("EntityTypeAssemblyQualifiedName")
.HasColumnType("nvarchar(512)")
.HasMaxLength(512);
b.Property<string>("EntityTypeName")
.HasColumnType("nvarchar(250)")
.HasMaxLength(250);
b.Property<string>("NotificationName")
.IsRequired()
.HasColumnType("nvarchar(96)")
.HasMaxLength(96);
b.Property<byte>("Severity")
.HasColumnType("tinyint");
b.Property<int?>("TenantId")
.HasColumnType("int");
b.HasKey("Id");
b.HasIndex("TenantId");
b.ToTable("AbpTenantNotifications");
});
modelBuilder.Entity("Abp.Notifications.UserNotificationInfo", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uniqueidentifier");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<int>("State")
.HasColumnType("int");
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<Guid>("TenantNotificationId")
.HasColumnType("uniqueidentifier");
b.Property<long>("UserId")
.HasColumnType("bigint");
b.HasKey("Id");
b.HasIndex("UserId", "State", "CreationTime");
b.ToTable("AbpUserNotifications");
});
modelBuilder.Entity("Abp.Organizations.OrganizationUnit", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<string>("Code")
.IsRequired()
.HasColumnType("nvarchar(95)")
.HasMaxLength(95);
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<long?>("DeleterUserId")
.HasColumnType("bigint");
b.Property<DateTime?>("DeletionTime")
.HasColumnType("datetime2");
b.Property<string>("DisplayName")
.IsRequired()
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<bool>("IsDeleted")
.HasColumnType("bit");
b.Property<DateTime?>("LastModificationTime")
.HasColumnType("datetime2");
b.Property<long?>("LastModifierUserId")
.HasColumnType("bigint");
b.Property<long?>("ParentId")
.HasColumnType("bigint");
b.Property<int?>("TenantId")
.HasColumnType("int");
b.HasKey("Id");
b.HasIndex("ParentId");
b.HasIndex("TenantId", "Code");
b.ToTable("AbpOrganizationUnits");
});
modelBuilder.Entity("Lesson.Authorization.Roles.Role", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("int");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("nvarchar(max)");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<long?>("DeleterUserId")
.HasColumnType("bigint");
b.Property<DateTime?>("DeletionTime")
.HasColumnType("datetime2");
b.Property<string>("Description")
.HasColumnType("nvarchar(max)")
.HasMaxLength(5000);
b.Property<string>("DisplayName")
.IsRequired()
.HasColumnType("nvarchar(64)")
.HasMaxLength(64);
b.Property<bool>("IsDefault")
.HasColumnType("bit");
b.Property<bool>("IsDeleted")
.HasColumnType("bit");
b.Property<bool>("IsStatic")
.HasColumnType("bit");
b.Property<DateTime?>("LastModificationTime")
.HasColumnType("datetime2");
b.Property<long?>("LastModifierUserId")
.HasColumnType("bigint");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("nvarchar(32)")
.HasMaxLength(32);
b.Property<string>("NormalizedName")
.IsRequired()
.HasColumnType("nvarchar(32)")
.HasMaxLength(32);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.HasKey("Id");
b.HasIndex("CreatorUserId");
b.HasIndex("DeleterUserId");
b.HasIndex("LastModifierUserId");
b.HasIndex("TenantId", "NormalizedName");
b.ToTable("AbpRoles");
});
modelBuilder.Entity("Lesson.Authorization.Users.User", b =>
{
b.Property<long>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("bigint");
b.Property<int>("AccessFailedCount")
.HasColumnType("int");
b.Property<string>("AuthenticationSource")
.HasColumnType("nvarchar(64)")
.HasMaxLength(64);
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("nvarchar(max)");
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<long?>("DeleterUserId")
.HasColumnType("bigint");
b.Property<DateTime?>("DeletionTime")
.HasColumnType("datetime2");
b.Property<string>("EmailAddress")
.IsRequired()
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<string>("EmailConfirmationCode")
.HasColumnType("nvarchar(328)")
.HasMaxLength(328);
b.Property<bool>("IsActive")
.HasColumnType("bit");
b.Property<bool>("IsDeleted")
.HasColumnType("bit");
b.Property<bool>("IsEmailConfirmed")
.HasColumnType("bit");
b.Property<bool>("IsLockoutEnabled")
.HasColumnType("bit");
b.Property<bool>("IsPhoneNumberConfirmed")
.HasColumnType("bit");
b.Property<bool>("IsTwoFactorEnabled")
.HasColumnType("bit");
b.Property<DateTime?>("LastLoginTime")
.HasColumnType("datetime2");
b.Property<DateTime?>("LastModificationTime")
.HasColumnType("datetime2");
b.Property<long?>("LastModifierUserId")
.HasColumnType("bigint");
b.Property<DateTime?>("LockoutEndDateUtc")
.HasColumnType("datetime2");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("nvarchar(32)")
.HasMaxLength(32);
b.Property<string>("NormalizedEmailAddress")
.IsRequired()
.HasColumnType("nvarchar(256)")
.HasMaxLength(256);
b.Property<string>("NormalizedUserName")
.IsRequired()
.HasColumnType("nvarchar(32)")
.HasMaxLength(32);
b.Property<string>("Password")
.IsRequired()
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<string>("PasswordResetCode")
.HasColumnType("nvarchar(328)")
.HasMaxLength(328);
b.Property<string>("PhoneNumber")
.HasColumnType("nvarchar(max)");
b.Property<string>("SecurityStamp")
.HasColumnType("nvarchar(max)");
b.Property<string>("Surname")
.IsRequired()
.HasColumnType("nvarchar(32)")
.HasMaxLength(32);
b.Property<int?>("TenantId")
.HasColumnType("int");
b.Property<string>("UserName")
.IsRequired()
.HasColumnType("nvarchar(32)")
.HasMaxLength(32);
b.HasKey("Id");
b.HasIndex("CreatorUserId");
b.HasIndex("DeleterUserId");
b.HasIndex("LastModifierUserId");
b.HasIndex("TenantId", "NormalizedEmailAddress");
b.HasIndex("TenantId", "NormalizedUserName");
b.ToTable("AbpUsers");
});
modelBuilder.Entity("Lesson.MultiTenancy.Tenant", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("int");
b.Property<string>("ConnectionString")
.HasColumnType("nvarchar(1024)")
.HasMaxLength(1024);
b.Property<DateTime>("CreationTime")
.HasColumnType("datetime2");
b.Property<long?>("CreatorUserId")
.HasColumnType("bigint");
b.Property<long?>("DeleterUserId")
.HasColumnType("bigint");
b.Property<DateTime?>("DeletionTime")
.HasColumnType("datetime2");
b.Property<int?>("EditionId")
.HasColumnType("int");
b.Property<bool>("IsActive")
.HasColumnType("bit");
b.Property<bool>("IsDeleted")
.HasColumnType("bit");
b.Property<DateTime?>("LastModificationTime")
.HasColumnType("datetime2");
b.Property<long?>("LastModifierUserId")
.HasColumnType("bigint");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("nvarchar(128)")
.HasMaxLength(128);
b.Property<string>("TenancyName")
.IsRequired()
.HasColumnType("nvarchar(64)")
.HasMaxLength(64);
b.HasKey("Id");
b.HasIndex("CreatorUserId");
b.HasIndex("DeleterUserId");
b.HasIndex("EditionId");
b.HasIndex("LastModifierUserId");
b.HasIndex("TenancyName");
b.ToTable("AbpTenants");
});
modelBuilder.Entity("Abp.Application.Features.EditionFeatureSetting", b =>
{
b.HasBaseType("Abp.Application.Features.FeatureSetting");
b.Property<int>("EditionId")
.HasColumnType("int");
b.HasIndex("EditionId", "Name");
b.ToTable("AbpFeatures");
b.HasDiscriminator().HasValue("EditionFeatureSetting");
});
modelBuilder.Entity("Abp.MultiTenancy.TenantFeatureSetting", b =>
{
b.HasBaseType("Abp.Application.Features.FeatureSetting");
b.HasIndex("TenantId", "Name");
b.ToTable("AbpFeatures");
b.HasDiscriminator().HasValue("TenantFeatureSetting");
});
modelBuilder.Entity("Abp.Authorization.Roles.RolePermissionSetting", b =>
{
b.HasBaseType("Abp.Authorization.PermissionSetting");
b.Property<int>("RoleId")
.HasColumnType("int");
b.HasIndex("RoleId");
b.ToTable("AbpPermissions");
b.HasDiscriminator().HasValue("RolePermissionSetting");
});
modelBuilder.Entity("Abp.Authorization.Users.UserPermissionSetting", b =>
{
b.HasBaseType("Abp.Authorization.PermissionSetting");
b.Property<long>("UserId")
.HasColumnType("bigint");
b.HasIndex("UserId");
b.ToTable("AbpPermissions");
b.HasDiscriminator().HasValue("UserPermissionSetting");
});
modelBuilder.Entity("Abp.Authorization.Roles.RoleClaim", b =>
{
b.HasOne("Lesson.Authorization.Roles.Role")
.WithMany("Claims")
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Abp.Authorization.Users.UserClaim", b =>
{
b.HasOne("Lesson.Authorization.Users.User")
.WithMany("Claims")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Abp.Authorization.Users.UserLogin", b =>
{
b.HasOne("Lesson.Authorization.Users.User")
.WithMany("Logins")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Abp.Authorization.Users.UserRole", b =>
{
b.HasOne("Lesson.Authorization.Users.User")
.WithMany("Roles")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Abp.Authorization.Users.UserToken", b =>
{
b.HasOne("Lesson.Authorization.Users.User")
.WithMany("Tokens")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Abp.Configuration.Setting", b =>
{
b.HasOne("Lesson.Authorization.Users.User")
.WithMany("Settings")
.HasForeignKey("UserId");
});
modelBuilder.Entity("Abp.EntityHistory.EntityChange", b =>
{
b.HasOne("Abp.EntityHistory.EntityChangeSet")
.WithMany("EntityChanges")
.HasForeignKey("EntityChangeSetId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Abp.EntityHistory.EntityPropertyChange", b =>
{
b.HasOne("Abp.EntityHistory.EntityChange")
.WithMany("PropertyChanges")
.HasForeignKey("EntityChangeId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Abp.Organizations.OrganizationUnit", b =>
{
b.HasOne("Abp.Organizations.OrganizationUnit", "Parent")
.WithMany("Children")
.HasForeignKey("ParentId");
});
modelBuilder.Entity("Lesson.Authorization.Roles.Role", b =>
{
b.HasOne("Lesson.Authorization.Users.User", "CreatorUser")
.WithMany()
.HasForeignKey("CreatorUserId");
b.HasOne("Lesson.Authorization.Users.User", "DeleterUser")
.WithMany()
.HasForeignKey("DeleterUserId");
b.HasOne("Lesson.Authorization.Users.User", "LastModifierUser")
.WithMany()
.HasForeignKey("LastModifierUserId");
});
modelBuilder.Entity("Lesson.Authorization.Users.User", b =>
{
b.HasOne("Lesson.Authorization.Users.User", "CreatorUser")
.WithMany()
.HasForeignKey("CreatorUserId");
b.HasOne("Lesson.Authorization.Users.User", "DeleterUser")
.WithMany()
.HasForeignKey("DeleterUserId");
b.HasOne("Lesson.Authorization.Users.User", "LastModifierUser")
.WithMany()
.HasForeignKey("LastModifierUserId");
});
modelBuilder.Entity("Lesson.MultiTenancy.Tenant", b =>
{
b.HasOne("Lesson.Authorization.Users.User", "CreatorUser")
.WithMany()
.HasForeignKey("CreatorUserId");
b.HasOne("Lesson.Authorization.Users.User", "DeleterUser")
.WithMany()
.HasForeignKey("DeleterUserId");
b.HasOne("Abp.Application.Editions.Edition", "Edition")
.WithMany()
.HasForeignKey("EditionId");
b.HasOne("Lesson.Authorization.Users.User", "LastModifierUser")
.WithMany()
.HasForeignKey("LastModifierUserId");
});
modelBuilder.Entity("Abp.Application.Features.EditionFeatureSetting", b =>
{
b.HasOne("Abp.Application.Editions.Edition", "Edition")
.WithMany()
.HasForeignKey("EditionId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Abp.Authorization.Roles.RolePermissionSetting", b =>
{
b.HasOne("Lesson.Authorization.Roles.Role")
.WithMany("Permissions")
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Abp.Authorization.Users.UserPermissionSetting", b =>
{
b.HasOne("Lesson.Authorization.Users.User")
.WithMany("Permissions")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade);
});
#pragma warning restore 612, 618
}
}
}
| 35.376486 | 117 | 0.431311 | [
"MIT"
] | bsogulcan/LessonCore | aspnet-core/src/Lesson.EntityFrameworkCore/Migrations/20180201051646_Upgraded_To_Abp_v3.4.0.Designer.cs | 53,560 | C# |
using Basket.API.Entities;
using Microsoft.Extensions.Caching.Distributed;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace Basket.API.Repositories
{
public class BasketRepository : IBasketRepository
{
private readonly IDistributedCache _redisCache;
public BasketRepository(IDistributedCache redisCache)
{
_redisCache = redisCache;
}
public async Task DeleteBasket(string userName)
{
await _redisCache.RemoveAsync(userName);
}
public async Task<ShoppingCart> GetBasket(string userName)
{
var basket = await _redisCache.GetStringAsync(userName);
if (string.IsNullOrEmpty(basket))
{
return null;
}
return JsonConvert.DeserializeObject<ShoppingCart>(basket);
}
public async Task<ShoppingCart> UpdateBasket(ShoppingCart basket)
{
await _redisCache.SetStringAsync(basket.UserName, JsonConvert.SerializeObject(basket));
return await GetBasket(basket.UserName);
}
}
}
| 27.651163 | 99 | 0.655172 | [
"MIT"
] | hpslogics/AspnetMicroservices | src/Services/Basket/Basket.API/Repositories/BasketRepository.cs | 1,191 | C# |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("04.ReplaceATag")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("04.ReplaceATag")]
[assembly: AssemblyCopyright("Copyright © 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("38a4dec7-efdb-465e-badd-8b5d8fdf2fc8")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| 37.72973 | 84 | 0.747135 | [
"MIT"
] | drunin89/SoftUni | Programming with C#/5.C#Advanced/7.Regular Expressions/2.Exercises/Regular Expressions - Exercise/04.ReplaceATag/Properties/AssemblyInfo.cs | 1,399 | C# |
/*
* LUSID API
*
* # Introduction This page documents the [LUSID APIs](https://www.lusid.com/api/swagger), which allows authorised clients to query and update their data within the LUSID platform. SDKs to interact with the LUSID APIs are available in the following languages and frameworks: * [C#](https://github.com/finbourne/lusid-sdk-csharp) * [Java](https://github.com/finbourne/lusid-sdk-java) * [JavaScript](https://github.com/finbourne/lusid-sdk-js) * [Python](https://github.com/finbourne/lusid-sdk-python) * [Angular](https://github.com/finbourne/lusid-sdk-angular) # Data Model The LUSID API has a relatively lightweight but extremely powerful data model. One of the goals of LUSID was not to enforce on clients a single rigid data model but rather to provide a flexible foundation onto which clients can map their own data models. The core entities in LUSID provide a minimal structure and set of relationships, and the data model can be extended using Properties. The LUSID data model is exposed through the LUSID APIs. The APIs provide access to both business objects and the meta data used to configure the systems behaviours. The key business entities are: - * **Portfolios** A portfolio is a container for transactions and holdings (a **Transaction Portfolio**) or constituents (a **Reference Portfolio**). * **Derived Portfolios**. Derived Portfolios allow Portfolios to be created based on other Portfolios, by overriding or adding specific items. * **Holdings** A Holding is a quantity of an Instrument or a balance of cash within a Portfolio. Holdings can only be adjusted via Transactions. * **Transactions** A Transaction is an economic event that occurs in a Portfolio, causing its holdings to change. * **Corporate Actions** A corporate action is a market event which occurs to an Instrument and thus applies to all portfolios which holding the instrument. Examples are stock splits or mergers. * **Constituents** A constituent is a record in a Reference Portfolio containing an Instrument and an associated weight. * **Instruments** An instrument represents a currency, tradable instrument or OTC contract that is attached to a transaction and a holding. * **Properties** All major entities allow additional user defined properties to be associated with them. For example, a Portfolio manager may be associated with a portfolio. Meta data includes: - * **Transaction Types** Transactions are booked with a specific transaction type. The types are client defined and are used to map the Transaction to a series of movements which update the portfolio holdings. * **Properties Types** Types of user defined properties used within the system. ## Scope All data in LUSID is segregated at the client level. Entities in LUSID are identifiable by a unique code. Every entity lives within a logical data partition known as a Scope. Scope is an identity namespace allowing two entities with the same unique code to co-exist within individual address spaces. For example, prices for equities from different vendors may be uploaded into different scopes such as `client/vendor1` and `client/vendor2`. A portfolio may then be valued using either of the price sources by referencing the appropriate scope. LUSID Clients cannot access scopes of other clients. ## Instruments LUSID has its own built-in instrument master which you can use to master your own instrument universe. Every instrument must be created with one or more unique market identifiers, such as [FIGI](https://openfigi.com/). For any non-listed instruments (eg OTCs), you can upload an instrument against a custom ID of your choosing. In addition, LUSID will allocate each instrument a unique 'LUSID instrument identifier'. The LUSID instrument identifier is what is used when uploading transactions, holdings, prices, etc. The API exposes an `instrument/lookup` endpoint which can be used to lookup these LUSID identifiers using their market identifiers. Cash can be referenced using the ISO currency code prefixed with \"`CCY_`\" e.g. `CCY_GBP` ## Instrument Data Instrument data can be uploaded to the system using the [Instrument Properties](#operation/UpsertInstrumentsProperties) endpoint. | Field|Type|Description | | - --|- --|- -- | | Key|propertykey|The key of the property. This takes the format {domain}/{scope}/{code} e.g. 'Instrument/system/Name' or 'Transaction/strategy/quantsignal'. | | Value|string|The value of the property. | | EffectiveFrom|datetimeoffset|The effective datetime from which the property is valid. | | EffectiveUntil|datetimeoffset|The effective datetime until which the property is valid. If not supplied this will be valid indefinitely, or until the next 'effectiveFrom' datetime of the property. | ## Transaction Portfolios Portfolios are the top-level entity containers within LUSID, containing transactions, corporate actions and holdings. The transactions build up the portfolio holdings on which valuations, analytics profit & loss and risk can be calculated. Properties can be associated with Portfolios to add in additional data. Portfolio properties can be changed over time, for example to allow a Portfolio Manager to be linked with a Portfolio. Additionally, portfolios can be securitised and held by other portfolios, allowing LUSID to perform \"drill-through\" into underlying fund holdings ### Derived Portfolios LUSID also allows for a portfolio to be composed of another portfolio via derived portfolios. A derived portfolio can contain its own transactions and also inherits any transactions from its parent portfolio. Any changes made to the parent portfolio are automatically reflected in derived portfolio. Derived portfolios in conjunction with scopes are a powerful construct. For example, to do pre-trade what-if analysis, a derived portfolio could be created a new namespace linked to the underlying live (parent) portfolio. Analysis can then be undertaken on the derived portfolio without affecting the live portfolio. ### Transactions A transaction represents an economic activity against a Portfolio. Transactions are processed according to a configuration. This will tell the LUSID engine how to interpret the transaction and correctly update the holdings. LUSID comes with a set of transaction types you can use out of the box, or you can configure your own set(s) of transactions. For more details see the [LUSID Getting Started Guide for transaction configuration.](https://support.lusid.com/configuring-transaction-types) | Field|Type|Description | | - --|- --|- -- | | TransactionId|string|The unique identifier for the transaction. | | Type|string|The type of the transaction e.g. 'Buy', 'Sell'. The transaction type should have been pre-configured via the System Configuration API endpoint. If it hasn't been pre-configured the transaction will still be updated or inserted however you will be unable to generate the resultant holdings for the portfolio that contains this transaction as LUSID does not know how to process it. | | InstrumentIdentifiers|map|A set of instrument identifiers to use to resolve the transaction to a unique instrument. | | TransactionDate|dateorcutlabel|The date of the transaction. | | SettlementDate|dateorcutlabel|The settlement date of the transaction. | | Units|decimal|The number of units transacted in the associated instrument. | | TransactionPrice|transactionprice|The price for each unit of the transacted instrument in the transaction currency. | | TotalConsideration|currencyandamount|The total value of the transaction in the settlement currency. | | ExchangeRate|decimal|The exchange rate between the transaction and settlement currency (settlement currency being represented by the TotalConsideration.Currency). For example if the transaction currency is in USD and the settlement currency is in GBP this this the USD/GBP rate. | | TransactionCurrency|currency|The transaction currency. | | Properties|map|Set of unique transaction properties and associated values to store with the transaction. Each property must be from the 'Transaction' domain. | | CounterpartyId|string|The identifier for the counterparty of the transaction. | | Source|string|The source of the transaction. This is used to look up the appropriate transaction group set in the transaction type configuration. | From these fields, the following values can be calculated * **Transaction value in Transaction currency**: TotalConsideration / ExchangeRate * **Transaction value in Portfolio currency**: Transaction value in Transaction currency * TradeToPortfolioRate #### Example Transactions ##### A Common Purchase Example Three example transactions are shown in the table below. They represent a purchase of USD denominated IBM shares within a Sterling denominated portfolio. * The first two transactions are for separate buy and fx trades * Buying 500 IBM shares for $71,480.00 * A spot foreign exchange conversion to fund the IBM purchase. (Buy $71,480.00 for £54,846.60) * The third transaction is an alternate version of the above trades. Buying 500 IBM shares and settling directly in Sterling. | Column | Buy Trade | Fx Trade | Buy Trade with foreign Settlement | | - -- -- | - -- -- | - -- -- | - -- -- | | TransactionId | FBN00001 | FBN00002 | FBN00003 | | Type | Buy | FxBuy | Buy | | InstrumentIdentifiers | { \"figi\", \"BBG000BLNNH6\" } | { \"CCY\", \"CCY_USD\" } | { \"figi\", \"BBG000BLNNH6\" } | | TransactionDate | 2018-08-02 | 2018-08-02 | 2018-08-02 | | SettlementDate | 2018-08-06 | 2018-08-06 | 2018-08-06 | | Units | 500 | 71480 | 500 | | TransactionPrice | 142.96 | 1 | 142.96 | | TradeCurrency | USD | USD | USD | | ExchangeRate | 1 | 0.7673 | 0.7673 | | TotalConsideration.Amount | 71480.00 | 54846.60 | 54846.60 | | TotalConsideration.Currency | USD | GBP | GBP | | Trade/default/TradeToPortfolioRate* | 0.7673 | 0.7673 | 0.7673 | [* This is a property field] ##### A Forward FX Example LUSID has a flexible transaction modelling system, meaning there are a number of different ways of modelling forward fx trades. The default LUSID transaction types are FwdFxBuy and FwdFxSell. Using these transaction types, LUSID will generate two holdings for each Forward FX trade, one for each currency in the trade. An example Forward Fx trade to sell GBP for USD in a JPY-denominated portfolio is shown below: | Column | Forward 'Sell' Trade | Notes | | - -- -- | - -- -- | - -- - | | TransactionId | FBN00004 | | | Type | FwdFxSell | | | InstrumentIdentifiers | { \"Instrument/default/Currency\", \"GBP\" } | | | TransactionDate | 2018-08-02 | | | SettlementDate | 2019-02-06 | Six month forward | | Units | 10000.00 | Units of GBP | | TransactionPrice | 1 | | | TradeCurrency | GBP | Currency being sold | | ExchangeRate | 1.3142 | Agreed rate between GBP and USD | | TotalConsideration.Amount | 13142.00 | Amount in the settlement currency, USD | | TotalConsideration.Currency | USD | Settlement currency | | Trade/default/TradeToPortfolioRate | 142.88 | Rate between trade currency, GBP and portfolio base currency, JPY | Please note that exactly the same economic behaviour could be modelled using the FwdFxBuy Transaction Type with the amounts and rates reversed. ### Holdings A holding represents a position in an instrument or cash on a given date. | Field|Type|Description | | - --|- --|- -- | | InstrumentUid|string|The unqiue Lusid Instrument Id (LUID) of the instrument that the holding is in. | | SubHoldingKeys|map|The sub-holding properties which identify the holding. Each property will be from the 'Transaction' domain. These are configured when a transaction portfolio is created. | | Properties|map|The properties which have been requested to be decorated onto the holding. These will be from the 'Instrument' or 'Holding' domain. | | HoldingType|string|The type of the holding e.g. Position, Balance, CashCommitment, Receivable, ForwardFX etc. | | Units|decimal|The total number of units of the holding. | | SettledUnits|decimal|The total number of settled units of the holding. | | Cost|currencyandamount|The total cost of the holding in the transaction currency. | | CostPortfolioCcy|currencyandamount|The total cost of the holding in the portfolio currency. | | Transaction|transaction|The transaction associated with an unsettled holding. | | Currency|currency|The holding currency. | ## Corporate Actions Corporate actions are represented within LUSID in terms of a set of instrument-specific 'transitions'. These transitions are used to specify the participants of the corporate action, and the effect that the corporate action will have on holdings in those participants. ### Corporate Action | Field|Type|Description | | - --|- --|- -- | | CorporateActionCode|code|The unique identifier of this corporate action | | Description|string| | | AnnouncementDate|datetimeoffset|The announcement date of the corporate action | | ExDate|datetimeoffset|The ex date of the corporate action | | RecordDate|datetimeoffset|The record date of the corporate action | | PaymentDate|datetimeoffset|The payment date of the corporate action | | Transitions|corporateactiontransition[]|The transitions that result from this corporate action | ### Transition | Field|Type|Description | | - --|- --|- -- | | InputTransition|corporateactiontransitioncomponent|Indicating the basis of the corporate action - which security and how many units | | OutputTransitions|corporateactiontransitioncomponent[]|What will be generated relative to the input transition | ### Example Corporate Action Transitions #### A Dividend Action Transition In this example, for each share of IBM, 0.20 units (or 20 pence) of GBP are generated. | Column | Input Transition | Output Transition | | - -- -- | - -- -- | - -- -- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"ccy\" : \"CCY_GBP\" } | | Units Factor | 1 | 0.20 | | Cost Factor | 1 | 0 | #### A Split Action Transition In this example, for each share of IBM, we end up with 2 units (2 shares) of IBM, with total value unchanged. | Column | Input Transition | Output Transition | | - -- -- | - -- -- | - -- -- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000BLNNH6\" } | | Units Factor | 1 | 2 | | Cost Factor | 1 | 1 | #### A Spinoff Action Transition In this example, for each share of IBM, we end up with 1 unit (1 share) of IBM and 3 units (3 shares) of Celestica, with 85% of the value remaining on the IBM share, and 5% in each Celestica share (15% total). | Column | Input Transition | Output Transition 1 | Output Transition 2 | | - -- -- | - -- -- | - -- -- | - -- -- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000HBGRF3\" } | | Units Factor | 1 | 1 | 3 | | Cost Factor | 1 | 0.85 | 0.15 | ## Reference Portfolios Reference portfolios are portfolios that contain constituents with weights. They are designed to represent entities such as indices and benchmarks. ### Constituents | Field|Type|Description | | - --|- --|- -- | | InstrumentIdentifiers|map|Unique instrument identifiers | | InstrumentUid|string|LUSID's internal unique instrument identifier, resolved from the instrument identifiers | | Currency|decimal| | | Weight|decimal| | | FloatingWeight|decimal| | ## Portfolio Groups Portfolio groups allow the construction of a hierarchy from portfolios and groups. Portfolio operations on the group are executed on an aggregated set of portfolios in the hierarchy. For example: * Global Portfolios _(group)_ * APAC _(group)_ * Hong Kong _(portfolio)_ * Japan _(portfolio)_ * Europe _(group)_ * France _(portfolio)_ * Germany _(portfolio)_ * UK _(portfolio)_ In this example **Global Portfolios** is a group that consists of an aggregate of **Hong Kong**, **Japan**, **France**, **Germany** and **UK** portfolios. ## Properties Properties are key-value pairs that can be applied to any entity within a domain (where a domain is `trade`, `portfolio`, `security` etc). Properties must be defined before use with a `PropertyDefinition` and can then subsequently be added to entities. ## Schema A detailed description of the entities used by the API and parameters for endpoints which take a JSON document can be retrieved via the `schema` endpoint. ## Meta data The following headers are returned on all responses from LUSID | Name | Purpose | | - -- | - -- | | lusid-meta-duration | Duration of the request | | lusid-meta-success | Whether or not LUSID considered the request to be successful | | lusid-meta-requestId | The unique identifier for the request | | lusid-schema-url | Url of the schema for the data being returned | | lusid-property-schema-url | Url of the schema for any properties | # Error Codes | Code|Name|Description | | - --|- --|- -- | | <a name=\"-10\">-10</a>|Server Configuration Error| | | <a name=\"-1\">-1</a>|Unknown error|An unexpected error was encountered on our side. | | <a name=\"102\">102</a>|Version Not Found| | | <a name=\"103\">103</a>|Api Rate Limit Violation| | | <a name=\"104\">104</a>|Instrument Not Found| | | <a name=\"105\">105</a>|Property Not Found| | | <a name=\"106\">106</a>|Portfolio Recursion Depth| | | <a name=\"108\">108</a>|Group Not Found| | | <a name=\"109\">109</a>|Portfolio Not Found| | | <a name=\"110\">110</a>|Property Schema Not Found| | | <a name=\"111\">111</a>|Portfolio Ancestry Not Found| | | <a name=\"112\">112</a>|Portfolio With Id Already Exists| | | <a name=\"113\">113</a>|Orphaned Portfolio| | | <a name=\"119\">119</a>|Missing Base Claims| | | <a name=\"121\">121</a>|Property Not Defined| | | <a name=\"122\">122</a>|Cannot Delete System Property| | | <a name=\"123\">123</a>|Cannot Modify Immutable Property Field| | | <a name=\"124\">124</a>|Property Already Exists| | | <a name=\"125\">125</a>|Invalid Property Life Time| | | <a name=\"126\">126</a>|Property Constraint Style Excludes Properties| | | <a name=\"127\">127</a>|Cannot Modify Default Data Type| | | <a name=\"128\">128</a>|Group Already Exists| | | <a name=\"129\">129</a>|No Such Data Type| | | <a name=\"130\">130</a>|Undefined Value For Data Type| | | <a name=\"131\">131</a>|Unsupported Value Type Defined On Data Type| | | <a name=\"132\">132</a>|Validation Error| | | <a name=\"133\">133</a>|Loop Detected In Group Hierarchy| | | <a name=\"134\">134</a>|Undefined Acceptable Values| | | <a name=\"135\">135</a>|Sub Group Already Exists| | | <a name=\"138\">138</a>|Price Source Not Found| | | <a name=\"139\">139</a>|Analytic Store Not Found| | | <a name=\"141\">141</a>|Analytic Store Already Exists| | | <a name=\"143\">143</a>|Client Instrument Already Exists| | | <a name=\"144\">144</a>|Duplicate In Parameter Set| | | <a name=\"147\">147</a>|Results Not Found| | | <a name=\"148\">148</a>|Order Field Not In Result Set| | | <a name=\"149\">149</a>|Operation Failed| | | <a name=\"150\">150</a>|Elastic Search Error| | | <a name=\"151\">151</a>|Invalid Parameter Value| | | <a name=\"153\">153</a>|Command Processing Failure| | | <a name=\"154\">154</a>|Entity State Construction Failure| | | <a name=\"155\">155</a>|Entity Timeline Does Not Exist| | | <a name=\"156\">156</a>|Concurrency Conflict Failure| | | <a name=\"157\">157</a>|Invalid Request| | | <a name=\"158\">158</a>|Event Publish Unknown| | | <a name=\"159\">159</a>|Event Query Failure| | | <a name=\"160\">160</a>|Blob Did Not Exist| | | <a name=\"162\">162</a>|Sub System Request Failure| | | <a name=\"163\">163</a>|Sub System Configuration Failure| | | <a name=\"165\">165</a>|Failed To Delete| | | <a name=\"166\">166</a>|Upsert Client Instrument Failure| | | <a name=\"167\">167</a>|Illegal As At Interval| | | <a name=\"168\">168</a>|Illegal Bitemporal Query| | | <a name=\"169\">169</a>|Invalid Alternate Id| | | <a name=\"170\">170</a>|Cannot Add Source Portfolio Property Explicitly| | | <a name=\"171\">171</a>|Entity Already Exists In Group| | | <a name=\"173\">173</a>|Entity With Id Already Exists| | | <a name=\"174\">174</a>|Derived Portfolio Details Do Not Exist| | | <a name=\"176\">176</a>|Portfolio With Name Already Exists| | | <a name=\"177\">177</a>|Invalid Transactions| | | <a name=\"178\">178</a>|Reference Portfolio Not Found| | | <a name=\"179\">179</a>|Duplicate Id| | | <a name=\"180\">180</a>|Command Retrieval Failure| | | <a name=\"181\">181</a>|Data Filter Application Failure| | | <a name=\"182\">182</a>|Search Failed| | | <a name=\"183\">183</a>|Movements Engine Configuration Key Failure| | | <a name=\"184\">184</a>|Fx Rate Source Not Found| | | <a name=\"185\">185</a>|Accrual Source Not Found| | | <a name=\"186\">186</a>|Access Denied| | | <a name=\"187\">187</a>|Invalid Identity Token| | | <a name=\"188\">188</a>|Invalid Request Headers| | | <a name=\"189\">189</a>|Price Not Found| | | <a name=\"190\">190</a>|Invalid Sub Holding Keys Provided| | | <a name=\"191\">191</a>|Duplicate Sub Holding Keys Provided| | | <a name=\"192\">192</a>|Cut Definition Not Found| | | <a name=\"193\">193</a>|Cut Definition Invalid| | | <a name=\"194\">194</a>|Time Variant Property Deletion Date Unspecified| | | <a name=\"195\">195</a>|Perpetual Property Deletion Date Specified| | | <a name=\"196\">196</a>|Time Variant Property Upsert Date Unspecified| | | <a name=\"197\">197</a>|Perpetual Property Upsert Date Specified| | | <a name=\"200\">200</a>|Invalid Unit For Data Type| | | <a name=\"201\">201</a>|Invalid Type For Data Type| | | <a name=\"202\">202</a>|Invalid Value For Data Type| | | <a name=\"203\">203</a>|Unit Not Defined For Data Type| | | <a name=\"204\">204</a>|Units Not Supported On Data Type| | | <a name=\"205\">205</a>|Cannot Specify Units On Data Type| | | <a name=\"206\">206</a>|Unit Schema Inconsistent With Data Type| | | <a name=\"207\">207</a>|Unit Definition Not Specified| | | <a name=\"208\">208</a>|Duplicate Unit Definitions Specified| | | <a name=\"209\">209</a>|Invalid Units Definition| | | <a name=\"210\">210</a>|Invalid Instrument Identifier Unit| | | <a name=\"211\">211</a>|Holdings Adjustment Does Not Exist| | | <a name=\"212\">212</a>|Could Not Build Excel Url| | | <a name=\"213\">213</a>|Could Not Get Excel Version| | | <a name=\"214\">214</a>|Instrument By Code Not Found| | | <a name=\"215\">215</a>|Entity Schema Does Not Exist| | | <a name=\"216\">216</a>|Feature Not Supported On Portfolio Type| | | <a name=\"217\">217</a>|Quote Not Found| | | <a name=\"218\">218</a>|Invalid Quote Identifier| | | <a name=\"219\">219</a>|Invalid Metric For Data Type| | | <a name=\"220\">220</a>|Invalid Instrument Definition| | | <a name=\"221\">221</a>|Instrument Upsert Failure| | | <a name=\"222\">222</a>|Reference Portfolio Request Not Supported| | | <a name=\"223\">223</a>|Transaction Portfolio Request Not Supported| | | <a name=\"224\">224</a>|Invalid Property Value Assignment| | | <a name=\"230\">230</a>|Transaction Type Not Found| | | <a name=\"231\">231</a>|Transaction Type Duplication| | | <a name=\"232\">232</a>|Portfolio Does Not Exist At Given Date| | | <a name=\"233\">233</a>|Query Parser Failure| | | <a name=\"234\">234</a>|Duplicate Constituent| | | <a name=\"235\">235</a>|Unresolved Instrument Constituent| | | <a name=\"236\">236</a>|Unresolved Instrument In Transition| | | <a name=\"237\">237</a>|Missing Side Definitions| | | <a name=\"299\">299</a>|Invalid Recipe| | | <a name=\"300\">300</a>|Missing Recipe| | | <a name=\"301\">301</a>|Dependencies| | | <a name=\"304\">304</a>|Portfolio Preprocess Failure| | | <a name=\"310\">310</a>|Valuation Engine Failure| | | <a name=\"311\">311</a>|Task Factory Failure| | | <a name=\"312\">312</a>|Task Evaluation Failure| | | <a name=\"313\">313</a>|Task Generation Failure| | | <a name=\"314\">314</a>|Engine Configuration Failure| | | <a name=\"315\">315</a>|Model Specification Failure| | | <a name=\"320\">320</a>|Market Data Key Failure| | | <a name=\"321\">321</a>|Market Resolver Failure| | | <a name=\"322\">322</a>|Market Data Failure| | | <a name=\"330\">330</a>|Curve Failure| | | <a name=\"331\">331</a>|Volatility Surface Failure| | | <a name=\"332\">332</a>|Volatility Cube Failure| | | <a name=\"350\">350</a>|Instrument Failure| | | <a name=\"351\">351</a>|Cash Flows Failure| | | <a name=\"352\">352</a>|Reference Data Failure| | | <a name=\"360\">360</a>|Aggregation Failure| | | <a name=\"361\">361</a>|Aggregation Measure Failure| | | <a name=\"370\">370</a>|Result Retrieval Failure| | | <a name=\"371\">371</a>|Result Processing Failure| | | <a name=\"372\">372</a>|Vendor Result Processing Failure| | | <a name=\"373\">373</a>|Vendor Result Mapping Failure| | | <a name=\"374\">374</a>|Vendor Library Unauthorised| | | <a name=\"375\">375</a>|Vendor Connectivity Error| | | <a name=\"376\">376</a>|Vendor Interface Error| | | <a name=\"377\">377</a>|Vendor Pricing Failure| | | <a name=\"378\">378</a>|Vendor Translation Failure| | | <a name=\"379\">379</a>|Vendor Key Mapping Failure| | | <a name=\"380\">380</a>|Vendor Reflection Failure| | | <a name=\"390\">390</a>|Attempt To Upsert Duplicate Quotes| | | <a name=\"391\">391</a>|Corporate Action Source Does Not Exist| | | <a name=\"392\">392</a>|Corporate Action Source Already Exists| | | <a name=\"393\">393</a>|Instrument Identifier Already In Use| | | <a name=\"394\">394</a>|Properties Not Found| | | <a name=\"395\">395</a>|Batch Operation Aborted| | | <a name=\"400\">400</a>|Invalid Iso4217 Currency Code| | | <a name=\"401\">401</a>|Cannot Assign Instrument Identifier To Currency| | | <a name=\"402\">402</a>|Cannot Assign Currency Identifier To Non Currency| | | <a name=\"403\">403</a>|Currency Instrument Cannot Be Deleted| | | <a name=\"404\">404</a>|Currency Instrument Cannot Have Economic Definition| | | <a name=\"405\">405</a>|Currency Instrument Cannot Have Lookthrough Portfolio| | | <a name=\"406\">406</a>|Cannot Create Currency Instrument With Multiple Identifiers| | | <a name=\"407\">407</a>|Specified Currency Is Undefined| | | <a name=\"410\">410</a>|Index Does Not Exist| | | <a name=\"411\">411</a>|Sort Field Does Not Exist| | | <a name=\"413\">413</a>|Negative Pagination Parameters| | | <a name=\"414\">414</a>|Invalid Search Syntax| | | <a name=\"415\">415</a>|Filter Execution Timeout| | | <a name=\"420\">420</a>|Side Definition Inconsistent| | | <a name=\"450\">450</a>|Invalid Quote Access Metadata Rule| | | <a name=\"451\">451</a>|Access Metadata Not Found| | | <a name=\"452\">452</a>|Invalid Access Metadata Identifier| | | <a name=\"460\">460</a>|Standard Resource Not Found| | | <a name=\"461\">461</a>|Standard Resource Conflict| | | <a name=\"462\">462</a>|Calendar Not Found| | | <a name=\"463\">463</a>|Date In A Calendar Not Found| | | <a name=\"464\">464</a>|Invalid Date Source Data| | | <a name=\"465\">465</a>|Invalid Timezone| | | <a name=\"601\">601</a>|Person Identifier Already In Use| | | <a name=\"602\">602</a>|Person Not Found| | | <a name=\"603\">603</a>|Cannot Set Identifier| | | <a name=\"617\">617</a>|Invalid Recipe Specification In Request| | | <a name=\"618\">618</a>|Inline Recipe Deserialisation Failure| | | <a name=\"619\">619</a>|Identifier Types Not Set For Entity| | | <a name=\"620\">620</a>|Cannot Delete All Client Defined Identifiers| | | <a name=\"650\">650</a>|The Order requested was not found.| | | <a name=\"654\">654</a>|The Allocation requested was not found.| | | <a name=\"655\">655</a>|Cannot build the fx forward target with the given holdings.| | | <a name=\"656\">656</a>|Group does not contain expected entities.| | | <a name=\"667\">667</a>|Relation definition already exists| | | <a name=\"673\">673</a>|Missing entitlements for entities in Group| | | <a name=\"674\">674</a>|Next Best Action not found| | | <a name=\"676\">676</a>|Relation definition not defined| | | <a name=\"677\">677</a>|Invalid entity identifier for relation| | | <a name=\"681\">681</a>|Sorting by specified field not supported|One or more of the provided fields to order by were either invalid or not supported. | | <a name=\"682\">682</a>|Too many fields to sort by|The number of fields to sort the data by exceeds the number allowed by the endpoint | | <a name=\"684\">684</a>|Sequence Not Found| | | <a name=\"685\">685</a>|Sequence Already Exists| | | <a name=\"686\">686</a>|Non-cycling sequence has been exhausted| | | <a name=\"687\">687</a>|Legal Entity Identifier Already In Use| | | <a name=\"688\">688</a>|Legal Entity Not Found| | | <a name=\"689\">689</a>|The supplied pagination token is invalid| | | <a name=\"690\">690</a>|Property Type Is Not Supported| | | <a name=\"691\">691</a>|Multiple Tax-lots For Currency Type Is Not Supported| | | <a name=\"692\">692</a>|This endpoint does not support impersonation| | | <a name=\"693\">693</a>|Entity type is not supported for Relationship| | | <a name=\"694\">694</a>|Relationship Validation Failure| | | <a name=\"695\">695</a>|Relationship Not Found| | | <a name=\"697\">697</a>|Derived Property Formula No Longer Valid| | | <a name=\"698\">698</a>|Story is not available| | | <a name=\"703\">703</a>|Corporate Action Does Not Exist| |
*
* The version of the OpenAPI document: 0.11.2810
* Contact: info@finbourne.com
* Generated by: https://github.com/openapitools/openapi-generator.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using OpenAPIDateConverter = Lusid.Sdk.Client.OpenAPIDateConverter;
namespace Lusid.Sdk.Model
{
/// <summary>
/// ResourceListOfRelationship
/// </summary>
[DataContract]
public partial class ResourceListOfRelationship : IEquatable<ResourceListOfRelationship>
{
/// <summary>
/// Initializes a new instance of the <see cref="ResourceListOfRelationship" /> class.
/// </summary>
[JsonConstructorAttribute]
protected ResourceListOfRelationship() { }
/// <summary>
/// Initializes a new instance of the <see cref="ResourceListOfRelationship" /> class.
/// </summary>
/// <param name="values">values (required).</param>
/// <param name="href">href.</param>
/// <param name="links">links.</param>
/// <param name="nextPage">nextPage.</param>
/// <param name="previousPage">previousPage.</param>
public ResourceListOfRelationship(List<Relationship> values = default(List<Relationship>), string href = default(string), List<Link> links = default(List<Link>), string nextPage = default(string), string previousPage = default(string))
{
// to ensure "values" is required (not null)
if (values == null)
{
throw new InvalidDataException("values is a required property for ResourceListOfRelationship and cannot be null");
}
else
{
this.Values = values;
}
this.Href = href;
this.Links = links;
this.NextPage = nextPage;
this.PreviousPage = previousPage;
this.Href = href;
this.Links = links;
this.NextPage = nextPage;
this.PreviousPage = previousPage;
}
/// <summary>
/// Gets or Sets Values
/// </summary>
[DataMember(Name="values", EmitDefaultValue=false)]
public List<Relationship> Values { get; set; }
/// <summary>
/// Gets or Sets Href
/// </summary>
[DataMember(Name="href", EmitDefaultValue=true)]
public string Href { get; set; }
/// <summary>
/// Gets or Sets Links
/// </summary>
[DataMember(Name="links", EmitDefaultValue=true)]
public List<Link> Links { get; set; }
/// <summary>
/// Gets or Sets NextPage
/// </summary>
[DataMember(Name="nextPage", EmitDefaultValue=true)]
public string NextPage { get; set; }
/// <summary>
/// Gets or Sets PreviousPage
/// </summary>
[DataMember(Name="previousPage", EmitDefaultValue=true)]
public string PreviousPage { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class ResourceListOfRelationship {\n");
sb.Append(" Values: ").Append(Values).Append("\n");
sb.Append(" Href: ").Append(Href).Append("\n");
sb.Append(" Links: ").Append(Links).Append("\n");
sb.Append(" NextPage: ").Append(NextPage).Append("\n");
sb.Append(" PreviousPage: ").Append(PreviousPage).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public virtual string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="input">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object input)
{
return this.Equals(input as ResourceListOfRelationship);
}
/// <summary>
/// Returns true if ResourceListOfRelationship instances are equal
/// </summary>
/// <param name="input">Instance of ResourceListOfRelationship to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(ResourceListOfRelationship input)
{
if (input == null)
return false;
return
(
this.Values == input.Values ||
this.Values != null &&
input.Values != null &&
this.Values.SequenceEqual(input.Values)
) &&
(
this.Href == input.Href ||
(this.Href != null &&
this.Href.Equals(input.Href))
) &&
(
this.Links == input.Links ||
this.Links != null &&
input.Links != null &&
this.Links.SequenceEqual(input.Links)
) &&
(
this.NextPage == input.NextPage ||
(this.NextPage != null &&
this.NextPage.Equals(input.NextPage))
) &&
(
this.PreviousPage == input.PreviousPage ||
(this.PreviousPage != null &&
this.PreviousPage.Equals(input.PreviousPage))
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
unchecked // Overflow is fine, just wrap
{
int hashCode = 41;
if (this.Values != null)
hashCode = hashCode * 59 + this.Values.GetHashCode();
if (this.Href != null)
hashCode = hashCode * 59 + this.Href.GetHashCode();
if (this.Links != null)
hashCode = hashCode * 59 + this.Links.GetHashCode();
if (this.NextPage != null)
hashCode = hashCode * 59 + this.NextPage.GetHashCode();
if (this.PreviousPage != null)
hashCode = hashCode * 59 + this.PreviousPage.GetHashCode();
return hashCode;
}
}
}
}
| 184.401015 | 29,340 | 0.66964 | [
"MIT"
] | SamuelColvinFinbourne/lusid-sdk-csharp-preview | sdk/Lusid.Sdk/Model/ResourceListOfRelationship.cs | 36,327 | C# |
namespace Burls.Windows.Constants
{
public class Regions
{
public const string Main = "MainRegion";
}
}
| 15.625 | 48 | 0.632 | [
"MIT"
] | BEzGumpTion/Burls | Burls.Windows/Constants/Regions.cs | 127 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using VersOne.Epub.Schema;
using VersOne.Epub.Utils;
namespace VersOne.Epub.Internal
{
internal static class BookCoverReader
{
public static EpubByteContentFileRef ReadBookCover(EpubSchema epubSchema, Dictionary<string, EpubByteContentFileRef> imageContentRefs)
{
List<EpubMetadataMeta> metaItems = epubSchema.Package.Metadata.MetaItems;
if (metaItems == null || !metaItems.Any())
{
return null;
}
EpubMetadataMeta coverMetaItem = metaItems.FirstOrDefault(metaItem => metaItem.Name.CompareOrdinalIgnoreCase("cover"));
if (coverMetaItem == null)
{
return null;
}
if (String.IsNullOrEmpty(coverMetaItem.Content))
{
throw new Exception("Incorrect EPUB metadata: cover item content is missing.");
}
EpubByteContentFileRef coverImageContentFileRef;
EpubManifestItem coverManifestItem = epubSchema.Package.Manifest.FirstOrDefault(manifestItem => manifestItem.Id.CompareOrdinalIgnoreCase(coverMetaItem.Content));
if (null != coverManifestItem?.Href && imageContentRefs.TryGetValue(coverManifestItem.Href, out coverImageContentFileRef))
{
return coverImageContentFileRef;
}
// For non-standard ebooks, we try several other ways...
if (null != coverManifestItem) // we have found the item but there was no corresponding image ...
{
// some ebooks seem to contain more than one item with Id="cover"
// thus we test if there is a second item, and whether that is an image....
coverManifestItem = epubSchema.Package.Manifest.Where(manifestItem => manifestItem.Id.CompareOrdinalIgnoreCase(coverMetaItem.Content)).Skip(1).FirstOrDefault(); ;
if (null != coverManifestItem?.Href && imageContentRefs.TryGetValue(coverManifestItem.Href, out coverImageContentFileRef))
{
return coverImageContentFileRef;
}
}
// we have still not found the item
// 2019-08-20 Hotfix: if coverManifestItem is not found by its Id, then try it with its Href - some ebooks refer to the image directly!
coverManifestItem = epubSchema.Package.Manifest.FirstOrDefault(manifestItem => manifestItem.Href.CompareOrdinalIgnoreCase(coverMetaItem.Content));
if (null != coverManifestItem?.Href && imageContentRefs.TryGetValue(coverManifestItem.Href, out coverImageContentFileRef))
{
return coverImageContentFileRef;
}
// 2019-08-24 if it is still not found, then try to find an Id named cover
coverManifestItem = epubSchema.Package.Manifest.FirstOrDefault(manifestItem => manifestItem.Id.CompareOrdinalIgnoreCase(coverMetaItem.Name));
if (null != coverManifestItem?.Href && imageContentRefs.TryGetValue(coverManifestItem.Href, out coverImageContentFileRef))
{
return coverImageContentFileRef;
}
// 2019-08-24 if it is still not found, then try to find it in the guide
var guideItem = epubSchema.Package.Guide.FirstOrDefault(reference => reference.Title.CompareOrdinalIgnoreCase(coverMetaItem.Name));
if (null != guideItem?.Href && imageContentRefs.TryGetValue(guideItem.Href, out coverImageContentFileRef))
{
return coverImageContentFileRef;
}
throw new Exception($"Incorrect EPUB manifest: item with ID = \"{coverMetaItem.Content}\" is missing or no corresponding image was found.");
}
}
}
| 53.111111 | 178 | 0.65455 | [
"Unlicense"
] | FrankDrebin893/EpubReader | Source/VersOne.Epub/Readers/BookCoverReader.cs | 3,826 | C# |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FocusCamera : MonoBehaviour
{
public Transform targetPosition;
// Start is called before the first frame update
void Start()
{
}
// Update is called once per frame
void Update()
{
}
public void MoveToTarget(GameObject focus)
{
GameObject.Find("MainCamera").GetComponent<MouseOrbit>().ChangeFocus(focus.transform);
}
}
| 19.52 | 94 | 0.659836 | [
"MIT"
] | ubcemergingmedialab/3DMetabolism-Unity | UnityProject2018/Assets/Scripts/FocusCamera.cs | 490 | C# |
using System.Linq;
using System.Threading.Tasks;
using PlayerTrade.Mail;
namespace TradeServer.Commands
{
public class CommandSendLetter : Command
{
public override string Name => "sendletter";
public override string Usage => "<target> <title> (body)";
public override async Task Execute(Caller caller, string[] args)
{
CommandUtility.AdminRequired(caller);
if (args.Length < 1)
throw new CommandUsageException(this);
string title = "";
if (args.Length >= 2)
title = args[1];
string body = "";
if (args.Length >= 3)
body = string.Join(" ", args.Skip(2).ToArray());
foreach (Client client in CommandUtility.GetClientsFromInput(args[0]))
{
client.SendPacket(new PacketMail
{
For = client.Player.Guid,
From = "Server",
Title = title,
Body = body
});
}
caller.Output("Letter sent");
}
}
}
| 27.166667 | 82 | 0.501315 | [
"MIT"
] | mitchfizz05/RimLink | Source/PlayerTrade/TradeServer/Commands/CommandSendLetter.cs | 1,143 | C# |
using Microsoft.EntityFrameworkCore;
using NadekoBot.Services.Database.Models;
using System;
using System.Collections.Generic;
using System.Linq;
namespace NadekoBot.Services.Database.Repositories
{
public interface IGuildConfigRepository : IRepository<GuildConfig>
{
GuildConfig For(ulong guildId, Func<DbSet<GuildConfig>, IQueryable<GuildConfig>> includes = null);
GuildConfig LogSettingsFor(ulong guildId);
IEnumerable<GuildConfig> OldPermissionsForAll();
IEnumerable<GuildConfig> GetAllGuildConfigs(List<long> availableGuilds);
IEnumerable<FollowedStream> GetAllFollowedStreams(List<long> included);
void SetCleverbotEnabled(ulong id, bool cleverbotEnabled);
IEnumerable<GuildConfig> Permissionsv2ForAll(List<long> include);
GuildConfig GcWithPermissionsv2For(ulong guildId);
XpSettings XpSettingsFor(ulong guildId);
}
}
| 41.409091 | 106 | 0.766191 | [
"MIT"
] | 2UNIEK/tunes-of-turmoil-radio-bot | src/NadekoBot/Services/Database/Repositories/IGuildConfigRepository.cs | 913 | C# |
using System.Security;
using static MicaWPF.Interop.InteropValues;
namespace MicaWPF.Interop;
internal class InteropMethods
{
[DllImport(ExternDll.DwmApi)]
[DefaultDllImportSearchPaths(DllImportSearchPath.System32)]
private static extern int DwmSetWindowAttribute(IntPtr hwnd, DWMWINDOWATTRIBUTE dwAttribute, ref int pvAttribute, int cbAttribute);
[DllImport(ExternDll.DwmApi)]
[DefaultDllImportSearchPaths(DllImportSearchPath.System32)]
public static extern int DwmExtendFrameIntoClientArea(IntPtr hwnd, ref MARGINS pMarInset);
[DllImport(ExternDll.DwmApi, EntryPoint = "#127", PreserveSig = false, CharSet = CharSet.Unicode)]
[DefaultDllImportSearchPaths(DllImportSearchPath.System32)]
public static extern void DwmGetColorizationParameters(out DWMCOLORIZATIONPARAMS dwParameters);
[SecurityCritical]
[DllImport(ExternDll.NTdll, SetLastError = true, CharSet = CharSet.Unicode)]
[DefaultDllImportSearchPaths(DllImportSearchPath.System32)]
internal static extern int RtlGetVersion(ref OSVERSIONINFOEX versionInfo);
[DllImport(ExternDll.User32, CharSet = CharSet.Auto)]
[DefaultDllImportSearchPaths(DllImportSearchPath.System32)]
public static extern IntPtr GetDC(IntPtr ptr);
[DllImport(ExternDll.User32, SetLastError = true)]
[DefaultDllImportSearchPaths(DllImportSearchPath.System32)]
public static extern int ReleaseDC(IntPtr window, IntPtr dc);
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, CharSet = CharSet.Auto)]
[DefaultDllImportSearchPaths(DllImportSearchPath.System32)]
public static extern int GetDeviceCaps(IntPtr hdc, int nIndex);
[SecurityCritical]
[SuppressUnmanagedCodeSecurity]
[DllImport(ExternDll.Gdi32, SetLastError = true, ExactSpelling = true, CharSet = CharSet.Auto)]
[DefaultDllImportSearchPaths(DllImportSearchPath.System32)]
public static extern int GetDeviceCaps(HandleRef hDC, int nIndex);
[DllImport(ExternDll.User32)]
[DefaultDllImportSearchPaths(DllImportSearchPath.System32)]
public static extern IntPtr MonitorFromWindow(IntPtr handle, uint flags);
[DllImport(ExternDll.User32)]
[DefaultDllImportSearchPaths(DllImportSearchPath.System32)]
public static extern bool GetMonitorInfo(IntPtr hMonitor, ref MONITORINFO lpmi);
public static int SetWindowAttribute(IntPtr hwnd, DWMWINDOWATTRIBUTE attribute, int parameter)
{
return DwmSetWindowAttribute(hwnd, attribute, ref parameter, Marshal.SizeOf<int>());
}
}
| 45.160714 | 135 | 0.784895 | [
"MIT"
] | Simnico99/MicaWPF | src/MicaWPF/Interop/InteropMethods.cs | 2,531 | C# |
using System;
using System.Collections.Generic;
namespace IpLi.Core.Entities
{
public class Page<T>
{
public Page()
{
}
public Page(List<T> items,
Int32 totalCount)
{
Items = items;
TotalCount = totalCount;
}
public List<T> Items { get; set; }
public Int32 TotalCount { get; set; }
}
} | 18.545455 | 45 | 0.502451 | [
"MIT"
] | d2funlife/ipli | src/IpLi.Core/Entities/Page.cs | 408 | C# |
// ==========================================================================
// Squidex Headless CMS
// ==========================================================================
// Copyright (c) Squidex UG (haftungsbeschränkt)
// All rights reserved. Licensed under the MIT license.
// ==========================================================================
using System.Collections.Generic;
using System.Threading.Tasks;
using Squidex.Infrastructure;
using Squidex.Infrastructure.EventSourcing;
namespace Squidex.Domain.Apps.Entities.History
{
public abstract class HistoryEventsCreatorBase : IHistoryEventsCreator
{
private readonly Dictionary<string, string> texts = new Dictionary<string, string>();
private readonly TypeNameRegistry typeNameRegistry;
public IReadOnlyDictionary<string, string> Texts
{
get { return texts; }
}
protected HistoryEventsCreatorBase(TypeNameRegistry typeNameRegistry)
{
Guard.NotNull(typeNameRegistry, nameof(typeNameRegistry));
this.typeNameRegistry = typeNameRegistry;
}
protected void AddEventMessage<TEvent>(string message) where TEvent : IEvent
{
Guard.NotNullOrEmpty(message, nameof(message));
texts[typeNameRegistry.GetName<TEvent>()] = message;
}
protected void AddEventMessage(string type, string message)
{
Guard.NotNullOrEmpty(message, nameof(message));
texts[type] = message;
}
protected bool HasEventText(IEvent @event)
{
var message = typeNameRegistry.GetName(@event.GetType());
return texts.ContainsKey(message);
}
protected HistoryEventToStore ForEvent(IEvent @event, string channel)
{
var message = typeNameRegistry.GetName(@event.GetType());
return new HistoryEventToStore(channel, message);
}
public Task<HistoryEventToStore> CreateEventAsync(Envelope<IEvent> @event)
{
if (HasEventText(@event.Payload))
{
return CreateEventCoreAsync(@event);
}
return Task.FromResult<HistoryEventToStore>(null);
}
protected abstract Task<HistoryEventToStore> CreateEventCoreAsync(Envelope<IEvent> @event);
}
}
| 32.479452 | 99 | 0.587516 | [
"MIT"
] | BtrJay/squidex | src/Squidex.Domain.Apps.Entities/History/HistoryEventsCreatorBase.cs | 2,374 | C# |
using ProjetoModeloDDD.Domain.Entities;
using System.Data.Entity.ModelConfiguration;
namespace ProjetoModeloDDD.Infra.Data.EntityConfig
{
public class ProdutoConfiguration : EntityTypeConfiguration<Produto>
{
public ProdutoConfiguration()
{
HasKey(p => p.ProdutoId);
Property(p => p.Nome).IsRequired().HasMaxLength(250);
Property(p => p.Valor).IsRequired();
HasRequired(p => p.Cliente).WithMany().HasForeignKey(p => p.ClientId);
}
}
}
| 28.473684 | 83 | 0.626617 | [
"Unlicense"
] | swatboss93/ProjetoModeloDDD | ProjetoModeloDDD.Infra.Data/EntityConfig/ProdutoConfiguration.cs | 543 | C# |
using System;
namespace SimpleRepository
{
public interface IEntity
{
Guid Id { get; set; }
}
} | 13 | 29 | 0.598291 | [
"MIT-0"
] | kamabery/Slackers.Services.Bus | Samples/SimpleRepository/IEntity.cs | 119 | C# |
using System;
using System.Xml.Serialization;
namespace Alipay.AopSdk.Domain
{
/// <summary>
/// KbAdvertContentPasswordModify Data Structure.
/// </summary>
[Serializable]
public class KbAdvertContentPasswordModify : AopObject
{
/// <summary>
/// 口令红包背景图的django ID
/// </summary>
[XmlElement("background_img_id")]
public string BackgroundImgId { get; set; }
/// <summary>
/// 口令红包品牌名称(品牌名称不能超过20位)
/// </summary>
[XmlElement("brand_name")]
public string BrandName { get; set; }
/// <summary>
/// 红包口令(口令不能超过20位,口令只能是中文、英文、数字组合,不能纯数字)
/// </summary>
[XmlElement("password")]
public string Password { get; set; }
/// <summary>
/// 口令红包券LOGO的django ID
/// </summary>
[XmlElement("voucher_logo_id")]
public string VoucherLogoId { get; set; }
}
}
| 25.27027 | 58 | 0.565775 | [
"MIT"
] | ArcherTrister/LeXun.Alipay.AopSdk | src/Alipay.AopSdk/Domain/KbAdvertContentPasswordModify.cs | 1,069 | C# |
#region Apache License Version 2.0
/*----------------------------------------------------------------
Copyright 2017 Yang Chen (cy2000@gmail.com)
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied. See the License for the specific language governing permissions
and limitations under the License.
Detail: https://github.com/etechi/ServiceFramework/blob/master/license.md
----------------------------------------------------------------*/
#endregion Apache License Version 2.0
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SF.Utils
{
public class CSVFile
{
public static IEnumerable<List<string>> ParseLines(string Content)
{
var re = new List<string>();
foreach (var value in ParseValues(Content))
{
if (value == null)
{
yield return re;
re.Clear();
}
else
re.Add(value);
}
if (re.Count > 0)
yield return re;
}
public static IEnumerable<string> ParseValues(string Content)
{
var i = 0;
var e = Content.Length;
var inString = false;
var strBuilder = new StringBuilder();
var valueReturned=false;
while (i < e)
{
if (inString)
{
var t = Content.IndexOf('"', i);
if (t != -1 && t <= e - 1 && Content[t + 1] == '"')
{
strBuilder.Append(Content, i, t - i + 1);
i = t + 2;
continue;
}
var te = t == -1 ? e : t;
if (strBuilder.Length > 0)
{
strBuilder.Append(Content, i, te - i);
yield return strBuilder.ToString();
strBuilder.Clear();
}
else
yield return Content.Substring(i, te - i);
inString = false;
i = te + 1;
valueReturned = true;
continue;
}
var c = Content[i];
switch (c)
{
case ',':
if (valueReturned)
valueReturned = false;
else
yield return "";
i++;
continue;
case '"':
i++;
inString = true;
continue;
default:
if (c == '\n')
{
i++;
if (valueReturned)
valueReturned = false;
else
yield return "";
yield return null;
continue;
}
if (char.IsWhiteSpace(c))
{
i++;
continue;
}
var t = Content.IndexOfAny(ValueStopChars, i);
var te = t == -1 ? e : t;
yield return Content.Substring(i, te - i);
i = te ;
valueReturned = true;
break;
}
}
yield return null;
}
static readonly char[] ValueStopChars = new[] { '\r', '\n', ',' };
string Content { get; }
int BodyBegin { get; }
public string[] Columns { get; }
IEnumerator<List<string>> _LineParser;
public CSVFile(string Content,bool WithHeaders)
{
_LineParser = ParseLines(Content).GetEnumerator();
if (WithHeaders)
{
if (_LineParser.MoveNext())
Columns = _LineParser.Current.ToArray();
}
}
public IEnumerable<IReadOnlyList<string>> Rows
{
get
{
while (_LineParser.MoveNext())
yield return _LineParser.Current;
}
}
static char[] EscChars { get; } = new[] { ' ', '\n', '\r' };
public static async Task Write(System.IO.TextWriter writer,IEnumerable<string> Row)
{
var first = true;
foreach (var v in Row)
{
if (first)
first = false;
else
await writer.WriteAsync(',');
if (v == null)
{ }
else if (v.IndexOfAny(EscChars) == -1)
await writer.WriteAsync(v);
else
{
await writer.WriteAsync('\"');
var i = 0;
for (; ; )
{
var t = v.IndexOf('"');
var l = t == -1 ? v.Length - i : t - i;
await writer.WriteAsync(v.Substring(i, l));
if (t == -1) break;
await writer.WriteAsync("\"\"");
i = t + 1;
}
await writer.WriteAsync('\"');
}
}
await writer.WriteLineAsync();
}
}
}
| 23.130435 | 90 | 0.566494 | [
"Apache-2.0"
] | etechi/ServiceFramework | Projects/Server/Utils/SF.Utils.CSVFile/CSVFile.cs | 4,258 | C# |
using System;
using BrawlLib.Imaging;
using System.Drawing;
using System.ComponentModel;
using System.Drawing.Drawing2D;
namespace System.Windows.Forms
{
public class GradientDialog2 : Form
{
#region Designer
private Button btnOkay;
private Button btnCancel;
private Panel startArrow;
private Panel endArrow;
private Panel panel3;
private BufferedPanel pnlPreview;
private void InitializeComponent()
{
this.btnOkay = new System.Windows.Forms.Button();
this.btnCancel = new System.Windows.Forms.Button();
this.pnlPreview = new System.Windows.Forms.BufferedPanel();
this.startArrow = new System.Windows.Forms.Panel();
this.endArrow = new System.Windows.Forms.Panel();
this.panel3 = new System.Windows.Forms.Panel();
this.panel3.SuspendLayout();
this.SuspendLayout();
//
// btnOkay
//
this.btnOkay.Anchor = System.Windows.Forms.AnchorStyles.Bottom;
this.btnOkay.Location = new System.Drawing.Point(193, 105);
this.btnOkay.Name = "btnOkay";
this.btnOkay.Size = new System.Drawing.Size(65, 23);
this.btnOkay.TabIndex = 6;
this.btnOkay.Text = "Okay";
this.btnOkay.UseVisualStyleBackColor = true;
this.btnOkay.Click += new System.EventHandler(this.btnOkay_Click);
//
// btnCancel
//
this.btnCancel.Anchor = System.Windows.Forms.AnchorStyles.Bottom;
this.btnCancel.Location = new System.Drawing.Point(264, 105);
this.btnCancel.Name = "btnCancel";
this.btnCancel.Size = new System.Drawing.Size(65, 23);
this.btnCancel.TabIndex = 7;
this.btnCancel.Text = "Cancel";
this.btnCancel.UseVisualStyleBackColor = true;
this.btnCancel.Click += new System.EventHandler(this.btnCancel_Click);
//
// pnlPreview
//
this.pnlPreview.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.pnlPreview.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.pnlPreview.Location = new System.Drawing.Point(12, 12);
this.pnlPreview.Name = "pnlPreview";
this.pnlPreview.Size = new System.Drawing.Size(499, 61);
this.pnlPreview.TabIndex = 8;
this.pnlPreview.Paint += new System.Windows.Forms.PaintEventHandler(this.pnlPreview_Paint);
//
// startArrow
//
this.startArrow.Location = new System.Drawing.Point(0, 0);
this.startArrow.Name = "startArrow";
this.startArrow.Size = new System.Drawing.Size(24, 20);
this.startArrow.TabIndex = 9;
//
// endArrow
//
this.endArrow.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.endArrow.Location = new System.Drawing.Point(475, 0);
this.endArrow.Name = "endArrow";
this.endArrow.Size = new System.Drawing.Size(24, 20);
this.endArrow.TabIndex = 10;
//
// panel3
//
this.panel3.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.panel3.Controls.Add(this.startArrow);
this.panel3.Controls.Add(this.endArrow);
this.panel3.Location = new System.Drawing.Point(12, 79);
this.panel3.Name = "panel3";
this.panel3.Size = new System.Drawing.Size(499, 20);
this.panel3.TabIndex = 11;
this.panel3.DoubleClick += new System.EventHandler(this.panel3_DoubleClick);
//
// GradientDialog2
//
this.ClientSize = new System.Drawing.Size(523, 140);
this.Controls.Add(this.panel3);
this.Controls.Add(this.pnlPreview);
this.Controls.Add(this.btnCancel);
this.Controls.Add(this.btnOkay);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedToolWindow;
this.MaximizeBox = false;
this.MinimizeBox = false;
this.MinimumSize = new System.Drawing.Size(164, 164);
this.Name = "GradientDialog2";
this.ShowIcon = false;
this.ShowInTaskbar = false;
this.Text = "Gradient Fill";
this.panel3.ResumeLayout(false);
this.ResumeLayout(false);
}
#endregion
protected bool isDragging;
private Point clickPosition;
private Color _startColor;
[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
public Color StartColor
{
get { return _startColor; }
set { _startColor = value; UpdateStart(); }
}
private Color _endColor;
[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
public Color EndColor
{
get { return _endColor; }
set { _endColor = value; UpdateEnd(); }
}
//private int _startIndex;
//[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
//public int StartIndex
//{
// get { return _startIndex; }
// set { numStart.Value = _startIndex = value; }
//}
//private int _endIndex;
//[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
//public int EndIndex
//{
// get { return _endIndex; }
// set { numStart.Value = _endIndex = value; }
//}
//private int _maxIndex;
//[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
//public int MaxIndex
//{
// get { return _maxIndex; }
// set { numStart.Maximum = numEnd.Maximum = _maxIndex = value; }
//}
private GoodColorDialog _dlgColor;
private LinearGradientBrush _gradBrush;
public GradientDialog2()
{
InitializeComponent();
this.MouseLeftButtonDown += new MouseButtonEventHandler(Control_MouseLeftButtonDown);
this.MouseLeftButtonUp += new MouseButtonEventHandler(Control_MouseLeftButtonUp);
this.MouseMove += new MouseEventHandler(Control_MouseMove);
_dlgColor = new GoodColorDialog();
_gradBrush = new LinearGradientBrush(new Rectangle(0, 0, pnlPreview.ClientRectangle.Width, pnlPreview.ClientRectangle.Height), Color.White, Color.Black, LinearGradientMode.Horizontal);
}
private void UpdateStart()
{
lblStartText.Text = ((ARGBPixel)_startColor).ToString();
lblStartColor.BackColor = Color.FromArgb(_startColor.R, _startColor.G, _startColor.B);
UpdateBrush();
}
private void UpdateEnd()
{
lblEndText.Text = ((ARGBPixel)_endColor).ToString();
lblEndColor.BackColor = Color.FromArgb(_endColor.R, _endColor.G, _endColor.B);
UpdateBrush();
}
private void UpdateBrush()
{
_gradBrush.LinearColors = new Color[] { _startColor, _endColor };
pnlPreview.Invalidate();
}
private void numStart_ValueChanged(object sender, EventArgs e) {}// _startIndex = (int)numStart.Value; }
private void lblStartText_Click(object sender, EventArgs e)
{
_dlgColor.Color = _startColor;
if (_dlgColor.ShowDialog(this) == DialogResult.OK)
{
_startColor = _dlgColor.Color;
UpdateStart();
}
}
private void numEnd_ValueChanged(object sender, EventArgs e) {}// _endIndex = (int)numEnd.Value; }
private void lblEndText_Click(object sender, EventArgs e)
{
_dlgColor.Color = _endColor;
if (_dlgColor.ShowDialog(this) == DialogResult.OK)
{
_endColor = _dlgColor.Color;
UpdateEnd();
}
}
private void btnCopy_Click(object sender, EventArgs e)
{
_endColor = _startColor;
UpdateEnd();
}
private void btnOkay_Click(object sender, EventArgs e)
{
DialogResult = DialogResult.OK;
Close();
}
private void btnCancel_Click(object sender, EventArgs e)
{
DialogResult = DialogResult.Cancel;
Close();
}
private void pnlPreview_Paint(object sender, PaintEventArgs e)
{
Graphics g = e.Graphics;
g.FillRectangle(GoodPictureBox._brush, pnlPreview.ClientRectangle);
g.FillRectangle(_gradBrush, pnlPreview.ClientRectangle);
}
private void Control_MouseLeftButtonDown(object sender, MouseButtonEventArgs e)
{
isDragging = true;
var draggableControl = sender as UserControl;
clickPosition = e.GetPosition(this);
draggableControl.CaptureMouse();
}
private void Control_MouseLeftButtonUp(object sender, MouseButtonEventArgs e)
{
isDragging = false;
var draggable = sender as UserControl;
draggable.ReleaseMouseCapture();
}
private void Control_MouseMove(object sender, MouseEventArgs e)
{
var draggableControl = sender as UserControl;
if (isDragging && draggableControl != null)
{
Point currentPosition = e.GetPosition(this.Parent as UIElement);
var transform = draggableControl.RenderTransform as TranslateTransform;
if (transform == null)
{
transform = new TranslateTransform();
draggableControl.RenderTransform = transform;
}
transform.X = currentPosition.X - clickPosition.X;
transform.Y = currentPosition.Y - clickPosition.Y;
}
}
private void panel3_DoubleClick(object sender, EventArgs e)
{
}
}
}
| 38.346429 | 196 | 0.592065 | [
"MIT"
] | Birdthulu/Legacy-Costume-Manager | brawltools/BrawlLib/System/Windows/Forms/GradientDialog2.cs | 10,739 | C# |
using System.Collections.Generic;
using FubuMVC.Core.Runtime;
namespace FubuMVC.Core.Resources.Media
{
public interface IMediaWriter<T>
{
IEnumerable<string> Mimetypes { get; }
void Write(IValues<T> source, IOutputWriter writer);
void Write(T source, IOutputWriter writer);
}
} | 27.083333 | 61 | 0.676923 | [
"Apache-2.0"
] | uluhonolulu/fubumvc | src/FubuMVC.Core/Resources/Media/IMediaWriter.cs | 325 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
public partial class DesktopModules_HRMS_CaM_assignMentor : System.Web.UI.UserControl
{
protected void Page_Load(object sender, EventArgs e)
{
}
} | 21.428571 | 85 | 0.773333 | [
"Apache-2.0",
"BSD-3-Clause"
] | deyeni2001/HRMSDEV60 | DesktopModules/HRMS/CaM/assignMentor.ascx.cs | 302 | C# |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.IO;
using Lucene.Net.Documents;
using Lucene.Net.Support;
using NUnit.Framework;
using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using FieldSelector = Lucene.Net.Documents.FieldSelector;
using SetBasedFieldSelector = Lucene.Net.Documents.SetBasedFieldSelector;
using FieldOption = Lucene.Net.Index.IndexReader.FieldOption;
using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
using Directory = Lucene.Net.Store.Directory;
using FSDirectory = Lucene.Net.Store.FSDirectory;
using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException;
using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
using NoSuchDirectoryException = Lucene.Net.Store.NoSuchDirectoryException;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
using FieldCache = Lucene.Net.Search.FieldCache;
using IndexSearcher = Lucene.Net.Search.IndexSearcher;
using ScoreDoc = Lucene.Net.Search.ScoreDoc;
using TermQuery = Lucene.Net.Search.TermQuery;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
using _TestUtil = Lucene.Net.Util._TestUtil;
namespace Lucene.Net.Index
{
[TestFixture]
public class TestIndexReader:LuceneTestCase
{
#if !NETCOREAPP
/// <summary>Main for running test case by itself. </summary>
[STAThread]
public static void Main(System.String[] args)
{
// TestRunner.run(new TestSuite(typeof(TestIndexReader))); // {{Aroush-2.9}} how is this done in NUnit?
// TestRunner.run (new TestIndexReader("testBasicDelete"));
// TestRunner.run (new TestIndexReader("testDeleteReaderWriterConflict"));
// TestRunner.run (new TestIndexReader("testDeleteReaderReaderConflict"));
// TestRunner.run (new TestIndexReader("testFilesOpenClose"));
}
#endif
public TestIndexReader(System.String name):base(name)
{
}
public TestIndexReader(): base("")
{
}
[Test]
public virtual void TestCommitUserData()
{
RAMDirectory d = new MockRAMDirectory();
System.Collections.Generic.IDictionary<string, string> commitUserData = new System.Collections.Generic.Dictionary<string,string>();
commitUserData["foo"] = "fighters";
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);
writer.SetMaxBufferedDocs(2);
for (int i = 0; i < 27; i++)
AddDocumentWithFields(writer);
writer.Close();
IndexReader r = IndexReader.Open((Directory) d, false, null);
r.DeleteDocument(5, null);
r.Flush(commitUserData, null);
r.Close();
SegmentInfos sis = new SegmentInfos();
sis.Read(d, null);
IndexReader r2 = IndexReader.Open((Directory) d, false, null);
IndexCommit c = r.IndexCommit(null);
Assert.AreEqual(c.UserData, commitUserData);
Assert.AreEqual(sis.GetCurrentSegmentFileName(), c.SegmentsFileName);
Assert.IsTrue(c.Equals(r.IndexCommit(null)));
// Change the index
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
writer.SetMaxBufferedDocs(2);
for (int i = 0; i < 7; i++)
AddDocumentWithFields(writer);
writer.Close();
IndexReader r3 = r2.Reopen(null);
Assert.IsFalse(c.Equals(r3.IndexCommit(null)));
Assert.IsFalse(r2.IndexCommit(null).IsOptimized);
r3.Close();
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
writer.Optimize(null);
writer.Close();
r3 = r2.Reopen(null);
Assert.IsTrue(r3.IndexCommit(null).IsOptimized);
r2.Close();
r3.Close();
d.Close();
}
[Test]
public virtual void TestIsCurrent()
{
RAMDirectory d = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
writer.Close();
// set up reader:
IndexReader reader = IndexReader.Open((Directory) d, false, null);
Assert.IsTrue(reader.IsCurrent(null));
// modify index by adding another document:
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
writer.Close();
Assert.IsFalse(reader.IsCurrent(null));
// re-create index:
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
writer.Close();
Assert.IsFalse(reader.IsCurrent(null));
reader.Close();
d.Close();
}
/// <summary> Tests the IndexReader.getFieldNames implementation</summary>
/// <throws> Exception on error </throws>
[Test]
public virtual void TestGetFieldNames()
{
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
writer.Close();
// set up reader
IndexReader reader = IndexReader.Open((Directory) d, true, null);
System.Collections.Generic.ICollection<string> fieldNames = reader.GetFieldNames(IndexReader.FieldOption.ALL);
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "keyword"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "text"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "unindexed"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "unstored"));
reader.Close();
// add more documents
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
// want to get some more segments here
for (int i = 0; i < 5 * writer.MergeFactor; i++)
{
AddDocumentWithFields(writer);
}
// new fields are in some different segments (we hope)
for (int i = 0; i < 5 * writer.MergeFactor; i++)
{
AddDocumentWithDifferentFields(writer);
}
// new termvector fields
for (int i = 0; i < 5 * writer.MergeFactor; i++)
{
AddDocumentWithTermVectorFields(writer);
}
writer.Close();
// verify fields again
reader = IndexReader.Open((Directory) d, true, null);
fieldNames = reader.GetFieldNames(IndexReader.FieldOption.ALL);
Assert.AreEqual(13, fieldNames.Count); // the following fields
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "keyword"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "text"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "unindexed"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "unstored"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "keyword2"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "text2"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "unindexed2"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "unstored2"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvnot"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "termvector"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvposition"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvoffset"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvpositionoffset"));
// verify that only indexed fields were returned
fieldNames = reader.GetFieldNames(IndexReader.FieldOption.INDEXED);
Assert.AreEqual(11, fieldNames.Count); // 6 original + the 5 termvector fields
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "keyword"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "text"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "unstored"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "keyword2"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "text2"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "unstored2"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvnot"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "termvector"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvposition"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvoffset"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvpositionoffset"));
// verify that only unindexed fields were returned
fieldNames = reader.GetFieldNames(IndexReader.FieldOption.UNINDEXED);
Assert.AreEqual(2, fieldNames.Count); // the following fields
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "unindexed"));
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "unindexed2"));
// verify index term vector fields
fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR);
Assert.AreEqual(1, fieldNames.Count); // 1 field has term vector only
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "termvector"));
fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION);
Assert.AreEqual(1, fieldNames.Count); // 4 fields are indexed with term vectors
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvposition"));
fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET);
Assert.AreEqual(1, fieldNames.Count); // 4 fields are indexed with term vectors
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvoffset"));
fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET);
Assert.AreEqual(1, fieldNames.Count); // 4 fields are indexed with term vectors
Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "tvpositionoffset"));
reader.Close();
d.Close();
}
[Test]
public virtual void TestTermVectors()
{
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);
// want to get some more segments here
// new termvector fields
for (int i = 0; i < 5 * writer.MergeFactor; i++)
{
Document doc = new Document();
doc.Add(new Field("tvnot", "one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
doc.Add(new Field("termvector", "one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
doc.Add(new Field("tvoffset", "one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
doc.Add(new Field("tvposition", "one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
doc.Add(new Field("tvpositionoffset", "one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
writer.AddDocument(doc, null);
}
writer.Close();
IndexReader reader = IndexReader.Open((Directory) d, false, null);
FieldSortedTermVectorMapper mapper = new FieldSortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
reader.GetTermFreqVector(0, mapper, null);
var map = mapper.FieldToTerms;
Assert.IsTrue(map != null, "map is null and it shouldn't be");
Assert.IsTrue(map.Count == 4, "map Size: " + map.Count + " is not: " + 4);
var set_Renamed = map["termvector"];
foreach (var item in set_Renamed)
{
TermVectorEntry entry = (TermVectorEntry)item;
Assert.IsTrue(entry != null, "entry is null and it shouldn't be");
System.Console.Out.WriteLine("Entry: " + entry);
}
}
private void AssertTermDocsCount(System.String msg, IndexReader reader, Term term, int expected)
{
TermDocs tdocs = null;
try
{
tdocs = reader.TermDocs(term, null);
Assert.IsNotNull(tdocs, msg + ", null TermDocs");
int count = 0;
while (tdocs.Next(null))
{
count++;
}
Assert.AreEqual(expected, count, msg + ", count mismatch");
}
finally
{
if (tdocs != null)
tdocs.Close();
}
}
[Test]
public virtual void TestBasicDelete()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = null;
IndexReader reader = null;
Term searchTerm = new Term("content", "aaa");
// add 100 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
for (int i = 0; i < 100; i++)
{
AddDoc(writer, searchTerm.Text);
}
writer.Close();
// OPEN READER AT THIS POINT - this should fix the view of the
// index at the point of having 100 "aaa" documents and 0 "bbb"
reader = IndexReader.Open(dir, false, null);
Assert.AreEqual(100, reader.DocFreq(searchTerm, null), "first docFreq");
AssertTermDocsCount("first reader", reader, searchTerm, 100);
reader.Close();
// DELETE DOCUMENTS CONTAINING TERM: aaa
int deleted = 0;
reader = IndexReader.Open(dir, false, null);
deleted = reader.DeleteDocuments(searchTerm, null);
Assert.AreEqual(100, deleted, "deleted count");
Assert.AreEqual(100, reader.DocFreq(searchTerm, null), "deleted docFreq");
AssertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
// open a 2nd reader to make sure first reader can
// commit its changes (.del) while second reader
// is open:
IndexReader reader2 = IndexReader.Open(dir, false, null);
reader.Close();
// CREATE A NEW READER and re-test
reader = IndexReader.Open(dir, false, null);
Assert.AreEqual(100, reader.DocFreq(searchTerm, null), "deleted docFreq");
AssertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
reader.Close();
reader2.Close();
dir.Close();
}
[Test]
public virtual void TestBinaryFields()
{
Directory dir = new RAMDirectory();
byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED, null);
for (int i = 0; i < 10; i++)
{
AddDoc(writer, "document number " + (i + 1));
AddDocumentWithFields(writer);
AddDocumentWithDifferentFields(writer);
AddDocumentWithTermVectorFields(writer);
}
writer.Close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED, null);
Document doc = new Document();
doc.Add(new Field("bin1", bin, Field.Store.YES));
doc.Add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(doc, null);
writer.Close();
IndexReader reader = IndexReader.Open(dir, false, null);
doc = reader.Document(reader.MaxDoc - 1, null);
Field[] fields = doc.GetFields("bin1");
Assert.IsNotNull(fields);
Assert.AreEqual(1, fields.Length);
Field b1 = fields[0];
Assert.IsTrue(b1.IsBinary);
byte[] data1 = b1.GetBinaryValue(null);
Assert.AreEqual(bin.Length, b1.BinaryLength);
for (int i = 0; i < bin.Length; i++)
{
Assert.AreEqual(bin[i], data1[i + b1.BinaryOffset]);
}
var lazyFields = Support.Compatibility.SetFactory.CreateHashSet<string>();
lazyFields.Add("bin1");
FieldSelector sel = new SetBasedFieldSelector(Support.Compatibility.SetFactory.CreateHashSet<string>(), lazyFields);
doc = reader.Document(reader.MaxDoc - 1, sel, null);
IFieldable[] fieldables = doc.GetFieldables("bin1");
Assert.IsNotNull(fieldables);
Assert.AreEqual(1, fieldables.Length);
IFieldable fb1 = fieldables[0];
Assert.IsTrue(fb1.IsBinary);
Assert.AreEqual(bin.Length, fb1.BinaryLength);
data1 = fb1.GetBinaryValue(null);
Assert.AreEqual(bin.Length, fb1.BinaryLength);
for (int i = 0; i < bin.Length; i++)
{
Assert.AreEqual(bin[i], data1[i + fb1.BinaryOffset]);
}
reader.Close();
// force optimize
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED, null);
writer.Optimize(null);
writer.Close();
reader = IndexReader.Open(dir, false, null);
doc = reader.Document(reader.MaxDoc - 1, null);
fields = doc.GetFields("bin1");
Assert.IsNotNull(fields);
Assert.AreEqual(1, fields.Length);
b1 = fields[0];
Assert.IsTrue(b1.IsBinary);
data1 = b1.GetBinaryValue(null);
Assert.AreEqual(bin.Length, b1.BinaryLength);
for (int i = 0; i < bin.Length; i++)
{
Assert.AreEqual(bin[i], data1[i + b1.BinaryOffset]);
}
reader.Close();
}
// Make sure attempts to make changes after reader is
// closed throws IOException:
[Test]
public virtual void TestChangesAfterClose()
{
Directory dir = new RAMDirectory();
IndexWriter writer = null;
IndexReader reader = null;
Term searchTerm = new Term("content", "aaa");
// add 11 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
for (int i = 0; i < 11; i++)
{
AddDoc(writer, searchTerm.Text);
}
writer.Close();
reader = IndexReader.Open(dir, false, null);
// Close reader:
reader.Close();
// Then, try to make changes:
Assert.Throws<AlreadyClosedException>(() => reader.DeleteDocument(4, null), "deleteDocument after close failed to throw IOException");
Assert.Throws<AlreadyClosedException>(() => reader.SetNorm(5, "aaa", 2.0f, null), "setNorm after close failed to throw IOException");
Assert.Throws<AlreadyClosedException>(() => reader.UndeleteAll(null), "undeleteAll after close failed to throw IOException");
}
// Make sure we get lock obtain failed exception with 2 writers:
[Test]
public virtual void TestLockObtainFailed()
{
Directory dir = new RAMDirectory();
IndexWriter writer = null;
IndexReader reader = null;
Term searchTerm = new Term("content", "aaa");
// add 11 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
for (int i = 0; i < 11; i++)
{
AddDoc(writer, searchTerm.Text);
}
// Create reader:
reader = IndexReader.Open(dir, false, null);
// Try to make changes
Assert.Throws<LockObtainFailedException>(() => reader.DeleteDocument(4, null), "deleteDocument should have hit LockObtainFailedException");
Assert.Throws<LockObtainFailedException>(() =>reader.SetNorm(5, "aaa", 2.0f, null), "setNorm should have hit LockObtainFailedException");
Assert.Throws<LockObtainFailedException>(() => reader.UndeleteAll(null), "undeleteAll should have hit LockObtainFailedException");
writer.Close();
reader.Close();
}
// Make sure you can set norms & commit even if a reader
// is open against the index:
[Test]
public virtual void TestWritingNorms()
{
System.String tempDir = AppSettings.Get("tempDir", Path.GetTempPath());
if (tempDir == null)
throw new System.IO.IOException("tempDir undefined, cannot run test");
System.IO.DirectoryInfo indexDir = new System.IO.DirectoryInfo(System.IO.Path.Combine(tempDir, "lucenetestnormwriter"));
Directory dir = FSDirectory.Open(indexDir);
IndexWriter writer;
IndexReader reader;
Term searchTerm = new Term("content", "aaa");
// add 1 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDoc(writer, searchTerm.Text);
writer.Close();
// now open reader & set norm for doc 0
reader = IndexReader.Open(dir, false, null);
reader.SetNorm(0, "content", (float) 2.0, null);
// we should be holding the write lock now:
Assert.IsTrue(IndexWriter.IsLocked(dir), "locked");
reader.Commit(null);
// we should not be holding the write lock now:
Assert.IsTrue(!IndexWriter.IsLocked(dir), "not locked");
// open a 2nd reader:
IndexReader reader2 = IndexReader.Open(dir, false, null);
// set norm again for doc 0
reader.SetNorm(0, "content", (float) 3.0, null);
Assert.IsTrue(IndexWriter.IsLocked(dir), "locked");
reader.Close();
// we should not be holding the write lock now:
Assert.IsTrue(!IndexWriter.IsLocked(dir), "not locked");
reader2.Close();
dir.Close();
RmDir(indexDir);
}
// Make sure you can set norms & commit, and there are
// no extra norms files left:
[Test]
public virtual void TestWritingNormsNoReader()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = null;
IndexReader reader = null;
Term searchTerm = new Term("content", "aaa");
// add 1 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
writer.UseCompoundFile = false;
AddDoc(writer, searchTerm.Text);
writer.Close();
// now open reader & set norm for doc 0 (writes to
// _0_1.s0)
reader = IndexReader.Open(dir, false, null);
reader.SetNorm(0, "content", (float) 2.0, null);
reader.Close();
// now open reader again & set norm for doc 0 (writes to _0_2.s0)
reader = IndexReader.Open(dir, false, null);
reader.SetNorm(0, "content", (float) 2.0, null);
reader.Close();
Assert.IsFalse(dir.FileExists("_0_1.s0", null), "failed to remove first generation norms file on writing second generation");
dir.Close();
}
[Test]
public virtual void TestDeleteReaderWriterConflictUnoptimized()
{
DeleteReaderWriterConflict(false);
}
//[Test]
//public virtual void TestOpenEmptyDirectory()
//{
// System.String dirName = "test.empty";
// System.IO.FileInfo fileDirName = new System.IO.FileInfo(dirName);
// bool tmpBool;
// if (System.IO.File.Exists(fileDirName.FullName))
// tmpBool = true;
// else
// tmpBool = System.IO.Directory.Exists(fileDirName.FullName);
// if (!tmpBool)
// {
// System.IO.Directory.CreateDirectory(fileDirName.FullName);
// }
// try
// {
// IndexReader.Open(fileDirName);
// Assert.Fail("opening IndexReader on empty directory failed to produce FileNotFoundException");
// }
// catch (System.IO.FileNotFoundException e)
// {
// // GOOD
// }
// RmDir(fileDirName);
//}
[Test]
public virtual void TestDeleteReaderWriterConflictOptimized()
{
DeleteReaderWriterConflict(true);
}
private void DeleteReaderWriterConflict(bool optimize)
{
//Directory dir = new RAMDirectory();
Directory dir = GetDirectory();
Term searchTerm = new Term("content", "aaa");
Term searchTerm2 = new Term("content", "bbb");
// add 100 documents with term : aaa
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
for (int i = 0; i < 100; i++)
{
AddDoc(writer, searchTerm.Text);
}
writer.Close();
// OPEN READER AT THIS POINT - this should fix the view of the
// index at the point of having 100 "aaa" documents and 0 "bbb"
IndexReader reader = IndexReader.Open(dir, false, null);
Assert.AreEqual(100, reader.DocFreq(searchTerm, null), "first docFreq");
Assert.AreEqual(0, reader.DocFreq(searchTerm2, null), "first docFreq");
AssertTermDocsCount("first reader", reader, searchTerm, 100);
AssertTermDocsCount("first reader", reader, searchTerm2, 0);
// add 100 documents with term : bbb
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED, null);
for (int i = 0; i < 100; i++)
{
AddDoc(writer, searchTerm2.Text);
}
// REQUEST OPTIMIZATION
// This causes a new segment to become current for all subsequent
// searchers. Because of this, deletions made via a previously open
// reader, which would be applied to that reader's segment, are lost
// for subsequent searchers/readers
if (optimize)
writer.Optimize(null);
writer.Close();
// The reader should not see the new data
Assert.AreEqual(100, reader.DocFreq(searchTerm, null), "first docFreq");
Assert.AreEqual(0, reader.DocFreq(searchTerm2, null), "first docFreq");
AssertTermDocsCount("first reader", reader, searchTerm, 100);
AssertTermDocsCount("first reader", reader, searchTerm2, 0);
// DELETE DOCUMENTS CONTAINING TERM: aaa
// NOTE: the reader was created when only "aaa" documents were in
int deleted = 0;
Assert.Throws<StaleReaderException>(() => reader.DeleteDocuments(searchTerm, null),
"Delete allowed on an index reader with stale segment information");
// Re-open index reader and try again. This time it should see
// the new data.
reader.Close();
reader = IndexReader.Open(dir, false, null);
Assert.AreEqual(100, reader.DocFreq(searchTerm, null), "first docFreq");
Assert.AreEqual(100, reader.DocFreq(searchTerm2, null), "first docFreq");
AssertTermDocsCount("first reader", reader, searchTerm, 100);
AssertTermDocsCount("first reader", reader, searchTerm2, 100);
deleted = reader.DeleteDocuments(searchTerm, null);
Assert.AreEqual(100, deleted, "deleted count");
Assert.AreEqual(100, reader.DocFreq(searchTerm, null), "deleted docFreq");
Assert.AreEqual(100, reader.DocFreq(searchTerm2, null), "deleted docFreq");
AssertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
AssertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);
reader.Close();
// CREATE A NEW READER and re-test
reader = IndexReader.Open(dir, false, null);
Assert.AreEqual(100, reader.DocFreq(searchTerm, null), "deleted docFreq");
Assert.AreEqual(100, reader.DocFreq(searchTerm2, null), "deleted docFreq");
AssertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
AssertTermDocsCount("deleted termDocs", reader, searchTerm2, 100);
reader.Close();
}
private Directory GetDirectory()
{
return FSDirectory.Open(new System.IO.DirectoryInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", Path.GetTempPath()), "testIndex")));
}
[Test]
public virtual void TestFilesOpenClose()
{
// Create initial data set
System.IO.DirectoryInfo dirFile = new System.IO.DirectoryInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", Path.GetTempPath()), "testIndex"));
Directory dir = GetDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDoc(writer, "test");
writer.Close();
dir.Close();
// Try to erase the data - this ensures that the writer closed all files
_TestUtil.RmDir(dirFile);
dir = GetDirectory();
// Now create the data set again, just as before
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDoc(writer, "test");
writer.Close();
dir.Close();
// Now open existing directory and test that reader closes all files
dir = GetDirectory();
IndexReader reader1 = IndexReader.Open(dir, false, null);
reader1.Close();
dir.Close();
// The following will fail if reader did not close
// all files
_TestUtil.RmDir(dirFile);
}
[Test]
public virtual void TestLastModified()
{
System.IO.DirectoryInfo fileDir = new System.IO.DirectoryInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", Path.GetTempPath()), "testIndex"));
for (int i = 0; i < 2; i++)
{
try
{
Directory dir;
if (0 == i)
dir = new MockRAMDirectory();
else
dir = GetDirectory();
Assert.IsFalse(IndexReader.IndexExists(dir, null));
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
Assert.IsTrue(IndexWriter.IsLocked(dir)); // writer open, so dir is locked
writer.Close();
Assert.IsTrue(IndexReader.IndexExists(dir, null));
IndexReader reader = IndexReader.Open(dir, false, null);
Assert.IsFalse(IndexWriter.IsLocked(dir)); // reader only, no lock
long version = IndexReader.LastModified(dir, null);
if (i == 1)
{
long version2 = IndexReader.LastModified(dir, null);
Assert.AreEqual(version, version2);
}
reader.Close();
// modify index and check version has been
// incremented:
System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1000));
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
writer.Close();
reader = IndexReader.Open(dir, false, null);
Assert.IsTrue(version <= IndexReader.LastModified(dir, null), "old lastModified is " + version + "; new lastModified is " + IndexReader.LastModified(dir, null));
reader.Close();
dir.Close();
}
finally
{
if (i == 1)
_TestUtil.RmDir(fileDir);
}
}
}
[Test]
public virtual void TestVersion()
{
Directory dir = new MockRAMDirectory();
Assert.IsFalse(IndexReader.IndexExists(dir, null));
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
Assert.IsTrue(IndexWriter.IsLocked(dir)); // writer open, so dir is locked
writer.Close();
Assert.IsTrue(IndexReader.IndexExists(dir, null));
IndexReader reader = IndexReader.Open(dir, false, null);
Assert.IsFalse(IndexWriter.IsLocked(dir)); // reader only, no lock
long version = IndexReader.GetCurrentVersion(dir, null);
reader.Close();
// modify index and check version has been
// incremented:
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
writer.Close();
reader = IndexReader.Open(dir, false, null);
Assert.IsTrue(version < IndexReader.GetCurrentVersion(dir, null), "old version is " + version + "; new version is " + IndexReader.GetCurrentVersion(dir, null));
reader.Close();
dir.Close();
}
[Test]
public virtual void TestLock()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
writer.Close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED, null);
IndexReader reader = IndexReader.Open(dir, false, null);
Assert.Throws<LockObtainFailedException>(() => reader.DeleteDocument(0, null), "expected lock");
IndexWriter.Unlock(dir); // this should not be done in the real world!
reader.DeleteDocument(0, null);
reader.Close();
writer.Close();
dir.Close();
}
[Test]
public virtual void TestUndeleteAll()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
AddDocumentWithFields(writer);
writer.Close();
IndexReader reader = IndexReader.Open(dir, false, null);
reader.DeleteDocument(0, null);
reader.DeleteDocument(1, null);
reader.UndeleteAll(null);
reader.Close();
reader = IndexReader.Open(dir, false, null);
Assert.AreEqual(2, reader.NumDocs()); // nothing has really been deleted thanks to undeleteAll()
reader.Close();
dir.Close();
}
[Test]
public virtual void TestUndeleteAllAfterClose()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
AddDocumentWithFields(writer);
writer.Close();
IndexReader reader = IndexReader.Open(dir, false, null);
reader.DeleteDocument(0, null);
reader.DeleteDocument(1, null);
reader.Close();
reader = IndexReader.Open(dir, false, null);
reader.UndeleteAll(null);
Assert.AreEqual(2, reader.NumDocs()); // nothing has really been deleted thanks to undeleteAll()
reader.Close();
dir.Close();
}
[Test]
public virtual void TestUndeleteAllAfterCloseThenReopen()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
AddDocumentWithFields(writer);
writer.Close();
IndexReader reader = IndexReader.Open(dir, false, null);
reader.DeleteDocument(0, null);
reader.DeleteDocument(1, null);
reader.Close();
reader = IndexReader.Open(dir, false, null);
reader.UndeleteAll(null);
reader.Close();
reader = IndexReader.Open(dir, false, null);
Assert.AreEqual(2, reader.NumDocs()); // nothing has really been deleted thanks to undeleteAll()
reader.Close();
dir.Close();
}
[Test]
public virtual void TestDeleteReaderReaderConflictUnoptimized()
{
DeleteReaderReaderConflict(false);
}
[Test]
public virtual void TestDeleteReaderReaderConflictOptimized()
{
DeleteReaderReaderConflict(true);
}
/// <summary> Make sure if reader tries to commit but hits disk
/// full that reader remains consistent and usable.
/// </summary>
[Test]
public virtual void TestDiskFull()
{
bool debug = false;
Term searchTerm = new Term("content", "aaa");
int START_COUNT = 157;
int END_COUNT = 144;
// First build up a starting index:
RAMDirectory startDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
for (int i = 0; i < 157; i++)
{
Document d = new Document();
d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(d, null);
}
writer.Close();
long diskUsage = startDir.SizeInBytes();
long diskFree = diskUsage + 100;
System.IO.IOException err = null;
bool done = false;
// Iterate w/ ever increasing free disk space:
while (!done)
{
MockRAMDirectory dir = new MockRAMDirectory(startDir);
// If IndexReader hits disk full, it can write to
// the same files again.
dir.SetPreventDoubleWrite(false);
IndexReader reader = IndexReader.Open((Directory) dir, false, null);
// For each disk size, first try to commit against
// dir that will hit random IOExceptions & disk
// full; after, give it infinite disk space & turn
// off random IOExceptions & retry w/ same reader:
bool success = false;
for (int x = 0; x < 2; x++)
{
double rate = 0.05;
double diskRatio = ((double) diskFree) / diskUsage;
long thisDiskFree;
System.String testName;
if (0 == x)
{
thisDiskFree = diskFree;
if (diskRatio >= 2.0)
{
rate /= 2;
}
if (diskRatio >= 4.0)
{
rate /= 2;
}
if (diskRatio >= 6.0)
{
rate = 0.0;
}
if (debug)
{
System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
}
testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
}
else
{
thisDiskFree = 0;
rate = 0.0;
if (debug)
{
System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
}
testName = "reader re-use after disk full";
}
dir.SetMaxSizeInBytes(thisDiskFree);
dir.SetRandomIOExceptionRate(rate, diskFree);
try
{
if (0 == x)
{
int docId = 12;
for (int i = 0; i < 13; i++)
{
reader.DeleteDocument(docId, null);
reader.SetNorm(docId, "contents", (float) 2.0, null);
docId += 12;
}
}
reader.Close();
success = true;
if (0 == x)
{
done = true;
}
}
catch (System.IO.IOException e)
{
if (debug)
{
System.Console.Out.WriteLine(" hit IOException: " + e);
System.Console.Out.WriteLine(e.StackTrace);
}
err = e;
if (1 == x)
{
System.Console.Error.WriteLine(e.StackTrace);
Assert.Fail(testName + " hit IOException after disk space was freed up");
}
}
// Whether we succeeded or failed, check that all
// un-referenced files were in fact deleted (ie,
// we did not create garbage). Just create a
// new IndexFileDeleter, have it delete
// unreferenced files, then verify that in fact
// no files were deleted:
System.String[] startFiles = dir.ListAll(null);
SegmentInfos infos = new SegmentInfos();
infos.Read(dir, null);
new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null,null, null);
System.String[] endFiles = dir.ListAll(null);
System.Array.Sort(startFiles);
System.Array.Sort(endFiles);
//for(int i=0;i<startFiles.length;i++) {
// System.out.println(" startFiles: " + i + ": " + startFiles[i]);
//}
if (!CollectionsHelper.Equals(startFiles, endFiles))
{
System.String successStr;
if (success)
{
successStr = "success";
}
else
{
successStr = "IOException";
System.Console.Error.WriteLine(err.StackTrace);
}
Assert.Fail("reader.close() failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes): before delete:\n " + ArrayToString(startFiles) + "\n after delete:\n " + ArrayToString(endFiles));
}
// Finally, verify index is not corrupt, and, if
// we succeeded, we see all docs changed, and if
// we failed, we see either all docs or no docs
// changed (transactional semantics):
IndexReader newReader = null;
try
{
newReader = IndexReader.Open((Directory) dir, false, null);
}
catch (System.IO.IOException e)
{
System.Console.Error.WriteLine(e.StackTrace);
Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
}
/*
int result = newReader.docFreq(searchTerm);
if (success) {
if (result != END_COUNT) {
fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + END_COUNT);
}
} else {
// On hitting exception we still may have added
// all docs:
if (result != START_COUNT && result != END_COUNT) {
err.printStackTrace();
fail(testName + ": method did throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " or " + END_COUNT);
}
}
*/
IndexSearcher searcher = new IndexSearcher(newReader);
ScoreDoc[] hits = null;
try
{
hits = searcher.Search(new TermQuery(searchTerm), null, 1000, null).ScoreDocs;
}
catch (System.IO.IOException e)
{
System.Console.Error.WriteLine(e.StackTrace);
Assert.Fail(testName + ": exception when searching: " + e);
}
int result2 = hits.Length;
if (success)
{
if (result2 != END_COUNT)
{
Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
}
}
else
{
// On hitting exception we still may have added
// all docs:
if (result2 != START_COUNT && result2 != END_COUNT)
{
System.Console.Error.WriteLine(err.StackTrace);
Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT);
}
}
searcher.Close();
newReader.Close();
if (result2 == END_COUNT)
{
break;
}
}
dir.Close();
// Try again with 10 more bytes of free space:
diskFree += 10;
}
startDir.Close();
}
[Test]
public virtual void TestDocsOutOfOrderJIRA140()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
for (int i = 0; i < 11; i++)
{
AddDoc(writer, "aaa");
}
writer.Close();
IndexReader reader = IndexReader.Open(dir, false, null);
// Try to delete an invalid docId, yet, within range
// of the final bits of the BitVector:
bool gotException = false;
try
{
reader.DeleteDocument(11, null);
}
catch (System.IndexOutOfRangeException e)
{
gotException = true;
}
reader.Close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED, null);
// We must add more docs to get a new segment written
for (int i = 0; i < 11; i++)
{
AddDoc(writer, "aaa");
}
// Without the fix for LUCENE-140 this call will
// [incorrectly] hit a "docs out of order"
// IllegalStateException because above out-of-bounds
// deleteDocument corrupted the index:
writer.Optimize(null);
writer.Close();
if (!gotException)
{
Assert.Fail("delete of out-of-bounds doc number failed to hit exception");
}
dir.Close();
}
[Test]
public virtual void TestExceptionReleaseWriteLockJIRA768()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDoc(writer, "aaa");
writer.Close();
IndexReader reader = IndexReader.Open(dir, false, null);
Assert.Throws<IndexOutOfRangeException>(() => reader.DeleteDocument(1, null),
"did not hit exception when deleting an invalid doc number");
reader.Close();
Assert.IsFalse(IndexWriter.IsLocked(dir), "write lock is still held after close");
reader = IndexReader.Open(dir, false, null);
Assert.Throws<IndexOutOfRangeException>(() => reader.SetNorm(1, "content", (float) 2.0, null),
"did not hit exception when calling setNorm on an invalid doc number");
reader.Close();
Assert.IsFalse(IndexWriter.IsLocked(dir), "write lock is still held after close");
dir.Close();
}
private System.String ArrayToString(System.String[] l)
{
System.String s = "";
for (int i = 0; i < l.Length; i++)
{
if (i > 0)
{
s += "\n ";
}
s += l[i];
}
return s;
}
[Test]
public virtual void TestOpenReaderAfterDelete()
{
System.IO.DirectoryInfo dirFile = new System.IO.DirectoryInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", Path.GetTempPath()), "deletetest"));
Directory dir = FSDirectory.Open(dirFile);
Assert.Throws<NoSuchDirectoryException>(() => IndexReader.Open(dir, false, null), "expected FileNotFoundException");
bool tmpBool;
if (System.IO.File.Exists(dirFile.FullName))
{
System.IO.File.Delete(dirFile.FullName);
tmpBool = true;
}
else if (System.IO.Directory.Exists(dirFile.FullName))
{
System.IO.Directory.Delete(dirFile.FullName);
tmpBool = true;
}
else
tmpBool = false;
bool generatedAux = tmpBool;
// Make sure we still get a CorruptIndexException (not NPE):
Assert.Throws<NoSuchDirectoryException>(() => IndexReader.Open(dir, false, null), "expected FileNotFoundException");
dir.Close();
}
private void DeleteReaderReaderConflict(bool optimize)
{
Directory dir = GetDirectory();
Term searchTerm1 = new Term("content", "aaa");
Term searchTerm2 = new Term("content", "bbb");
Term searchTerm3 = new Term("content", "ccc");
// add 100 documents with term : aaa
// add 100 documents with term : bbb
// add 100 documents with term : ccc
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
for (int i = 0; i < 100; i++)
{
AddDoc(writer, searchTerm1.Text);
AddDoc(writer, searchTerm2.Text);
AddDoc(writer, searchTerm3.Text);
}
if (optimize)
writer.Optimize(null);
writer.Close();
// OPEN TWO READERS
// Both readers get segment info as exists at this time
IndexReader reader1 = IndexReader.Open(dir, false, null);
Assert.AreEqual(100, reader1.DocFreq(searchTerm1, null), "first opened");
Assert.AreEqual(100, reader1.DocFreq(searchTerm2, null), "first opened");
Assert.AreEqual(100, reader1.DocFreq(searchTerm3, null), "first opened");
AssertTermDocsCount("first opened", reader1, searchTerm1, 100);
AssertTermDocsCount("first opened", reader1, searchTerm2, 100);
AssertTermDocsCount("first opened", reader1, searchTerm3, 100);
IndexReader reader2 = IndexReader.Open(dir, false, null);
Assert.AreEqual(100, reader2.DocFreq(searchTerm1, null), "first opened");
Assert.AreEqual(100, reader2.DocFreq(searchTerm2, null), "first opened");
Assert.AreEqual(100, reader2.DocFreq(searchTerm3, null), "first opened");
AssertTermDocsCount("first opened", reader2, searchTerm1, 100);
AssertTermDocsCount("first opened", reader2, searchTerm2, 100);
AssertTermDocsCount("first opened", reader2, searchTerm3, 100);
// DELETE DOCS FROM READER 2 and CLOSE IT
// delete documents containing term: aaa
// when the reader is closed, the segment info is updated and
// the first reader is now stale
reader2.DeleteDocuments(searchTerm1, null);
Assert.AreEqual(100, reader2.DocFreq(searchTerm1, null), "after delete 1");
Assert.AreEqual(100, reader2.DocFreq(searchTerm2, null), "after delete 1");
Assert.AreEqual(100, reader2.DocFreq(searchTerm3, null), "after delete 1");
AssertTermDocsCount("after delete 1", reader2, searchTerm1, 0);
AssertTermDocsCount("after delete 1", reader2, searchTerm2, 100);
AssertTermDocsCount("after delete 1", reader2, searchTerm3, 100);
reader2.Close();
// Make sure reader 1 is unchanged since it was open earlier
Assert.AreEqual(100, reader1.DocFreq(searchTerm1, null), "after delete 1");
Assert.AreEqual(100, reader1.DocFreq(searchTerm2, null), "after delete 1");
Assert.AreEqual(100, reader1.DocFreq(searchTerm3, null), "after delete 1");
AssertTermDocsCount("after delete 1", reader1, searchTerm1, 100);
AssertTermDocsCount("after delete 1", reader1, searchTerm2, 100);
AssertTermDocsCount("after delete 1", reader1, searchTerm3, 100);
// ATTEMPT TO DELETE FROM STALE READER
// delete documents containing term: bbb
Assert.Throws<StaleReaderException>(() => reader1.DeleteDocuments(searchTerm2, null),
"Delete allowed from a stale index reader");
// RECREATE READER AND TRY AGAIN
reader1.Close();
reader1 = IndexReader.Open(dir, false, null);
Assert.AreEqual(100, reader1.DocFreq(searchTerm1, null), "reopened");
Assert.AreEqual(100, reader1.DocFreq(searchTerm2, null), "reopened");
Assert.AreEqual(100, reader1.DocFreq(searchTerm3, null), "reopened");
AssertTermDocsCount("reopened", reader1, searchTerm1, 0);
AssertTermDocsCount("reopened", reader1, searchTerm2, 100);
AssertTermDocsCount("reopened", reader1, searchTerm3, 100);
reader1.DeleteDocuments(searchTerm2, null);
Assert.AreEqual(100, reader1.DocFreq(searchTerm1, null), "deleted 2");
Assert.AreEqual(100, reader1.DocFreq(searchTerm2, null), "deleted 2");
Assert.AreEqual(100, reader1.DocFreq(searchTerm3, null), "deleted 2");
AssertTermDocsCount("deleted 2", reader1, searchTerm1, 0);
AssertTermDocsCount("deleted 2", reader1, searchTerm2, 0);
AssertTermDocsCount("deleted 2", reader1, searchTerm3, 100);
reader1.Close();
// Open another reader to confirm that everything is deleted
reader2 = IndexReader.Open(dir, false, null);
Assert.AreEqual(100, reader2.DocFreq(searchTerm1, null), "reopened 2");
Assert.AreEqual(100, reader2.DocFreq(searchTerm2, null), "reopened 2");
Assert.AreEqual(100, reader2.DocFreq(searchTerm3, null), "reopened 2");
AssertTermDocsCount("reopened 2", reader2, searchTerm1, 0);
AssertTermDocsCount("reopened 2", reader2, searchTerm2, 0);
AssertTermDocsCount("reopened 2", reader2, searchTerm3, 100);
reader2.Close();
dir.Close();
}
private void AddDocumentWithFields(IndexWriter writer)
{
Document doc = new Document();
doc.Add(new Field("keyword", "test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field("text", "test1", Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("unindexed", "test1", Field.Store.YES, Field.Index.NO));
doc.Add(new Field("unstored", "test1", Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(doc, null);
}
private void AddDocumentWithDifferentFields(IndexWriter writer)
{
Document doc = new Document();
doc.Add(new Field("keyword2", "test1", Field.Store.YES, Field.Index.NOT_ANALYZED));
doc.Add(new Field("text2", "test1", Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("unindexed2", "test1", Field.Store.YES, Field.Index.NO));
doc.Add(new Field("unstored2", "test1", Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(doc, null);
}
private void AddDocumentWithTermVectorFields(IndexWriter writer)
{
Document doc = new Document();
doc.Add(new Field("tvnot", "tvnot", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
doc.Add(new Field("termvector", "termvector", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
doc.Add(new Field("tvoffset", "tvoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));
doc.Add(new Field("tvposition", "tvposition", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
doc.Add(new Field("tvpositionoffset", "tvpositionoffset", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
writer.AddDocument(doc, null);
}
private void AddDoc(IndexWriter writer, System.String value_Renamed)
{
Document doc = new Document();
doc.Add(new Field("content", value_Renamed, Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(doc, null);
}
private void RmDir(System.IO.DirectoryInfo dir)
{
System.IO.FileInfo[] files = FileSupport.GetFiles(dir);
for (int i = 0; i < files.Length; i++)
{
bool tmpBool;
if (System.IO.File.Exists(files[i].FullName))
{
System.IO.File.Delete(files[i].FullName);
tmpBool = true;
}
else if (System.IO.Directory.Exists(files[i].FullName))
{
System.IO.Directory.Delete(files[i].FullName);
tmpBool = true;
}
else
tmpBool = false;
bool generatedAux = tmpBool;
}
bool tmpBool2;
if (System.IO.File.Exists(dir.FullName))
{
System.IO.File.Delete(dir.FullName);
tmpBool2 = true;
}
else if (System.IO.Directory.Exists(dir.FullName))
{
System.IO.Directory.Delete(dir.FullName);
tmpBool2 = true;
}
else
tmpBool2 = false;
bool generatedAux2 = tmpBool2;
}
public static void AssertIndexEquals(IndexReader index1, IndexReader index2)
{
Assert.AreEqual(index1.NumDocs(), index2.NumDocs(), "IndexReaders have different values for numDocs.");
Assert.AreEqual(index1.MaxDoc, index2.MaxDoc, "IndexReaders have different values for maxDoc.");
Assert.AreEqual(index1.HasDeletions, index2.HasDeletions, "Only one IndexReader has deletions.");
Assert.AreEqual(index1.IsOptimized(), index2.IsOptimized(), "Only one index is optimized.");
// check field names
System.Collections.Generic.ICollection<string> fieldsNames1 = index1.GetFieldNames(FieldOption.ALL);
System.Collections.Generic.ICollection<string> fieldsNames2 = index1.GetFieldNames(FieldOption.ALL);
System.Collections.Generic.ICollection<IFieldable> fields1 = null;
System.Collections.Generic.ICollection<IFieldable> fields2 = null;
Assert.AreEqual(fieldsNames1.Count, fieldsNames2.Count, "IndexReaders have different numbers of fields.");
System.Collections.IEnumerator it1 = fieldsNames1.GetEnumerator();
System.Collections.IEnumerator it2 = fieldsNames2.GetEnumerator();
while (it1.MoveNext() && it2.MoveNext())
{
Assert.AreEqual((System.String) it1.Current, (System.String) it2.Current, "Different field names.");
}
// check norms
it1 = fieldsNames1.GetEnumerator();
while (it1.MoveNext())
{
System.String curField = (System.String) it1.Current;
byte[] norms1 = index1.Norms(curField, null);
byte[] norms2 = index2.Norms(curField, null);
if (norms1 != null && norms2 != null)
{
Assert.AreEqual(norms1.Length, norms2.Length);
for (int i = 0; i < norms1.Length; i++)
{
Assert.AreEqual(norms1[i], norms2[i], "Norm different for doc " + i + " and field '" + curField + "'.");
}
}
else
{
Assert.AreSame(norms1, norms2);
}
}
// check deletions
for (int i = 0; i < index1.MaxDoc; i++)
{
Assert.AreEqual(index1.IsDeleted(i), index2.IsDeleted(i), "Doc " + i + " only deleted in one index.");
}
// check stored fields
for (int i = 0; i < index1.MaxDoc; i++)
{
if (!index1.IsDeleted(i))
{
Document doc1 = index1.Document(i, null);
Document doc2 = index2.Document(i, null);
fields1 = doc1.GetFields();
fields2 = doc2.GetFields();
Assert.AreEqual(fields1.Count, fields2.Count, "Different numbers of fields for doc " + i + ".");
it1 = fields1.GetEnumerator();
it2 = fields2.GetEnumerator();
while (it1.MoveNext() && it2.MoveNext())
{
Field curField1 = (Field) it1.Current;
Field curField2 = (Field) it2.Current;
Assert.AreEqual(curField1.Name, curField2.Name, "Different fields names for doc " + i + ".");
Assert.AreEqual(curField1.StringValue(null), curField2.StringValue(null), "Different field values for doc " + i + ".");
}
}
}
// check dictionary and posting lists
TermEnum enum1 = index1.Terms(null);
TermEnum enum2 = index2.Terms(null);
TermPositions tp1 = index1.TermPositions(null);
TermPositions tp2 = index2.TermPositions(null);
while (enum1.Next(null))
{
Assert.IsTrue(enum2.Next(null));
Assert.AreEqual(enum1.Term, enum2.Term, "Different term in dictionary.");
tp1.Seek(enum1.Term, null);
tp2.Seek(enum1.Term, null);
while (tp1.Next(null))
{
Assert.IsTrue(tp2.Next(null));
Assert.AreEqual(tp1.Doc, tp2.Doc, "Different doc id in postinglist of term " + enum1.Term + ".");
Assert.AreEqual(tp1.Freq, tp2.Freq, "Different term frequence in postinglist of term " + enum1.Term + ".");
for (int i = 0; i < tp1.Freq; i++)
{
Assert.AreEqual(tp1.NextPosition(null), tp2.NextPosition(null), "Different positions in postinglist of term " + enum1.Term + ".");
}
}
}
}
[Test]
public virtual void TestGetIndexCommit()
{
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);
writer.SetMaxBufferedDocs(2);
for (int i = 0; i < 27; i++)
AddDocumentWithFields(writer);
writer.Close();
SegmentInfos sis = new SegmentInfos();
sis.Read(d, null);
IndexReader r = IndexReader.Open((Directory) d, false, null);
IndexCommit c = r.IndexCommit(null);
Assert.AreEqual(sis.GetCurrentSegmentFileName(), c.SegmentsFileName);
Assert.IsTrue(c.Equals(r.IndexCommit(null)));
// Change the index
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
writer.SetMaxBufferedDocs(2);
for (int i = 0; i < 7; i++)
AddDocumentWithFields(writer);
writer.Close();
IndexReader r2 = r.Reopen(null);
Assert.IsFalse(c.Equals(r2.IndexCommit(null)));
Assert.IsFalse(r2.IndexCommit(null).IsOptimized);
r2.Close();
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
writer.Optimize(null);
writer.Close();
r2 = r.Reopen(null);
Assert.IsTrue(r2.IndexCommit(null).IsOptimized);
r.Close();
r2.Close();
d.Close();
}
[Test]
public virtual void TestReadOnly()
{
RAMDirectory d = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
writer.Commit(null);
AddDocumentWithFields(writer);
writer.Close();
IndexReader r = IndexReader.Open((Directory) d, true, null);
Assert.Throws<System.NotSupportedException>(() => r.DeleteDocument(0, null));
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
AddDocumentWithFields(writer);
writer.Close();
// Make sure reopen is still readonly:
IndexReader r2 = r.Reopen(null);
r.Close();
Assert.IsFalse(r == r2);
Assert.Throws<System.NotSupportedException>(() => r2.DeleteDocument(0, null));
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
writer.Optimize(null);
writer.Close();
// Make sure reopen to a single segment is still readonly:
IndexReader r3 = r2.Reopen(null);
r2.Close();
Assert.IsFalse(r == r2);
Assert.Throws<System.NotSupportedException>(() => r3.DeleteDocument(0, null));
// Make sure write lock isn't held
writer = new IndexWriter(d, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED, null);
writer.Close();
r3.Close();
}
// LUCENE-1474
[Test]
public virtual void TestIndexReader_Rename()
{
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED, null);
writer.AddDocument(CreateDocument("a"), null);
writer.AddDocument(CreateDocument("b"), null);
writer.AddDocument(CreateDocument("c"), null);
writer.Close();
IndexReader reader = IndexReader.Open(dir, false, null);
reader.DeleteDocuments(new Term("id", "a"), null);
reader.Flush(null);
reader.DeleteDocuments(new Term("id", "b"), null);
reader.Close();
IndexReader.Open(dir, true, null).Close();
}
// LUCENE-1647
[Test]
public virtual void TestIndexReaderUnDeleteAll()
{
MockRAMDirectory dir = new MockRAMDirectory();
dir.SetPreventDoubleWrite(false);
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED, null);
writer.AddDocument(CreateDocument("a"), null);
writer.AddDocument(CreateDocument("b"), null);
writer.AddDocument(CreateDocument("c"), null);
writer.Close();
IndexReader reader = IndexReader.Open((Directory) dir, false, null);
reader.DeleteDocuments(new Term("id", "a"), null);
reader.Flush(null);
reader.DeleteDocuments(new Term("id", "b"), null);
reader.UndeleteAll(null);
reader.DeleteDocuments(new Term("id", "b"), null);
reader.Close();
IndexReader.Open((Directory) dir, false, null).Close();
dir.Close();
}
private Document CreateDocument(System.String id)
{
Document doc = new Document();
doc.Add(new Field("id", id, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
return doc;
}
// LUCENE-1468 -- make sure on attempting to open an
// IndexReader on a non-existent directory, you get a
// good exception
[Test]
public virtual void TestNoDir()
{
Directory dir = FSDirectory.Open(_TestUtil.GetTempDir("doesnotexist"));
Assert.Throws<NoSuchDirectoryException>(() => IndexReader.Open(dir, true, null), "did not hit expected exception");
dir.Close();
}
// LUCENE-1509
[Test]
public virtual void TestNoDupCommitFileNames()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED, null);
writer.SetMaxBufferedDocs(2);
writer.AddDocument(CreateDocument("a"), null);
writer.AddDocument(CreateDocument("a"), null);
writer.AddDocument(CreateDocument("a"), null);
writer.Close();
var commits = IndexReader.ListCommits(dir, null);
var it = commits.GetEnumerator();
while (it.MoveNext())
{
IndexCommit commit = it.Current;
System.Collections.Generic.ICollection<string> files = commit.FileNames;
System.Collections.Hashtable seen = new System.Collections.Hashtable();
System.Collections.IEnumerator it2 = files.GetEnumerator();
while (it2.MoveNext())
{
System.String fileName = (System.String) it2.Current;
Assert.IsTrue(!seen.Contains(fileName), "file " + fileName + " was duplicated");
seen.Add(fileName, fileName);
}
}
dir.Close();
}
// LUCENE-1579: Ensure that on a cloned reader, segments
// reuse the doc values arrays in FieldCache
[Test]
public virtual void TestFieldCacheReuseAfterClone()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
Document doc = new Document();
doc.Add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc, null);
writer.Close();
// Open reader
IndexReader r = SegmentReader.GetOnlySegmentReader(dir, null);
int[] ints = Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetInts(r, "number", null);
Assert.AreEqual(1, ints.Length);
Assert.AreEqual(17, ints[0]);
// Clone reader
IndexReader r2 = (IndexReader) r.Clone(null);
r.Close();
Assert.IsTrue(r2 != r);
int[] ints2 = Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetInts(r2, "number", null);
r2.Close();
Assert.AreEqual(1, ints2.Length);
Assert.AreEqual(17, ints2[0]);
Assert.IsTrue(ints == ints2);
dir.Close();
}
// LUCENE-1579: Ensure that on a reopened reader, that any
// shared segments reuse the doc values arrays in
// FieldCache
[Test]
public virtual void TestFieldCacheReuseAfterReopen()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
Document doc = new Document();
doc.Add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc, null);
writer.Commit(null);
// Open reader1
IndexReader r = IndexReader.Open(dir, false, null);
IndexReader r1 = SegmentReader.GetOnlySegmentReader(r);
int[] ints = Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetInts(r1, "number", null);
Assert.AreEqual(1, ints.Length);
Assert.AreEqual(17, ints[0]);
// Add new segment
writer.AddDocument(doc, null);
writer.Commit(null);
// Reopen reader1 --> reader2
IndexReader r2 = r.Reopen(null);
r.Close();
IndexReader sub0 = r2.GetSequentialSubReaders()[0];
int[] ints2 = Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetInts(sub0, "number", null);
r2.Close();
Assert.IsTrue(ints == ints2);
dir.Close();
}
// LUCENE-1579: Make sure all SegmentReaders are new when
// reopen switches readOnly
[Test]
public virtual void TestReopenChangeReadonly()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
Document doc = new Document();
doc.Add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
writer.AddDocument(doc, null);
writer.Commit(null);
// Open reader1
IndexReader r = IndexReader.Open(dir, false, null);
Assert.IsTrue(r is DirectoryReader);
IndexReader r1 = SegmentReader.GetOnlySegmentReader(r);
int[] ints = Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetInts(r1, "number", null);
Assert.AreEqual(1, ints.Length);
Assert.AreEqual(17, ints[0]);
// Reopen to readonly w/ no chnages
IndexReader r3 = r.Reopen(true, null);
Assert.IsTrue(r3 is ReadOnlyDirectoryReader);
r3.Close();
// Add new segment
writer.AddDocument(doc, null);
writer.Commit(null);
// Reopen reader1 --> reader2
IndexReader r2 = r.Reopen(true, null);
r.Close();
Assert.IsTrue(r2 is ReadOnlyDirectoryReader);
IndexReader[] subs = r2.GetSequentialSubReaders();
int[] ints2 = Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetInts(subs[0], "number", null);
r2.Close();
Assert.IsTrue(subs[0] is ReadOnlySegmentReader);
Assert.IsTrue(subs[1] is ReadOnlySegmentReader);
Assert.IsTrue(ints == ints2);
dir.Close();
}
// LUCENE-1586: getUniqueTermCount
[Test]
public virtual void TestUniqueTermCount()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
Document doc = new Document();
doc.Add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
doc.Add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(doc, null);
writer.AddDocument(doc, null);
writer.Commit(null);
IndexReader r = IndexReader.Open(dir, false, null);
IndexReader r1 = SegmentReader.GetOnlySegmentReader(r);
Assert.AreEqual(36, r1.UniqueTermCount);
writer.AddDocument(doc, null);
writer.Commit(null);
IndexReader r2 = r.Reopen(null);
r.Close();
Assert.Throws<NotSupportedException>(() => { var tc = r2.UniqueTermCount; }, "expected exception");
IndexReader[] subs = r2.GetSequentialSubReaders();
for (int i = 0; i < subs.Length; i++)
{
Assert.AreEqual(36, subs[i].UniqueTermCount);
}
r2.Close();
writer.Close();
dir.Close();
}
// LUCENE-1609: don't load terms index
[Test]
public virtual void TestNoTermsIndex()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
Document doc = new Document();
doc.Add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
doc.Add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
writer.AddDocument(doc, null);
writer.AddDocument(doc, null);
writer.Close();
IndexReader r = IndexReader.Open(dir, null, true, - 1, null);
Assert.Throws<SystemException>(() => r.DocFreq(new Term("field", "f"), null), "did not hit expected exception");
Assert.IsFalse(((SegmentReader) r.GetSequentialSubReaders()[0]).TermsIndexLoaded());
Assert.AreEqual(-1, (r.GetSequentialSubReaders()[0]).TermInfosIndexDivisor);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
writer.AddDocument(doc, null);
writer.Close();
// LUCENE-1718: ensure re-open carries over no terms index:
IndexReader r2 = r.Reopen(null);
r.Close();
IndexReader[] subReaders = r2.GetSequentialSubReaders();
Assert.AreEqual(2, subReaders.Length);
for (int i = 0; i < 2; i++)
{
Assert.IsFalse(((SegmentReader) subReaders[i]).TermsIndexLoaded());
}
r2.Close();
dir.Close();
}
// LUCENE-2046
[Test]
public void TestPrepareCommitIsCurrent()
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
Document doc = new Document();
writer.AddDocument(doc, null);
IndexReader r = IndexReader.Open(dir, true, null);
Assert.IsTrue(r.IsCurrent(null));
writer.AddDocument(doc, null);
writer.PrepareCommit(null);
Assert.IsTrue(r.IsCurrent(null));
IndexReader r2 = r.Reopen(null);
Assert.IsTrue(r == r2);
writer.Commit(null);
Assert.IsFalse(r.IsCurrent(null));
writer.Close();
r.Close();
dir.Close();
}
}
} | 37.254632 | 226 | 0.676855 | [
"Apache-2.0"
] | grisha-kotler/lucenenet | test/Lucene.Net.Test/Index/TestIndexReader.cs | 70,374 | C# |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using StarkPlatform.CodeAnalysis.LanguageServices;
using StarkPlatform.CodeAnalysis.Shared.Extensions;
namespace StarkPlatform.CodeAnalysis.AddImport
{
internal abstract partial class AbstractAddImportFeatureService<TSimpleNameSyntax>
{
private struct SearchResult
{
public readonly IReadOnlyList<string> NameParts;
// How good a match this was. 0 means it was a perfect match. Larger numbers are less
// and less good.
public readonly double Weight;
// The desired name to change the user text to if this was a fuzzy (spell-checking) match.
public readonly string DesiredName;
// The node to convert to the desired name
public readonly TSimpleNameSyntax NameNode;
public SearchResult(SymbolResult<INamespaceOrTypeSymbol> result)
: this(result.DesiredName, result.NameNode, INamespaceOrTypeSymbolExtensions.GetNameParts(result.Symbol), result.Weight)
{
}
public SearchResult(string desiredName, TSimpleNameSyntax nameNode, IReadOnlyList<string> nameParts, double weight)
{
DesiredName = desiredName;
Weight = weight;
NameNode = nameNode;
NameParts = nameParts;
}
public bool DesiredNameDiffersFromSourceName()
{
return !string.IsNullOrEmpty(this.DesiredName) &&
this.NameNode != null &&
this.NameNode.GetFirstToken().ValueText != this.DesiredName;
}
public bool DesiredNameDiffersFromSourceNameOnlyByCase()
{
Debug.Assert(DesiredNameDiffersFromSourceName());
return StringComparer.OrdinalIgnoreCase.Equals(
this.NameNode.GetFirstToken().ValueText, this.DesiredName);
}
public bool DesiredNameMatchesSourceName(Document document)
{
if (!this.DesiredNameDiffersFromSourceName())
{
// Names match in any language.
return true;
}
var syntaxFacts = document.GetLanguageService<ISyntaxFactsService>();
// Names differ. But in a case insensitive language they may match.
if (!syntaxFacts.IsCaseSensitive &&
this.DesiredNameDiffersFromSourceNameOnlyByCase())
{
return true;
}
// Name are totally different in any language.
return false;
}
}
private struct SymbolResult<T> where T : ISymbol
{
// The symbol that matched the string being searched for.
public readonly T Symbol;
// How good a match this was. 0 means it was a perfect match. Larger numbers are less
// and less good.
public readonly double Weight;
// The desired name to change the user text to if this was a fuzzy (spell-checking) match.
public readonly string DesiredName;
// The node to convert to the desired name
public readonly TSimpleNameSyntax NameNode;
public SymbolResult(string desiredName, TSimpleNameSyntax nameNode, T symbol, double weight)
{
DesiredName = desiredName;
Symbol = symbol;
Weight = weight;
NameNode = nameNode;
}
public SymbolResult<T2> WithSymbol<T2>(T2 symbol) where T2 : ISymbol
{
return new SymbolResult<T2>(DesiredName, NameNode, symbol, this.Weight);
}
internal SymbolResult<T> WithDesiredName(string desiredName)
{
return new SymbolResult<T>(desiredName, NameNode, Symbol, Weight);
}
}
private struct SymbolResult
{
public static SymbolResult<T> Create<T>(string desiredName, TSimpleNameSyntax nameNode, T symbol, double weight) where T : ISymbol
{
return new SymbolResult<T>(desiredName, nameNode, symbol, weight);
}
}
}
}
| 38.067227 | 161 | 0.592494 | [
"Apache-2.0"
] | stark-lang/stark-roslyn | src/Features/Core/Portable/AddImport/SymbolResult.cs | 4,532 | C# |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace CodeStack.Examples.eDrawings.Properties {
using System;
/// <summary>
/// A strongly-typed resource class, for looking up localized strings, etc.
/// </summary>
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// Returns the cached ResourceManager instance used by this class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("CodeStack.Examples.eDrawings.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
}
}
| 44.046875 | 194 | 0.616531 | [
"MIT"
] | benisahardworker/solidworks-api-examples | edrawings-api/eDrawingsWpfHost/eDrawingsWpfHost/Properties/Resources.Designer.cs | 2,821 | C# |
// Copyright 2004-2021 Castle Project - http://www.castleproject.org/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace Castle.Components.DictionaryAdapter.Tests
{
using System.Collections.Generic;
public class CustomGetter : DictionaryBehaviorAttribute, IDictionaryPropertyGetter
{
private List<string> propertiesFetched = new List<string>();
public IList<string> PropertiesFetched
{
get { return propertiesFetched; }
}
public void Reset()
{
propertiesFetched.Clear();
}
public object GetPropertyValue(
IDictionaryAdapter dictionaryAdapter,
string key,
object storedValue,
PropertyDescriptor property,
bool ifExists
)
{
propertiesFetched.Add(key);
return storedValue;
}
}
}
| 30.673913 | 86 | 0.659816 | [
"Apache-2.0"
] | belav/Core | src/Castle.Core.Tests/Components.DictionaryAdapter.Tests/CustomGetter.cs | 1,413 | C# |
#region Copyright
////////////////////////////////////////////////////////////////////////////////
// The following FIT Protocol software provided may be used with FIT protocol
// devices only and remains the copyrighted property of Garmin Canada Inc.
// The software is being provided on an "as-is" basis and as an accommodation,
// and therefore all warranties, representations, or guarantees of any kind
// (whether express, implied or statutory) including, without limitation,
// warranties of merchantability, non-infringement, or fitness for a particular
// purpose, are specifically disclaimed.
//
// Copyright 2020 Garmin Canada Inc.
////////////////////////////////////////////////////////////////////////////////
// ****WARNING**** This file is auto-generated! Do NOT edit this file.
// Profile Version = 21.30Release
// Tag = production/akw/21.30.00-0-g324900c
////////////////////////////////////////////////////////////////////////////////
#endregion
namespace Dynastream.Fit
{
/// <summary>
/// Implements the profile LocalDateTime type as a class
/// </summary>
public static class LocalDateTime
{
public const uint Min = 0x10000000; // if date_time is < 0x10000000 then it is system time (seconds from device power on)
public const uint Invalid = (uint)0xFFFFFFFF;
}
}
| 40.176471 | 130 | 0.580527 | [
"MIT"
] | epvanhouten/PowerToSpeed | PowerToSpeed/Dynastream/Fit/Profile/Types/LocalDateTime.cs | 1,366 | C# |
using System;
using System.Collections.Generic;
using Windows.ApplicationModel;
using Windows.ApplicationModel.Activation;
using Windows.ApplicationModel.Store;
using Windows.Storage;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Navigation;
using AppStudio.Common;
using VSauceSupreme.Services;
namespace VSauceSupreme
{
/// <summary>
/// Provides application-specific behavior to supplement the default Application class.
/// </summary>
sealed partial class App : Windows.UI.Xaml.Application
{
private Guid APP_ID = new Guid("871d6893-8af7-4842-84f8-e6472879b9a7");
/// <summary>
/// Initializes the singleton application object. This is the first line of authored code
/// executed, and as such is the logical equivalent of main() or WinMain().
/// </summary>
public App()
{
this.InitializeComponent();
this.Suspending += OnSuspending;
}
/// <summary>
/// Invoked when the application is launched normally by the end user. Other entry points
/// will be used such as when the application is launched to open a specific file.
/// </summary>
/// <param name="e">Details about the launch request and process.</param>
protected override void OnLaunched(LaunchActivatedEventArgs e)
{
#if DEBUG
if (System.Diagnostics.Debugger.IsAttached)
{
this.DebugSettings.EnableFrameRateCounter = true;
}
#endif
GetAppData();
UpdateAppTiles();
Frame rootFrame = Window.Current.Content as Frame;
// Do not repeat app initialization when the Window already has content,
// just ensure that the window is active
if (rootFrame == null)
{
// Create a Frame to act as the navigation context and navigate to the first page
rootFrame = new Frame();
// Set the default language
rootFrame.Language = Windows.Globalization.ApplicationLanguages.Languages[0];
rootFrame.NavigationFailed += OnNavigationFailed;
if (e.PreviousExecutionState == ApplicationExecutionState.Terminated)
{
//TODO: Load state from previously suspended application
}
// Place the frame in the current Window
Window.Current.Content = rootFrame;
}
if (rootFrame.Content == null)
{
// When the navigation stack isn't restored navigate to the first page,
// configuring the new page by passing required information as a navigation
// parameter
rootFrame.Navigate(typeof(Shell), e.Arguments);
}
// Ensure the current window is active
Window.Current.Activate();
}
/// <summary>
/// Invoked when Navigation to a certain page fails
/// </summary>
/// <param name="sender">The Frame which failed navigation</param>
/// <param name="e">Details about the navigation failure</param>
void OnNavigationFailed(object sender, NavigationFailedEventArgs e)
{
throw new Exception("Failed to load Page " + e.SourcePageType.FullName);
}
private void GetAppData()
{
string deviceType = IsOnPhoneExecution() ? LocalSettingNames.PhoneValue : LocalSettingNames.WindowsValue;
ApplicationData.Current.LocalSettings.Values[LocalSettingNames.DeviceType] = deviceType;
ApplicationData.Current.LocalSettings.Values[LocalSettingNames.StoreId] = ValidateStoreId();
}
private bool IsOnPhoneExecution()
{
var qualifiers = Windows.ApplicationModel.Resources.Core.ResourceContext.GetForCurrentView().QualifierValues;
return (qualifiers.ContainsKey("DeviceFamily") && qualifiers["DeviceFamily"] == "Mobile");
}
private Guid ValidateStoreId()
{
try
{
Guid storeId = CurrentApp.AppId;
if (storeId != Guid.Empty && storeId != APP_ID)
{
return storeId;
}
return Guid.Empty;
}
catch (Exception)
{
return Guid.Empty;
}
}
private void UpdateAppTiles()
{
var init = ApplicationData.Current.LocalSettings.Values[LocalSettingNames.TilesInitialized];
if (init == null || (init is bool && !(bool)init))
{
TileServices.CreateFlipTile(@"VSauce Supreme", @"All the VSauces in one app!");
ApplicationData.Current.LocalSettings.Values[LocalSettingNames.TilesInitialized] = true;
}
}
/// <summary>
/// Invoked when application execution is being suspended. Application state is saved
/// without knowing whether the application will be terminated or resumed with the contents
/// of memory still intact.
/// </summary>
/// <param name="sender">The source of the suspend request.</param>
/// <param name="e">Details about the suspend request.</param>
private void OnSuspending(object sender, SuspendingEventArgs e)
{
var deferral = e.SuspendingOperation.GetDeferral();
//TODO: Save application state and stop any background activity
deferral.Complete();
}
}
}
| 37.885906 | 121 | 0.601594 | [
"MIT"
] | LanceMcCarthy/Lancelot.VSauceSupreme | src/VSauce Supreme/VSauceSupreme.W10/App.xaml.cs | 5,645 | C# |
//---------------------------------------------------------
// <auto-generated>
// This code was generated by a tool. Changes to this
// file may cause incorrect behavior and will be lost
// if the code is regenerated.
//
// Generated on 2020 October 09 04:58:54 UTC
// </auto-generated>
//---------------------------------------------------------
using System.CodeDom.Compiler;
using System.Runtime.CompilerServices;
#nullable enable
namespace go
{
public static partial class flag_package
{
[GeneratedCode("go2cs", "0.1.0.0")]
private partial struct float64Value
{
// Value of the float64Value struct
private readonly double m_value;
public float64Value(double value) => m_value = value;
// Enable implicit conversions between double and float64Value struct
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static implicit operator float64Value(double value) => new float64Value(value);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static implicit operator double(float64Value value) => value.m_value;
// Enable comparisons between nil and float64Value struct
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static bool operator ==(float64Value value, NilType nil) => value.Equals(default(float64Value));
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static bool operator !=(float64Value value, NilType nil) => !(value == nil);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static bool operator ==(NilType nil, float64Value value) => value == nil;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static bool operator !=(NilType nil, float64Value value) => value != nil;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static implicit operator float64Value(NilType nil) => default(float64Value);
}
}
}
| 40.403846 | 115 | 0.62732 | [
"MIT"
] | GridProtectionAlliance/go2cs | src/go-src-converted/flag/flag_float64ValueStructOf(double).cs | 2,101 | C# |
using Microsoft.EntityFrameworkCore.Storage;
using Mix.Cms.Lib.Models.Cms;
using Mix.Domain.Data.ViewModels;
using Newtonsoft.Json;
using System;
namespace Mix.Cms.Lib.ViewModels.MixPageModules
{
public class ReadMvcViewModel
: ViewModelBase<MixCmsContext, MixPageModule, ReadMvcViewModel>
{
public ReadMvcViewModel(MixPageModule model, MixCmsContext _context = null, IDbContextTransaction _transaction = null)
: base(model, _context, _transaction)
{
}
public ReadMvcViewModel() : base()
{
}
#region Models
[JsonProperty("id")]
public int Id { get; set; }
[JsonProperty("specificulture")]
public string Specificulture { get; set; }
[JsonProperty("moduleId")]
public int ModuleId { get; set; }
[JsonProperty("pageId")]
public int PageId { get; set; }
[JsonProperty("isActived")]
public bool IsActived { get; set; }
[JsonProperty("image")]
public string Image { get; set; }
[JsonProperty("description")]
public string Description { get; set; }
[JsonProperty("createdBy")]
public string CreatedBy { get; set; }
[JsonProperty("createdDateTime")]
public DateTime CreatedDateTime { get; set; }
[JsonProperty("modifiedBy")]
public string ModifiedBy { get; set; }
[JsonProperty("lastModified")]
public DateTime? LastModified { get; set; }
[JsonProperty("priority")]
public int Priority { get; set; }
[JsonProperty("status")]
public MixEnums.MixContentStatus Status { get; set; }
#endregion
#region Views
[JsonProperty("module")]
public MixModules.ReadMvcViewModel Module { get; set; }
#endregion Views
#region overrides
public override MixPageModule ParseModel(MixCmsContext _context = null, IDbContextTransaction _transaction = null)
{
if (Id == 0)
{
Id = Repository.Max(m => m.Id, _context, _transaction).Data + 1;
}
return base.ParseModel(_context, _transaction);
}
public override void ExpandView(MixCmsContext _context = null, IDbContextTransaction _transaction = null)
{
var getModule = MixModules.ReadMvcViewModel.Repository.GetSingleModel(p => p.Id == ModuleId && p.Specificulture == Specificulture
, _context: _context, _transaction: _transaction
);
if (getModule.IsSucceed)
{
Module = getModule.Data;
//Module.LoadData();
}
}
#endregion overrides
}
} | 32.951807 | 141 | 0.601463 | [
"MIT"
] | Noriffik/mix.core | src/Mix.Cms.Lib/ViewModels/MixPageModules/ReadMvcViewModel.cs | 2,737 | C# |
using CityBreaks.Models;
using CityBreaks.Services;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.RazorPages;
using System.ComponentModel.DataAnnotations;
namespace CityBreaks.Pages;
public class CityModel : PageModel
{
private readonly ICityService _cityService;
private readonly IPropertyService _propertyService;
public CityModel(ICityService cityService, IPropertyService propertyService)
{
_cityService = cityService;
_propertyService = propertyService;
}
[BindProperty(SupportsGet = true)]
public string Name { get; set; }
public City City { get; set; }
public async Task<IActionResult> OnGetAsync()
{
City = await _cityService.GetByNameAsync(Name);
if (City == null)
{
return NotFound();
}
return Page();
}
public async Task<PartialViewResult> OnGetPropertyDetails(int id)
{
var property = await _propertyService.FindAsync(id);
var model = new BookingInputModel { Property = property };
return Partial("_PropertyDetailsPartial", model);
}
public JsonResult OnPostBooking([FromBody]BookingInputModel model)
{
var numberOfDays = (int)(model.EndDate.Value - model.StartDate.Value).TotalDays;
var totalCost = numberOfDays * model.Property.DayRate * model.NumberOfGuests;
var result = new { TotalCost = totalCost };
return new JsonResult(result);
}
public class BookingInputModel
{
public Property Property { get; set; }
[Display(Name = "No. of guests")]
public int NumberOfGuests { get; set; }
[DataType(DataType.Date), Display(Name = "Arrival")]
public DateTime? StartDate { get; set; }
[DataType(DataType.Date), Display(Name = "Departure")]
public DateTime? EndDate { get; set; }
}
}
| 30.754098 | 88 | 0.668977 | [
"MIT"
] | mikebrind/Razor-Pages-In-Action | Chapter11/CssIsolation/CityBreaks/Pages/City.cshtml.cs | 1,876 | C# |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using PetStore.Core.Helper;
using PetStore.Core.Infrastructure.Logging;
namespace PetStore.Infrastructure.Foundation.Logging
{
public class EventViewerLogger : ILogger
{
#region Fields
private static object _sync = new object();
#endregion
#region Properties
public string Source { get; set; }
public string Log { get; set; }
#endregion
#region Constructors
public EventViewerLogger()
{
Source = "TimeSpender";
Log = "Application";
}
#endregion
#region ILogger Members
public void Info(string message)
{
Write(message, EventLogEntryType.Information);
}
public void Warning(string message)
{
Write(message, EventLogEntryType.Warning);
}
public void Success(string message)
{
Write(message, EventLogEntryType.SuccessAudit);
}
public void Error(string message)
{
Write(message, EventLogEntryType.Error);
}
public void Exception(Exception exception)
{
Write(ExceptionManager.GetExceptionAsString(exception), EventLogEntryType.Error);
}
public void Exception(Exception exception, string message)
{
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.AppendLine(message);
stringBuilder.AppendLine();
stringBuilder.Append(ExceptionManager.GetExceptionAsString(exception));
Write(stringBuilder.ToString(), EventLogEntryType.Error);
}
#endregion
#region Private Methods
private void Write(string message, EventLogEntryType entryType)
{
try
{
lock (_sync)
{
if (!EventLog.SourceExists(Source))
{
EventLog.CreateEventSource(Source, Log);
}
}
EventLog.WriteEntry(Source, message, entryType);
}
catch (Exception)
{
}
}
#endregion
}
}
| 23.42 | 93 | 0.561059 | [
"MIT"
] | claudineinobrega/DDD-Avanade-DIO | src/PetStore.Infrastructure.Foundation/Logging/EventViewerLogger.cs | 2,344 | C# |
using System;
using System.Diagnostics;
namespace AtleX.CommandLineArguments.Validators
{
/// <summary>
/// Represents a validation error for a single argument and a single <see cref="ArgumentValidator"/>
/// </summary>
[DebuggerDisplay("Argument: {ArgumentName}, validator: {ValidatorName}")]
public sealed class ValidationError
{
/// <summary>
/// Gets the name of the command line argument
/// </summary>
public string ArgumentName
{
get;
}
/// <summary>
/// Gets the name of the validator this <see cref="ValidationError"/> is from
/// </summary>
public string ValidatorName
{
get;
}
/// <summary>
/// Gets the validation error message, if any
/// </summary>
public string ErrorMessage
{
get;
}
/// <summary>
/// Initializes a new instance of <see cref="ValidationError"/> with the
/// specified argument name, validation result and validation error message
/// </summary>
/// <param name="argumentName">
/// The name of the command line argument
/// </param>
/// <param name="validatorName">
/// The name of the validator this <see cref="ValidationError"/> is from
/// </param>
/// <param name="validationErrorMessage">
/// The validation error message, if any
/// </param>
public ValidationError(string argumentName, string validatorName, string validationErrorMessage)
{
if (string.IsNullOrWhiteSpace(argumentName))
throw new ArgumentNullException(nameof(argumentName));
if (string.IsNullOrWhiteSpace(validatorName))
throw new ArgumentNullException(nameof(validatorName));
this.ArgumentName = argumentName;
this.ValidatorName = validatorName;
this.ErrorMessage = validationErrorMessage;
}
}
}
| 29.354839 | 102 | 0.656593 | [
"MIT"
] | akamsteeg/AtleX.CommandLineArguments | src/AtleX.CommandLineArguments/Validators/ValidationError.cs | 1,822 | C# |
using System;
using System.Collections.Generic;
using System.Globalization;
namespace Orckestra.Composer.Cart.Parameters
{
public class InitializePaymentParam
{
/// <summary>
/// Name of the cart to update.
/// </summary>
public string CartName { get; set; }
/// <summary>
/// ID of the customer.
/// </summary>
public Guid CustomerId { get; set; }
/// <summary>
/// Id of the payment that will be initialized.
/// </summary>
public Guid PaymentId { get; set; }
/// <summary>
/// Type of the payment that will be initialized.
/// </summary>
public string PaymentType { get; set; }
/// <summary>
/// Scope in which the cart is.
/// </summary>
public string Scope { get; set; }
/// <summary>
/// Culture of the request.
/// </summary>
public CultureInfo CultureInfo { get; set; }
/// <summary>
/// Additional data that may be used by the Overture Payment Provider to initialize the payment.
/// This is optional.
/// </summary>
public Dictionary<string, object> AdditionalData { get; set; }
/// <summary>
/// Options that may be used to override default behaviors of the Overture Payment Provider.
/// This is optional.
/// </summary>
public Dictionary<string, object> Options { get; set; }
}
}
| 29.117647 | 104 | 0.557576 | [
"MIT"
] | InnaBoitsun/BetterRetailGroceryTest | src/Orckestra.Composer.Cart/Parameters/InitializePaymentParam.cs | 1,487 | C# |
namespace IBatisNetWinformDemo
{
partial class FormProblemSolutions
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.dgvDataShow = new System.Windows.Forms.DataGridView();
this.btnQuery = new System.Windows.Forms.Button();
this.btnQuery2 = new System.Windows.Forms.Button();
this.button1 = new System.Windows.Forms.Button();
((System.ComponentModel.ISupportInitialize)(this.dgvDataShow)).BeginInit();
this.SuspendLayout();
//
// dgvDataShow
//
this.dgvDataShow.AllowUserToAddRows = false;
this.dgvDataShow.AllowUserToDeleteRows = false;
this.dgvDataShow.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize;
this.dgvDataShow.Location = new System.Drawing.Point(33, 41);
this.dgvDataShow.Name = "dgvDataShow";
this.dgvDataShow.ReadOnly = true;
this.dgvDataShow.RowTemplate.Height = 23;
this.dgvDataShow.Size = new System.Drawing.Size(525, 242);
this.dgvDataShow.TabIndex = 4;
//
// btnQuery
//
this.btnQuery.Location = new System.Drawing.Point(33, 12);
this.btnQuery.Name = "btnQuery";
this.btnQuery.Size = new System.Drawing.Size(128, 23);
this.btnQuery.TabIndex = 3;
this.btnQuery.Text = "查询(TypeHandler)";
this.btnQuery.UseVisualStyleBackColor = true;
this.btnQuery.Click += new System.EventHandler(this.btnQuery_Click);
//
// btnQuery2
//
this.btnQuery2.Location = new System.Drawing.Point(167, 12);
this.btnQuery2.Name = "btnQuery2";
this.btnQuery2.Size = new System.Drawing.Size(178, 23);
this.btnQuery2.TabIndex = 5;
this.btnQuery2.Text = "查询(N+1 Select Lists)";
this.btnQuery2.UseVisualStyleBackColor = true;
this.btnQuery2.Click += new System.EventHandler(this.btnQuery2_Click);
//
// button1
//
this.button1.Location = new System.Drawing.Point(351, 12);
this.button1.Name = "button1";
this.button1.Size = new System.Drawing.Size(207, 23);
this.button1.TabIndex = 6;
this.button1.Text = "查询(composite key)";
this.button1.UseVisualStyleBackColor = true;
this.button1.Click += new System.EventHandler(this.button1_Click);
//
// FormProblemSolutions
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(591, 294);
this.Controls.Add(this.button1);
this.Controls.Add(this.btnQuery2);
this.Controls.Add(this.dgvDataShow);
this.Controls.Add(this.btnQuery);
this.Name = "FormProblemSolutions";
this.Text = "FormProblemSolutions";
((System.ComponentModel.ISupportInitialize)(this.dgvDataShow)).EndInit();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.DataGridView dgvDataShow;
private System.Windows.Forms.Button btnQuery;
private System.Windows.Forms.Button btnQuery2;
private System.Windows.Forms.Button button1;
}
} | 42.15534 | 129 | 0.58982 | [
"MIT"
] | shenhx/DotNetAll | 08Demos/Orms/IBatisNetWinformDemo/IBatisNetWinformDemo/FormProblemSolutions.Designer.cs | 4,356 | C# |
namespace Host.UnitTests.IO
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Crest.Host.IO;
using FluentAssertions;
using NSubstitute;
using Xunit;
public class BlockStreamTests
{
private readonly BlockStreamPool pool;
private readonly BlockStream stream;
public BlockStreamTests()
{
this.pool = Substitute.For<BlockStreamPool>();
this.pool.GetBlock().Returns(_ => new byte[BlockStreamPool.DefaultBlockSize]);
this.stream = new BlockStream(this.pool);
}
public sealed class CanRead : BlockStreamTests
{
[Fact]
public void ShouldReturnTrue()
{
this.stream.CanRead.Should().BeTrue();
}
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
this.stream.Invoking(s => { _ = s.CanRead; })
.Should().Throw<ObjectDisposedException>();
}
}
public sealed class CanSeek : BlockStreamTests
{
[Fact]
public void ShouldReturnTrue()
{
this.stream.CanSeek.Should().BeTrue();
}
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
this.stream.Invoking(s => { _ = s.CanSeek; })
.Should().Throw<ObjectDisposedException>();
}
}
public sealed class CanWrite : BlockStreamTests
{
[Fact]
public void ShouldReturnTrue()
{
this.stream.CanWrite.Should().BeTrue();
}
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
this.stream.Invoking(s => { _ = s.CanWrite; })
.Should().Throw<ObjectDisposedException>();
}
}
public sealed class CopyToAsync : BlockStreamTests
{
[Fact]
public async Task ShouldCopyFromTheCurrentPosition()
{
byte[] data = new byte[BlockStreamPool.DefaultBlockSize + 1];
data[data.Length - 2] = 1;
data[data.Length - 1] = 2;
this.stream.Write(data, 0, data.Length);
this.stream.Position = data.Length - 2;
using (var destination = new MemoryStream())
{
await this.stream.CopyToAsync(destination);
destination.ToArray().Should().Equal(1, 2);
}
}
}
public sealed class Dispose : BlockStreamTests
{
[Fact]
public void ShouldReleaseTheMemoryToThePool()
{
byte[] block = new byte[BlockStreamPool.DefaultBlockSize];
this.pool.GetBlock().Returns(block);
// Write something to force it to grab a block
this.stream.Write(new byte[10], 0, 10);
this.stream.Dispose();
this.pool.Received().ReturnBlocks(
Arg.Do<IReadOnlyCollection<byte[]>>(a => a.Should().ContainSingle().Which.Should().BeSameAs(block)));
}
}
public sealed class Flush : BlockStreamTests
{
[Fact]
public void ShouldNotThrowAnException()
{
this.stream.Invoking(s => s.Flush())
.Should().NotThrow();
}
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
this.stream.Invoking(s => s.Flush())
.Should().Throw<ObjectDisposedException>();
}
}
public sealed class Length : BlockStreamTests
{
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
this.stream.Invoking(s => { _ = s.Length; })
.Should().Throw<ObjectDisposedException>();
}
}
public sealed class Read : BlockStreamTests
{
[Fact]
public void ShouldReadAllTheBytes()
{
byte[] data = new byte[BlockStreamPool.DefaultBlockSize + 1];
for (int i = 0; i < data.Length; i++)
{
data[i] = 1;
}
this.stream.Write(data, 0, data.Length);
this.stream.Position = 0;
byte[] buffer = new byte[data.Length + 2];
int read = this.stream.Read(buffer, 1, data.Length);
read.Should().Be(data.Length);
buffer.Should().HaveElementAt(0, 0);
buffer.Should().HaveElementAt(1, 1);
buffer.Should().HaveElementAt(buffer.Length - 2, 1);
buffer.Should().HaveElementAt(buffer.Length - 1, 0);
}
[Fact]
public void ShouldReturnZeroForEmptyStreams()
{
byte[] buffer = new byte[1];
int result = this.stream.Read(buffer, 0, 1);
result.Should().Be(0);
}
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
byte[] buffer = new byte[1];
this.stream.Invoking(s => s.Read(buffer, 0, 1))
.Should().Throw<ObjectDisposedException>();
}
}
public sealed class ReadByte : BlockStreamTests
{
[Fact]
public void ShouldReturnMinusOneWhenAtTheEndOfTheStream()
{
int result = this.stream.ReadByte();
result.Should().Be(-1);
}
[Fact]
public void ShouldReturnTheByteAtTheCurrentPosition()
{
byte[] data = new byte[BlockStreamPool.DefaultBlockSize + 1];
data[data.Length - 1] = 1;
this.stream.Write(data, 0, data.Length);
this.stream.Position = data.Length - 1;
int result = this.stream.ReadByte();
result.Should().Be(1);
}
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
this.stream.Invoking(s => { _ = s.ReadByte(); })
.Should().Throw<ObjectDisposedException>();
}
}
public sealed class Seek : BlockStreamTests
{
[Fact]
public void ShouldSetThePositionFromTheCurrentPosition()
{
this.stream.Position = 1;
this.stream.Seek(2, SeekOrigin.Current);
this.stream.Position.Should().Be(3);
}
[Fact]
public void ShouldSetThePositionFromTheEnd()
{
this.stream.SetLength(1);
this.stream.Seek(2, SeekOrigin.End);
this.stream.Position.Should().Be(3);
}
[Fact]
public void ShouldSetThePositionFromTheStart()
{
this.stream.Position = 1;
this.stream.Seek(2, SeekOrigin.Begin);
this.stream.Position.Should().Be(2);
}
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
this.stream.Invoking(s => s.Seek(0, SeekOrigin.Begin))
.Should().Throw<ObjectDisposedException>();
}
}
public sealed class SetLength : BlockStreamTests
{
[Fact]
public void ShouldEnsurePositionIsSmaller()
{
this.stream.Position = 12;
this.stream.SetLength(5);
this.stream.Position.Should().Be(5);
}
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
this.stream.Invoking(s => s.SetLength(0))
.Should().Throw<ObjectDisposedException>();
}
[Fact]
public void ShouldUpdateTheLength()
{
this.stream.SetLength(3);
this.stream.Length.Should().Be(3);
}
}
public sealed class Write : BlockStreamTests
{
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
byte[] buffer = new byte[1];
this.stream.Invoking(s => s.Write(buffer, 0, 1))
.Should().Throw<ObjectDisposedException>();
}
}
public sealed class WriteAsync : BlockStreamTests
{
[Fact]
public void ShouldReturnCanceledIfTheTokenIsAlreadyCanceled()
{
byte[] buffer = new byte[1];
var token = new CancellationToken(canceled: true);
Task result = this.stream.WriteAsync(buffer, 0, 1, token);
result.IsCanceled.Should().BeTrue();
}
[Fact]
public void ShouldThrowIfDisposed()
{
this.stream.Dispose();
byte[] buffer = new byte[1];
this.stream.Awaiting(s => s.WriteAsync(buffer, 0, 1))
.Should().Throw<ObjectDisposedException>();
}
[Fact]
public void ShouldWriteTheDataSyncronously()
{
Task result = this.stream.WriteAsync(new byte[] { 1, 2 }, 0, 2);
result.IsCompleted.Should().BeTrue();
byte[] buffer = new byte[2];
this.stream.Position = 0;
this.stream.Read(buffer, 0, 2);
buffer.Should().Equal(1, 2);
}
}
}
}
| 29.335244 | 121 | 0.478609 | [
"MIT"
] | samcragg/Crest | test/Host.UnitTests/IO/BlockStreamTests.cs | 10,240 | C# |
using UnityEngine;
using Mirror;
namespace WeaverSyncListTests.SyncList
{
class SyncList : NetworkBehaviour
{
public SyncListInt Foo;
}
}
| 14.454545 | 38 | 0.704403 | [
"MIT"
] | exiledgamesstudio/Mirror | Assets/Mirror/Tests/Editor/Weaver/WeaverSyncListTests~/SyncList.cs | 159 | C# |
using Backrole.Core.Abstractions;
namespace Backrole.Http.Abstractions
{
/// <summary>
/// Http Container.
/// </summary>
public interface IHttpContainer : IContainer
{
}
}
| 15.384615 | 48 | 0.64 | [
"MIT"
] | neurnn/backrole | http/src/Backrole.Http.Abstractions/IHttpContainer.cs | 202 | C# |
/*
* Copyright (C) Sportradar AG. See LICENSE for full license governing this code
*/
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using Dawn;
using System.Linq;
using Common.Logging;
using Sportradar.OddsFeed.SDK.Common;
using Sportradar.OddsFeed.SDK.Common.Internal;
using Sportradar.OddsFeed.SDK.Entities;
using Sportradar.OddsFeed.SDK.Messages.Internal;
namespace Sportradar.OddsFeed.SDK.API.Internal
{
/// <summary>
/// Class used to track the progress of a single recovery operation
/// </summary>
/// <remarks>
/// The class is not thread safe
/// </remarks>
public class RecoveryOperation : IRecoveryOperation
{
/// <summary>
/// A <see cref="ILog"/> used for execution logging
/// </summary>
private static readonly ILog ExecutionLog = SdkLoggerFactory.GetLoggerForExecution(typeof(RecoveryOperation));
/// <summary>
/// The producer whose recovery is being tracked by current instance
/// </summary>
private readonly Producer _producer;
/// <summary>
/// A <see cref="IRecoveryRequestIssuer"/> used to start recovery requests
/// </summary>
private readonly IRecoveryRequestIssuer _recoveryRequestIssuer;
/// <summary>
/// The node id of the current feed instance or a null reference
/// </summary>
private readonly int _nodeId;
/// <summary>
/// A <see cref="MessageInterest"/> containing interests on all sessions
/// </summary>
private readonly List<MessageInterest> _allInterests;
/// <summary>
/// A <see cref="MessageInterest"/> of all sessions on which the snapshot was received
/// </summary>
private readonly List<MessageInterest> _snapshotReceivedSessions = new List<MessageInterest>();
/// <summary>
/// The current request id
/// </summary>
private long _requestId;
/// <summary>
/// The <see cref="DateTime"/> specifying when the current operation started
/// </summary>
private DateTime _startTime;
/// <summary>
/// The <see cref="DateTime"/> specifying the first time the recovery was interrupted
/// </summary>
internal DateTime? InterruptionTime;
/// <summary>
/// Gets a value indication whether the recovery operation is currently running.
/// </summary>
public bool IsRunning { get; private set; }
/// <summary>
/// Gets a request Id of the current recovery operation or a null reference if recovery is not running
/// </summary>
public long? RequestId => IsRunning ? (long?)_requestId : null;
/// <summary>
/// Gets the start time of the last issued recovery request.
/// </summary>
public DateTime LastStartTime => _startTime;
/// <summary>
/// Gets a value indicating whether [adjusted after age]
/// </summary>
/// <value><c>true</c> if [adjusted after age]; otherwise, <c>false</c></value>
private readonly bool _adjustedAfterAge;
/// <summary>
/// Initializes a new instance of the <see cref="RecoveryOperation"/> class
/// </summary>
/// <param name="producer">The producer whose recovery is being tracked by current instance</param>
/// <param name="recoveryRequestIssuer">A <see cref="IRecoveryRequestIssuer"/> used to start recovery requests</param>
/// <param name="allInterests">A <see cref="MessageInterest"/> containing interests on all sessions</param>
/// <param name="nodeId">The node id of the current feed instance</param>
/// <param name="adjustAfterAge">The value indicating whether the after age should be enforced before executing recovery request</param>
[SuppressMessage("ReSharper", "PossibleMultipleEnumeration")]
public RecoveryOperation(Producer producer, IRecoveryRequestIssuer recoveryRequestIssuer, IEnumerable<MessageInterest> allInterests, int nodeId, bool adjustAfterAge)
{
Guard.Argument(producer, nameof(producer)).NotNull();
Guard.Argument(recoveryRequestIssuer, nameof(recoveryRequestIssuer)).NotNull();
Guard.Argument(allInterests, nameof(allInterests)).NotNull();//.NotEmpty();
if (!allInterests.Any())
{
throw new ArgumentOutOfRangeException(nameof(allInterests));
}
_producer = producer;
_recoveryRequestIssuer = recoveryRequestIssuer;
_allInterests = allInterests as List<MessageInterest> ?? new List<MessageInterest>(allInterests);
_nodeId = nodeId;
_adjustedAfterAge = adjustAfterAge;
}
/// <summary>
/// Determines whether the snapshot was received on all required sessions
/// </summary>
/// <param name="requiredInterests"><see cref="MessageInterest"/>s on which the snapshot is expected</param>
/// <returns>True if the snapshot was received on all required sessions; False otherwise</returns>
private bool WereSnapshotsReceivedOnSessions(params MessageInterest[] requiredInterests)
{
return requiredInterests.Length <= _snapshotReceivedSessions.Count && requiredInterests.All(_snapshotReceivedSessions.Contains);
}
/// <summary>
/// Determines whether the snapshot was received on all required sessions
/// </summary>
/// <returns>True if the snapshot was received on all required sessions; False otherwise</returns>
private bool IsRecoveryDone()
{
bool done;
// if there is only one session, only snapshot from that session is needed :)
if (_allInterests.Count == 1)
{
done = WereSnapshotsReceivedOnSessions(_allInterests[0]);
}
// if there are hi & low priority sessions, the snapshot from high priority session is needed
else if (_allInterests.Count == 2
&& _allInterests.Contains(MessageInterest.LowPriorityMessages)
&& _allInterests.Contains(MessageInterest.HighPriorityMessages))
{
done = WereSnapshotsReceivedOnSessions(MessageInterest.HighPriorityMessages);
}
// if all interests are a combination of different message scopes, use the producer
// scopes to determine whether all snapshots were received
else if (_allInterests.Count <= MessageInterest.MessageScopes.Length && _allInterests.All(MessageInterest.MessageScopes.Contains))
{
done = _producer.Scope
.Select(MessageInterest.FromScope)
.All(interest => _snapshotReceivedSessions.Contains(interest));
}
else
{
throw new InvalidOperationException("The combination of all interests is not supported");
}
if (done)
{
_snapshotReceivedSessions.Clear();
}
return done;
}
/// <summary>
/// Attempts to start a recovery operation
/// </summary>
/// <returns>True if the operation was successfully started; False otherwise</returns>
/// <exception cref="InvalidOperationException">The recovery operation is already running</exception>
/// <exception cref="RecoveryInitiationException">The after parameter is to far in the past</exception>
public bool Start()
{
if (IsRunning)
{
ExecutionLog.Error($"{_producer.Name}: trying started recovery which is already in progress.");
return false;
}
var after = _producer.LastTimestampBeforeDisconnect;
try
{
if (after == DateTime.MinValue)
{
_requestId = _recoveryRequestIssuer.RequestFullOddsRecoveryAsync(_producer, _nodeId).Result;
}
else
{
if (TimeProviderAccessor.Current.Now > after + _producer.MaxAfterAge())
{
if (_adjustedAfterAge)
{
ExecutionLog.Info($"{_producer.Name}: After time {after} is adjusted.");
after = TimeProviderAccessor.Current.Now - _producer.MaxAfterAge() + TimeSpan.FromMinutes(1);
}
else
{
throw new RecoveryInitiationException("The after parameter is to far in the past", after);
}
}
_requestId = _recoveryRequestIssuer.RequestRecoveryAfterTimestampAsync(_producer, after, _nodeId).Result;
}
}
catch (Exception ex)
{
var actualException = ex.InnerException ?? ex;
ExecutionLog.Error($"{_producer.Name} There was an error requesting recovery. Exception: {actualException.Message}");
if (ex is RecoveryInitiationException)
{
throw;
}
return false;
}
IsRunning = true;
_startTime = TimeProviderAccessor.Current.Now;
InterruptionTime = null;
return true;
}
/// <summary>
/// Stores the time when the operation was interrupted if this is the fist interruption.
/// Otherwise it does nothing
/// </summary>
/// <param name="interruptionTime">A <see cref="DateTime"/> specifying to when to set the interruption time</param>
/// <exception cref="InvalidOperationException">The recovery operation is not running</exception>
public void Interrupt(DateTime interruptionTime)
{
if (!IsRunning)
{
ExecutionLog.Error($"{_producer.Name}: trying to interrupt recovery which is not running.");
return;
}
if (InterruptionTime.HasValue)
{
return;
}
InterruptionTime = interruptionTime;
}
/// <summary>
/// Determines whether the current operation has timed-out
/// </summary>
/// <returns>True if the operation timed-out; Otherwise false</returns>
/// <exception cref="InvalidOperationException">The recovery operation is not running</exception>
public bool HasTimedOut()
{
if (!IsRunning)
{
ExecutionLog.Error($"{_producer.Name}: trying started recovery which is not running.");
return false;
}
return (TimeProviderAccessor.Current.Now - _startTime).TotalSeconds > _producer.MaxRecoveryTime;
}
/// <summary>
/// Stops the recovery operation if all snapshots were received
/// </summary>
/// <param name="interest">The <see cref="MessageInterest"/> of the session which received the snapshot message</param>
/// <param name="result">If the operation was successfully completed, it contains the results of the completed recovery</param>
/// <returns>True if the recovery operation could be completed; False otherwise</returns>
/// <exception cref="InvalidOperationException">The recovery operation is not running</exception>
public bool TryComplete(MessageInterest interest, out RecoveryResult result)
{
result = null;
if (!IsRunning)
{
ExecutionLog.Error($"{_producer.Name}: trying to complete recovery which is not running.");
return false;
}
_snapshotReceivedSessions.Add(interest);
if (IsRecoveryDone())
{
result = RecoveryResult.ForSuccess(_requestId, _startTime, InterruptionTime);
IsRunning = false;
return true;
}
if (HasTimedOut())
{
result = RecoveryResult.ForTimeOut(_requestId, _startTime);
IsRunning = false;
return true;
}
return false;
}
/// <summary>
/// Completes the timed-out recovery operation
/// </summary>
/// <returns>A <see cref="RecoveryResult"/> containing recovery info</returns>
/// <exception cref="InvalidOperationException">The recovery operation is not running or it has not timed-out</exception>
public RecoveryResult CompleteTimedOut()
{
if (!IsRunning)
{
ExecutionLog.Error($"{_producer.Name}: trying to CompleteTimedOut recovery which is not running.");
return null;
}
if (!HasTimedOut())
{
ExecutionLog.Error($"{_producer.Name}: trying to CompleteTimedOut recovery which is not timed-out.");
return null;
}
IsRunning = false;
return RecoveryResult.ForTimeOut(_requestId, _startTime);
}
/// <summary>
/// Resets the operation to it's default (not started) state. If operation is already not started, it does nothing.
/// </summary>
public void Reset()
{
IsRunning = false;
}
}
}
| 42.061728 | 173 | 0.593851 | [
"Apache-2.0"
] | sportradar/UnifiedOddsSdkNet | src/Sportradar.OddsFeed.SDK.API/Internal/RecoveryOperation.cs | 13,630 | C# |
// Copyright (c) Peter Palotas
// Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information.
using System;
using System.Collections.Generic;
using System.Linq;
namespace Alphaleonis.Reflection
{
/// <summary>
/// An equality comparer that will compare two <see cref="Type"/> instances for equality by
/// returning <see langword="true"/> if the underlying system type of the two instances are equal.
/// </summary>
[Serializable]
public sealed class TypeEqualityComparer : IEqualityComparer<Type>
{
private readonly static TypeEqualityComparer s_default = new TypeEqualityComparer();
/// <summary>Gets the default instance of the <see cref="TypeEqualityComparer"/>.</summary>
public static IEqualityComparer<Type> Default
{
get
{
return s_default;
}
}
/// <summary>Tests if two Type objects are considered equal according to this comparer.</summary>
/// <param name="x">Type to be compared.</param>
/// <param name="y">Type to be compared.</param>
/// <returns><see langword="true"/> if the underlying system type of the two types are equal, <see langword="false"/> otherwise.</returns>
public bool Equals(Type x, Type y)
{
if (object.ReferenceEquals(x, null))
return object.ReferenceEquals(y, null);
return x.Equals(y);
}
/// <summary>Calculates a hash code for the type specified according to this comparer.</summary>
/// <param name="obj">The type to get a hash code from.</param>
/// <returns>The hash code for the specified type.</returns>
public int GetHashCode(Type obj)
{
if (obj == null)
return 0;
//The hash code for Type and TypeInfo is different if we do not use UnderlyingSystemType.
return obj.UnderlyingSystemType.GetHashCode();
}
}
}
| 35.545455 | 144 | 0.650639 | [
"MIT"
] | alphaleonis/Alphaleonis.Reflection | src/Alphaleonis.Reflection/Utilities/TypeEqualityComparer.cs | 1,955 | C# |
// ======================================================================
//
// filename : ExporterHeaderAttribute.cs
// description :
//
// created by 雪雁 at 2019-09-11 13:51
// 文档官网:https://docs.xin-lai.com
// 公众号教程:麦扣聊技术
// QQ群:85318032(编程交流)
// Blog:http://www.cnblogs.com/codelove/
//
// ======================================================================
using System;
namespace Magicodes.ExporterAndImporter.Core
{
/// <summary>
/// 导出属性特性
/// </summary>
[AttributeUsage(AttributeTargets.Property)]
public class ExporterHeaderAttribute : Attribute
{
/// <inheritdoc />
public ExporterHeaderAttribute(string displayName = null, float fontSize = 11, string format = null,
bool isBold = true, bool isAutoFit = true, bool autoCenterColumn = false, int width = 0)
{
DisplayName = displayName;
FontSize = fontSize;
Format = format;
IsBold = isBold;
IsAutoFit = isAutoFit;
AutoCenterColumn = autoCenterColumn;
Width = width;
}
/// <summary>
/// 显示名称
/// </summary>
public string DisplayName { set; get; }
/// <summary>
/// 字体大小
/// </summary>
public float? FontSize { set; get; }
/// <summary>
/// 是否加粗
/// </summary>
public bool IsBold { set; get; }
/// <summary>
/// 格式化
/// </summary>
public string Format { get; set; }
/// <summary>
/// 是否自适应
/// </summary>
public bool IsAutoFit { set; get; }
/// <summary>
/// 自动居中
/// </summary>
public bool AutoCenterColumn { get; set; }
/// <summary>
/// 是否忽略
/// </summary>
public bool IsIgnore { get; set; }
/// <summary>
/// 宽度
/// </summary>
public int Width { get; set; }
/// <summary>
/// 排序
/// </summary>
public int ColumnIndex { get; set; } = 10000;
/// <summary>
/// 自动换行
/// </summary>
public bool WrapText { get; set; }
/// <summary>
/// Hidden
/// </summary>
public bool Hidden { get; set; }
}
} | 25.645161 | 108 | 0.440252 | [
"MIT"
] | AaronCore/Magicodes.IE | src/Magicodes.ExporterAndImporter.Core/ExporterHeaderAttribute.cs | 2,525 | C# |
/*
* Copyright(c) 2019-2021 Samsung Electronics Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System.ComponentModel;
namespace Tizen.NUI.BaseComponents
{
/// <summary>
/// The View layout Direction type.
/// </summary>
/// <since_tizen> 4 </since_tizen>
public enum ViewLayoutDirectionType
{
/// <summary>
/// Left to right.
/// </summary>
/// <since_tizen> 4 </since_tizen>
LTR,
/// <summary>
/// Right to left.
/// </summary>
/// <since_tizen> 4 </since_tizen>
RTL
}
/// <summary>
/// [Draft] Available policies for layout parameters
/// </summary>
/// This will be public opened in tizen_5.5 after ACR done. Before ACR, need to be hidden as inhouse API.
[EditorBrowsable(EditorBrowsableState.Never)]
public static class LayoutParamPolicies
{
/// <summary>
/// Constant which indicates child size should match parent size
/// </summary>
/// This will be public opened in tizen_5.5 after ACR done. Before ACR, need to be hidden as inhouse API.
[EditorBrowsable(EditorBrowsableState.Never)]
public const int MatchParent = -1;
/// <summary>
/// Constant which indicates parent should take the smallest size possible to wrap it's children with their desired size
/// </summary>
/// This will be public opened in tizen_5.5 after ACR done. Before ACR, need to be hidden as inhouse API.
[EditorBrowsable(EditorBrowsableState.Never)]
public const int WrapContent = -2;
}
internal enum ResourceLoadingStatusType
{
Invalid = -1,
Preparing = 0,
Ready,
Failed,
}
/// <summary>
/// View is the base class for all views.
/// </summary>
/// <since_tizen> 3 </since_tizen>
public partial class View
{
/// <summary>
/// Enumeration for describing the states of the view.
/// </summary>
/// <since_tizen> 3 </since_tizen>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Naming", "CA1717:Only FlagsAttribute enums should have plural names")]
public enum States
{
/// <summary>
/// The normal state.
/// </summary>
[Description("NORMAL")]
Normal,
/// <summary>
/// The focused state.
/// </summary>
[Description("FOCUSED")]
Focused,
/// <summary>
/// The disabled state.
/// </summary>
[Description("DISABLED")]
Disabled
}
/// <summary>
/// Describes the direction to move the focus towards.
/// </summary>
/// <since_tizen> 3 </since_tizen>
public enum FocusDirection
{
/// <summary>
/// Move keyboard focus towards the left direction.
/// </summary>
/// <since_tizen> 3 </since_tizen>
Left,
/// <summary>
/// Move keyboard focus towards the right direction.
/// </summary>
/// <since_tizen> 3 </since_tizen>
Right,
/// <summary>
/// Move keyboard focus towards the up direction.
/// </summary>
/// <since_tizen> 3 </since_tizen>
Up,
/// <summary>
/// Move keyboard focus towards the down direction.
/// </summary>
/// <since_tizen> 3 </since_tizen>
Down,
/// <summary>
/// Move keyboard focus towards the previous page direction.
/// </summary>
/// <since_tizen> 3 </since_tizen>
PageUp,
/// <summary>
/// Move keyboard focus towards the next page direction.
/// </summary>
/// <since_tizen> 3 </since_tizen>
PageDown
}
internal enum PropertyRange
{
PROPERTY_START_INDEX = PropertyRanges.PROPERTY_REGISTRATION_START_INDEX,
CONTROL_PROPERTY_START_INDEX = PROPERTY_START_INDEX,
CONTROL_PROPERTY_END_INDEX = CONTROL_PROPERTY_START_INDEX + 1000
}
internal class Property
{
internal static readonly int TOOLTIP = Interop.ViewProperty.TooltipGet();
internal static readonly int STATE = Interop.ViewProperty.StateGet();
internal static readonly int SubState = Interop.ViewProperty.SubStateGet();
internal static readonly int LeftFocusableViewId = Interop.ViewProperty.LeftFocusableActorIdGet();
internal static readonly int RightFocusableViewId = Interop.ViewProperty.RightFocusableActorIdGet();
internal static readonly int UpFocusableViewId = Interop.ViewProperty.UpFocusableActorIdGet();
internal static readonly int DownFocusableViewId = Interop.ViewProperty.DownFocusableActorIdGet();
internal static readonly int StyleName = Interop.ViewProperty.StyleNameGet();
internal static readonly int KeyInputFocus = Interop.ViewProperty.KeyInputFocusGet();
internal static readonly int BACKGROUND = Interop.ViewProperty.BackgroundGet();
internal static readonly int SiblingOrder = Interop.ActorProperty.SiblingOrderGet();
internal static readonly int OPACITY = Interop.ActorProperty.OpacityGet();
internal static readonly int ScreenPosition = Interop.ActorProperty.ScreenPositionGet();
internal static readonly int PositionUsesAnchorPoint = Interop.ActorProperty.PositionUsesAnchorPointGet();
internal static readonly int ParentOrigin = Interop.ActorProperty.ParentOriginGet();
internal static readonly int ParentOriginX = Interop.ActorProperty.ParentOriginXGet();
internal static readonly int ParentOriginY = Interop.ActorProperty.ParentOriginYGet();
internal static readonly int ParentOriginZ = Interop.ActorProperty.ParentOriginZGet();
internal static readonly int AnchorPoint = Interop.ActorProperty.AnchorPointGet();
internal static readonly int AnchorPointX = Interop.ActorProperty.AnchorPointXGet();
internal static readonly int AnchorPointY = Interop.ActorProperty.AnchorPointYGet();
internal static readonly int AnchorPointZ = Interop.ActorProperty.AnchorPointZGet();
internal static readonly int SIZE = Interop.ActorProperty.SizeGet();
internal static readonly int SizeWidth = Interop.ActorProperty.SizeWidthGet();
internal static readonly int SizeHeight = Interop.ActorProperty.SizeHeightGet();
internal static readonly int SizeDepth = Interop.ActorProperty.SizeDepthGet();
internal static readonly int POSITION = Interop.ActorProperty.PositionGet();
internal static readonly int PositionX = Interop.ActorProperty.PositionXGet();
internal static readonly int PositionY = Interop.ActorProperty.PositionYGet();
internal static readonly int PositionZ = Interop.ActorProperty.PositionZGet();
internal static readonly int WorldPosition = Interop.ActorProperty.WorldPositionGet();
internal static readonly int WorldPositionX = Interop.ActorProperty.WorldPositionXGet();
internal static readonly int WorldPositionY = Interop.ActorProperty.WorldPositionYGet();
internal static readonly int WorldPositionZ = Interop.ActorProperty.WorldPositionZGet();
internal static readonly int ORIENTATION = Interop.ActorProperty.OrientationGet();
internal static readonly int WorldOrientation = Interop.ActorProperty.WorldOrientationGet();
internal static readonly int SCALE = Interop.ActorProperty.ScaleGet();
internal static readonly int ScaleX = Interop.ActorProperty.ScaleXGet();
internal static readonly int ScaleY = Interop.ActorProperty.ScaleYGet();
internal static readonly int ScaleZ = Interop.ActorProperty.ScaleZGet();
internal static readonly int WorldScale = Interop.ActorProperty.WorldScaleGet();
internal static readonly int VISIBLE = Interop.ActorProperty.VisibleGet();
internal static readonly int WorldColor = Interop.ActorProperty.WorldColorGet();
internal static readonly int WorldMatrix = Interop.ActorProperty.WorldMatrixGet();
internal static readonly int NAME = Interop.ActorProperty.NameGet();
internal static readonly int SENSITIVE = Interop.ActorProperty.SensitiveGet();
internal static readonly int LeaveRequired = Interop.ActorProperty.LeaveRequiredGet();
internal static readonly int InheritOrientation = Interop.ActorProperty.InheritOrientationGet();
internal static readonly int InheritScale = Interop.ActorProperty.InheritScaleGet();
internal static readonly int DrawMode = Interop.ActorProperty.DrawModeGet();
internal static readonly int SizeModeFactor = Interop.ActorProperty.SizeModeFactorGet();
internal static readonly int WidthResizePolicy = Interop.ActorProperty.WidthResizePolicyGet();
internal static readonly int HeightResizePolicy = Interop.ActorProperty.HeightResizePolicyGet();
internal static readonly int SizeScalePolicy = Interop.ActorProperty.SizeScalePolicyGet();
internal static readonly int WidthForHeight = Interop.ActorProperty.WidthForHeightGet();
internal static readonly int HeightForWidth = Interop.ActorProperty.HeightForWidthGet();
internal static readonly int MinimumSize = Interop.ActorProperty.MinimumSizeGet();
internal static readonly int MaximumSize = Interop.ActorProperty.MaximumSizeGet();
internal static readonly int InheritPosition = Interop.ActorProperty.InheritPositionGet();
internal static readonly int ClippingMode = Interop.ActorProperty.ClippingModeGet();
internal static readonly int InheritLayoutDirection = Interop.ActorProperty.InheritLayoutDirectionGet();
internal static readonly int LayoutDirection = Interop.ActorProperty.LayoutDirectionGet();
internal static readonly int MARGIN = Interop.ViewProperty.MarginGet();
internal static readonly int PADDING = Interop.ViewProperty.PaddingGet();
internal static readonly int SHADOW = Interop.ViewProperty.ShadowGet();
internal static readonly int CaptureAllTouchAfterStart = Interop.ActorProperty.CaptureAllTouchAfterStartGet();
internal static readonly int BlendEquation = Interop.ActorProperty.BlendEquationGet();
internal static readonly int Culled = Interop.ActorProperty.CulledGet();
internal static readonly int AccessibilityName = Interop.ViewProperty.AccessibilityNameGet();
internal static readonly int AccessibilityDescription = Interop.ViewProperty.AccessibilityDescriptionGet();
internal static readonly int AccessibilityTranslationDomain = Interop.ViewProperty.AccessibilityTranslationDomainGet();
internal static readonly int AccessibilityRole = Interop.ViewProperty.AccessibilityRoleGet();
internal static readonly int AccessibilityHighlightable = Interop.ViewProperty.AccessibilityHighlightableGet();
internal static readonly int AccessibilityAttributes = Interop.ViewProperty.AccessibilityAttributesGet();
internal static readonly int AccessibilityAnimated = Interop.ViewProperty.AccessibilityAnimatedGet();
internal static readonly int TouchArea = Interop.ActorProperty.TouchAreaGet();
}
}
}
| 54.72 | 131 | 0.672677 | [
"Apache-2.0",
"MIT"
] | AchoWang/TizenFX | src/Tizen.NUI/src/public/BaseComponents/ViewEnum.cs | 12,314 | C# |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.