content
stringlengths
23
1.05M
// Developed by Softeq Development Corporation // http://www.softeq.com using System; using System.Collections.Generic; using System.Collections.Specialized; using System.Linq; using System.Threading.Tasks; using System.Windows.Input; using Softeq.XToolkit.Chat.Interfaces; using Softeq.XToolkit.Chat.Models; using Softeq.XToolkit.Chat.Models.Queries; using Softeq.XToolkit.Common; using Softeq.XToolkit.Common.Collections; using Softeq.XToolkit.Common.Commands; using Softeq.XToolkit.Common.Extensions; namespace Softeq.XToolkit.Chat.ViewModels { public class ChatMessagesListViewModel : ObservableObject { private const int InitialReadMessagesBatchCount = 20; private const int OlderMessagesBatchCount = 50; private readonly string _chatId; private readonly IChatMessagesManager _chatManager; private readonly Action _messageAdded; private readonly IList<IDisposable> _subscriptions = new List<IDisposable>(); private bool _areLatestMessagesLoaded; private bool _areOlderMessagesLoaded; public ChatMessagesListViewModel( string chatId, IChatMessagesManager chatManager, Action messageAdded) { _chatId = chatId; _chatManager = chatManager; _messageAdded = messageAdded; LoadOlderMessagesCommand = new RelayCommand(() => LoadOlderMessagesAsync().FireAndForget()); } public ICommand LoadOlderMessagesCommand { get; } public ObservableKeyGroupsCollection<DateTimeOffset, ChatMessageViewModel> Messages { get; } = new ObservableKeyGroupsCollection<DateTimeOffset, ChatMessageViewModel>(message => message.DateTime.Date, (x, y) => x.CompareTo(y), (x, y) => x.DateTime.CompareTo(y.DateTime)); public void OnAppearing() { Subscribe(_chatManager.MessageAdded, OnMessageReceived); Subscribe(_chatManager.MessageEdited, OnMessageEdited); Subscribe(_chatManager.MessageDeleted, OnMessageDeleted); Subscribe(_chatManager.MessagesBatchAdded, OnMessagesBatchReceived); Subscribe(_chatManager.MessagesBatchUpdated, OnMessagesBatchUpdated); Subscribe(_chatManager.MessagesBatchDeleted, OnMessagesBatchDeleted); Subscribe(_chatManager.ChatRead, OnChatRead); if (!_areLatestMessagesLoaded) { LoadInitialMessagesAsync().FireAndForget(); _areLatestMessagesLoaded = true; } else { RefreshMessagesAsync().FireAndForget(); } Messages.ItemsChanged += OnMessagesAddedToCollection; MarkMessagesAsRead(); } public void OnDisappearing() { RemoveAllSubscriptions(); Messages.ItemsChanged -= OnMessagesAddedToCollection; } private void OnMessageDeleted(string deletedMessageId) { DeleteAllMessages(x => x.Id == deletedMessageId); } private void OnMessageReceived(ChatMessageViewModel messageViewModel) { if (messageViewModel.ChatId == _chatId) { AddNewMessages(new List<ChatMessageViewModel> { messageViewModel }); } } private void OnMessagesBatchReceived(IList<ChatMessageViewModel> messages) { var messagesToAdd = messages.Where(x => x.ChatId == _chatId).ToList(); AddNewMessages(messagesToAdd); } private void OnMessageEdited(ChatMessageModel messageModel) { bool WasUpdated(ChatMessageViewModel x) => x.Id == messageModel.Id; Messages.Where(x => x.Any(WasUpdated)) .SelectMany(x => x) .Where(WasUpdated) .Apply(x => x.UpdateMessageModel(messageModel)); } private void OnMessagesBatchUpdated(IList<ChatMessageModel> messagesModels) { foreach (var m in messagesModels) { OnMessageEdited(m); } } private void OnMessagesBatchDeleted(IList<string> deletedMessagesIds) { DeleteAllMessages(x => deletedMessagesIds.Contains(x.Id)); } private void OnChatRead(string chatId) { if (chatId != _chatId) { return; } Messages.SelectMany(x => x) .Where(x => x.IsMine) .Apply(x => x.MarkAsRead()); } private void AddNewMessages(IList<ChatMessageViewModel> messages) { Messages.AddRangeToGroupsSorted(messages); _messageAdded.Invoke(); MarkMessagesAsRead(); } // TODO YP: check frequency of call this method private async Task LoadOlderMessagesAsync() { if (_areOlderMessagesLoaded) { return; } var oldestMessage = Messages.FirstOrDefaultValue(); if (oldestMessage == null) { await LoadInitialMessagesAsync(); return; } var query = new MessagesQuery { ChannelId = _chatId, FromId = oldestMessage.Id, FromDateTime = oldestMessage.DateTime, Count = OlderMessagesBatchCount }; var olderMessages = await _chatManager.LoadOlderMessagesAsync(query); if (olderMessages.Any()) { AddMessages(olderMessages); } else { // empty list = no old messages _areOlderMessagesLoaded = true; } } private async Task LoadInitialMessagesAsync() { var messages = await _chatManager.LoadInitialMessagesAsync(_chatId, InitialReadMessagesBatchCount); AddMessages(messages); } private async Task RefreshMessagesAsync() { _areOlderMessagesLoaded = false; var lastMessages = await _chatManager.LoadInitialMessagesAsync(_chatId, InitialReadMessagesBatchCount); var lastMessagesSortedGroup = new ObservableKeyGroupsCollection<DateTimeOffset, ChatMessageViewModel>( message => message.DateTime.Date, (x, y) => x.CompareTo(y), (x, y) => x.DateTime.CompareTo(y.DateTime)); lastMessagesSortedGroup.AddRangeToGroupsSorted(lastMessages); Messages.UnionSortedGroups(lastMessagesSortedGroup, new MessagesGroupComparer()); _messageAdded.Invoke(); } private void OnMessagesAddedToCollection(object sender, NotifyKeyGroupsCollectionChangedEventArgs e) { if (e.Action == NotifyCollectionChangedAction.Add || e.Action == NotifyCollectionChangedAction.Reset) { MarkMessagesAsRead(); } } private async void MarkMessagesAsRead() { if (Messages.Count == 0) { return; } var lastMessage = Messages.SelectMany(x => x).Last(); if (!lastMessage.IsRead && !lastMessage.IsMine) { await _chatManager.MarkMessageAsReadAsync(_chatId, lastMessage.Id).ConfigureAwait(false); } } private void AddMessages(IList<ChatMessageViewModel> messages) { var messagesForAdd = messages.Except(Messages.Values).ToList(); Messages.AddRangeToGroupsSorted(messagesForAdd); } // TODO YP: unused //private void ClearMessages() //{ // _areLatestMessagesLoaded = false; // Messages.ItemsChanged -= OnMessagesAddedToCollection; // Messages.ClearAll(); //} private void DeleteAllMessages(Func<ChatMessageViewModel, bool> predicate) { var messagesToDelete = Messages .Where(x => x.Any(predicate)) .SelectMany(x => x) .Where(predicate) .ToList(); Messages.RemoveAllFromGroups(messagesToDelete); } private void Subscribe<T>(IObservable<T> observer, Action<T> handler) { _subscriptions.Add(observer.Subscribe(handler)); } private void RemoveAllSubscriptions() { _subscriptions.Apply(x => x.Dispose()); } private class MessagesGroupComparer : IEqualityComparer<ChatMessageViewModel> { public bool Equals(ChatMessageViewModel x, ChatMessageViewModel y) { return x.Equals(y); } public int GetHashCode(ChatMessageViewModel obj) { return obj.GetHashCode(); } } } }
using System; namespace Presentacion1.modelo { public class Usuario { protected int id; protected String nombre; public Usuario() { nombre = null; } public int ID { get { return id; } set { id = value; } } public String Nombre { get { return nombre; } set { nombre = value; } } } }
using System; namespace Voxels.Networking.Serverside { public abstract class DynamicEntityServerside { public uint UID; public virtual string EntityType { get; } public VoxelMoverServerside Mover { get; } public DynamicEntityServerside() { Mover = new VoxelMoverServerside(this); } public abstract void Init(); public virtual void DeserializeState(byte[] state) { } public virtual byte[] SerializeState() { return Array.Empty<byte>(); } public virtual byte[] SerializeViewState() { return Array.Empty<byte>(); } public void CallMethodOnClient(string methodName) { } public void SetPropOnClient(string propName, object value) { } public virtual void Tick() { Mover.Update(); } public virtual void NetTick() { if ( Mover.NeedSendUpdate ) { //TODO: send position update } } } }
namespace Stripe.Client.Sdk.Models { public class StripeResponse<TModel> { public TModel Model { get; set; } public StripeError Error { get; set; } public bool Success => Error == null; } }
//Stella using System.Collections; using System.Collections.Generic; using UnityEngine; /// <summary> /// responsible for starting screw guide placement and handling its events /// </summary> public class ScrewGuidePlacer : MonoBehaviour { private void Start() { ScrewGuideCollection.Instance.enterPhase(ScrewGuideCollection.Phase.Placement); } //called via global speech command on this gameObject public void newGuide() { //Create only if no screw is being adjusted //Currently we only have one type of screw that can be placed if (ScrewGuideCollection.Instance.focusedScrewGuide == null) ScrewGuideCollection.Instance.createScrewGuide("Screw"); } //called via global speech command on this gameObject public void placeGuide() { if (ScrewGuideCollection.Instance.focusedScrewGuide != null) ScrewGuideCollection.Instance.focusedScrewGuide.placement.place(); } }
using System.ComponentModel.DataAnnotations; namespace BooksApiMongoDb.Models { public class ViewBook { [Required] public string BookName { get; set; } [Required] public string Author { get; set; } public string Editor { get; set; } public string Language { get; set; } [Required] // [Range(10, 13, ErrorMessage = "ISBN must be between 10 and 13")] [StringLength(14,MinimumLength=10, ErrorMessage = "ISBN must be between 10 and 13")] public string ISBN { get; set; } public string Dimensions { get; set; } [Required] public decimal Price { get; set; } } }
using System.Collections.Generic; using HoneydewCore.Logging; using HoneydewExtractors.CSharp.Metrics.Extraction.Class.Relations; using HoneydewExtractors.Processors; using HoneydewModels.CSharp; using HoneydewModels.Types; using Moq; using Xunit; namespace HoneydewExtractorsTests.Processors { public class FullNameDependencyProcessorTests { private readonly FullNameModelProcessor _sut; private readonly Mock<ILogger> _loggerMock = new(); private readonly Mock<ILogger> _ambiguousClassLoggerMock = new(); private readonly Mock<IProgressLogger> _progressLoggerMock = new(); private readonly Mock<IProgressLoggerBar> _progressLoggerBarMock = new(); public FullNameDependencyProcessorTests() { _sut = new FullNameModelProcessor(_loggerMock.Object, _ambiguousClassLoggerMock.Object, _progressLoggerMock.Object, false); } [Fact] public void GetFunction_ShouldReturnTheSameClassNames_WhenGivenClassNamesThatCouldNotBeLocatedInSolution() { var solutionModel = new SolutionModel(); ClassModel classModel1 = new() { Name = "Models.Class1", FilePath = "path/Models/Class1.cs" }; classModel1.Metrics.Add(new MetricModel { ExtractorName = typeof(ParameterRelationVisitor).FullName, ValueType = typeof(Dictionary<string, int>).FullName, Value = new Dictionary<string, int> { { "Dependency1", 1 } } }); ClassModel classModel2 = new() { Name = "Services.Class2", FilePath = "path/Services/Class2.cs" }; classModel2.Metrics.Add(new MetricModel { ExtractorName = typeof(ParameterRelationVisitor).FullName, ValueType = typeof(Dictionary<string, int>).FullName, Value = new Dictionary<string, int> { { "Dependency1", 2 } } }); ClassModel classModel3 = new() { Name = "Controllers.Class3", FilePath = "path/Controllers/Class3.cs" }; classModel3.Metrics.Add(new MetricModel { ExtractorName = typeof(ParameterRelationVisitor).FullName, ValueType = typeof(Dictionary<string, int>).FullName, Value = new Dictionary<string, int> { { "Dependency1", 6 }, { "Dependency2", 2 } } }); ClassModel classModel4 = new() { Name = "Domain.Data.Class4", FilePath = "path/Domain/Data/Class4.cs" }; classModel4.Metrics.Add(new MetricModel { ExtractorName = typeof(ParameterRelationVisitor).FullName, ValueType = typeof(Dictionary<string, int>).FullName, Value = new Dictionary<string, int> { { "Dependency2", 2 } } }); ClassModel classModel5 = new() { Name = "Controllers.Class5", FilePath = "path/Controllers/Class5.cs" }; classModel5.Metrics.Add(new MetricModel { ExtractorName = typeof(ParameterRelationVisitor).FullName, ValueType = typeof(Dictionary<string, int>).FullName, Value = new Dictionary<string, int>() }); var projectModel = new ProjectModel(); projectModel.Add(new CompilationUnitModel { ClassTypes = new List<IClassType> { classModel1 } }); projectModel.Add(new CompilationUnitModel { ClassTypes = new List<IClassType> { classModel2 } }); projectModel.Add(new CompilationUnitModel { ClassTypes = new List<IClassType> { classModel3 } }); projectModel.Add(new CompilationUnitModel { ClassTypes = new List<IClassType> { classModel4 } }); projectModel.Add(new CompilationUnitModel { ClassTypes = new List<IClassType> { classModel5 } }); var repositoryModel = new RepositoryModel(); repositoryModel.Solutions.Add(solutionModel); repositoryModel.Projects.Add(projectModel); _progressLoggerMock.Setup(logger => logger.CreateProgressLogger(5, "Resolving Class Names")) .Returns(_progressLoggerBarMock.Object); _progressLoggerMock.Setup(logger => logger.CreateProgressLogger(5, "Resolving Using Statements for Each Class")) .Returns(_progressLoggerBarMock.Object); _progressLoggerMock.Setup(logger => logger.CreateProgressLogger(5, "Resolving Class Elements (Fields, Methods, Properties,...)")) .Returns(_progressLoggerBarMock.Object); var processedRepositoryModel = _sut.Process(repositoryModel); var processedProjectModel = processedRepositoryModel.Projects[0]; Assert.False( ((Dictionary<string, int>)processedProjectModel.CompilationUnits[0].ClassTypes[0].Metrics[0].Value) .TryGetValue("Full.Path.Dependency1", out _)); Assert.False( ((Dictionary<string, int>)processedProjectModel.CompilationUnits[1].ClassTypes[0].Metrics[0].Value) .TryGetValue("Full.Path.Dependency1", out _)); Assert.False( ((Dictionary<string, int>)processedProjectModel.CompilationUnits[2].ClassTypes[0].Metrics[0].Value) .TryGetValue("Full.Path.Dependency1", out _)); Assert.False( ((Dictionary<string, int>)processedProjectModel.CompilationUnits[2].ClassTypes[0].Metrics[0].Value) .TryGetValue("Full.Path.Dependency2", out _)); Assert.False( ((Dictionary<string, int>)processedProjectModel.CompilationUnits[3].ClassTypes[0].Metrics[0].Value) .TryGetValue("Full.Path.Dependency2", out _)); } [Fact] public void GetFunction_ShouldReturnTheFullClassNames_WhenGivenClassNamesThatCanBeLocatedInSolution() { var solutionModel = new SolutionModel(); ClassModel classModel1 = new() { Name = "Models.Class1", FilePath = "path/Models/Class1.cs" }; classModel1.Metrics.Add(new MetricModel { ExtractorName = typeof(ParameterRelationVisitor).FullName, ValueType = typeof(Dictionary<string, int>).FullName, Value = new Dictionary<string, int>() }); ClassModel classModel2 = new() { Name = "Services.Class2", FilePath = "path/Services/Class2.cs", Imports = new List<IImportType> { new UsingModel { Name = "Models" } } }; classModel2.Metrics.Add(new MetricModel { ExtractorName = typeof(ParameterRelationVisitor).FullName, ValueType = typeof(Dictionary<string, int>).FullName, Value = new Dictionary<string, int> { { "Class1", 2 } } }); ClassModel classModel3 = new() { Name = "Controllers.Class3", FilePath = "path/Controllers/Class3.cs", Imports = new List<IImportType> { new UsingModel { Name = "Models" }, new UsingModel { Name = "Services" } } }; classModel3.Metrics.Add(new MetricModel { ExtractorName = typeof(ParameterRelationVisitor).FullName, ValueType = typeof(Dictionary<string, int>).FullName, Value = new Dictionary<string, int> { { "Class1", 6 }, { "Class2", 2 } } }); ClassModel classModel4 = new() { Name = "Domain.Data.Class4", FilePath = "path/Domain/Data/Class4.cs", Imports = new List<IImportType> { new UsingModel { Name = "Models" }, new UsingModel { Name = "Controllers" } } }; classModel4.Metrics.Add(new MetricModel { ExtractorName = typeof(ParameterRelationVisitor).FullName, ValueType = typeof(Dictionary<string, int>).FullName, Value = new Dictionary<string, int> { { "Class3", 4 }, { "Class5", 1 }, } }); ClassModel classModel5 = new() { Name = "Controllers.Class5", FilePath = "path/Controllers/Class5.cs" }; classModel5.Metrics.Add(new MetricModel { ExtractorName = typeof(ParameterRelationVisitor).FullName, ValueType = typeof(Dictionary<string, int>).FullName, Value = new Dictionary<string, int>() }); var projectModel = new ProjectModel(); projectModel.Add(new CompilationUnitModel { ClassTypes = new List<IClassType> { classModel1 } }); projectModel.Add(new CompilationUnitModel { ClassTypes = new List<IClassType> { classModel2 } }); projectModel.Add(new CompilationUnitModel { ClassTypes = new List<IClassType> { classModel3 } }); projectModel.Add(new CompilationUnitModel { ClassTypes = new List<IClassType> { classModel4 } }); projectModel.Add(new CompilationUnitModel { ClassTypes = new List<IClassType> { classModel5 } }); var repositoryModel = new RepositoryModel(); repositoryModel.Solutions.Add(solutionModel); repositoryModel.Projects.Add(projectModel); _progressLoggerMock.Setup(logger => logger.CreateProgressLogger(5, "Resolving Class Names")) .Returns(_progressLoggerBarMock.Object); _progressLoggerMock.Setup(logger => logger.CreateProgressLogger(5, "Resolving Using Statements for Each Class")) .Returns(_progressLoggerBarMock.Object); _progressLoggerMock.Setup(logger => logger.CreateProgressLogger(5, "Resolving Class Elements (Fields, Methods, Properties,...)")) .Returns(_progressLoggerBarMock.Object); var processedRepositoryModel = _sut.Process(repositoryModel); var processedProjectModel = processedRepositoryModel.Projects[0]; Assert.Empty( (Dictionary<string, int>)processedProjectModel.CompilationUnits[0].ClassTypes[0].Metrics[0].Value); Assert.True( ((Dictionary<string, int>)processedProjectModel.CompilationUnits[1].ClassTypes[0].Metrics[0].Value) .TryGetValue("Models.Class1", out var depCount1)); Assert.Equal(2, depCount1); Assert.True( ((Dictionary<string, int>)processedProjectModel.CompilationUnits[2].ClassTypes[0].Metrics[0].Value) .TryGetValue("Models.Class1", out var depCount2)); Assert.Equal(6, depCount2); Assert.True( ((Dictionary<string, int>)processedProjectModel.CompilationUnits[2].ClassTypes[0].Metrics[0].Value) .TryGetValue("Services.Class2", out var depCount3)); Assert.Equal(2, depCount3); Assert.True( ((Dictionary<string, int>)processedProjectModel.CompilationUnits[3].ClassTypes[0].Metrics[0].Value) .TryGetValue("Controllers.Class3", out var depCount4)); Assert.Equal(4, depCount4); Assert.True( ((Dictionary<string, int>)processedProjectModel.CompilationUnits[3].ClassTypes[0].Metrics[0].Value) .TryGetValue("Controllers.Class5", out var depCount5)); Assert.Equal(1, depCount5); } } }
using System.Text.Json.Serialization; using BeeSharp.ApiComponents.ApiModels.JsonConverter.Annotations; using BeeSharp.HiveEngine.ApiComponents.ApiModels.JsonConverter.Annotations; namespace BeeSharp.HiveEngine.ApiComponents.ApiModels.BroadcastOps.CustomJson.HiveEngine.Contracts.NftMarket { [HiveEngineContract("nftmarket", "setMarketParams")] public class HiveEngineNftMarketSetMarketParamsModel : HiveEngineOperation { [JsonPropertyName("symbol")] public string Symbol { get; } [JsonPropertyName("officialMarket")] public string OfficialMarket { get; } [JsonPropertyName("agentCut")] public int AgentCut { get; } [JsonPropertyName("minFee")] public int MinFee { get; } public HiveEngineNftMarketSetMarketParamsModel(string symbol, string officialMarket, int agentCut, int minFee) { Symbol = symbol; OfficialMarket = officialMarket; AgentCut = agentCut; MinFee = minFee; } } }
/* * Created by SharpDevelop. * User: TheSON * Date: 2012-09-01 * Time: 오후 7:39 * * To change this template use Tools | Options | Coding | Edit Standard Headers. */ using System; namespace Artn.Ilhwa.Model { /// <summary> /// Description of Enums. /// </summary> public enum TaskType{ MaterialSkelpInfo, MaterialProdInfo } public enum SearchType{ TaskOrderList, BarcodeList } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System; using Google; using Microsoft.Bot.Solutions.Skills; namespace CalendarSkill.Services.GoogleAPI { public class GoogleClient { private const string APIErrorAccessDenied = "insufficient permission"; public string ApplicationName { get; set; } public string ClientId { get; set; } public string ClientSecret { get; set; } public string[] Scopes { get; set; } public static GoogleClient GetGoogleClient(BotSettings settings) { if (settings == null) { throw new ArgumentNullException(nameof(settings)); } var appName = settings.GoogleAppName; var clientId = settings.GoogleClientId; var clientSecret = settings.GoogleClientSecret; var scopes = settings.GoogleScopes; var googleClient = new GoogleClient { ApplicationName = appName, ClientId = clientId, ClientSecret = clientSecret, Scopes = scopes.Split(","), }; return googleClient; } public static SkillException HandleGoogleAPIException(GoogleApiException ex) { var skillExceptionType = SkillExceptionType.Other; if (ex.Error.Message.Equals(APIErrorAccessDenied, StringComparison.InvariantCultureIgnoreCase)) { skillExceptionType = SkillExceptionType.APIAccessDenied; } else if (ex.HttpStatusCode == System.Net.HttpStatusCode.Unauthorized) { skillExceptionType = SkillExceptionType.APIUnauthorized; } else if (ex.HttpStatusCode == System.Net.HttpStatusCode.Forbidden) { skillExceptionType = SkillExceptionType.APIForbidden; } else if (ex.HttpStatusCode == System.Net.HttpStatusCode.BadRequest) { skillExceptionType = SkillExceptionType.APIBadRequest; } return new SkillException(skillExceptionType, ex.Message, ex); } } }
using Microsoft.AspNetCore.Mvc.RazorPages; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace RazorPagesWebApplication.Pages { public class ContactModel : PageModel { public void OnGet() { ViewData["Message"] = "Your contact page."; } } }
namespace Oliviann.Data.Tests { #region Usings using System.Collections.Generic; using System.Linq; using Xunit; #endregion Usings [Trait("Category", "CI")] public class IDatabaseExtensionsTests { /// <summary> /// Verifies a null collection returns a null object. /// </summary> [Fact] public void FilterByNameTest_NullCollection() { IEnumerable<IDatabase> dbs = null; IDatabase result = dbs.FilterByName("PMDB"); Assert.Null(result); } /// <summary> /// Verifies an empty collection returns a null object. /// </summary> [Fact] public void FilterByNameTest_EmptyCollection() { IEnumerable<IDatabase> dbs = Enumerable.Empty<IDatabase>(); IDatabase result = dbs.FilterByName("PMDB"); Assert.Null(result); } /// <summary> /// Verifies an invalid database name string returns a null instance. /// </summary> [Theory] [InlineData(null)] [InlineData("")] [InlineData("Taco Bell")] [InlineData("JKBD8y7348u02388rnd9sacs79ctoelrjs 8odsjg ne4q86u503416t om4lg")] public void FilterByNameTest_InvalidStrings(string input) { IEnumerable<IDatabase> dbs = new List<IDatabase> { new InternalDatabase { Name = "PMDB" }, new InternalDatabase { Name = "ColorMat" }, new InternalDatabase { Name = "Stuxnet" } }; IDatabase result = dbs.FilterByName(input); Assert.Null(result); } /// <summary> /// Verifies a valid string returns the correct object. /// </summary> /// <param name="input">The input.</param> [Theory] [InlineData("PMDB")] public void FilterByNameTest_ValidString(string input) { IEnumerable<IDatabase> dbs = new List<IDatabase> { new InternalDatabase { Name = "PMDB" }, new InternalDatabase { Name = "ColorMat" }, new InternalDatabase { Name = "Stuxnet" } }; IDatabase result = dbs.FilterByName(input); Assert.NotNull(result); Assert.Equal(input, result.Name); } } }
@model IEnumerable<Time.Data.EntityModels.TimeMFG.TicketProject> @{ //ViewBag.Title = "Support Home"; var location = ViewBag.Location; } @*<h2>@ViewBag.Title</h2>*@ <div class="container"> <div class="row"> @Html.Action("SideBar") <div class="col-md-8 col-sm-8 col-xs-8"> <div class="row"> <div class="col-md-5"><h4>@ViewBag.Title</h4></div> <div class="col-md-5"><h4>@ViewBag.SubTitle</h4></div> </div> <div class="row"> <div class="col-md-5"><i>@Model.Count() tickets</i></div> </div> <hr /> @Html.Partial("_TicketSearch") @Html.Partial("_TicketSort") <hr /> </div> <div class="col-md-8 col-sm-8 col-xs-8"> @foreach (var item in Model) { @Html.Partial("_Ticket", item) } </div> </div> </div>
using System.Collections; using System.Collections.Generic; using UnityEngine; public class PlayerGun : MonoBehaviour { public PlayerShot shotPrefab; public float shotAngleDeg; public float cooldown; public AudioSource shotSound; private float cooldownEndTime = 0f; // Fires a shot if the cooldown from the previous shot has ended. public void Fire() { if (this.cooldownEndTime > Time.time) { // Cooldown ends in the future. Do nothing. return; } // Fire shot. PlayerShot shot = Instantiate<PlayerShot>(this.shotPrefab, this.transform.position, Quaternion.identity); shot.angle = this.shotAngleDeg * Mathf.Deg2Rad; this.cooldownEndTime = Time.time + this.cooldown; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. namespace Microsoft.IIS.Administration.Security { using Microsoft.IIS.Administration.Core.Security; using System.Threading; using System.Threading.Tasks; using System.Collections.Generic; using System; using System.IO; using Microsoft.IIS.Administration.Core.Utils; using System.Security.Cryptography; using Newtonsoft.Json; using System.Linq; /* JSON file format: { "keys": [ { "id": "...........", "reason": "My app key", "created_on": "1999-01-01T00:00:00Z", "expires_on": "1999-02-01T00:00:00Z", "token_hash": "....................." }, ... ] } */ class ApiKeyFileStorage : IApiKeyStorage { private string _filePath; private SemaphoreSlim _lock = new SemaphoreSlim(1); // All writes are sequential private IDictionary<string, ApiKey> _keys; // Read-only public ApiKeyFileStorage(string filePath) { if (string.IsNullOrEmpty(filePath)) { throw new ArgumentNullException(nameof(filePath)); } _filePath = filePath; } public async Task<IEnumerable<ApiKey>> GetAllKeys() { await EnsureInit(); return _keys.Values; } public async Task<ApiKey> GetKeyByHash(string hash) { await EnsureInit(); ApiKey apiKey = null; if (_keys.TryGetValue(hash, out apiKey)) { return apiKey; } return null; } public async Task<ApiKey> GetKeyById(string id) { await EnsureInit(); return _keys.Values.Where(k => k.Id == id).FirstOrDefault(); } public async Task SaveKey(ApiKey key) { // // Sequential access await _lock.WaitAsync(); try { // Load a fresh copy var keys = await LoadFile(); // Remove an existing key var existing = _keys.Where(kv => kv.Value.Id == key.Id).FirstOrDefault(); if (existing.Key != null) { keys.Remove(existing.Key); } // Add keys.Add(key.TokenHash, key); // Save await UpdateFile(keys.Values); // Update the cache Interlocked.Exchange(ref _keys, keys); // To avoid volatile } finally { _lock.Release(); } } public async Task<bool> RemoveKey(ApiKey key) { if (await GetKeyByHash(key.TokenHash) == null) { return false; } // // Sequential access await _lock.WaitAsync(); try { var keys = await LoadFile(); if (keys.Remove(key.TokenHash)) { await UpdateFile(keys.Values); // Update the cache Interlocked.Exchange(ref _keys, keys); // To avoid volatile } } finally { _lock.Release(); } return true; } private async Task EnsureInit() { if (_keys != null) { return; } // // Load keys from file await _lock.WaitAsync(); try { _keys = await LoadFile(); } finally { _lock.Release(); } } private async Task<IDictionary<string, ApiKey>> LoadFile() { var result = new Dictionary<string, ApiKey>(); dynamic obj = null; if (File.Exists(_filePath)) { using (var fs = new FileStream(_filePath, FileMode.Open, FileAccess.Read)) using (var sr = new StreamReader(fs)) { obj = JsonConvert.DeserializeObject(await sr.ReadToEndAsync()); } } if (obj != null && obj.keys != null) { foreach (var k in obj.keys) { ApiKey key = FromJson(k); result[key.TokenHash] = key; } } return result; } private async Task UpdateFile(IEnumerable<ApiKey> keys) { if (keys == null) { throw new ArgumentNullException(nameof(keys)); } // // Write into temp file to avoid corruption // // // Define unique temp name string filePath = null; do { filePath = Path.Combine(new FileInfo(_filePath).DirectoryName, Base64.Encode(GenerateRandom(4)) + ".api-keys.json.tmp"); } while (File.Exists(filePath)); // // Write to file using (var sw = File.AppendText(filePath)) { FileInfo fi = new FileInfo(filePath); fi.Attributes = FileAttributes.Temporary; await sw.WriteAsync("{\r\n \"keys\": ["); // // Write each key for (int i = 0; i < keys.Count(); ++i) { var key = ToJson(keys.ElementAt(i)); string obj = String.Format("\r\n {0}{1}", JsonConvert.SerializeObject(key, Formatting.Indented).Replace("\n", "\n "), i < keys.Count() - 1 ? "," : ""); await sw.WriteAsync(obj); } await sw.WriteAsync("\r\n ]\r\n}"); await sw.FlushAsync(); } // // Swap the original file File.Delete(_filePath); File.Move(filePath, _filePath); } private static object ToJson(ApiKey key) { return new { id = key.Id, purpose = key.Purpose ?? "", created_on = key.CreatedOn, last_modified = key.LastModified, expires_on = (object)key.ExpiresOn ?? string.Empty, token_hash = key.TokenHash, token_type = key.TokenType }; } private static ApiKey FromJson(dynamic key) { string tokenHash = DynamicHelper.Value(key.token_hash) ?? DynamicHelper.Value(key.hash); string tokenType = DynamicHelper.Value(key.token_type) ?? "SWT"; DateTime createdOn = DynamicHelper.To<DateTime>(key.created_on) ?? DateTime.UtcNow; return new ApiKey(tokenHash, tokenType) { Id = DynamicHelper.Value(key.id) ?? GenerateId(), Purpose = DynamicHelper.Value(key.purpose) ?? string.Empty, CreatedOn = createdOn, ExpiresOn = DynamicHelper.To<DateTime>(key.expires_on), LastModified = DynamicHelper.To<DateTime>(key.last_modified) ?? createdOn }; } private static byte[] GenerateRandom(int bytesLen) { byte[] bytes = new byte[bytesLen]; using (var rng = RandomNumberGenerator.Create()) { rng.GetBytes(bytes); } return bytes; } private static string GenerateId() { return Base64.Encode(GenerateRandom(16)); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace MightyElk.MiscExt { public static class ICollectionExt { public static T AddAndReturn<T>(this ICollection<T> list, T item) { list.Add(item); return item; } } }
using Sorter.Core; using Xunit; namespace Sorter.UnitTests { public class DefaultComparerTests { [Fact] public void Compare_ForNullArguments_ThrowsNoExceptions() { var comparer = new DefaultComparer(); var difference = comparer.Compare(null, null); Assert.Equal(0, difference); } [Fact] public void Compare_ForEmptyArguments_ThrowsNoExceptions() { var comparer = new DefaultComparer(); var difference = comparer.Compare(string.Empty, string.Empty); Assert.Equal(0, difference); } [Fact] public void Compare_ForLineWithoutStringButNumber_TakesNumberForZero() { var comparer = new DefaultComparer(); var difference = comparer.Compare("3. Hello World", " Hello World"); Assert.Equal(1, difference); } [Fact] public void Compare_ForLineWithoutStringButNumber_TakesNumberForZero2() { var comparer = new DefaultComparer(); var difference = comparer.Compare(" Hello World", "3. Hello World"); Assert.Equal(-1, difference); } [Fact] public void Compare_ForLineWithoutNumber_ComparesStrings() { var comparer = new DefaultComparer(); var difference = comparer.Compare("Beaver", "Chipmunk"); Assert.Equal(-1, difference); } } }
using Order = Masa.EShop.Services.Ordering.Entities.Order; namespace Masa.EShop.Services.Ordering.Dto; public class OrderDto { public int ordernumber { get; set; } public DateTime date { get; set; } public string status { get; set; } = default!; public string description { get; set; } = default!; public string street { get; set; } = default!; public string city { get; set; } = default!; public string zipcode { get; set; } = default!; public string country { get; set; } = default!; public List<OrderItemDto> orderitems { get; set; } = default!; public decimal subtotal { get; set; } public decimal total { get; set; } public static OrderDto FromOrder(Order order) { return new OrderDto { ordernumber = order.OrderNumber, date = order.OrderDate, status = order.OrderStatus, description = order.Description ?? "", street = order.Address.Street, city = order.Address.City, zipcode = order.Address.ZipCode, country = order.Address.Country, orderitems = order.OrderItems .Select(OrderItemDto.FromOrderItem) .ToList(), subtotal = order.GetTotal(), total = order.GetTotal() }; } }
using FS.Query.Scripts.SelectionScripts.Sources; using FS.Query.Settings; namespace FS.Query.Scripts.SelectionScripts.Combinations { public abstract class Combination { public Combination(ISource firstSource, ISource secondSource) { FirstSource = firstSource; SecondSource = secondSource; } public ISource FirstSource { get; } public ISource SecondSource { get; } public abstract object Build(DbSettings dbSettings); } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Windows.Forms; namespace CppCompiler { public partial class MainMenu : Form { public MainMenu() { InitializeComponent(); } private void btnLexicalAnalysis_Click(object sender, EventArgs e) { LexicalAnalysisForm lexicalAnalysisForm = new LexicalAnalysisForm(); lexicalAnalysisForm.Show(); } private void btnLexerGenerator_Click(object sender, EventArgs e) { LexerGeneratorForm lexerGeneratorForm = new LexerGeneratorForm(); lexerGeneratorForm.Show(); } private void btnTinySyntaxAnalysis_Click(object sender, EventArgs e) { TinySyntaxAnalysisForm tinySyntaxAnalysisForm = new TinySyntaxAnalysisForm(); tinySyntaxAnalysisForm.Show(); } private void btnLL1Analysis_Click(object sender, EventArgs e) { LL1AnalysisForm lL1AnalysisForm = new LL1AnalysisForm(); lL1AnalysisForm.Show(); } private void btnAbout_Click(object sender, EventArgs e) { AboutForm aboutForm = new AboutForm(); aboutForm.Show(); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Runtime.Serialization; namespace LeagueOfLegendsLibrary { [DataContract] public class AggregatedStats { public int AverageAssists { get { if (_totalSessionsPlayed > 0) { return _totalAssists / __totalSessionsPlayed; } return 0; } } public int AverageChampionsKilled { get { if (_totalSessionsPlayed > 0) { return _totalChampionKills / _totalSessionsPlayed; } return 0; } } [DataMember(Name = "averageCombatPlayerScore")] private int _averageCombatPlayerScore; //fix this public int AverageCombatPlayerScore { get { return _averageCombatPlayerScore; } } [DataMember(Name = "averageNodeCapture")] private int _averageNodeCapture; public int AverageNodeCapture { get { if (_totalSessionsPlayed > 0) { return _totalNodeCapture / _totalSessionsPlayed; } return 0; } } [DataMember(Name = "averageNodeCaptureAssist")] private int _averageNodeCaptureAssist; //fix this public int AverageNodeCaptureAssist { get { return _averageNodeCaptureAssist; } } [DataMember(Name = "averageNodeNeutralize")] private int averageNodeNeutralize; public int AverageNodeNeutralize { get { if (_totalSessionsPlayed > 0) { return _totalNodeNeutralize / _totalSessionsPlayed; } return 0; } } [DataMember(Name = "averageNodeNeutralizeAssist")] private int _averageNodeNeutralizeAssist; //fix this public int AverageNodeNeutralizeAssist { get { return _averageNodeNeutralizeAssist; } } [DataMember(Name = "averageNumDeaths")] private int _averageNumDeaths; //fix this public int AverageNumberOfDeaths { get { return _averageNumDeaths; } } [DataMember(Name = "averageObjectivePlayerScore")] private int _averageObjectivePlayerScore; //fix this public int AverageObjectivePlayerScore { get { return _averageObjectivePlayerScore; } } [DataMember(Name = "averageTeamObjective")] private int _averageTeamObjective; //fix this public int AverageTeamObjective { get { return _averageTeamObjective; } } [DataMember(Name = "averageTotalPlayerScore")] private int _averageTotalPlayerScore; public int AverageTotalPlayerScore { get { return _averageTotalPlayerScore; } } [DataMember(Name = "botGamesPlayed")] private int botGamesPlayed; public int BotGamesPlayed { get { return _totalSessionsPlayed; } } [DataMember(Name = "killingSpree")] private int _killingSpree; public int KillingSpree { get { return _killingSpree; } } [DataMember(Name = "maxAssists")] private int _maxAssists; public int MaxAssists { get { return _maxAssists; } } [DataMember(Name = "maxChampionsKilled")] private int _maxChampionsKilled; public int MaxChampionsKilled { get { return _maxChampionsKilled; } } [DataMember(Name = "maxCombatPlayerScore")] private int _maxCombatPlayerScore; public int MaxCombatPlayerScore { get { return _maxCombatPlayerScore; } } [DataMember(Name = "maxLargestCriticalStrike")] private int _maxLargestCriticalStrike; public int MaxLargestCriticalStrike { get { return _maxLargestCriticalStrike; } } [DataMember(Name = "maxLargestKillingSpree")] private int _maxLargestKillingSpree; public int MaxLargestKillingSpree { get { return _maxLargestKillingSpree; } } [DataMember(Name = "maxNodeCapture")] private int _maxNodeCapture; public int MaxNodeCapture { get { return _maxNodeCapture; } } [DataMember(Name = "maxNodeCaptureAssist")] private int _maxNodeCaptureAssist; public int MaxNodeCaptureAssist { get { return _maxNodeCaptureAssist; } } [DataMember(Name = "maxNodeNeutralize")] private int _maxNodeNeutralize; public int MaxNodeNeutralize { get { return _maxNodeNeutralize; } } [DataMember(Name = "maxNodeNeutralizeAssist")] private int _maxNodeNeutralizeAssist; public int MaxNodeNeutralizeAssist { get { return _maxNodeNeutralizeAssist; } } [DataMember(Name = "maxObjectivePlayerScore")] private int _maxObjectivePlayerScore; public int MaxObjectivePlayerScore { get { return _maxObjectivePlayerScore; } } [DataMember(Name = "maxTeamObjective")] private int _maxTeamObjective; public int MaxTeamObjective { get { return _maxTeamObjective; } } [DataMember(Name = "maxTimePlayed")] private int _maxTimePlayed; public int MaxTimePlayed { get { return _maxTimePlayed; } } [DataMember(Name = "maxTimeSpentLiving")] private int _maxTimeSpentLiving; public int MaxTimeSpentLiving { get { return _maxTimeSpentLiving; } } [DataMember(Name = "maxTotalPlayerScore")] private int _maxTotalPlayerScore; public int MaxTotalPlayerScore { get { return _maxTotalPlayerScore; } } [DataMember(Name = "mostChampionKillsPerSession")] private int _mostChampionKillsPerSession; public int MostChampionKillsPerSession { get { return _mostChampionKillsPerSession; } } [DataMember(Name = "mostSpellsCast")] private int _mostSpellsCast; public int MostSpellsCast { get { return _mostSpellsCast; } } [DataMember(Name = "normalGamesPlayed")] private int _normalGamesPlayed; public int NormalGamesPlayed { get { return _normalGamesPlayed; } } [DataMember(Name = "rankedPremadeGamesPlayed")] private int _rankedPremadeGamesPlayed; public int RankedPremadeGamesPlayed { get { return _rankedPremadeGamesPlayed; } } [DataMember(Name = "rankedSoloGamesPlayed")] private int _rankedSoloGamesPlayed; public int RankedSoloGamesPlayed { get { return _rankedSoloGamesPlayed; } } [DataMember(Name = "totalAssists")] private int _totalAssists; public int TotalAssists { get { return _totalAssists; } } [DataMember(Name = "totalChampionKills")] private int _totalChampionKills; public int TotalChampionKills { get { return _totalChampionKills; } } [DataMember(Name = "totalDamageDealt")] private int _totalDamageDealt; public int TotalDamageDealt { get { return _totalDamageDealt; } } [DataMember(Name = "totalDamageTaken")] private int _totalDamageTaken; public int TotalDamageTaken { get { return _totalDamageTaken; } } [DataMember(Name = "totalDoubleKills")] private int _totalDoubleKills; public int TotalDoubleKills { get { return _totalDoubleKills; } } [DataMember(Name = "totalFirstBlood")] private int _totalFirstBlood; public int TotalFirstBlood { get { return _totalFirstBlood; } } [DataMember(Name = "totalGoldEarned")] private int _totalGoldEarned; public int TotalGoldEarned { get { return _totalGoldEarned; } } [DataMember(Name = "totalHeal")] private int _totalHeal; public int TotalHeal { get { return _totalHeal; } } [DataMember(Name = "totalMagicDamageDealt")] private int _totalMagicDamageDealt; public int TotalMagicDamageDealt { get { return _totalMagicDamageDealt; } } [DataMember(Name = "totalMinionKills")] private int _totalMinionKills; public int TotalMinionKills { get { return _totalMinionKills; } } [DataMember(Name = "totalNeutralMinionsKilled")] private int _totalNeutralMinionsKilled; public int TotalNeutralMinionsKilled { get { return _totalNeutralMinionsKilled; } } [DataMember(Name = "totalNodeCapture")] private int _totalNodeCapture; public int TotalNodeCapture { get { return _totalNodeCapture; } } [DataMember(Name = "totalNodeNeutralize")] private int _totalNodeNeutralize; public int TotalNodeNeutralize { get { return _totalNodeNeutralize; } } [DataMember(Name = "totalPentaKills")] private int _totalPentaKills; public int TotalPentaKills { get { return _totalPentaKills; } } [DataMember(Name = "totalPhysicalDamageDealt")] private int _totalPhysicalDamageDealt; public int TotalPhysicalDamageDealt { get { return _totalPhysicalDamageDealt; } } [DataMember(Name = "totalQuadraKills")] private int _totalQuadraKills; public int TotalQuadraKills { get { return _totalQuadraKills; } } [DataMember(Name = "totalSessionsLost")] private int _totalSessionsLost; public int TotalSessionsLost { get { return _totalSessionsLost; } } [DataMember(Name = "_totalSessionsPlayed")] private int __totalSessionsPlayed; public int _totalSessionsPlayed { get { return __totalSessionsPlayed; } } [DataMember(Name = "totalSessionsWon")] private int _totalSessionsWon; public int TotalSessionsWon { get { return _totalSessionsWon; } } [DataMember(Name = "totalTripleKills")] private int _totalTripleKills; public int TotalTripleKills { get { return _totalTripleKills; } } [DataMember(Name = "totalTurretsKilled")] private int _totalTurretsKilled; public int TotalTurretsKilled { get { return _totalTurretsKilled; } } [DataMember(Name = "totalUnrealKills")] private int _totalUnrealKills; public int TotalUnrealKills { get { return _totalUnrealKills; } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Data; using Newtonsoft.Json; namespace CloudFilesMonitor { class Site { public string Name { get; set; } public string ContainerName { get; set; } public string RestoreCommand { get; set;} /// <summary> /// Compares the files on the server to the existing records in the database. /// </summary> /// <returns>A set of which files are different on the server.</returns> public MD5Result[] Compare() { var existingQuery = string.Format("SELECT * FROM cfm_files WHERE SiteName = '{0}'", this.Name); var existing = Database.Helper.CurrentHelper.GetDataTable(existingQuery); var filesOnServer = Provider.GetFiles(ContainerName); if (existing.Rows.Count == 0) { SetCurrentAsValid(filesOnServer); return new MD5Result[0]; } List<MD5Result> changes = new List<MD5Result>(); foreach (var file in filesOnServer) { var queryableResults = existing.AsEnumerable(); var hasChanged = queryableResults.Where(x => x.Field<string>("CloudPath") == file.CloudPath && x.Field<string>("MD5Hash") == file.MD5 ).Count() != 1; if (hasChanged) changes.Add(file); } return changes.ToArray(); } public void Restore() { var startInfo = new System.Diagnostics.ProcessStartInfo(this.RestoreCommand); startInfo.UseShellExecute = false; startInfo.RedirectStandardOutput = true; var proc = System.Diagnostics.Process.Start(startInfo); while(!proc.HasExited) { System.Threading.Thread.Sleep(1); } } public void SetCurrentAsValid() { var filesOnServer = Provider.GetFiles(ContainerName); SetCurrentAsValid(filesOnServer); } public void SetCurrentAsValid(IEnumerable<MD5Result> files) { // Delete any existing files. Database.Helper.CurrentHelper.Delete("cfm_files", string.Format("SiteName = '{0}'", this.Name)); // Add the files back to the DB. foreach (var file in files) { Dictionary<string, string> columns = new Dictionary<string, string>() { {"SiteName", this.Name}, {"CloudPath", file.CloudPath}, {"MD5Hash", file.MD5} }; Database.Helper.CurrentHelper.Insert("cfm_files", columns); } } [JsonProperty(TypeNameHandling=TypeNameHandling.All)] public ICloudProvider Provider { get; set; } } }
namespace Stumps { using Stumps.Server; /// <summary> /// An interface used to abstract the starting environment of a Stumps server. /// </summary> public interface IStartup { /// <summary> /// Gets or sets the configuration for the Stumps server. /// </summary> /// <value> /// The configuration for the Stumps server. /// </value> StumpsConfiguration Configuration { get; set; } /// <summary> /// Gets or sets the <see cref="IMessageWriter"/> used to record startup messages. /// </summary> /// <value> /// The <see cref="IMessageWriter"/> used to record startup messages. /// </value> IMessageWriter MessageWriter { get; set; } /// <summary> /// Runs the instance of the Stumps server. /// </summary> void RunInstance(); } }
using Microsoft.VisualStudio.TestTools.UnitTesting; using System.Collections.Generic; using DARTS.ViewModel; using DARTS.Data; using DARTS.Data.DataObjects; using System.ComponentModel; using DARTS.Data.DataBase; namespace DARTS_UnitTests.ViewModel { [TestClass] public class MatchesOverviewViewModel_UnitTest_RightCharacter { private MatchesOverviewViewModel overview; [ClassInitialize] public static void ClassInitialize(TestContext testContext) { // Arrange Mock_DatabaseTestData.AddDatabaseTestData(); } [TestInitialize] public void TestInitialize() { // Arrange overview = new MatchesOverviewViewModel(); } [TestMethod] [DataRow("a", 3)] [DataRow("x", 0)] [DataRow("PLAYER", 3)] [DataRow("0", 2)] [DataRow("草", 0)] public void Set_FilterText_Should_Return_Filtered_Matches(string filterInput, int expectedAmount) { // Act overview.FilterText = filterInput; // Assert Assert.AreEqual(expectedAmount, overview.DisplayedMatches.Count); } [ClassCleanup] public static void TestCleanup() { DataBaseProvider.Instance.Dispose(); } } }
using System.Net.Http; using Cake.Core.Diagnostics; namespace Cake.Alive.Tests.Builders { internal class AliveBuilder { private readonly ICakeLog _log = new NullLog(); private HttpClient _httpClient = new HttpClientBuilder(); public AliveBuilder With(HttpClient httpClient) { _httpClient = httpClient; return this; } public Alive Build() { return new Alive(_log, _httpClient); } public static implicit operator Alive(AliveBuilder builder) { return builder.Build(); } } }
using System; using System.Collections.Generic; using System.Text; namespace NW55.Integration.RuneScape.Api { public abstract class RuneScapeApi<TParameter, TResult> : RuneScapeApi { public abstract string GetUri(TParameter parameter); public abstract TResult ParseResult(TParameter parameter, string rawResponse); } }
using System.Web.OData.Builder; using Microsoft.OData.Edm; namespace ODataOpenTypeSample { public class ODataModels { public static IEdmModel GetModel() { ODataConventionModelBuilder builder = new ODataConventionModelBuilder(); EntitySetConfiguration<Account> accounts = builder.EntitySet<Account>("Accounts"); builder.Namespace = typeof(Account).Namespace; var edmModel = builder.GetEdmModel(); return edmModel; } } }
using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Linq; using System.Text; using System.Threading.Tasks; namespace DissProject.Models { public class Student : AbstractStudent { public String InternshipRating { get; set; } // evaluation of the internship the student has taken public virtual Thesis CurrentThesis { get; set; } public virtual Internship CurrentInternship { get; set; } [Required] public int FacultyNumber { get; set; } [Required] public int GraduationYear { get; set; } public String WorkCompany { get; set; } public Student() :base() { } } }
using System.Runtime.CompilerServices; using System.Threading.Tasks; [assembly: InternalsVisibleTo("DnDGen.TreasureGen.Tests.Unit")] [assembly: InternalsVisibleTo("DynamicProxyGenAssembly2")] [assembly: InternalsVisibleTo("DnDGen.TreasureGen.Tests.Integration")] [assembly: InternalsVisibleTo("DnDGen.TreasureGen.Tests.Integration.IoC")] [assembly: InternalsVisibleTo("DnDGen.TreasureGen.Tests.Integration.Tables")] namespace DnDGen.TreasureGen.Generators { public interface ITreasureGenerator { Treasure GenerateAtLevel(int level); Task<Treasure> GenerateAtLevelAsync(int level); } }
using UnityEngine; using System.Collections; using System; public class SystemTimeProvider : DateTimeProvider { public DateTime GetCurrent () { return DateTime.Now; } }
using System.Text.Json.Serialization; using Essensoft.AspNetCore.Payment.Alipay.Domain; namespace Essensoft.AspNetCore.Payment.Alipay.Response { /// <summary> /// MybankCreditSupplychainCreditpayTradeQueryResponse. /// </summary> public class MybankCreditSupplychainCreditpayTradeQueryResponse : AlipayResponse { /// <summary> /// 买方信息 /// </summary> [JsonPropertyName("buyer")] public Member Buyer { get; set; } /// <summary> /// 1688买家ID /// </summary> [JsonPropertyName("buyer_scene_id")] public string BuyerSceneId { get; set; } /// <summary> /// 确认收货金额 /// </summary> [JsonPropertyName("confirm_amt")] public string ConfirmAmt { get; set; } /// <summary> /// 创建日期 /// </summary> [JsonPropertyName("create_date")] public string CreateDate { get; set; } /// <summary> /// 生效日期 /// </summary> [JsonPropertyName("effect_date")] public string EffectDate { get; set; } /// <summary> /// 外部订单号 /// </summary> [JsonPropertyName("out_order_no")] public string OutOrderNo { get; set; } /// <summary> /// 退款金额,单位:分 /// </summary> [JsonPropertyName("refund_amt")] public string RefundAmt { get; set; } /// <summary> /// 卖方信息 /// </summary> [JsonPropertyName("seller")] public Member Seller { get; set; } /// <summary> /// 1688卖家ID /// </summary> [JsonPropertyName("seller_scene_id")] public string SellerSceneId { get; set; } /// <summary> /// 源订单ID,1688订单ID /// </summary> [JsonPropertyName("source_order_no")] public string SourceOrderNo { get; set; } /// <summary> /// 订单状态:VALID/INVALID/INIT/CANCEL /// </summary> [JsonPropertyName("status")] public string Status { get; set; } /// <summary> /// 网商日志追踪ID /// </summary> [JsonPropertyName("trace_id")] public string TraceId { get; set; } /// <summary> /// 订单金额,单位:分 /// </summary> [JsonPropertyName("trade_amt")] public string TradeAmt { get; set; } /// <summary> /// 内部订单号 /// </summary> [JsonPropertyName("trade_no")] public string TradeNo { get; set; } } }
using System.Collections.Generic; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; namespace SFCCTools.Core.HTTP { /// <summary> /// Logs the request and response bodies to Trace if they look /// like textual content /// /// HttpClients default trace logging only logs headers so this adds /// additional information /// </summary> public class HttpLoggingHandler : DelegatingHandler { readonly string[] types = new[] {"html", "text", "xml", "json", "txt", "x-www-form-urlencoded"}; private ILogger<HttpLoggingHandler> _logger; public HttpLoggingHandler(ILogger<HttpLoggingHandler> logger) : base() { _logger = logger; } protected override async Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) { var req = request; var messages = new List<string>(); if (req.Content != null) { if (req.Content is StringContent || this.IsTextBasedContentType(req.Headers) || this.IsTextBasedContentType(req.Content.Headers)) { var result = await req.Content.ReadAsStringAsync(); _logger.LogTrace("Request Body:\n{RequestBody}\n", result); } } var response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false); var resp = response; if (resp.Content != null) { if (resp.Content is StringContent || this.IsTextBasedContentType(resp.Headers) || this.IsTextBasedContentType(resp.Content.Headers)) { var result = await resp.Content.ReadAsStringAsync(); _logger.LogTrace("Response Body:\n{RequestBody}\n", result); } } return response; } bool IsTextBasedContentType(HttpHeaders headers) { IEnumerable<string> values; if (!headers.TryGetValues("Content-Type", out values)) return false; var header = string.Join(" ", values).ToLowerInvariant(); return types.Any(t => header.Contains((string) t)); } } }
using System; using System.Threading.Tasks; using ResultFunctional.Models.Implementations.Results; using ResultFunctional.Models.Interfaces.Errors.Base; using ResultFunctional.Models.Interfaces.Results; namespace ResultFunctional.FunctionalExtensions.Async.ResultExtension.ResultErrors { /// <summary> /// Методы расширения для результирующего ответа со значением и обработкой исключений асинхронно /// </summary> public static class ResultErrorTryAsyncExtensions { /// <summary> /// Обработать асинхронную функцию, вернуть результирующий ответ со значением или ошибку исключения /// </summary> public static async Task<IResultError> ResultErrorTryAsync(Func<Task> action, IErrorResult errorType) { try { await action.Invoke(); } catch (Exception ex) { return new ResultError(errorType.AppendException(ex)); } return new ResultError(); } } }
using System; using System.Collections.Generic; using Microsoft.Diagnostics.Tracing; using Microsoft.Diagnostics.Tracing.Parsers; namespace Quadrant.UITest.Framework { public abstract class Scenario { private TraceEventDispatcher _source; private readonly List<Counter> _counters = new List<Counter>(); public Scenario( string name, string startEventProvider, string startEvent, string endEventProvider, string endEvent) { Name = name ?? throw new ArgumentNullException(nameof(name)); StartEventProvider = startEventProvider ?? throw new ArgumentNullException(nameof(startEventProvider)); StartEvent = startEvent ?? throw new ArgumentNullException(nameof(startEvent)); EndEventProvider = endEventProvider ?? throw new ArgumentNullException(nameof(endEventProvider)); EndEvent = endEvent ?? throw new ArgumentNullException(nameof(endEvent)); } public string Name { get; } public string StartEventProvider { get; } public string StartEvent { get; } public string EndEventProvider { get; } public string EndEvent { get; } public IReadOnlyList<Counter> Counters { get => _counters; } public void Register(TraceEventDispatcher source) { _source = source; DynamicTraceEventParser dynamic = source.Dynamic; dynamic.AddCallbackForProviderEvent(StartEventProvider, StartEvent, OnStart); dynamic.AddCallbackForProviderEvent(EndEventProvider, EndEvent, OnEnd); } public void Unregister() { if (_source != null) { DynamicTraceEventParser dynamic = _source.Dynamic; dynamic.RemoveCallback<TraceEvent>(OnStart); dynamic.RemoveCallback<TraceEvent>(OnEnd); } } public void AddCounter(Counter counter) => _counters.Add(counter); public void LogResult(PerformanceTestContext context) { LogResultInternal(context); foreach (Counter counter in Counters) { context.LogMessage($"\t{counter.Name}: {counter.Count}"); } } protected abstract void LogResultInternal(PerformanceTestContext context); public abstract bool Contains(double timeStamp); protected abstract void OnStart(TraceEvent startEvent); protected abstract void OnEnd(TraceEvent endEvent); } }
using Abp.AutoMapper; using Localink.Platform.Authorization.Roles.Dto; using Localink.Platform.Web.Areas.Mpa.Models.Common; namespace Localink.Platform.Web.Areas.Mpa.Models.Roles { [AutoMapFrom(typeof(GetRoleForEditOutput))] public class CreateOrEditRoleModalViewModel : GetRoleForEditOutput, IPermissionsEditViewModel { public bool IsEditMode { get { return Role.Id.HasValue; } } public CreateOrEditRoleModalViewModel(GetRoleForEditOutput output) { output.MapTo(this); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows; using System.Windows.Controls; using System.Windows.Data; using System.Windows.Documents; using System.Windows.Input; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Windows.Navigation; using System.Windows.Shapes; using Sico.iResponder.App.Common; using Sico.iResponder.Data.Models; namespace Sico.iResponder.App.Pages.QuestionResult { /// <summary> /// Interaction logic for QuestionResult.xaml /// </summary> public partial class QuestionResultPage : Page, INotifyKeyDown { public event Action Next; public QuestionResultPage(IList<CompetitionAnswer> answers) { InitializeComponent(); SetAnswers(answers); this.MouseDown += (sender, e) => { Next?.Invoke(); }; } private void SetAnswers(IList<CompetitionAnswer> answers) { answers = answers.OrderBy(t => t.CompetitionParticipant.No).ToList(); ; var count = answers.Count; var participants = new[] { Participant1, Participant2, Participant3, Participant4 }; double paddingLeftRight = 20; double paddingTopBottom = 50; var scale = (double)900 / 600; var width = (1920 - paddingLeftRight * 2 - paddingLeftRight * (count - 1)) / count; var height = width * scale; if (height > 1080 - paddingTopBottom * 2) { height = 1080 - paddingTopBottom * 2; width = (int)(height / scale); } paddingTopBottom = ((1080 - height) / 2); paddingLeftRight = (1920 - width * count) / (count + 1); for (var i = 0; i < participants.Length; i++) { var participant = participants[i]; var answer = answers.Skip(i).FirstOrDefault(); if (answer == null) { participant.Visibility = Visibility.Hidden; continue; } participant.SetValue(Canvas.LeftProperty, (double)(paddingLeftRight + (paddingLeftRight + width) * i)); participant.SetValue(Canvas.TopProperty, (double)paddingTopBottom); participant.Width = width; participant.Height = height; participant.SetCompetitionAnswer(answer); } } public void OnKeyDown(object sender, KeyEventArgs e) { if (e.Key == Key.Enter || e.Key == Key.Space) { Next?.Invoke(); } } } }
using System; using System.Collections.Generic; using System.Text; namespace SuperSQLInjection.model { public static class ErrorMessage { public static String mysql4_no_error_inject_info = "抱歉MySQL4数据库,不支持错误显示注入!"; public static String access_no_error_inject_info = "抱歉Access数据库,不支持错误显示注入!"; public static String access_no_key = "Access数据库需要关键字协助盲猜表明,所以大侠请你填写好关键字!"; } }
using ITGlobal.CommandLine.Parsing.Impl; using Xunit; namespace ITGlobal.CommandLine.Parsing { public class LevenshteinDistanceTest { [Theory] [InlineData("test", "test", 0)] [InlineData("test", "tEST", 0)] [InlineData("", "A", 1)] [InlineData("A", "", 1)] [InlineData("ABC", "def", 3)] [InlineData("kitten", "sitting", 3)] public void Calculate_distance(string left, string right, int expected) { var actual = LevenshteinDistance.Calculate(left, right); Assert.Equal(expected, actual); } } }
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using Microsoft.Extensions.Internal; namespace Microsoft.AspNetCore.Razor.Language { internal class DirectiveTokenDescriptorComparer : IEqualityComparer<DirectiveTokenDescriptor> { public static readonly DirectiveTokenDescriptorComparer Default = new DirectiveTokenDescriptorComparer(); protected DirectiveTokenDescriptorComparer() { } public bool Equals(DirectiveTokenDescriptor descriptorX, DirectiveTokenDescriptor descriptorY) { if (descriptorX == descriptorY) { return true; } return descriptorX != null && descriptorX.Kind == descriptorY.Kind && descriptorX.Optional == descriptorY.Optional; } public int GetHashCode(DirectiveTokenDescriptor descriptor) { if (descriptor == null) { throw new ArgumentNullException(nameof(descriptor)); } var hashCodeCombiner = HashCodeCombiner.Start(); hashCodeCombiner.Add(descriptor.Kind); hashCodeCombiner.Add(descriptor.Optional ? 1 : 0); return hashCodeCombiner.CombinedHash; } } }
 namespace Minduca.Domain.Core.Events { /// <summary> /// Domain event /// </summary> public interface IDomainEvent { } }
using System; using System.ComponentModel.DataAnnotations; using AwkwardPresentation.Models.Properties; using EPiServer.Core; using EPiServer.DataAbstraction; using EPiServer.DataAnnotations; using EPiServer.SpecializedProperties; namespace AwkwardPresentation.Models.Pages { [ContentType(DisplayName = "ImageModel", GUID = "44aba054-372e-4641-bd70-99a0f515b081", Description = "")] public class ImageModel : PageData { [Display( Name = "Image URL", Description = "", GroupName = SystemTabNames.Content, Order = 100)] public virtual string Url { get; set; } [Display( Name = "Image title", Description = "", GroupName = SystemTabNames.Content, Order = 110)] public virtual string Title { get; set; } [Display( Name = "Image text", Description = "", GroupName = SystemTabNames.Content, Order = 120)] public virtual string Text { get; set; } } public class SimpleImageModel { public int Id { get; set; } public string Url { get; set; } public string Title { get; set; } public string Text { get; set; } public ClickerModel ClickerModel { get; set; } } }
namespace RMS.Repositories.Contracts { using RMS.Data.Entities; /// <summary> /// Contract for repository responsible for dealing with Teacher entities. /// </summary> public interface ITeacherRepository : IRepository<Teacher> { } }
using BluePointLilac.Controls; using BluePointLilac.Methods; using ContextMenuManager.Methods; using System.Drawing; using System.Windows.Forms; namespace ContextMenuManager.Controls { sealed class SubItemsForm : Form { public SubItemsForm() { this.SuspendLayout(); this.StartPosition = FormStartPosition.CenterParent; this.ShowInTaskbar = this.MaximizeBox = this.MinimizeBox = false; this.MinimumSize = this.Size = new Size(646, 419).DpiZoom(); this.Controls.AddRange(new Control[] { listBox, statusBar }); statusBar.CanMoveForm(); this.AddEscapeButton(); this.ResumeLayout(); } readonly MyListBox listBox = new MyListBox { Dock = DockStyle.Fill }; readonly MyStatusBar statusBar = new MyStatusBar(); public void AddList(MyList myList) { myList.Owner = listBox; myList.HoveredItemChanged += (sender, e) => { if(!AppConfig.ShowFilePath) return; MyListItem item = myList.HoveredItem; foreach(string prop in new[] { "ItemFilePath", "RegPath", "GroupPath" }) { string path = item.GetType().GetProperty(prop)?.GetValue(item, null)?.ToString(); if(!path.IsNullOrWhiteSpace()) { statusBar.Text = path; return; } } statusBar.Text = item.Text; }; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Xml.Linq; using Dapper; using EZChat.Master.Database; using Microsoft.AspNetCore.DataProtection.Repositories; using SqlKata; namespace EZChat.Master.DataProtection { public class DataProtectionXmlRepository : IXmlRepository { private const string TableName = "data_protection_keys"; private readonly IDbConnectionFactory _factory; public DataProtectionXmlRepository(IDbConnectionFactory factory) { _factory = factory; } public IReadOnlyCollection<XElement> GetAllElements() { var sql = new Query(TableName).CompileQuery(); using (var connection = _factory.Open()) { return connection.Query(sql) .Select(x => XElement.Parse((string) x.xml)) .ToArray(); } } public void StoreElement(XElement element, string friendlyName) { var data = new Dictionary<string, object> { ["name"] = friendlyName, ["xml"] = element.ToString(SaveOptions.DisableFormatting) }; var sql = new Query(TableName) .AsInsert(data) .CompileQuery(); using (var connection = _factory.Open()) { connection.Execute(sql); } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace TextEngine { public class Sequence { private int _value; private int _startValue; private int _endValue; public Sequence() { _value = -1; _startValue = -1; _endValue = Int32.MaxValue; } public Sequence(int start) { _value = start; _startValue = start; _endValue = Int32.MaxValue; } public Sequence(int start, int end) { _startValue = start; _endValue = end; _value = start; } public int Next { get { if (_value == Int32.MaxValue || _value == _endValue) _value = _startValue; return ++_value; } } } }
/* Copyright 2017 Cimpress Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Threading.Tasks; namespace VP.FF.PT.Common.WpfInfrastructure.Threading { /// <summary> /// Dispatches an action or func to the UI thread. /// </summary> public interface IDispatcher { /// <summary> /// Execute an action that needs to run on the UI thread. /// </summary> /// <param name="actionToDispatch">The action to execute.</param> void Dispatch(Action actionToDispatch); /// <summary> /// Execute a func that needs to run on the UI thread. /// </summary> /// <param name="functionToDispatch">The func to execute.</param> /// <returns>The task of functionToDispatch.</returns> Task Dispatch(Func<Task> functionToDispatch); /// <summary> /// Execute a func that needs to run on the UI thread. /// </summary> /// <typeparam name="TResult"></typeparam> /// <param name="functionToDispatch">The func to execute.</param> /// <returns>The task of functionToDispatch.</returns> Task<TResult> Dispatch<TResult>(Func<Task<TResult>> functionToDispatch); } }
using System.Collections; using System.Collections.Generic; using System.Linq; using UnityEngine; using DIS = DataInfluenceSystem; using CLS = CommonLogicSystem; using CS = ConditionSystem; using System; public class CardPoolLogic : SingletonBehaviour<CardPoolLogic> { List<Card> dayCards = new List<Card>(); void Awake() { } // Start is called before the first frame update void Start() { } // Update is called once per frame void Update() { } public void Init() { } public void UpdateCardType() { // 判断开始卡牌 var sceneMap = DataSystem.I.CopyAttrDataWithInfluenceByType<List<CardType>>(DataType.SceneMap); if (sceneMap == null) { DataSystem.I.SetDataByType<int>(DataType.TurnCardType, (int)CardType.None); return; } var distance = DataSystem.I.CopyAttrDataWithInfluenceByType<int>(DataType.Distance); if (distance < 0 || distance >= sceneMap.Count) { DataSystem.I.SetDataByType<int>(DataType.TurnCardType, (int)CardType.None); return; } var cardType = sceneMap[distance]; DataSystem.I.SetDataByType<int>(DataType.TurnCardType, (int)cardType); } public void RerollTurnCard() { // 获取卡片类型 var cardType = (CardType)DataSystem.I.GetDataByType<int>(DataType.TurnCardType); // 处理空卡 if (cardType == CardType.Blank) { DataSystem.I.SetDataByType(DataType.TurnCardId, 0); return; } // 根据卡片类型获取卡牌列表 var cardList = CardLogic.I.GetCardListByType(cardType); if (cardList.Count > 0) { cardList = cardList.Where((c) => ConditionSystem.I.IsConditionMet(c.FillCondition)).ToList(); } // 处理空卡 if (cardList.Count <= 0) { DataSystem.I.SetDataByType(DataType.TurnCardId, 0); Debug.LogWarning($"今天没抽到卡, 卡牌类型: {cardType.ToString()}"); return; } // 理出符合抽取条件的卡 // 抽卡 var randomCard = GameUtil.RandomRemoveFromList(cardList); // 保存抽到的卡 DataSystem.I.SetDataByType(DataType.TurnCardQuality, randomCard.Quality); DataSystem.I.SetDataByType(DataType.TurnCardId, randomCard.Id); } public Card GetTurnCardRaw() { var cardId = DataSystem.I.CopyAttrDataWithInfluenceByType<int>(DataType.TurnCardId); return CardLogic.I.GetCardById(cardId); } public Card GetTurnCardInstance() { var cardId = DataSystem.I.CopyAttrDataWithInfluenceByType<int>(DataType.TurnCardId); return CardLogic.I.InstantiateTurnCard(cardId); } }
using System; using System.Collections.Generic; namespace Rebus.Config { /// <summary> /// Allows for fluently configuring RabbitMQ input queue options /// </summary> public class RabbitMqQueueOptionsBuilder { /// <summary> /// Set the durability of the input queue /// </summary> public RabbitMqQueueOptionsBuilder SetDurable(bool durable) { Durable = durable; return this; } /// <summary> /// Set exclusiveness of the input queue /// </summary> public RabbitMqQueueOptionsBuilder SetExclusive(bool exclusive) { Exclusive = exclusive; return this; } /// <summary> /// Set auto-delete propery when declaring the queue /// <param name="autoDelete">Whether queue should be deleted when the last consumer unsubscribes</param> /// </summary> public RabbitMqQueueOptionsBuilder SetAutoDelete(bool autoDelete) { AutoDelete = autoDelete; return this; } /// <summary> /// Configure for how long a queue can be unused before it is automatically deleted by setting x-expires argument /// </summary> /// <param name="ttlInMs">expiration period in milliseconds, </param> /// <exception cref="ArgumentException">if the argumnet value is 0 or less</exception> public RabbitMqQueueOptionsBuilder SetQueueTTL(long ttlInMs) { if (ttlInMs <= 0) throw new ArgumentException("Time must be in milliseconds and greater than 0", nameof(ttlInMs)); Arguments.Add("x-expires", ttlInMs); return this; } /// <summary> /// Set auto delete, when last consumer disconnects and/or how long queue can stay unused until it is deleted as expired. /// Zero or negative values of ttlInMs are ignored (no queue expiration). /// <param name="autoDelete">Whether queue should be deleted</param> /// <param name="ttlInMs">Time to live (in milliseconds) after last subscriber disconnects</param> /// </summary> public RabbitMqQueueOptionsBuilder SetAutoDelete(bool autoDelete, long ttlInMs = 0) { SetAutoDelete(autoDelete); if (ttlInMs > 0) SetQueueTTL(ttlInMs); return this; } /// <summary> /// Set the arguments of the input queue /// </summary> public RabbitMqQueueOptionsBuilder SetArguments(Dictionary<string, object> arguments) { Arguments = arguments; return this; } /// <summary> /// Add input queue arguments to the default settings /// </summary> public RabbitMqQueueOptionsBuilder AddArgument(string key, object val) { Arguments.Add(key, val); return this; } internal bool Durable { get; private set; } = true; internal bool Exclusive { get; private set; } = false; internal bool AutoDelete { get; private set; } = false; internal Dictionary<string, object> Arguments { get; private set; } = new Dictionary<string, object> { {"x-ha-policy", "all"} }; } }
using System; using System.ComponentModel; using Caliburn.Micro; using Katarai.Wpf.Extensions; namespace Katarai.Wpf.ViewModels { public abstract class SingleInstanceViewModel : Screen, IViewModel { public virtual void OnClose(object sender, CancelEventArgs eventArgs) { eventArgs.Cancel = true; try { this.GetWindow().Hide(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Can't hide: " + ex.Message); } } } }
namespace System.Windows.Forms { public delegate void DateBoldEventHandler(object sender, DateBoldEventArgs e); }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using System; using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Diagnostics.ContractsLight; #pragma warning disable CS1591 // Missing XML comment for publicly visible type or member #pragma warning disable CS3003 #pragma warning disable CS3008 #pragma warning disable SA1000 #pragma warning disable SA1002 #pragma warning disable SA1003 #pragma warning disable SA1021 #pragma warning disable SA1023 #pragma warning disable SA1025 #pragma warning disable SA1028 #pragma warning disable SA1106 #pragma warning disable SA1107 #pragma warning disable SA1108 #pragma warning disable SA1119 #pragma warning disable SA1120 #pragma warning disable SA1131 #pragma warning disable SA1121 // Use built-in type alias #pragma warning disable SA1210 #pragma warning disable SA1300 // Element must begin with upper-case letter #pragma warning disable SA1303 // Const field names must begin with upper-case letter #pragma warning disable SA1308 // Variable names must not be prefixed #pragma warning disable SA1310 // Field names must not contain underscore #pragma warning disable SA1312 #pragma warning disable SA1311 // Static readonly fields must begin with upper-case letter #pragma warning disable SA1400 // Access modifier must be declared #pragma warning disable SA1503 #pragma warning disable SA1507 #pragma warning disable SA1508 #pragma warning disable SA1512 #pragma warning disable SA1513 #pragma warning disable SA1515 #pragma warning disable SA1516 #pragma warning disable SA1600 // Elements must be documented #pragma warning disable SA1602 #pragma warning disable IDE0040 #pragma warning disable IDE0011 #pragma warning disable IDE1006 // Naming rules are different here. namespace BuildXL.Cache.ContentStore.Hashing.Chunking { using BYTE = System.Byte; using DWORD = System.UInt32; using HashValueT = System.UInt64; using OffsetT = System.Int64; using size_t = System.UInt64; //[GeneratedCode("Copied from Windows Sources", "1.0")] // Adapted from https://microsoft.visualstudio.com/OS/_git/os?path=%2Fservercommon%2Fbase%2Ffs%2Fdedup%2Fmodules%2Fchunk%2FRegressionChunking.cpp&version=GBofficial%2Frsmaster&_a=contents public sealed class RegressionChunking { const DWORD g_dwChunkingHashMatchValue = 0x55555555; // Sliding window size (in bytes) for Rabin hashing public const size_t m_nWindowSize = 16; // Number of past offset slots to be remembered for regression const size_t m_nRegressSize = 4; // Polynomial values const int PolynomialsLength = 256; // Parameters private readonly size_t m_nMinChunkSize; private readonly size_t m_nMaxChunkSize; private readonly size_t m_nAverageChunkSize; private DWORD m_dwInitialChunkingHashMatchValue => g_dwChunkingHashMatchValue; private readonly HashMaskMatch _smallestMask; // State maintained across multiple FindRabinChunkBoundariesInternal() calls private HashValueT m_hash; // Last hash value private readonly BYTE[] m_history; // Last bytes from the previous chunk. Used to reinitialize the state machine for chunking (size = window size) private readonly OffsetT[] m_regressChunkLen; private OffsetT m_lastNonZeroChunkLen; private OffsetT m_numZeroRun; // Size of continuous zeros after last chunk. // >= 0, a run is counted (# of consecutive zeros) // < 0, a run has been interrupted, i.e., encounter at least one none zero values. // Regress hash values private readonly HashMaskMatch[] _regressionMasks; private size_t previouslyProcessedBytesAcrossCalls; private size_t lastChunkAbsoluteOffsetAcrossCalls; private readonly List<DedupBasicChunkInfo> outOffsetsVector = new List<DedupBasicChunkInfo>(); private readonly Action<DedupBasicChunkInfo> chunkCallback; private bool m_pushBufferCalled = false; public IReadOnlyList<DedupBasicChunkInfo> Chunks => outOffsetsVector; /// <summary>A mask + match value to compare a current hash value to. Colocating mask and match yields better cache locality.</summary> private readonly struct HashMaskMatch { public HashMaskMatch(uint truncateMask, uint matchValue) => (_truncateMask, _matchValue) = (truncateMask, matchValue); private readonly uint _truncateMask; // 0b0000...1111, where number of trailing 1's is the mask length. private readonly uint _matchValue; // 0b0000...xxxx, where x is a bit in the hash match value. public bool IsMatch(ulong hash) => _matchValue == (hash & _truncateMask); } public RegressionChunking(ChunkerConfiguration configuration, Action<DedupBasicChunkInfo> chunkCallback) { m_nMinChunkSize = (size_t)configuration.MinChunkSize; m_nAverageChunkSize = (size_t)configuration.AvgChunkSize; m_nMaxChunkSize = (size_t)configuration.MaxChunkSize; this.chunkCallback = chunkCallback; m_history = new BYTE[m_nWindowSize]; m_regressChunkLen = new OffsetT[m_nRegressSize]; _regressionMasks = new HashMaskMatch[m_nRegressSize]; previouslyProcessedBytesAcrossCalls = 0; lastChunkAbsoluteOffsetAcrossCalls = 0; m_hash = 0; for (int i = 0; i < m_regressChunkLen.Length; i++) { m_regressChunkLen[i] = -1; } m_numZeroRun = 0; m_lastNonZeroChunkLen = -1; // Initialize // The maximum value of N in the comparison above (default = 16 bits) DWORD dwChunkingTruncateMask = (DWORD)(m_nAverageChunkSize - 1); // This is the value we are using for chunking: if the least significant N bytes from the Rabin hash are equal to this value, we declare a "cut" (where N depends on the context) DWORD dwChunkingHashMatchValue = m_dwInitialChunkingHashMatchValue & dwChunkingTruncateMask; // Initialize a set of mask & match value, each has one bit less than previous mask. for (size_t regressIndex = 0; regressIndex < m_nRegressSize; regressIndex++) { _regressionMasks[regressIndex] = new HashMaskMatch(dwChunkingTruncateMask, dwChunkingHashMatchValue); dwChunkingTruncateMask >>= 1; dwChunkingHashMatchValue &= dwChunkingTruncateMask; } _smallestMask = new HashMaskMatch(dwChunkingTruncateMask, dwChunkingHashMatchValue); } public void PushBuffer( BYTE[] buffer ) { PushBuffer(new ArraySegment<BYTE>(buffer)); } public void PushBuffer( ArraySegment<BYTE> buffer ) { // we do not trust successive calls, so ensure we only call it once Contract.Assert(!m_pushBufferCalled); m_pushBufferCalled = true; size_t size = (size_t)buffer.Count; bool bNoMoreData = true; if (size == 0) { return; } else if (size < m_nWindowSize) { previouslyProcessedBytesAcrossCalls += size; return; } unsafe { fixed (byte* p = &buffer.Array![buffer.Offset]) // byte * p = buffer.GetContinuousBuffer(iSizeDone, size); { FindRabinChunkBoundariesInternal( p, size, bNoMoreData, ref previouslyProcessedBytesAcrossCalls, ref lastChunkAbsoluteOffsetAcrossCalls); } } } public void Complete() { AddChunkInfo( new DedupBasicChunkInfo( lastChunkAbsoluteOffsetAcrossCalls, previouslyProcessedBytesAcrossCalls, DedupChunkCutType.DDP_CCT_EndReached)); } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void NT_ASSERT(bool expression) { // Making this method inline friendly by moving the 'throw' statement // into a separate method. That's because JIT won't inline a method with 'throw' instruction in it. if (!expression) { throwInvalidOperation(); } static void throwInvalidOperation() { throw new InvalidOperationException(); } } private static unsafe void DDP_BUFFER_RANGE_ASSERT( byte * pTestedPointer, byte * pStartBuffer, byte * pEndBuffer) { NT_ASSERT(pTestedPointer != null); NT_ASSERT(pTestedPointer >= pStartBuffer); NT_ASSERT(pTestedPointer < pEndBuffer); } private static unsafe void DDP_ASSERT_VALID_ARRAY_INDEX(long nArrayIndex, byte[] arrValues) { NT_ASSERT(nArrayIndex < arrValues.Length); NT_ASSERT(nArrayIndex >= 0); } private static unsafe void DDP_ASSERT_VALID_ARRAY_INDEX(ulong nArrayIndex, long[] arrValues) { NT_ASSERT((OffsetT)(nArrayIndex) < arrValues.Length); NT_ASSERT((OffsetT)(nArrayIndex) >= 0); } // This method is called a lot in a hot paths, making it static speeds up the overall throughput. [MethodImpl(MethodImplOptions.AggressiveInlining)] static unsafe bool DDP_IS_VALID_POINTER(byte* pTestedPointer, byte* pStartBuffer, byte* pEndBuffer) { return pTestedPointer >= pStartBuffer && pTestedPointer < pEndBuffer; } private void AddChunkInfo(DedupBasicChunkInfo chunkInfo) { if (chunkInfo.m_nChunkLength != 0) { outOffsetsVector.Add(chunkInfo); chunkCallback(chunkInfo); } } private unsafe void FindRabinChunkBoundariesInternal( byte* pStartBuffer, // Pointer to the BYTE buffer of data to be chunked in a sequence of FindRabinChunkBoundariesInternal() calls size_t cbLen, // Length of the data to be chunked in a sequence of FindRabinChunkBoundariesInternal() calls bool bNoMoreData, // If TRUE, this is the last call in the sequence of FindRabinChunkBoundariesInternal() calls on this data ref size_t previouslyProcessedBytesParam, // Temporary state between calls FindRabinChunkBoundariesInternal(). Amount of previously processed bytes since the last recorded chunk ref size_t lastChunkAbsoluteOffsetParam // Temporary state between calls FindRabinChunkBoundariesInternal(). Offset of the last inserted chunk, relative to the overall buffer in FindRabinChunkBoundaries() ) { HashValueT[] g_arrPolynomialsTD = Rabin64Table.g_arrPolynomialsTD; HashValueT[] g_arrPolynomialsTU = Rabin64Table.g_arrPolynomialsTU; unchecked { NT_ASSERT(cbLen > 0); NT_ASSERT(pStartBuffer != null); NT_ASSERT(previouslyProcessedBytesParam <= m_nMaxChunkSize); // // Buffer validation support // // Used to define the end of buffer (the first byte beyond the addressable pStartBuffer) // Used only for DDP_ASSERT_VALID_XXX asserts byte* pEndBuffer = pStartBuffer + cbLen; NT_ASSERT(pStartBuffer < pEndBuffer); // Ensure that we don't have arithmetic overrun // Using local functions instead of delegates, because the local functions are slightly more efficient // because they can avoid creating a closure at runtime. void DDP_ASSERT_VALID_BUFFER_POINTER(byte* pTestedPointer) { unchecked { DDP_BUFFER_RANGE_ASSERT(pTestedPointer, pStartBuffer, pEndBuffer); } } void DDP_ASSERT_VALID_BUFFER_END(byte* pTestedPointer) { unchecked { DDP_BUFFER_RANGE_ASSERT((byte*)pTestedPointer, pStartBuffer + 1, pEndBuffer + 1); } } void DDP_ASSERT_VALID_START_POINTER(byte* pTestedPointer) { unchecked { DDP_BUFFER_RANGE_ASSERT((byte*)pTestedPointer + m_nWindowSize, pStartBuffer, pEndBuffer + 1); } } void DDP_ASSERT_VALID_END_POINTER(byte* pTestedPointer) { unchecked { DDP_BUFFER_RANGE_ASSERT((byte*)pTestedPointer, pStartBuffer, pEndBuffer + 1); } } //bool DDP_IS_VALID_POINTER(IntPtr pTestedPointer) => // unchecked((((byte*)pTestedPointer >= pStartBuffer) && ((byte*)pTestedPointer < pEndBuffer))); // // Local state variables (for this call) // // During chunking, this is the index where we start the "look-ahead". This offset is relative to the beginning of the current buffer // Note: this value should always be positive OffsetT startOffset = 0; // Amount of bytes available in the buffer for chunking/analysis from startOffset to the end of the buffer. Equal or smaller than the size of the buffer // Note: this value should be always positive OffsetT remainingBytes = (OffsetT)cbLen; // Amount of previously processed bytes (since we recorded the last chunk) until the startOffset. Represents bytes from the previous call, in the initial chunking iteration size_t previouslyProcessedBytes = previouslyProcessedBytesParam; // Offset of the last inserted chunk relative to the file stream (i.e. to the beginning of the virtual buffer spanning the sequence of calls) size_t lastChunkAbsoluteOffset = lastChunkAbsoluteOffsetParam; // Holds the currently computed Rabin hash value over the current window. At the end of thus call, we will move this value back to m_hash HashValueT hash = m_hash; /* // Per-instance state members (passed across calls) LONGLONG m_numZeroRun - // Size of continuous zeros after last chunk up to the current offset. // >= 0, a run is counted (# of consecutive zeros) // < 0, a run has been interrupted, i.e., encounter at least one none zero values. HashValueT m_hash; // Last hash value BYTE m_history[m_nWindowSize]; // Last bytes from the previous chunk. Used to reinitialize the state machine for chunking (size = window size) OffsetT m_regressChunkLen[m_nRegressSize]; OffsetT m_lastNonZeroChunkLen; TODO:365262 - simplify the code by allocating a larger buffer and copying the data to ensure a continuous operating buffer */ HashMaskMatch smallestMask = _smallestMask; // If the least significant N bytes from the Rabin hash are equal to this value, we declare a "cut" (N depends on the context) HashMaskMatch[] regressionMasks = _regressionMasks; // Same as above, but for each regression step. // // Chunking loop // while (remainingBytes > 0) { NT_ASSERT(startOffset >= 0); // // Check to see if the available bytes (remaining + previous) are insufficient to create ore "real" (i.e. non-minimum) chunks // // If the remaining bytes plus the previous "partial chunk" are less than the minimum chunk size, wrap up and exit size_t remainingBytesToBeReported = (size_t)(remainingBytes) + previouslyProcessedBytes; NT_ASSERT(remainingBytesToBeReported > 0); if (remainingBytesToBeReported < m_nMinChunkSize) { // If we had a zero run previously, check to see if all the remaining bytes are all zeros // If yes, m_numZeroRun will be the size of the last "zero chunk" if (m_numZeroRun >= 0) { NT_ASSERT(m_numZeroRun == (OffsetT)(previouslyProcessedBytes)); byte* pStartZeroTest = pStartBuffer + startOffset; DDP_ASSERT_VALID_BUFFER_POINTER(pStartZeroTest); // Check how many subsequent consecutive bytes are zeros OffsetT remainingNonZero = remainingBytes; // TODO:365262 - move this in a C++ utility routine // Note: we used DDP_IS_VALID_POINTER to "hide" OACR failures as Prefast can't keep up with the large list of asumptions (known bug) while (DDP_IS_VALID_POINTER(pStartZeroTest, pStartBuffer, pEndBuffer) && (remainingNonZero > 0) && ((*pStartZeroTest) == 0)) { remainingNonZero--; pStartZeroTest++; } // Check if we found a non-zero byte if (remainingNonZero > 0) { // A non-zero byte was encountered m_numZeroRun = -1; } else { NT_ASSERT(pStartZeroTest <= pEndBuffer); // All remaining bytes were zeros m_numZeroRun += remainingBytes; NT_ASSERT(m_numZeroRun == (OffsetT)(remainingBytesToBeReported)); } } // Check if this is the last chunk in the last call sequence if (bNoMoreData) { // Add the final chunk, as its size is smaller than the minimum chunk size // TODO:365262 - use here DDP_CCT_MinReached always (technically this is a MinReached) plus mix DDP_CCT_MinReached and DDP_CCT_All_Zero as flags // TODO:365262 - use AddChunk AddChunkInfo( new DedupBasicChunkInfo( lastChunkAbsoluteOffset, remainingBytesToBeReported, (m_numZeroRun >= 0) ? DedupChunkCutType.DDP_CCT_All_Zero : DedupChunkCutType.DDP_CCT_MinReached)); // // Reset all state related with the cross-call sequence // // TODO:365262 - move the state reset in a utility // TODO:365262 - reset other members such as m_numZeroRun, m_regressChunk, etc hash = 0; previouslyProcessedBytes = 0; lastChunkAbsoluteOffset += remainingBytesToBeReported; m_numZeroRun = 0; // TODO:365262 - assert that we reached the end of the buffer // TODO:365262 - cleanup alg (visible exit here for code clarity) } else { // This is a "partial" chunk - the remainder will be processed in the next call previouslyProcessedBytes += (size_t)(remainingBytes); } // Add remainingBytes to the processed data startOffset += remainingBytes; remainingBytes = 0; // Nothing left to process in this call. Exit the loop break; } // // Given the treatment above, available bytes (remaining + previous) is at least MinChunkSize. Chunking can now proceed // NT_ASSERT(remainingBytesToBeReported >= m_nMinChunkSize); // Calculate the amount of bytes that could be skipped (since we can skip in the hash calculation the first m_nMinChunkSize bytes) OffsetT bytesToBeSkipped = (OffsetT)(m_nMinChunkSize) - (OffsetT)(previouslyProcessedBytes); NT_ASSERT(remainingBytes >= bytesToBeSkipped); // should be always true given the "if" test above // // Calculate start window // OffsetT initialStartWindow = 0; // Beginning of the byte window for Rabin hash calculation. This offset is relative to the beginning of the buffer // Check if we need to perform a "jump" in the data since, if we just made a chunk cut earlier, we can skip the current offset beyond the m_nMinChunkSize since the last cut // Note - since both values are either zero or positive, this test essentially checks if at least one of the values is non-zero if (startOffset + bytesToBeSkipped > 0) { // End of the byte window for Rabin hash calculation. This offset is relative to the beginning of the buffer OffsetT initialEndWindow = startOffset + bytesToBeSkipped; // Note: this can end up slightly negative (but within the window size). This is OK. initialStartWindow = initialEndWindow - (OffsetT)(m_nWindowSize); // Add zero run detection up to the end of the window, including bytes to be skipped if (m_numZeroRun >= 0) { // Scan till end window, is it all zeros? byte* pStartZeroTest = pStartBuffer + startOffset; DDP_ASSERT_VALID_BUFFER_POINTER(pStartZeroTest); byte* pEndPosZeroTest = pStartBuffer + initialEndWindow; DDP_ASSERT_VALID_BUFFER_END(pEndPosZeroTest); // Note: we used DDP_IS_VALID_POINTER to "hide" OACR failures as Prefast can't keep up with the large list of asumptions (known bug) while (DDP_IS_VALID_POINTER(pStartZeroTest, pStartBuffer, pEndBuffer) && (pStartZeroTest != pEndPosZeroTest) && ((*pStartZeroTest) == 0)) { pStartZeroTest++; } // Note: here m_numZeroRun can go beyond previouslyProcessedBytes if (pStartZeroTest != pEndPosZeroTest) { DDP_ASSERT_VALID_BUFFER_POINTER(pStartZeroTest); m_numZeroRun = -1; } else { NT_ASSERT(initialEndWindow >= startOffset); m_numZeroRun += initialEndWindow - (OffsetT)(startOffset); } } // We have to make a jump so the previous hash context is lost. Recalculate the hash starting from the new position hash = 0; // Start the hash calculation from the history if the beginning of the window falls outside the current buffer // This could happen if m_nMinChunkSize is just a few bytes below previouslyProcessedBytes, and the content of these bytes were in the previous buffer OffsetT currentStartIndex = initialStartWindow; for (; currentStartIndex < 0; currentStartIndex++) { HashValueT origHash = hash; hash <<= 8; OffsetT nHistoryIndex = currentStartIndex + (long)m_nWindowSize; DDP_ASSERT_VALID_ARRAY_INDEX(nHistoryIndex, m_history); hash ^= m_history[nHistoryIndex]; hash ^= g_arrPolynomialsTD[(origHash >> 56) & 0xff]; } // Perform the Rabin hash calculation on the remaining bytes in the window NT_ASSERT(currentStartIndex >= 0); byte* pbMark = pStartBuffer + currentStartIndex; DDP_ASSERT_VALID_BUFFER_POINTER(pbMark); // Compute the hash for the remaining bytes (within the window) in the actual buffer for (; DDP_IS_VALID_POINTER(pbMark, pStartBuffer, pEndBuffer) && (currentStartIndex < initialEndWindow) && (pbMark < pEndBuffer); currentStartIndex++) { HashValueT origHash = hash; hash <<= 8; hash ^= *pbMark; hash ^= g_arrPolynomialsTD[(origHash >> 56) & 0xff]; pbMark++; } // Reset m_regressChunkLen array as we just did a jump // TODO:365262 - move this to a utility routine (note: also used in the constructor) for (size_t regressIndex = 0; regressIndex < m_nRegressSize; regressIndex++) { DDP_ASSERT_VALID_ARRAY_INDEX(regressIndex, m_regressChunkLen); m_regressChunkLen[regressIndex] = -1; } m_lastNonZeroChunkLen = -1; } else { initialStartWindow = -(OffsetT)(m_nWindowSize); } // // Get pointers to the beginning of the window start/end // // Pointer to the start of the window byte* pStartWindow = pStartBuffer + initialStartWindow; DDP_ASSERT_VALID_START_POINTER(pStartWindow); // Pointer to the end of the window byte* pEndWindow = pStartWindow + m_nWindowSize; DDP_ASSERT_VALID_END_POINTER(pEndWindow); // Pointer to the byte where the maximum chunk size is hit (or to the end of the current buffer, if the max is beyond reach) NT_ASSERT(m_nMaxChunkSize > previouslyProcessedBytes); OffsetT bytesUntilMax = (OffsetT)(m_nMaxChunkSize) - (OffsetT)(previouslyProcessedBytes); byte* pEndPosUntilMax = pStartBuffer + Math.Min((OffsetT)cbLen, startOffset + bytesUntilMax); DDP_ASSERT_VALID_BUFFER_END(pEndPosUntilMax); // Continue the zero run detection until pEndPosUntilMax if (m_numZeroRun >= 0) { // Note: m_numZeroRun can be larger if we performed a jump (due to zero detection during the jump) NT_ASSERT(m_numZeroRun >= (OffsetT)(previouslyProcessedBytes)); bool bDeclareChunk = false; // Find the first non-zero // TODO:365262 consider use utility routine // Note: we used DDP_IS_VALID_POINTER to "hide" OACR failures as Prefast can't keep up with the large list of asumptions (known bug) byte* pPreviousEndWindow = pEndWindow; while (DDP_IS_VALID_POINTER(pEndWindow, pStartBuffer, pEndBuffer) && (pEndWindow != pEndPosUntilMax) && ((*pEndWindow) == 0)) { pEndWindow++; } DDP_ASSERT_VALID_END_POINTER(pEndWindow); // Get the amount of zero bytes just discovered, and update m_numZeroRun NT_ASSERT(pEndWindow >= pPreviousEndWindow); size_t zeroScanned = (size_t)(pEndWindow - pPreviousEndWindow); m_numZeroRun += (OffsetT)zeroScanned; // Update the number of processed bytes // This includes the bytes discovered above, and the zero-run discovered by "jumping" OffsetT zeroBytes = m_numZeroRun - (OffsetT)(previouslyProcessedBytes); // Check if we need to record a new chunk if (pEndWindow == pEndPosUntilMax) { // All zeros in this run // Check if we reached the end of the buffer without reaching the maximum chunk size if (m_numZeroRun < (OffsetT)(m_nMaxChunkSize)) { // We reached the end of the buffer NT_ASSERT(pEndPosUntilMax == (pStartBuffer + cbLen)); if (bNoMoreData) { NT_ASSERT(m_numZeroRun > 0); bDeclareChunk = true; } else { // We need to exit as we are at the end of the buffer } } else { NT_ASSERT(m_numZeroRun >= (OffsetT)(m_nMaxChunkSize)); bDeclareChunk = true; } } else { DDP_ASSERT_VALID_END_POINTER(pEndWindow); NT_ASSERT(m_numZeroRun >= (OffsetT)(m_nMinChunkSize)); bDeclareChunk = true; } if (bDeclareChunk) { // TODO:365262 - use AddChunk NT_ASSERT(m_numZeroRun > 0); AddChunkInfo( new DedupBasicChunkInfo( lastChunkAbsoluteOffset, (size_t)(m_numZeroRun), DedupChunkCutType.DDP_CCT_All_Zero)); hash = 0; previouslyProcessedBytes = 0; lastChunkAbsoluteOffset += (size_t)(m_numZeroRun); m_numZeroRun = 0; startOffset += zeroBytes; remainingBytes -= zeroBytes; NT_ASSERT(remainingBytes >= 0); continue; } else { // TODO:365262 - exit here from the routine // No chunk cut yet as we reached the end of the buffer while counting zeros // Update state to incorporate the zeros we just found startOffset += zeroBytes; remainingBytes -= zeroBytes; NT_ASSERT(remainingBytes == 0); // We need to exit as we can't declare a chunk yet NT_ASSERT(m_numZeroRun >= 0); previouslyProcessedBytes = (size_t)(m_numZeroRun); continue; } } // // We are done with zero detection. // Perform the actual hasing + chunking. // bool bLoopForNextHashValue = true; while (bLoopForNextHashValue) { NT_ASSERT(pStartWindow < pEndWindow); // Advance hash calculation when the start is from the previous buffer, and the end is in the current buffer // TODO:365262 potential perf improvement while (!smallestMask.IsMatch(hash) && pEndWindow < pEndPosUntilMax && initialStartWindow < 0) { // use history // TODO:365262 - add index check for g_arrPolynomialsXX (static assert) DDP_ASSERT_VALID_ARRAY_INDEX(initialStartWindow + (OffsetT)m_nWindowSize, m_history); hash ^= g_arrPolynomialsTU[m_history[initialStartWindow + (OffsetT)m_nWindowSize]]; HashValueT origHash = hash; hash <<= 8; hash ^= *pEndWindow; hash ^= g_arrPolynomialsTD[(origHash >> 56) & 0xff]; pStartWindow++; pEndWindow++; initialStartWindow++; } DDP_ASSERT_VALID_START_POINTER(pStartWindow); DDP_ASSERT_VALID_END_POINTER(pEndWindow); // Advance calculation while both window ends are in the same buffer // TODO:365262 potential perf improvement while (pEndWindow < pEndPosUntilMax && !smallestMask.IsMatch(hash)) { // the main critical loop hash ^= g_arrPolynomialsTU[*pStartWindow]; HashValueT origHash = hash; hash <<= 8; hash ^= *pEndWindow; hash ^= g_arrPolynomialsTD[origHash >> 56]; pStartWindow++; pEndWindow++; // Note: we do need not to increment initialStartWindow anymore here (as this was related with the initial setup) } DDP_ASSERT_VALID_START_POINTER(pStartWindow); DDP_ASSERT_VALID_END_POINTER(pEndWindow); // Processed bytes starting from the current startOffset. Equal or smaller than the size of the buffer NT_ASSERT(pEndWindow >= pStartBuffer + startOffset); size_t processedBytes = (size_t)(pEndWindow - (pStartBuffer + startOffset)); // Length of the "potential chunk" we found size_t chunkLen = processedBytes + previouslyProcessedBytes; NT_ASSERT(chunkLen <= m_nMaxChunkSize); NT_ASSERT(chunkLen >= m_nMinChunkSize); // Check if a hash-driven chunk cut was made (using the smallest hash/mask) if (smallestMask.IsMatch(hash)) { // TODO:365262 use a utility routine OffsetT regressHashMismatchIndex = (OffsetT)m_nRegressSize - 1; for (; regressHashMismatchIndex >= 0; regressHashMismatchIndex--) { // Find the last mask close to m_nMaxChunkSize // TODO:365262 array index check // TODO:365262 Refactor to eliminate the confusing "offset by 1" difference between the two arrays m_regressChunkLen[regressHashMismatchIndex] = (OffsetT)chunkLen; if (!regressionMasks[regressHashMismatchIndex].IsMatch(hash)) break; } // If we had a match all the way it means that we encountered a match with the full-lenght hash value if (regressHashMismatchIndex < 0) { // Find full mask match // TODO:365262 - use AddChunk AddChunkInfo( new DedupBasicChunkInfo(lastChunkAbsoluteOffset, chunkLen, DedupChunkCutType.DDP_CCT_Normal)); hash = 0; previouslyProcessedBytes = 0; m_numZeroRun = 0; lastChunkAbsoluteOffset += chunkLen; bLoopForNextHashValue = false; startOffset += (OffsetT)processedBytes; remainingBytes -= (OffsetT)processedBytes; break; } else { // Not a full mask match, repeat the logic in the main critical loop, // We will need to move the pointer forward by one byte, otherwise, it will stuck in the loop if (pEndWindow < pEndPosUntilMax) { if (initialStartWindow < 0) { DDP_ASSERT_VALID_ARRAY_INDEX(initialStartWindow + (OffsetT)m_nWindowSize, m_history); hash ^= g_arrPolynomialsTU[m_history[initialStartWindow + (OffsetT)m_nWindowSize]]; } else if (DDP_IS_VALID_POINTER(pStartWindow, pStartBuffer, pEndBuffer)) { hash ^= g_arrPolynomialsTU[*pStartWindow]; } else { NT_ASSERT(false); } if (DDP_IS_VALID_POINTER(pEndWindow, pStartBuffer, pEndBuffer)) { HashValueT origHash = hash; hash <<= 8; hash ^= *pEndWindow; hash ^= g_arrPolynomialsTD[(origHash >> 56) & 0xff]; } else { NT_ASSERT(false); } pStartWindow++; DDP_ASSERT_VALID_START_POINTER(pStartWindow); pEndWindow++; DDP_ASSERT_VALID_END_POINTER(pEndWindow); initialStartWindow++; continue; // To loop for next hash value } else { // We found a mismatch on a larger chunk but we also reach the end of the chunk. We need to continue with regression NT_ASSERT(pEndWindow == pEndPosUntilMax); } } }; // We reach pEndWindow == pEndPosUntilMax condition. NT_ASSERT(pEndWindow == pEndPosUntilMax); // Perform regression if (m_nMaxChunkSize == chunkLen) { // Run to m_nMaxChunkSize size_t curChunkLen = chunkLen; DedupChunkCutType cutType = DedupChunkCutType.DDP_CCT_Unknown; size_t lowestValidRegressionIndex; for (lowestValidRegressionIndex = 0; lowestValidRegressionIndex < m_nRegressSize; lowestValidRegressionIndex++) { if (m_regressChunkLen[lowestValidRegressionIndex] >= 0) break; } size_t potentiallyRegressedChunkLen = chunkLen; // If we found a length we can regress to, get the chunk length and type // Note: this should work if it regresses in Previous Bytes if (lowestValidRegressionIndex < m_nRegressSize) { // We find at least one chunk point with a partial mask match // TODO:365262 - add better type inference that works with a variable-length regression array switch (lowestValidRegressionIndex) { case 0: cutType = DedupChunkCutType.DDP_CCT_Regress_1_bit; break; case 1: cutType = DedupChunkCutType.DDP_CCT_Regress_2_bit; break; case 2: cutType = DedupChunkCutType.DDP_CCT_Regress_3_bit; break; case 3: cutType = DedupChunkCutType.DDP_CCT_Regress_4_bit; break; default: NT_ASSERT(false); break; } potentiallyRegressedChunkLen = (size_t)(m_regressChunkLen[lowestValidRegressionIndex]); m_regressChunkLen[lowestValidRegressionIndex] = -1; // Adjust the length of subsequent regressions size_t subsequentRegressionIndexes = lowestValidRegressionIndex + 1; for (; subsequentRegressionIndexes < m_nRegressSize; subsequentRegressionIndexes++) { // All regress point match less bit is adjusted here. NT_ASSERT(m_regressChunkLen[subsequentRegressionIndexes] >= (OffsetT)(potentiallyRegressedChunkLen)); m_regressChunkLen[subsequentRegressionIndexes] -= (OffsetT)potentiallyRegressedChunkLen; if (m_regressChunkLen[subsequentRegressionIndexes] < (OffsetT)(m_nMinChunkSize)) { m_regressChunkLen[subsequentRegressionIndexes] = -1; } } // TODO:365262 - clean up the algorithm if (m_lastNonZeroChunkLen != -1) { if (m_lastNonZeroChunkLen >= (OffsetT)(potentiallyRegressedChunkLen)) { m_lastNonZeroChunkLen -= (OffsetT)potentiallyRegressedChunkLen; if (m_lastNonZeroChunkLen < (OffsetT)(m_nMinChunkSize)) m_lastNonZeroChunkLen = -1; } else { m_lastNonZeroChunkLen = -1; } } } else { cutType = DedupChunkCutType.DDP_CCT_MaxReached; } NT_ASSERT(DedupChunkCutType.DDP_CCT_Unknown != cutType); // TODO:365262 - use AddChunk AddChunkInfo( new DedupBasicChunkInfo(lastChunkAbsoluteOffset, potentiallyRegressedChunkLen, cutType)); // Note: processedBytes is still relative to curChunkLen not potentiallyRegressedChunkLen // If this is the last call in teh dcall sequence, one more chunk will be added at the end previouslyProcessedBytes = (size_t)((OffsetT)(curChunkLen) - (OffsetT)(potentiallyRegressedChunkLen)); lastChunkAbsoluteOffset += potentiallyRegressedChunkLen; startOffset += (OffsetT)processedBytes; remainingBytes -= (OffsetT)processedBytes; // Recalculate m_numZeroRun // TODO:365262 cleanup algorithm if (previouslyProcessedBytes >= m_nWindowSize) { m_numZeroRun = -1; // This can't be part of a zero run, otherwise it will match the full 16 bit mask. } else { OffsetT nExaminePos = startOffset - 1; size_t prevBytesAdj = previouslyProcessedBytes; for (; prevBytesAdj > 0; prevBytesAdj--, nExaminePos--) { if (nExaminePos >= 0) { DDP_ASSERT_VALID_BUFFER_POINTER((pStartBuffer + nExaminePos)); if (pStartBuffer[nExaminePos] != 0) break; } else { DDP_ASSERT_VALID_ARRAY_INDEX(nExaminePos + (OffsetT)m_nWindowSize, m_history); if (m_history[nExaminePos + (OffsetT)m_nWindowSize] != 0) break; } } if (prevBytesAdj > 0) m_numZeroRun = -1; else m_numZeroRun = (OffsetT)previouslyProcessedBytes; } if (previouslyProcessedBytes >= m_nMinChunkSize) { // Loop can continue runs. bLoopForNextHashValue = true; // Update pEndPosUntilMax // TODO:365262 - cleanup algorithm pEndPosUntilMax = pStartBuffer + Math.Min((OffsetT)cbLen, startOffset + (OffsetT)(m_nMaxChunkSize) - (OffsetT)(previouslyProcessedBytes)); DDP_ASSERT_VALID_BUFFER_END(pEndPosUntilMax); } else { // Need to exit loop bLoopForNextHashValue = false; } } else if (bNoMoreData) { // We can't apply regression as we are reaching the end NT_ASSERT(chunkLen > 0); // TODO:365262 - use AddChunk // TODO:MSR - we should declare the "end" consistently (right now we use both MinReached and EndReached) AddChunkInfo( new DedupBasicChunkInfo(lastChunkAbsoluteOffset, chunkLen, DedupChunkCutType.DDP_CCT_EndReached)); hash = 0; previouslyProcessedBytes = 0; m_numZeroRun = 0; lastChunkAbsoluteOffset += chunkLen; startOffset += (OffsetT)processedBytes; remainingBytes -= (OffsetT)processedBytes; bLoopForNextHashValue = false; // TODO:365262 - add a more visible exit } else // more data but no regression { // Find first non zero position, make sure we don't run before pStartBuffer byte* pReverseScan = pEndWindow; DDP_ASSERT_VALID_END_POINTER(pReverseScan); byte* pReverseScanStop = pEndPosUntilMax - Math.Min((OffsetT)(chunkLen) - (OffsetT)(m_nMinChunkSize) + 1, pEndPosUntilMax - pStartBuffer); DDP_ASSERT_VALID_END_POINTER(pReverseScanStop); do { pReverseScan--; // DDP_ASSERT_VALID_BUFFER_POINTER((IntPtr)pReverseScan); } while (DDP_IS_VALID_POINTER(pReverseScan, pStartBuffer, pEndBuffer) && ((*pReverseScan) == 0) && (pReverseScan >= pReverseScanStop)); // Note: we used DDP_IS_VALID_POINTER to "hide" OACR failures as Prefast can't keep up with the large list of asumptions (known bug) if (pReverseScan >= pReverseScanStop) { // A first non zero position is found, we chunk at the first zero position afterwards. m_lastNonZeroChunkLen = (OffsetT)(chunkLen) - (pEndPosUntilMax - pReverseScan - 1); NT_ASSERT(m_lastNonZeroChunkLen >= (OffsetT)(m_nMinChunkSize)); NT_ASSERT(m_lastNonZeroChunkLen <= (OffsetT)(m_nMaxChunkSize)); }; previouslyProcessedBytes += processedBytes; startOffset += (OffsetT)processedBytes; remainingBytes -= (OffsetT)processedBytes; bLoopForNextHashValue = false; } } } NT_ASSERT(remainingBytes >= 0); // Add one last chunk, if this is the last call and the last regression run left some "left-over" buffer if ((previouslyProcessedBytes > 0) && bNoMoreData) { // End reached, no more data available DedupChunkCutType endChunkType = (m_numZeroRun >= 0) ? DedupChunkCutType.DDP_CCT_All_Zero : (previouslyProcessedBytes < m_nMinChunkSize) ? DedupChunkCutType.DDP_CCT_MinReached : DedupChunkCutType.DDP_CCT_EndReached; AddChunkInfo(new DedupBasicChunkInfo(lastChunkAbsoluteOffset, previouslyProcessedBytes, endChunkType)); hash = 0; lastChunkAbsoluteOffset += previouslyProcessedBytes; previouslyProcessedBytes = 0; m_numZeroRun = 0; } // // Save internal state for the next CDedupRegressionChunking::FindRabinChunkBoundariesInternal call // OffsetT bytesIndex = 0; for (; bytesIndex < Math.Min(0, (OffsetT)(m_nWindowSize) - (OffsetT)(cbLen)); bytesIndex++) { DDP_ASSERT_VALID_ARRAY_INDEX(bytesIndex, m_history); DDP_ASSERT_VALID_ARRAY_INDEX(bytesIndex + (OffsetT)cbLen, m_history); m_history[bytesIndex] = m_history[bytesIndex + (OffsetT)cbLen]; } for (; bytesIndex < (OffsetT)m_nWindowSize; bytesIndex++) { DDP_ASSERT_VALID_BUFFER_POINTER((pStartBuffer + cbLen - m_nWindowSize + bytesIndex)); DDP_ASSERT_VALID_ARRAY_INDEX(bytesIndex, m_history); m_history[bytesIndex] = pStartBuffer[(OffsetT)(cbLen) - (OffsetT)(m_nWindowSize) + bytesIndex]; } m_hash = hash; // Save the amount of unprocessed bytes previouslyProcessedBytesParam = previouslyProcessedBytes; // Save the last absolute chunk offset NT_ASSERT(lastChunkAbsoluteOffset >= lastChunkAbsoluteOffsetParam); lastChunkAbsoluteOffsetParam = lastChunkAbsoluteOffset; } } } }
// --------------------------------------------------------------------------------------------------------------------ScenarioBlock_Tests // <copyright file="TableBlock_Tests.cs" company="PicklesDoc"> // Copyright 2018 Darren Comeau // Copyright 2018-present PicklesDoc team and community contributors // // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> // -------------------------------------------------------------------------------------------------------------------- using NUnit.Framework; using PicklesDoc.Pickles.DocumentationBuilders.Markdown.Blocks; using PicklesDoc.Pickles.ObjectModel; using System; namespace PicklesDoc.Pickles.DocumentationBuilders.Markdown.UnitTests { [TestFixture] public class TableBlock_Tests { [Test] public void A_Table_Is_Formatted() { var mockStyle = new MockStylist { }; var table = new Table(); table.HeaderRow = new TableRow(new[] { "Col1", "Col2" }); table.DataRows = new System.Collections.Generic.List<ObjectModel.TableRow>(); table.DataRows.Add(new TableRow(new[] { "Col1Row1", "Col2Row1" })); table.DataRows.Add(new TableRow(new[] { "Col1Row2", "Col2Row2" })); var tableBlock = new TableBlock(table, mockStyle); var actualString = tableBlock.ToString().Split(new string[] { Environment.NewLine }, StringSplitOptions.None); Assert.AreEqual("> | Col1 | Col2 |", actualString[0]); Assert.AreEqual("> | --- | --- |", actualString[1]); Assert.AreEqual("> | Col1Row1 | Col2Row1 |", actualString[2]); Assert.AreEqual("> | Col1Row2 | Col2Row2 |", actualString[3]); Assert.AreEqual(5, actualString.Length); } [Test] public void A_Table_Is_Formatted_With_Placeholders() { var mockStyle = new MockStylist { }; var table = new Table(); table.HeaderRow = new TableRow(new[] { "Col1", "Col2" }); table.DataRows = new System.Collections.Generic.List<ObjectModel.TableRow>(); table.DataRows.Add(new TableRow(new[] { "Col1Row1", "<Col2Row1>" })); table.DataRows.Add(new TableRow(new[] { "<Col1Row2>", "Col2Row2" })); var tableBlock = new TableBlock(table, mockStyle); var actualString = tableBlock.ToString().Split(new string[] { Environment.NewLine }, StringSplitOptions.None); Assert.AreEqual("> | Col1 | Col2 |", actualString[0]); Assert.AreEqual("> | --- | --- |", actualString[1]); Assert.AreEqual(@"> | Col1Row1 | \<Col2Row1\> |", actualString[2]); Assert.AreEqual(@"> | \<Col1Row2\> | Col2Row2 |", actualString[3]); Assert.AreEqual(5, actualString.Length); } [Test] public void A_Table_Is_Formatted_With_Results() { var mockStyle = new MockStylist { }; var table = new Table(); table.HeaderRow = new TableRow(new[] { "Col1", "Col2" }); table.DataRows = new System.Collections.Generic.List<ObjectModel.TableRow>(); AddRowWithResult(table, new[] { "Col1Row1", "Col2Row1" }, TestResult.Passed); AddRowWithResult(table, new[] { "Col1Row2", "Col2Row2" }, TestResult.Failed); AddRowWithResult(table, new[] { "Col1Row3", "Col2Row3" }, TestResult.Inconclusive); AddRowWithResult(table, new[] { "Col1Row4", "Col2Row4" }, TestResult.NotProvided); var tableBlock = new TableBlock(table, mockStyle, true); var actualString = tableBlock.ToString().Split(new string[] { Environment.NewLine }, StringSplitOptions.None); Assert.AreEqual("> | Col1 | Col2 | Result |", actualString[0]); Assert.AreEqual("> | --- | --- | --- |", actualString[1]); Assert.AreEqual("> | Col1Row1 | Col2Row1 | ![Passed](pass.png) |", actualString[2]); Assert.AreEqual("> | Col1Row2 | Col2Row2 | ![Failed](fail.png) |", actualString[3]); Assert.AreEqual("> | Col1Row3 | Col2Row3 | ![Inconclusive](inconclusive.png) |", actualString[4]); Assert.AreEqual("> | Col1Row4 | Col2Row4 | |", actualString[5]); Assert.AreEqual(7, actualString.Length); } private void AddRowWithResult(Table table,string[] data, TestResult result) { var tableRowWithResult = new TableRowWithTestResult(data) { Result = result }; table.DataRows.Add(tableRowWithResult); } } }
using System; using System.Collections.Generic; using our.orders.Helpers; namespace our.orders.Statistics { public class StatisticsReport { public string Currency { get; set; } public DateTime StartDate { get; set; } public DateTime EndDate { get; set; } public TimeInterval Interval { get; set; } public StatisticMetric Global { get; set; } public List<StatisticsPeriodReport> Report { get; set; } } public class StatisticsPeriodReport { public DateTime StartDate { get; set; } public DateTime EndDate { get; set; } public StatisticMetric Global { get; set; } public IDictionary<string, StatisticMetric> Dimension { get; set; } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Runtime.InteropServices; using System.Security.Cryptography; using AutoBogus; using Bogus; using NUnit.Framework; using StirlingLabs.Utilities.Yaml; using YamlDotNet.RepresentationModel; using JsonSerializer = Newtonsoft.Json.JsonSerializer; namespace StirlingLabs.Utilities.Tests; [Parallelizable(ParallelScope.All)] public class YamlTests { private static readonly JsonSerializer JsonNetSerializer = JsonSerializer.CreateDefault(); private static readonly Faker<JsonMe> JsonMeFaker = new AutoFaker<JsonMe>() .RuleFor(f => f.number, GetActuallyRandomNumber) .RuleFor(f => f.texts, f => f.Make(100, _ => f.Hacker.Phrase()).ToArray()) .RuleFor(f => f.numbers, f => f.Make(500, _ => GetActuallyRandomNumber()).ToArray()); #if NETSTANDARD2_0 private static readonly RandomNumberGenerator Rng = RandomNumberGenerator.Create(); #endif public static unsafe double GetActuallyRandomNumber() { double number = 0; #if !NETSTANDARD2_0 var doubleSpan = MemoryMarshal.CreateSpan(ref number, 1); var bytesSpan = MemoryMarshal.AsBytes(doubleSpan); do RandomNumberGenerator.Fill(bytesSpan); while (!double.IsFinite(number)); #else var pNumber = &number; var buf = new byte[8]; var bytesSpan = new Span<byte>(pNumber, 8); var longSpan = new Span<long>(pNumber, 1); do { Rng.GetBytes(buf); buf.CopyTo(bytesSpan); } while ((longSpan[0] & 0x7FFFFFFFFFFFFFFF) < 0x7FF0000000000000); #endif return number; } [Test] public void Test1() { JsonMeFaker.AssertConfigurationIsValid(); var k = new { a = JsonMeFaker.Generate(2) }; var yml = OnDemand.YamlSerializer.Serialize(k); var ys = new YamlStream(); ys.Load(new StringReader(yml)); var sw = Stopwatch.StartNew(); var expectedJson = ys.Serialize(OnDemand.JsonSerializer); var json1 = sw.ElapsedTicks; Assert.IsNotNull(expectedJson); sw.Restart(); var actualJson = ys.ToJson(); var json2 = sw.ElapsedTicks; Assert.IsNotNull(actualJson); dynamic expected = JsonNetSerializer.Deserialize(new StringReader(expectedJson), k.GetType()); dynamic actual = JsonNetSerializer.Deserialize(new StringReader(actualJson), k.GetType()); Assert.AreEqual((IList<JsonMe>)expected.a, (IList<JsonMe>)actual.a); Console.WriteLine($"ToJson w/ Serializer: {json1}, ToJson w/ YamlToJsonVisitor: {json2}"); } }
using System; using System.Collections.Generic; using System.Text; using FreecraftCore.Serializer; namespace Runescape.Cache.Structures { [WireDataContract] public sealed class MapTileCollection { //The cache length-prefixes the collection /// <summary> /// Deserialized map tiles. /// </summary> [WireMember(1)] [SendSize(SendSizeAttribute.SizeType.UShort)] private MapTile[] _MapTiles { get; } /// <summary> /// Collection of loaded map tiles. /// </summary> public IEnumerable<MapTile> MapTiles { get; } } }
namespace UniversitySystem.Data { using System.Data.Entity; using Microsoft.AspNet.Identity.EntityFramework; using UniversitySystem.Data.Migrations; using UniversitySystem.Models; public class UniversitySystemDbContext : IdentityDbContext<User>, IUniversitySystemDbContext { public UniversitySystemDbContext() : base("UniversitySystemDb", throwIfV1Schema: false) { Database.SetInitializer(new MigrateDatabaseToLatestVersion<UniversitySystemDbContext, Configuration>()); } public IDbSet<Student> Students { get; set; } public IDbSet<Course> Courses { get; set; } public IDbSet<Lecture> Lectures { get; set; } public IDbSet<Homework> Homework { get; set; } public IDbSet<Lecturer> Lecturers { get; set; } public IDbSet<Department> Departments { get; set; } public IDbSet<IdentityUserRole> UserRoles { get; set; } public static UniversitySystemDbContext Create() { return new UniversitySystemDbContext(); } public new void SaveChanges() { base.SaveChanges(); } } }
/// <summary> /// Austin Berquam /// Created: 2019/02/23 /// /// This is a mock Data Accessor which implements IGuestTypeAccessor. This is for testing purposes only. /// </summary> /// using System; using System.Collections.Generic; using System.Data.SqlClient; using System.Linq; using System.Text; using System.Threading.Tasks; using DataObjects; namespace DataAccessLayer { public class MockGuestTypeAccessor : IGuestTypeAccessor { private List<GuestType> guestType; /// <summary> /// Author: Austin Berquam /// Created: 2019/02/23 /// This constructor that sets up dummy data /// </summary> public MockGuestTypeAccessor() { guestType = new List<GuestType> { new GuestType {GuestTypeID = "GuestType1", Description = "guestType is a guestType"}, new GuestType {GuestTypeID = "GuestType2", Description = "guestType is a guestType"}, new GuestType {GuestTypeID = "GuestType3", Description = "guestType is a guestType"}, new GuestType {GuestTypeID = "GuestType4", Description = "guestType is a guestType"} }; } public int InsertGuestType(GuestType newGuestType) { int listLength = guestType.Count; guestType.Add(newGuestType); if (listLength == guestType.Count - 1) { return 1; } else { return 0; } } public int DeleteGuestType(string guestTypeID) { int rowsDeleted = 0; foreach (var type in guestType) { if (type.GuestTypeID == guestTypeID) { int listLength = guestType.Count; guestType.Remove(type); if (listLength == guestType.Count - 1) { rowsDeleted = 1; } } } return rowsDeleted; } public List<string> SelectAllTypes() { throw new NotImplementedException(); } public List<GuestType> SelectGuestTypes(string status) { return guestType; } } }
using System; namespace Akka.Fluent.Config { public class SnapshotConfigBuilder : HoconConfigBuilder { public SnapshotConfigBuilder() : base("snapshot-store") { } public SnapshotConfigBuilder Plugin(string plugin) { AddKeyValue("plugin", plugin); return this; } public SnapshotConfigBuilder MongoDB(Action<MongoDBConfigBuilder> action) { SectionBuilder("mongodb", action); return this; } } }
namespace P08.RecursiveFibonacci { using System; using System.Collections.Generic; public class RecursiveFibonacci { private static Dictionary<int, long> memory = new Dictionary<int, long>(); public static void Main(string[] args) { int n = int.Parse(Console.ReadLine()); long nthFibonacci = GetFibonacci(n); Console.WriteLine(nthFibonacci); } private static long GetFibonacci(int number) { if (number == 0 || number == 1) { return number; } if (memory.ContainsKey(number)) { return memory[number]; } long currentValue = GetFibonacci(number - 2) + GetFibonacci(number - 1); memory.Add(number, currentValue); return currentValue; } } }
using System; using EnvDTE; using Microsoft.VisualStudio.Shell; using EnvDTE; namespace JSEssentials.JSESettings { public static class Global { public static DTE DTE = InitGlobals(); public static Version IdeVersion; private static DTE InitGlobals() { DTE dte = Package.GetGlobalService(typeof(DTE)) as DTE; IdeVersion = Version.Parse(dte.Version); return dte; } } }
// https://leetcode.com/problems/longest-palindromic-substring/ // // Given a string S, find the longest palindromic substring in S. // You may assume that the maximum length of S is 1000, and there exists one unique longest palindromic substring. using System; using System.Linq; using System.Threading; using System.Threading.Tasks; public class Solution { // // Submission Details // 88 / 88 test cases passed. // Status: Accepted // Runtime: 128 ms // // Submitted: 0 minutes ago // // Your runtime beats 95.71% of csharp submissions. // Wow! Holy shit. public string LongestPalindrome(string s) { var start = -1; var length = 0; for (var i = 0; i < s.Length; i++) { var odd = CheckPalindrome(s, i, i); var even = CheckPalindrome(s, i, i + 1); if (odd * 2 - 1 > length) { length = odd * 2 - 1; start = i - odd + 1; } if (even * 2 > length) { length = even * 2; start = i - even + 1; } } return s.Substring(start, length); } // You are here! // Your runtime beats 1.43% of csharp submissions. // // HAHAHA - Parallel public string LongestPalindrome2(string s) { var start = -1; var length = 0; Parallel.ForEach(Enumerable.Range(0, s.Length), i => { var odd = CheckPalindrome(s, i, i); var even = CheckPalindrome(s, i, i + 1); if (odd * 2 - 1 > length) { length = odd * 2 - 1; start = i - odd + 1; } if (even * 2 > length) { length = even * 2; start = i - even + 1; } }); return s.Substring(start, length); } public int CheckPalindrome(String s, int left, int right) { var length = 0; while (left >= 0 && right < s.Length && s[left] == s[right]) { left--; right++; length++; } return length; } static void Main() { Console.WriteLine(new Solution().LongestPalindrome("abcdcba234")); Console.WriteLine(new Solution().LongestPalindrome("abccba2")); Console.WriteLine(new Solution().LongestPalindrome2("abcdcba234")); Console.WriteLine(new Solution().LongestPalindrome2("abccba2")); } }
using ApiGateway.Core.Contracts.Exposers; using ApiGateway.Core.Exposers.Exceptions; using Microsoft.AspNetCore.Mvc; using System; using System.IO; using System.Text; using System.Threading.Tasks; namespace ApiGateway.Rest.Controllers { [ApiController] public class ApiGatewayController : ControllerBase { private readonly IExposerManager _exposerManager; public ApiGatewayController(IExposerManager exposerManager) { _exposerManager = exposerManager; } [Route("{*url}")] public void ProcessRequest() { try { ExecuteExposerDto dto = new ExecuteExposerDto(); dto.Path = Request.Path; dto.HttpMethod = Request.Method; dto.RequestBody = BodyToString(Request.Body); ExposerResultDto resultDto = _exposerManager.ExecuteExposer(dto); Response.StatusCode = resultDto.StatusCode; Response.ContentType = "application/json"; Response.Body.WriteAsync(Encoding.UTF8.GetBytes(resultDto.Body), 0, resultDto.Body.Length); } catch (ExposerDoesNotExistException e) { Response.StatusCode = 404; } catch (Exception e) { Response.StatusCode = 500; } } private string BodyToString(Stream body) { using (var reader = new StreamReader(body, Encoding.UTF8)) { Task<string> task = reader.ReadToEndAsync(); return task.Result; } } private Stream StringToBody(string str) { byte[] byteArray = Encoding.UTF8.GetBytes(str); return new MemoryStream(byteArray); } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Runtime.InteropServices; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; namespace FreedomPatch { // Class to hold the Wizard controls public partial class Wizard : Form { // https://stackoverflow.com/questions/1592876/make-a-borderless-form-movable public const int WM_NCLBUTTONDOWN = 0xA1; public const int HT_CAPTION = 0x2; [DllImportAttribute("user32.dll")] public static extern int SendMessage(IntPtr hWnd, int Msg, int wParam, int lParam); [DllImportAttribute("user32.dll")] public static extern bool ReleaseCapture(); public Wizard() { InitializeComponent(); } private void form_wizard_Load(object sender, EventArgs e) { } private void form_wizard_MouseDown(object sender, MouseEventArgs e) { if (e.Button == MouseButtons.Left) { ReleaseCapture(); SendMessage(Handle, WM_NCLBUTTONDOWN, HT_CAPTION, 0); } } // Page to the next page if we are not already at the end of the TabControl private void btn_next_Click(object sender, EventArgs e) { if (tabs_wizard.SelectedIndex < tabs_wizard.TabCount - 1) { tabs_wizard.SelectedIndex++; progress_wizard.Value = tabs_wizard.SelectedIndex; lbl_wizard_progress.Text = (tabs_wizard.SelectedIndex + 1) + " of 12"; if (tabs_wizard.SelectedIndex == tabs_wizard.TabCount - 1) { btn_next.Text = "Finish"; } else { btn_next.Text = "Next"; } } } // This this form behind private void btn_exit_Click(object sender, EventArgs e) { this.Close(); } private void label5_Click(object sender, EventArgs e) { } private void label4_Click(object sender, EventArgs e) { } private void radio_cortana_off_CheckedChanged(object sender, EventArgs e) { } // Page back if we are not already at the beginning private void btn_back_Click(object sender, EventArgs e) { if (tabs_wizard.SelectedIndex > 0) { tabs_wizard.SelectedIndex--; progress_wizard.Value = tabs_wizard.SelectedIndex; lbl_wizard_progress.Text = (tabs_wizard.SelectedIndex + 1) + " of 12"; btn_next.Text = "Next"; } } private void radioButton1_CheckedChanged(object sender, EventArgs e) { } } }
using System; using System.Buffers; using System.Text.Json; using A6k.Nats.Operations; using Bedrock.Framework.Protocols; namespace A6k.Nats.Protocol { public class NatsOperationWriter : IMessageWriter<NatsOperation> { private const byte CR = (byte)'\r'; private const byte LF = (byte)'\n'; private static ReadOnlySpan<byte> CRLF => new byte[] { CR, LF }; public void WriteMessage(NatsOperation operation, IBufferWriter<byte> output) { var writer = new NatsWriter(output); switch (operation.OpId) { case NatsOperationId.PING: writer.WriteString("PING\r\n"); break; case NatsOperationId.PONG: writer.WriteString("PONG\r\n"); break; case NatsOperationId.PUB: WritePub(ref writer, (PubOperation)operation.Op); break; case NatsOperationId.SUB: WriteSub(ref writer, (SubOperation)operation.Op); break; case NatsOperationId.CONNECT: WriteConnect(ref writer, (ConnectOperation)operation.Op); break; } writer.Commit(); } private static void WritePub(ref NatsWriter writer, PubOperation op) { writer.WriteString($"PUB {op.Subject} "); if (!string.IsNullOrEmpty(op.ReplyTo)) { writer.WriteString(op.ReplyTo); writer.WriteString(" "); } writer.WriteInt(op.Data.Length); writer.Write(CRLF); writer.Write(op.Data.Span); writer.Write(CRLF); } private static void WriteSub(ref NatsWriter writer, SubOperation op) { writer.WriteString($"SUB {op.Subject}"); if (!string.IsNullOrEmpty(op.QueueGroup)) { writer.WriteString(" "); writer.WriteString(op.QueueGroup); } writer.WriteString(" "); writer.WriteString(op.Sid); writer.Write(CRLF); } private static void WriteConnect(ref NatsWriter writer, ConnectOperation op) { writer.WriteString($"CONNECT "); writer.WriteJson(op); writer.Write(CRLF); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Runtime.CompilerServices; using System.Text; using System.Threading.Tasks; namespace TagCanvas.Services { public enum LogType { Info, Warning, Error } public interface ILogger { void Log(string content, LogType type = LogType.Info, [CallerMemberName] string caller = ""); IEnumerable<Models.Log> Logs { get; } void RemoveWhere(Predicate<Models.Log> predicate); } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using RTWTR.MVC.Models; namespace RTWTR.MVC.Areas.Administration.Models { public class ShowAllTweetsModel { public ICollection<TweetViewModel> Tweets; } }
namespace ObjectSerialization.UT.Helpers { public interface IPoly { int Int { get; } } }
namespace Gobie.Models; using System.Collections.Generic; public readonly struct ClassIdentifier : IEquatable<ClassIdentifier> { public ClassIdentifier(string classNamespace, string className) { NamespaceName = classNamespace ?? throw new ArgumentNullException(nameof(classNamespace)); ClassName = className ?? throw new ArgumentNullException(nameof(className)); } public string NamespaceName { get; } public string ClassName { get; } public string FullName => $"{NamespaceName}{(string.IsNullOrWhiteSpace(NamespaceName) ? "" : ".")}{ClassName}"; public string GlobalName => $"global::{FullName}"; public override bool Equals(object? obj) { return obj is ClassIdentifier identifier && Equals(identifier); } public bool Equals(ClassIdentifier other) { return NamespaceName == other.NamespaceName && ClassName == other.ClassName; } public override int GetHashCode() { int hashCode = 1117777763; hashCode = hashCode * -1521134295 + EqualityComparer<string>.Default.GetHashCode(NamespaceName); hashCode = hashCode * -1521134295 + EqualityComparer<string>.Default.GetHashCode(ClassName); return hashCode; } }
using Arbiter.Core.Enums; using Arbiter.Core.Interfaces; using Arbiter.Core.Models; using System; using System.Collections.Generic; using System.Linq; namespace Arbiter.LiveSportsOdds.Converters { public class OddsConverter : IConverter<IEnumerable<Odds>, Models.Game> { public IEnumerable<Odds> Convert(Models.Game input) { var teams = input.Teams; var homeIndex = Array.IndexOf(teams, input.HomeTeam); var awayIndex = Array.IndexOf(teams, input.Teams.Single(t => t != input.HomeTeam)); return input.Sites.Select(s => { var result = new Odds() { Site = s.SiteNice, LastUpdate = s.LastUpdate, OutcomeOdds = new Dictionary<OutcomeId, decimal>() { [OutcomeId.HomeWin] = s.Odds.H2H[homeIndex], [OutcomeId.AwayWin] = s.Odds.H2H[awayIndex] } }; if(s.Odds.H2H.Length > 2) result.OutcomeOdds[OutcomeId.Tie] = s.Odds.H2H[2]; return result; }); } public Models.Game Convert(IEnumerable<Odds> input) { throw new InvalidOperationException("Can't convert back to Game from a list of Odds"); } } }
using CommandLine; namespace EfsTools.CommandLineOptions { [Verb("getLog", HelpText = "Get log")] internal class GetLogsOptions { [Option('m', "messageMask", Required = false, HelpText = "Message packets mask")] public string MessageMask { get; set; } [Option('l', "logMask", Required = false, HelpText = "Log packets mask")] public string LogMask { get; set; } [Option('v', "verbose", Required = false, HelpText = "Verbose output")] public bool Verbose { get; set; } /*[Option('e', "eventMask", Required = false, HelpText = "Event reports mask")] public string EventMask { get; set; } [Option('f', "fileName", Required = false, HelpText = "File name")] public string FileName { get; set; } [Option('a', "layout", Required = false, HelpText = "Log layout")] public string Layout { get; set; } [Option('c', "logConfigFile", Required = false, HelpText = "NLog config file")] public string LogConfigFile { get; set; }*/ } }
namespace Management.Storage.ScenarioTest.Util.Globalization { using System; using System.Collections.Generic; using System.Linq; using System.Text; public sealed class Windows1254Generator : SingleByteCodePageUnicodeGenerator { public Windows1254Generator(params char[] excludedCharacters) : base(1254, excludedCharacters) { } protected override Tuple<byte, byte>[] GetRanges() { return new Tuple<byte, byte>[]{ new Tuple<byte,byte>(0x82,0x8C), new Tuple<byte,byte>(0x91,0x9C), new Tuple<byte,byte>(0x9F,0xFF), }; } } }
using System; namespace System.Windows.Threading { /// <summary> /// Base class for all event arguments associated with a <see cref="Dispatcher"/>. /// </summary> /// <ExternalAPI/> public class DispatcherEventArgs : EventArgs { /// <summary> /// The <see cref="Dispatcher"/> associated with this event. /// </summary> /// <ExternalAPI/> public Dispatcher Dispatcher { get { return _dispatcher; } } internal DispatcherEventArgs(Dispatcher dispatcher) { _dispatcher = dispatcher; } private Dispatcher _dispatcher; } }
using System; using System.Windows; using System.Windows.Controls; namespace Xamarin.Forms.Platform.WinPhone { public class VisualElementPackager { readonly Panel _panel; readonly IVisualElementRenderer _renderer; bool _loaded; public VisualElementPackager(IVisualElementRenderer renderer) { if (renderer == null) throw new ArgumentNullException("renderer"); _panel = renderer.ContainerElement as Panel; if (_panel == null) throw new ArgumentException("Renderer's container element must be a Panel or Panel subclass"); _renderer = renderer; } IElementController ElementController => _renderer.Element as IElementController; public void Load() { if (_loaded) return; _loaded = true; _renderer.Element.ChildAdded += HandleChildAdded; _renderer.Element.ChildRemoved += HandleChildRemoved; _renderer.Element.ChildrenReordered += HandleChildrenReordered; foreach (Element child in ElementController.LogicalChildren) HandleChildAdded(_renderer.Element, new ElementEventArgs(child)); } void EnsureZIndex() { for (var index = 0; index < ElementController.LogicalChildren.Count; index++) { var child = (VisualElement)ElementController.LogicalChildren[index]; IVisualElementRenderer r = Platform.GetRenderer(child); if (r == null) continue; // Even though this attached property is defined on Canvas, it actually works on all Panels // Why? Microsoft. Canvas.SetZIndex(r.ContainerElement, index + 1); } } void HandleChildAdded(object sender, ElementEventArgs e) { var view = e.Element as VisualElement; if (view == null) return; IVisualElementRenderer renderer; Platform.SetRenderer(view, renderer = Platform.CreateRenderer(view)); _panel.Children.Add(renderer.ContainerElement); EnsureZIndex(); } void HandleChildRemoved(object sender, ElementEventArgs e) { var view = e.Element as VisualElement; if (view == null) return; var renderer = Platform.GetRenderer(view) as UIElement; if (renderer != null) _panel.Children.Remove(renderer); EnsureZIndex(); } void HandleChildrenReordered(object sender, EventArgs e) { EnsureZIndex(); } } }
using NeuralNet.Autodiff; namespace NeuralNet.Optimizers { /// <summary> /// Abstract class used to implement optimization algorithms /// </summary> public abstract class Optimizer { /// <summary> /// The learning rate /// </summary> /// <value></value> public NDimArray LearningRate{get;set;} /// <summary> /// Constructor used to create an optimizer object by specifying the learning rate /// </summary> /// <param name="lr"></param> public Optimizer(double lr){ LearningRate = new NDimArray(lr); } /// <summary> /// Perform a parameter update, based on the current gradient of the parameters /// </summary> /// <param name="module">The module on which the parameters will be updated</param> public abstract void Step(Module module); } }
namespace UserService.Core.AuditPackage.AuditException { [StatusCode(Grpc.Core.StatusCode.PermissionDenied)] public class PasswordInputException : AuditException { public PasswordInputException() : base(nameof(PasswordInputException)) { } } }
using System.Collections; using System.Management.Automation; using AzureAppConfigurationRetriever.Core.Interfaces; namespace AzureAppConfigurationRetriever.PS.Commands { [Cmdlet(VerbsCommon.Get, "AzureAppConfiguration")] [OutputType(typeof(string))] public class GetAzureAppConfiguration : BaseCmdlet { public GetAzureAppConfiguration() { } public GetAzureAppConfiguration(CmdletDependencies cmdletDependencies): base(cmdletDependencies) { } [Parameter(Mandatory = true, ValueFromPipeline = true)] public string ValueName { get; set; } [Parameter(Mandatory = false, ValueFromPipeline = true)] public string Label { get; set; } protected override void ProcessRecord() { var azureAppConfigurationCredentials = base.GetAzureAppConfigurationCredentials(); IAzureAppConfigurationRetriever retriever = new Core.Implementations.AzureAppConfigurationRetriever(azureAppConfigurationCredentials); var result = retriever.GetConfiguration(ValueName,Label); WriteObject(result); } } }
using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.UI; public class ExitScript : MonoBehaviour { public float steep; public Image loadIcon; public GameObject layer; float progress = 0; // Update is called once per frame void FixedUpdate() { if (Controll.GetExit()) { if (!layer.gameObject.activeSelf) layer.gameObject.SetActive(true); progress = progress + steep; loadIcon.fillAmount =Mathf.Clamp( progress,0,100); if (progress >= 1) { Application.Quit(); } } else { if (layer.gameObject.activeSelf) { progress = 0; layer.gameObject.SetActive(false); } } } }
using UnityEngine; public class AttributeStats : MonoBehaviour { [SerializeField] private TMPro.TextMeshProUGUI lblValue; public string Text { get { return lblValue.text; } set { lblValue.text = value; } } }
using System; using System.IO; using SharpChannels.Core.Messages; namespace SharpChannels.Core.Serialization { internal class BinaryMessageWriter { private readonly BinaryWriter _binaryWriter; public BinaryMessageWriter(Stream stream) { _binaryWriter = new BinaryWriter(stream); } public void Write(IBinaryMessageData message) { var type = (ushort)message.Type; var length = message.Data.Length; if (!BitConverter.IsLittleEndian) { Endianness.Swap(ref type); Endianness.Swap(ref length); } _binaryWriter.Write(type); _binaryWriter.Write(length); _binaryWriter.Write(message.Data); _binaryWriter.Flush(); } } }
using Nancy; using Nancy.Testing; using Silverpop.Client.WebTester.Models; using Silverpop.Core; using System.Collections.Generic; using System.Threading.Tasks; using Xunit; namespace Silverpop.Client.WebTester.Tests.Modules { public class HomeModuleTests { private readonly Browser _browser; public HomeModuleTests() { var bootstrapper = GetConfigurableBootstrapper(); _browser = new Browser(bootstrapper); } #pragma warning disable UseAsyncSuffix public class GetRootTests : HomeModuleTests { private readonly BrowserResponse _response; public GetRootTests() { _response = _browser.Get("/", with => { with.HttpRequest(); }).ConfigureAwait(false).GetAwaiter().GetResult(); } [Fact] public void ReturnsOk() { Assert.Equal(HttpStatusCode.OK, _response.StatusCode); } [Fact] public void ReturnsIndexView() { var html = _response.Body.AsString(); var moduleName = _response.GetModuleName(); var modulePath = _response.GetModulePath(); var viewName = _response.GetViewName(); var contentType = _response.ContentType; Assert.IsType<IndexModel>(_response.GetModel<IndexModel>()); } } public class PostSendTests : HomeModuleTests { [Fact] public async Task ReturnsBadRequestWhenNoModel() { var response = await _browser.Post("/send", with => { with.AjaxRequest(); }); Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); } [Fact] public async Task ReturnsBadRequestWhenModelMissingCampaignId() { var response = await _browser.Post("/send", with => { with.AjaxRequest(); with.JsonBody<SendModel>(new SendModel() { CampaignId = null, ToAddress = "test@example.com" }); }); Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); Assert.Contains("'Campaign Id' should not be empty.", response.Body.AsString()); } [Fact] public async Task ReturnsBadRequestWhenMissingToAddress() { var response = await _browser.Post("/send", with => { with.AjaxRequest(); with.JsonBody<SendModel>(new SendModel() { CampaignId = "123", ToAddress = null }); }); Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); Assert.Contains("'To Address' should not be empty.", response.Body.AsString()); } [Fact] public async Task ReturnsInternalServerErrorForSendingError() { var testSpecificBootstrapper = GetConfigurableBootstrapper( errorStringToUse: "Some error happened."); var testSpecificBrowser = new Browser(testSpecificBootstrapper); var response = await testSpecificBrowser.Post("/send", with => { with.AjaxRequest(); with.JsonBody<SendModel>(new SendModel() { CampaignId = "123", ToAddress = "test@example.com" }); }); Assert.Equal(HttpStatusCode.InternalServerError, response.StatusCode); Assert.Equal("Some error happened.", response.Body.AsString()); } [Fact] public async Task ReturnsOkForSuccess() { var response = await _browser.Post("/send", with => { with.AjaxRequest(); with.JsonBody<SendModel>(new SendModel() { CampaignId = "123", ToAddress = "test@example.com" }); }); Assert.Equal(HttpStatusCode.OK, response.StatusCode); } } #pragma warning restore UseAsyncSuffix private ConfigurableBootstrapper GetConfigurableBootstrapper( string errorStringToUse = null) { return new ConfigurableBootstrapper(with => { with.ApplicationStartup((container, pipelines) => { container.Register<TransactClient>((x, y) => new TransactClient( new TransactClientConfiguration() { PodNumber = 0, }, new FakeTransactMessageEncoder("encodeOutput_data"), new FakeTransactMessageResponseDecoder(new TransactMessageResponse() { Error = new KeyValuePair<int, string>(1, errorStringToUse), Status = errorStringToUse == null ? TransactMessageResponseStatus.NoErrorsAllRecipientsSent : TransactMessageResponseStatus.EncounteredErrorsNoMessagesSent }), () => new FakeSilverpopCommunicationsClient())); }); with.RootPathProvider<TestingRootPathProvider>(); with.ViewFactory<TestingViewFactory>(); with.AllDiscoveredModules(); }); } } }
using Alexa.NET.Request; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Http.Internal; using System; using System.IO; using System.Threading.Tasks; namespace Alexa.NET.Security.Middleware { /// <summary> /// An ASP.NET Core Middleware for validating Alexa reqeusts /// </summary> public class AlexaRequestValidationMiddleware { private readonly RequestDelegate _next; /// <summary> /// Constructor /// </summary> /// <param name="next"></param> public AlexaRequestValidationMiddleware(RequestDelegate next) { _next = next; } /// <summary> /// Validate if all necessary parts for a valid request are available /// and pass them to the ReqeustVerification tool /// </summary> /// <param name="context"></param> /// <returns></returns> public async Task Invoke(HttpContext context) { // EnableRewind so the body can be read without causing issues to the request pipeline context.Request.EnableRewind(); // Verify SignatureCertChainUrl is present context.Request.Headers.TryGetValue("SignatureCertChainUrl", out var signatureChainUrl); if (String.IsNullOrWhiteSpace(signatureChainUrl)) { context.Response.StatusCode = StatusCodes.Status400BadRequest; return; } Uri certUrl; try { certUrl = new Uri(signatureChainUrl); } catch { context.Response.StatusCode = StatusCodes.Status400BadRequest; return; } // Verify SignatureCertChainUrl is Signature context.Request.Headers.TryGetValue("Signature", out var signature); if (String.IsNullOrWhiteSpace(signature)) { context.Response.StatusCode = StatusCodes.Status400BadRequest; return; } string body = new StreamReader(context.Request.Body).ReadToEnd(); context.Request.Body.Position = 0; if (String.IsNullOrWhiteSpace(body)) { context.Response.StatusCode = StatusCodes.Status400BadRequest; return; } var valid = await RequestVerification.Verify(signature, certUrl, body); if (!valid) { context.Response.StatusCode = StatusCodes.Status400BadRequest; return; } await _next(context); } } }
using System.Collections; using System.Collections.Generic; using UnityEngine; public class Enemy : MonoBehaviour { public Transform rightLimit; public Transform leftLimit; bool goRight = true; [SerializeField] float speed = 2; float valueX = 1000; float valueY = 200; private float[] values; int damage = 1; private void Start() { transform.position = rightLimit.position; } private void Update() { if (goRight) { transform.Translate(Vector2.right * speed * Time.deltaTime); if (transform.position.x > rightLimit.position.x) { GetComponent<SpriteRenderer>().flipX = true; goRight = false; } } else { transform.Translate(Vector2.left * speed * Time.deltaTime); if(transform.position.x < leftLimit.position.x) { GetComponent<SpriteRenderer>().flipX = false; goRight = true; } } } private void OnCollisionEnter2D(Collision2D collision) { print("ENEMY toca: " + collision.gameObject.name); if (collision.gameObject.tag == "Player") { collision.gameObject.GetComponent<Player>().TakeDamage(damage); //collision.GetComponent<Rigidbody2D>().AddForce(Vector2.up * 650); } } public float[] GiveImpulseToPlayer() { values = new float[] {valueX, valueY}; return values; } }
using TrialOfFortune.Classes; namespace TrialOfFortune.Cards { interface IAbilityJuggernaut { void Charge(Player player); } }
// Copyright(c) .NET Foundation.All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Generic; using Xunit; namespace NuGet.LibraryModel.Tests { public class FrameworkDependencyFlagsUtilsTests { [Fact] public void FrameworkDependencyFlagsUtils_GetFlagString_ReturnsExpectedString() { Assert.Equal("all", FrameworkDependencyFlagsUtils.GetFlagString(FrameworkDependencyFlags.All)); Assert.Equal("none", FrameworkDependencyFlagsUtils.GetFlagString(FrameworkDependencyFlags.None)); } [Fact] public void FrameworkDependencyFlagsUtils_GetFlagsFromString_ReturnsExpectedFlags() { Assert.Equal(FrameworkDependencyFlags.All, FrameworkDependencyFlagsUtils.GetFlags("all")); Assert.Equal(FrameworkDependencyFlags.All, FrameworkDependencyFlagsUtils.GetFlags("All")); Assert.Equal(FrameworkDependencyFlags.None, FrameworkDependencyFlagsUtils.GetFlags("None")); Assert.Equal(FrameworkDependencyFlags.None, FrameworkDependencyFlagsUtils.GetFlags("none")); Assert.Equal(FrameworkDependencyFlags.None, FrameworkDependencyFlagsUtils.GetFlags((string)null)); Assert.Equal(FrameworkDependencyFlags.All, FrameworkDependencyFlagsUtils.GetFlags("none,all")); // Stupid to write this, but pointless to enforce that people don't :) Assert.Equal(FrameworkDependencyFlags.All, FrameworkDependencyFlagsUtils.GetFlags("all,none")); // Stupid to write this, but pointless to enforce that people don't :) } [Fact] public void FrameworkDependencyFlagsUtils_GetFlagsFromAnEnumerable_ReturnsExpectedFlags() { Assert.Equal(FrameworkDependencyFlags.None, FrameworkDependencyFlagsUtils.GetFlags((IEnumerable<string>)null)); Assert.Equal(FrameworkDependencyFlags.All, FrameworkDependencyFlagsUtils.GetFlags(new string[] { "all" })); Assert.Equal(FrameworkDependencyFlags.None, FrameworkDependencyFlagsUtils.GetFlags(new string[] { "none" })); Assert.Equal(FrameworkDependencyFlags.All, FrameworkDependencyFlagsUtils.GetFlags(new string[] { "all", "none" })); Assert.Equal(FrameworkDependencyFlags.All, FrameworkDependencyFlagsUtils.GetFlags(new string[] { "none", "all" })); Assert.Equal(FrameworkDependencyFlags.All, FrameworkDependencyFlagsUtils.GetFlags(new string[] { "All", "None" })); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Skyreach.Util { public class SegmentScanner { /// <summary> /// Enumeration of lengths on a zero based line /// </summary> private readonly IEnumerable<int> _segmentLengths; /// <summary> /// /// </summary> /// <param name="segmentLengths"> /// List of segment lengths. /// </param> public SegmentScanner(IEnumerable<int> segmentLengths) { _segmentLengths = segmentLengths; } /// <summary> /// Finds the specified point location in the list /// of segments. Location is strictly non-negative. /// </summary> /// <param name="loc"></param> /// <returns> /// The segment index that this point resides in /// and the index of the point inside the segment. /// Returns null if the point location is outside of /// the segment index. /// </returns> public SegmentLocation Find(int loc) { if(loc < 0) { throw new ArgumentException(String.Concat( "Cannot find a negative location, works only ", "on zero-based locations")); } IEnumerator<int> iter = _segmentLengths.GetEnumerator(); for (int segIdx = 0, segStart = 0; iter.MoveNext(); segIdx++ ) { int segEnd = segStart + iter.Current; if (loc < segEnd) { return new SegmentLocation(segIdx, loc - segStart); } // advance for next iteration segStart = segEnd; } return null; } } public class SegmentLocation { public int SegmentIdx { get; private set; } public int InSegmentIdx { get; private set; } public SegmentLocation(int segIdx, int inSegIdx) { SegmentIdx = segIdx; InSegmentIdx = inSegIdx; } } }
using System.Collections.Generic; using LewisFam.Stocks.Models; namespace LewisFam.Stocks.ThirdParty.Cnbc.Models { public interface ICnbcRealTimeStockQuote : IStockQuote { double Change { get; set; } new double Close { get; set; } double High { get; set; } double Last { get; set; } double Low { get; set; } double Open { get; set; } double Price { get; set; } long Volume { get; set; } IList<ICnbcRealTimeStockQuote> Datas { get; set; } string AltName { get; set; } string AltSymbol { get; set; } string AssetSubType { get; set; } string AssetType { get; set; } string CachedTime { get; set; } string CacheServed { get; set; } string ChangePct { get; set; } string Code { get; set; } //string Comments { get; set; } string CountryCode { get; set; } string Curmktstatus { get; set; } string CurrencyCode { get; set; } EventData EventData { get; set; } //string Exchange { get; set; } ExtendedMktQuote ExtendedMktQuote { get; set; } string FullVolume { get; set; } FundamentalData FundamentalData { get; set; } string IssuerId { get; set; } string Name { get; set; } string OnAirName { get; set; } // string Provider { get; set; } string RealTime { get; set; } string ResponseTime { get; set; } string ShortName { get; set; } string Source { get; set; } string Streamable { get; set; } string SymbolType { get; set; } string TimeZone { get; set; } string PreviousDayClosing { get; set; } string PrevPrevClosing { get; set; } } }
using Serenity.Data; namespace Serenity.Services { public interface IDeleteHandler<TRow, TDeleteRequest, TDeleteResponse> : IRequestHandler<TRow, TDeleteRequest, TDeleteResponse> where TRow : class, IRow, new() where TDeleteRequest : DeleteRequest where TDeleteResponse : DeleteResponse, new() { TDeleteResponse Delete(IUnitOfWork uow, TDeleteRequest request); } public interface IDeleteHandler<TRow> : IDeleteHandler<TRow, DeleteRequest, DeleteResponse> where TRow : class, IRow, new() { } }
namespace ET { // Unit的组件有这个接口说明需要传送 public interface ITransfer { } }
using System.Collections; using System.Collections.Generic; using UnityEngine; public abstract class MovingUnit : MonoBehaviour, Unit { public float moveTime = .2f; public LayerMask blockingLayer; protected BoxCollider2D boxCollider; protected Rigidbody2D rb2D; private float inverseMoveTime; // Use this for initialization protected virtual void Start () { boxCollider = GetComponent< BoxCollider2D >(); rb2D = GetComponent< Rigidbody2D >(); inverseMoveTime = 1f / moveTime; } protected bool move( int xDir, int yDir, out RaycastHit2D hit ) { Vector2 start = transform.position; Vector2 end = start + new Vector2( xDir, yDir ); boxCollider.enabled = false; hit = Physics2D.Linecast( start, end, blockingLayer ); boxCollider.enabled = true; if ( hit.transform == null ) { StartCoroutine( smoothMovement( end ) ); return true; } return false; } protected IEnumerator smoothMovement( Vector3 end ) { float sqrRemaningDistance = ( transform.position - end ).sqrMagnitude; while ( sqrRemaningDistance > 0 ) { Vector3 newPosition = Vector3.MoveTowards( rb2D.position, end, inverseMoveTime*Time.deltaTime ); rb2D.MovePosition( newPosition ); sqrRemaningDistance = ( transform.position - end ).sqrMagnitude; if ( !( sqrRemaningDistance > 0 ) ) onStop(); yield return null; } } protected virtual bool attemptMove< T > ( int xDir, int yDir ) where T : Unit { RaycastHit2D hit; bool canMove = move( xDir, yDir, out hit ); if ( hit.transform == null ) return canMove; T hitComponenet = hit.transform.GetComponent< T >(); if ( !canMove && hitComponenet != null ) { onCantMove( hitComponenet ); } return canMove; } //Simple calculation between two objects. Sends out where the enemy is relative to the object that calls this method public virtual float distFrom( GameObject go, out EnumManager.Face face ) { float myX = transform.position.x; float myY = transform.position.y; float otherX = go.transform.position.x; float otherY = go.transform.position.y; float dist = Mathf.Sqrt( Mathf.Pow( myX - otherX, 2) + Mathf.Pow( myY - otherY, 2 ) ); float xDist = myX - otherX; //Positive if to the left, negative if to the right float yDist = myY - otherY; //Positive if below, negative if above if ( Mathf.Abs(xDist) > Mathf.Abs(yDist) ) { if (xDist > 0) face = EnumManager.Face.Left; else face = EnumManager.Face.Right; } else { if (yDist > 0) face = EnumManager.Face.Down; else face = EnumManager.Face.Up; } return dist; } protected abstract void onCantMove< T > ( T component ) where T : Unit; protected abstract void onStop(); public int Health{ get; set; } public int Defense{ get; set; } public int Mana{ get; set; } public int ViewRange{ get; set; } public abstract void Hurt( int damage ); }
using System; using UnityEngine; using UnityEngine.Serialization; namespace UniVRM10.FastSpringBones.Blittables { /// <summary> /// Blittableなコライダ /// </summary> [Serializable] public struct BlittableCollider { public BlittableColliderType colliderType; public Vector3 offset; public float radius; public Vector3 tail; public int transformIndex; } }
using DataFileProcessor.Models; namespace DataFileProcessor.Services { public interface IPersonRepository { bool RecordExists(Person person); int SaveChanges(); void UpsertPerson(Person person); } }
using System; using System.Collections.Generic; using System.Linq; using Bridge.Contract; namespace Bridge.Translator { public class BridgeOptions { public string Name { get; set; } public ProjectProperties ProjectProperties { get; set; } public string ProjectLocation { get; set; } public string OutputLocation { get; set; } public string DefaultFileName { get; set; } public string BridgeLocation { get; set; } public bool Rebuild { get; set; } public bool ExtractCore { get; set; } public string Folder { get; set; } public bool Recursive { get; set; } public string Lib { get; set; } public bool Help { get; set; } public bool? NoTimeStamp { get; set; } public bool FromTask { get; set; } public bool NoLoggerSetUp { get; set; } public string Sources { get; set; } public bool IsFolderMode { get { return string.IsNullOrWhiteSpace(this.ProjectLocation); } } public BridgeOptions() { ExtractCore = true; Folder = Environment.CurrentDirectory; } public override string ToString() { return string.Join(", ", GetValues().Select(x => x.Key + ":" + x.Value)); } protected Dictionary<string, string> GetValues() { var r = new Dictionary<string, string>() { { WrapProperty("Name"), GetString(this.Name) }, { WrapProperty("ProjectProperties"), GetString(this.ProjectProperties) }, { WrapProperty("ProjectLocation"), GetString(this.ProjectLocation) }, { WrapProperty("OutputLocation"), GetString(this.OutputLocation) }, { WrapProperty("DefaultFileName"), GetString(this.DefaultFileName) }, { WrapProperty("BridgeLocation"), GetString(this.BridgeLocation) }, { WrapProperty("Rebuild"), GetString(this.Rebuild) }, { WrapProperty("ExtractCore"), GetString(this.ExtractCore) }, { WrapProperty("Folder"), GetString(this.Folder) }, { WrapProperty("Recursive"), GetString(this.Recursive) }, { WrapProperty("Lib"), GetString(this.Lib) }, { WrapProperty("Help"), GetString(this.Help) }, { WrapProperty("NoTimeStamp"), GetString(this.NoTimeStamp) }, { WrapProperty("FromTask"), GetString(this.FromTask) }, { WrapProperty("NoLoggerSetUp"), GetString(this.NoLoggerSetUp) }, { WrapProperty("Sources"), GetString(this.Sources) }, }; return r; } protected string WrapProperty(string name) { return name; } protected string GetString(string s) { return s != null ? s : ""; } protected string GetString(ProjectProperties p) { return p != null ? p.ToString() : ""; } protected string GetString(bool? b) { return b.HasValue ? GetString(b.Value) : GetString((string)null); } protected string GetString(bool b) { return b.ToString().ToLowerInvariant(); } } }
namespace AuthorizationService { public interface IAuthorizationService { OAuthAccessTokens GetOAuthAccessTokens(string code, string state); } }
namespace navdi2 { using UnityEngine; using System.Collections.Generic; [System.Serializable] /// <summary>twinteger: IntVector2</summary> public struct twinrect { public twin min; public twin max; public twinrect(int x1, int y1, int x2, int y2) { this.min = new twin(x1,y1); this.max = new twin(x2,y2); } public twinrect(twin min, twin max) { this.min = min; this.max = max; } public twin size { get { return this.max-this.min+twin.one; } } public static twinrect operator +(twinrect p1, twinrect p2) { return new twinrect(p1.min+p2.min, p1.max+p2.max); } public static twinrect operator -(twinrect p1, twinrect p2) { return new twinrect(p1.min-p2.min, p1.max-p2.max); } public static twinrect operator +(twinrect r, twin t) { return new twinrect(r.min+t, r.max+t); } public static twinrect operator -(twinrect r, twin t) { return new twinrect(r.min-t, r.max-t); } public bool Contains(twin point) { return point >= min && point <= max; } public void DoEach(System.Action<twin> func) { for (twin point=min;point.y<=max.y;point.x=min.x,point.y++) { for (;point.x<=max.x;point.x++) { func(point); } } } public int GetArea() { twin size = max - min + twin.one; if (size.x < 0) size.x = -size.x; if (size.y < 0) size.y = -size.y; return size.x * size.y; } public twin[] GetAllPoints(bool shuffled=false) { List<twin> allPointsList = new List<twin>(); DoEach((point)=>{allPointsList.Add(point);}); twin[] allPoints = allPointsList.ToArray(); if (shuffled) Util.Shuffle<twin>(ref allPoints); return allPoints; } override public string ToString() { return string.Format("twinrect({0},{1},{2},{3})",min.x,min.y,max.x,max.y); } } }
using BEDA.CMB.Contracts.Responses; using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Xml.Serialization; namespace BEDA.CMB.Contracts.Requests { /// <summary> /// 12.4.5.支付机构经办汇入汇款母业务请求主体 /// </summary> [XmlRoot("CMBSDKPGK")] public class RQ12_4_5 : CMBBase<RQINFO>, IRequest<RS12_4_5> { /// <summary> /// NTIRAJZP /// </summary> /// <returns></returns> public override string GetFUNNAM() => "NTIRAJZP"; /// <summary> /// 12.4.5.支付机构经办汇入汇款母业务请求内容 /// </summary> public NTOPRMODX NTOPRMODX { get; set; } /// <summary> /// 12.4.5.支付机构经办汇入汇款母业务请求内容 /// </summary> public NTIRAJZPX NTIRAJZPX { get; set; } } /// <summary> /// 12.4.5.支付机构经办汇入汇款母业务请求内容 /// </summary> public class NTIRAJZPX { /// <summary> /// 业务参考号 C(30) 参考号前三位按下面的个规则来填写,否则有可能会经办不通过 1、汇入时,IRT 2、汇出时,ORT /// </summary> public string YURREF { get; set; } /// <summary> /// 分行号 C(2) 附录A.1 /// </summary> public string BBKNBR { get; set; } /// <summary> /// 客户号 C(10) /// </summary> public string CLTNBR { get; set; } /// <summary> /// 汇入汇款编号 C(16) /// </summary> public string IRMNBR { get; set; } /// <summary> /// 交易币种 C(2) /// </summary> public string CCYNBR { get; set; } /// <summary> /// 集中支付机构标志 C(1) 1:支付机构跨境电商外汇集中收付 其他不是支付机构的业务 /// </summary> public string SPLFLG { get; set; } } }
using Newtonsoft.Json; namespace Tbus.Parser.NETStandard { public class IndexedHour { [JsonProperty("index")] public int Index { get; set; } [JsonProperty("hour")] public int Hour { get; set; } public IndexedHour() { } public IndexedHour(int index, int hour) { Index = index; Hour = hour; } } }
using QRest.Core.Terms; namespace QRest.Semantics.OData.Parsing { public class ODataTermContainer : ITerm { public ITerm Data { get; set; } public ITerm Count { get; set; } public string SharedView { get { var result = $"value={Data.SharedView}"; if (Count != null) result += $";count={Count.SharedView}"; return result; } } public string DebugView => SharedView; public string KeyView => SharedView; public ITerm Clone() { throw new System.NotSupportedException(); } } }
using System; using System.Collections.Generic; using Nebula; using Nebula.Queue; using Nebula.Queue.Implementation; using SampleJob; namespace SampleWorker { internal class Program { private static void Main() { Console.WriteLine("Abaci.JobQueue.Worker worker service..."); var nebulaContext = new NebulaContext(); nebulaContext.RegisterJobQueue(typeof(DelayedJobQueue<>), QueueType.Delayed); nebulaContext.RegisterJobQueue(typeof(KafkaJobQueue<>), QueueType.Kafka); // register processor by type // nebulaContext.RegisterJobProcessor(typeof(SampleJobProcessor),typeof(SampleJobStep)); //register processor object nebulaContext.RegisterJobProcessor(new SampleJobProcessor(), typeof(SampleJobStep)); nebulaContext.MongoConnectionString = "mongodb://localhost:27017/SampleJob"; nebulaContext.RedisConnectionString = "localhost:6379"; nebulaContext.KafkaConfig = new List<KeyValuePair<string, object>> { new KeyValuePair<string, object>("bootstrap.servers", "172.30.3.59:9101"), new KeyValuePair<string, object>("group.id", "testGroup"), new KeyValuePair<string, object>("auto.commit.interval.ms", 5000), new KeyValuePair<string, object>("enable.auto.commit", true), new KeyValuePair<string, object>("statistics.interval.ms", 60000), new KeyValuePair<string, object>("auto.offset.reset", "earliest"), new KeyValuePair<string, object>("queue.buffering.max.ms", 1), new KeyValuePair<string, object>("batch.num.messages", 1), new KeyValuePair<string, object>("fetch.wait.max.ms", 5000), new KeyValuePair<string, object>("fetch.min.bytes", 1), }; nebulaContext.StartWorkerService(); Console.WriteLine("Service started. Press ENTER to stop."); Console.ReadLine(); Console.WriteLine("Stopping the serivce..."); nebulaContext.StopWorkerService(); Console.WriteLine("Service stopped, everything looks clean."); } } }
using System; using System.Collections.Generic; using System.IO; using System.Text; using System.Threading.Tasks; namespace SmartCode.Configuration.ConfigBuilders { public abstract class ConfigBuilder : IConfigBuilder { public string ConfigPath { get; } public Project Project { get; set; } protected ConfigBuilder(string configPath) { ConfigPath = configPath; } public Project Build() { using (StreamReader configStream = new StreamReader(ConfigPath)) { var jsonConfigStr = configStream.ReadToEnd(); Project = Deserialize(jsonConfigStr); } InitDefault(); return Project; } protected abstract Project Deserialize(string content); private void InitDefault() { if (Project.Output != null) { if (String.IsNullOrEmpty(Project.Output.Type)) { Project.Output.Type = "File"; } if (Project.Output?.Mode == CreateMode.None) { Project.Output.Mode = CreateMode.Incre; } } if (Project.NamingConverter == null) { Project.NamingConverter = NamingConverter.Default; } foreach (var buildTask in Project.BuildTasks.Values) { if (buildTask.Output != null) { if (String.IsNullOrEmpty(buildTask.Output.Type)) { buildTask.Output.Type = Project.Output.Type; } if (buildTask.Output.Mode == CreateMode.None) { buildTask.Output.Mode = Project.Output.Mode; } } if (buildTask.TemplateEngine == null) { buildTask.TemplateEngine = Project.TemplateEngine; } else { if (String.IsNullOrEmpty(buildTask.TemplateEngine.Name)) { buildTask.TemplateEngine.Name = Project.TemplateEngine.Name; } if (String.IsNullOrEmpty(buildTask.TemplateEngine.Root)) { buildTask.TemplateEngine.Root = Project.TemplateEngine.Root; } if (String.IsNullOrEmpty(buildTask.TemplateEngine.Path)) { buildTask.TemplateEngine.Path = Project.TemplateEngine.Path; } } if (buildTask.NamingConverter == null) { buildTask.NamingConverter = Project.NamingConverter; } else { if (buildTask.NamingConverter.Table == null) { buildTask.NamingConverter.Table = Project.NamingConverter.Table; } if (buildTask.NamingConverter.View == null) { buildTask.NamingConverter.View = Project.NamingConverter.View; } if (buildTask.NamingConverter.Column == null) { buildTask.NamingConverter.Column = Project.NamingConverter.Column; } } if (Project.TableFilter != null) { if (buildTask.IgnoreTables == null) { buildTask.IgnoreTables = Project.TableFilter.IgnoreTables; } if (buildTask.IncludeTables == null) { buildTask.IncludeTables = Project.TableFilter.IncludeTables; } if (!buildTask.IgnoreView.HasValue) { buildTask.IgnoreView = Project.TableFilter.IgnoreView; } if (!buildTask.IgnoreNoPKTable.HasValue) { buildTask.IgnoreNoPKTable = Project.TableFilter.IgnoreNoPKTable; } } } } } }
using SharpBucket.V2.Pocos; using Shouldly; namespace SharpBucketTests.V2.Pocos { public static class RepositoryLinksAssertions { public static RepositoryLinks ShouldBeFilled(this RepositoryLinks links) { links.ShouldNotBeNull(); links.avatar.ShouldBeFilled(); links.branches.ShouldBeFilled(); links.clone.ShouldAllBeFilled(); links.commits.ShouldBeFilled(); links.downloads.ShouldBeFilled(); links.forks.ShouldBeFilled(); links.hooks.ShouldBeFilled(); links.html.ShouldBeFilled(); links.pullrequests.ShouldBeFilled(); links.self.ShouldBeFilled(); links.source.ShouldBeFilled(); links.tags.ShouldBeFilled(); links.watchers.ShouldBeFilled(); return links; } } }