content
stringlengths
5
1.04M
avg_line_length
float64
1.75
12.9k
max_line_length
int64
2
244k
alphanum_fraction
float64
0
0.98
licenses
list
repository_name
stringlengths
7
92
path
stringlengths
3
249
size
int64
5
1.04M
lang
stringclasses
2 values
/// This code was generated by /// \ / _ _ _| _ _ /// | (_)\/(_)(_|\/| |(/_ v1.0.0 /// / / using System; using System.Collections.Generic; using Twilio.Base; using Twilio.Converters; namespace Twilio.Rest.Sync.V1.Service { /// <summary> /// FetchSyncMapOptions /// </summary> public class FetchSyncMapOptions : IOptions<SyncMapResource> { /// <summary> /// The SID of the Sync Service with the Sync Map resource to fetch /// </summary> public string PathServiceSid { get; } /// <summary> /// The SID of the Sync Map resource to fetch /// </summary> public string PathSid { get; } /// <summary> /// Construct a new FetchSyncMapOptions /// </summary> /// <param name="pathServiceSid"> The SID of the Sync Service with the Sync Map resource to fetch </param> /// <param name="pathSid"> The SID of the Sync Map resource to fetch </param> public FetchSyncMapOptions(string pathServiceSid, string pathSid) { PathServiceSid = pathServiceSid; PathSid = pathSid; } /// <summary> /// Generate the necessary parameters /// </summary> public List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); return p; } } /// <summary> /// DeleteSyncMapOptions /// </summary> public class DeleteSyncMapOptions : IOptions<SyncMapResource> { /// <summary> /// The SID of the Sync Service with the Sync Map resource to delete /// </summary> public string PathServiceSid { get; } /// <summary> /// The SID of the Sync Map resource to delete /// </summary> public string PathSid { get; } /// <summary> /// Construct a new DeleteSyncMapOptions /// </summary> /// <param name="pathServiceSid"> The SID of the Sync Service with the Sync Map resource to delete </param> /// <param name="pathSid"> The SID of the Sync Map resource to delete </param> public DeleteSyncMapOptions(string pathServiceSid, string pathSid) { PathServiceSid = pathServiceSid; PathSid = pathSid; } /// <summary> /// Generate the necessary parameters /// </summary> public List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); return p; } } /// <summary> /// CreateSyncMapOptions /// </summary> public class CreateSyncMapOptions : IOptions<SyncMapResource> { /// <summary> /// The SID of the Sync Service to create the Sync Map in /// </summary> public string PathServiceSid { get; } /// <summary> /// An application-defined string that uniquely identifies the resource /// </summary> public string UniqueName { get; set; } /// <summary> /// An alias for collection_ttl /// </summary> public int? Ttl { get; set; } /// <summary> /// How long, in seconds, before the Sync Map expires and is deleted /// </summary> public int? CollectionTtl { get; set; } /// <summary> /// Construct a new CreateSyncMapOptions /// </summary> /// <param name="pathServiceSid"> The SID of the Sync Service to create the Sync Map in </param> public CreateSyncMapOptions(string pathServiceSid) { PathServiceSid = pathServiceSid; } /// <summary> /// Generate the necessary parameters /// </summary> public List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); if (UniqueName != null) { p.Add(new KeyValuePair<string, string>("UniqueName", UniqueName)); } if (Ttl != null) { p.Add(new KeyValuePair<string, string>("Ttl", Ttl.ToString())); } if (CollectionTtl != null) { p.Add(new KeyValuePair<string, string>("CollectionTtl", CollectionTtl.ToString())); } return p; } } /// <summary> /// UpdateSyncMapOptions /// </summary> public class UpdateSyncMapOptions : IOptions<SyncMapResource> { /// <summary> /// The SID of the Sync Service with the Sync Map resource to update /// </summary> public string PathServiceSid { get; } /// <summary> /// The SID of the Sync Map resource to update /// </summary> public string PathSid { get; } /// <summary> /// An alias for collection_ttl /// </summary> public int? Ttl { get; set; } /// <summary> /// How long, in seconds, before the Sync Map expires and is deleted /// </summary> public int? CollectionTtl { get; set; } /// <summary> /// Construct a new UpdateSyncMapOptions /// </summary> /// <param name="pathServiceSid"> The SID of the Sync Service with the Sync Map resource to update </param> /// <param name="pathSid"> The SID of the Sync Map resource to update </param> public UpdateSyncMapOptions(string pathServiceSid, string pathSid) { PathServiceSid = pathServiceSid; PathSid = pathSid; } /// <summary> /// Generate the necessary parameters /// </summary> public List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); if (Ttl != null) { p.Add(new KeyValuePair<string, string>("Ttl", Ttl.ToString())); } if (CollectionTtl != null) { p.Add(new KeyValuePair<string, string>("CollectionTtl", CollectionTtl.ToString())); } return p; } } /// <summary> /// ReadSyncMapOptions /// </summary> public class ReadSyncMapOptions : ReadOptions<SyncMapResource> { /// <summary> /// The SID of the Sync Service with the Sync Map resources to read /// </summary> public string PathServiceSid { get; } /// <summary> /// Construct a new ReadSyncMapOptions /// </summary> /// <param name="pathServiceSid"> The SID of the Sync Service with the Sync Map resources to read </param> public ReadSyncMapOptions(string pathServiceSid) { PathServiceSid = pathServiceSid; } /// <summary> /// Generate the necessary parameters /// </summary> public override List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); if (PageSize != null) { p.Add(new KeyValuePair<string, string>("PageSize", PageSize.ToString())); } return p; } } }
31.894273
115
0.545304
[ "MIT" ]
BrimmingDev/twilio-csharp
src/Twilio/Rest/Sync/V1/Service/SyncMapOptions.cs
7,240
C#
using MicroGarden.Settings.Core.Schemas.Models; using System.Collections.Generic; using System.Threading.Tasks; namespace MicroGarden.Settings.Core.Schemas.Services.Provider { public interface ISettingsSchemaProvider { Task<IReadOnlyList<SettingsEntity>> List(); /// <exception cref="SchemaNotFoundException">Thrown when exception was not found</exception> Task<SettingsEntity> Get(string id); } }
29.066667
101
0.75
[ "MIT" ]
chaliy/microgarden-settings
src/MicroGarden.Settings.Core/Schemas/Services/Provider/ISettingsSchemaProvider.cs
436
C#
namespace DevFreela.Application.ViewModels { public class CreateProjectViewModel { public string Title { get; set; } public string Description { get; set; } public int IdClient { get; set; } public int IdFreelancer { get; set; } public decimal TotalCost { get; set; } } }
27.083333
47
0.621538
[ "MIT" ]
ClaudioSiqueira/DevFreela
DevFreela.Application/ViewModels/CreateProjectViewModel.cs
327
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.SceneManagement; public class YggDoor : MonoBehaviour { public string nextScene; private bool isPlayerIn = false; void Update() { if (Input.GetKeyDown(KeyCode.E) && isPlayerIn) { SceneManager.LoadScene(nextScene); } } void OnTriggerEnter2D(Collider2D other) { if (other.gameObject.tag == "Player") { GetComponent<Animator>().SetBool("isClose", true); isPlayerIn = true; } } void OnTriggerExit2D(Collider2D other) { if (other.gameObject.tag == "Player") { GetComponent<Animator>().SetBool("isClose", false); isPlayerIn = false; } } }
20.6
57
0.662968
[ "MIT" ]
yosonlineapp/blog2dgame
Assets/Scripts/Objects/YggDoor.cs
723
C#
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using UnityEditor; using UnityEngine; namespace Microsoft.MixedReality.Toolkit.Utilities { /// <summary> /// A Grid Object Collection is simply a set of child objects organized with some /// layout parameters. The collection can be used to quickly create /// control panels or sets of prefab/objects. /// </summary> [HelpURL("https://docs.microsoft.com/windows/mixed-reality/mrtk-unity/features/ux-building-blocks/object-collection")] [AddComponentMenu("Scripts/MRTK/SDK/GridObjectCollection")] [ExecuteAlways] public partial class GridObjectCollection : BaseObjectCollection { [Tooltip("Type of surface to map the collection to")] [SerializeField] private ObjectOrientationSurfaceType surfaceType = ObjectOrientationSurfaceType.Plane; /// <summary> /// Type of surface to map the collection to. /// </summary> public ObjectOrientationSurfaceType SurfaceType { get { return surfaceType; } set { surfaceType = value; } } [Tooltip("Should the objects in the collection be rotated / how should they be rotated")] [SerializeField] private OrientationType orientType = OrientationType.None; /// <summary> /// Should the objects in the collection face the origin of the collection /// </summary> public OrientationType OrientType { get { return orientType; } set { orientType = value; } } [Tooltip("Specify direction in which children are laid out")] [SerializeField] private LayoutOrder layout = LayoutOrder.RowThenColumn; /// <summary> /// Specify direction in which children are laid out /// </summary> public LayoutOrder Layout { get { return layout; } set { layout = value; } } [SerializeField, Tooltip("Where the grid is anchored relative to local origin")] private LayoutAnchor anchor = LayoutAnchor.MiddleCenter; /// <summary> /// Where the grid is anchored relative to local origin /// </summary> public LayoutAnchor Anchor { get { return anchor; } set { anchor = value; } } [SerializeField, Tooltip("Whether anchoring occurs along an objects axis or not")] private bool anchorAlongAxis = false; /// <summary> /// Whether anchoring occurs along an objects axis or not /// </summary> public bool AnchorAlongAxis { get { return anchorAlongAxis; } set { anchorAlongAxis = value; } } [SerializeField, Tooltip("How the columns are aligned in the grid")] private LayoutHorizontalAlignment columnAlignment = LayoutHorizontalAlignment.Left; /// <summary> /// How the columns are aligned in the grid /// </summary> public LayoutHorizontalAlignment ColumnAlignment { get { return columnAlignment; } set { columnAlignment = value; } } [SerializeField, Tooltip("How the rows are aligned in the grid")] private LayoutVerticalAlignment rowAlignment = LayoutVerticalAlignment.Top; /// <summary> /// How the rows are aligned in the grid /// </summary> public LayoutVerticalAlignment RowAlignment { get { return rowAlignment; } set { rowAlignment = value; } } [Range(0.05f, 100.0f)] [Tooltip("Radius for the sphere or cylinder")] [SerializeField] private float radius = 2f; /// <summary> /// This is the radius of either the Cylinder or Sphere mapping and is ignored when using the plane mapping. /// </summary> public float Radius { get { return radius; } set { radius = value; } } [SerializeField] [Tooltip("Radial range for radial layout")] [Range(5f, 360f)] private float radialRange = 180f; /// <summary> /// This is the radial range for creating a radial fan layout. /// </summary> public float RadialRange { get { return radialRange; } set { radialRange = value; } } [SerializeField] [Tooltip("Distance for plane layout")] [Range(0f, 100f)] private float distance = 0f; /// <summary> /// This is the Distance for an offset for the Plane mapping and is ignored for the other mappings. /// </summary> public float Distance { get { return distance; } set { distance = value; } } private const int DefaultValueRowsCols = 3; [Tooltip("Number of rows per column")] [SerializeField] private int rows = DefaultValueRowsCols; /// <summary> /// Number of rows per column. Can only be assigned when layout type is /// RowsThenColumns /// </summary> public int Rows { get { return rows; } set { if (Layout == LayoutOrder.ColumnThenRow) { Debug.LogError("When using ColumnThenRow layout, assign Columns instead of Rows."); return; } rows = value; } } [Tooltip("Number of columns per row")] [SerializeField] private int columns = DefaultValueRowsCols; /// <summary> /// Number of columns per row. Can only be assigned when layout type is /// ColumnsThenRows /// </summary> public int Columns { get { return columns; } set { if (Layout == LayoutOrder.RowThenColumn) { Debug.LogError("When using RowThenColumn layout, assign Rows instead of Columns."); return; } columns = value; } } [Tooltip("Width of cell per object")] [SerializeField] private float cellWidth = 0.5f; /// <summary> /// Width of the cell per object in the collection. /// </summary> public float CellWidth { get { return cellWidth; } set { cellWidth = value; } } [Tooltip("Height of cell per object")] [SerializeField] private float cellHeight = 0.5f; /// <summary> /// Height of the cell per object in the collection. /// </summary> public float CellHeight { get { return cellHeight; } set { cellHeight = value; } } /// <summary> /// Total Width of collection /// </summary> public float Width => Columns * CellWidth; /// <summary> /// Total Height of collection /// </summary> public float Height => rows * CellHeight; /// <summary> /// Reference mesh to use for rendering the sphere layout /// </summary> public Mesh SphereMesh { get; set; } /// <summary> /// Reference mesh to use for rendering the cylinder layout /// </summary> public Mesh CylinderMesh { get; set; } protected Vector2 HalfCell; /// <summary> /// Overriding base function for laying out all the children when UpdateCollection is called. /// </summary> protected override void LayoutChildren() { var nodeGrid = new Vector3[NodeList.Count]; Vector3 newPos; // Now lets lay out the grid if (Layout == LayoutOrder.RowThenColumn) { columns = Mathf.CeilToInt((float)NodeList.Count / rows); } else if (Layout == LayoutOrder.ColumnThenRow) { rows = Mathf.CeilToInt((float)NodeList.Count / columns); } HalfCell = new Vector2(CellWidth * 0.5f, CellHeight * 0.5f); // First start with a grid then project onto surface ResolveGridLayout(nodeGrid, layout); switch (SurfaceType) { case ObjectOrientationSurfaceType.Plane: for (int i = 0; i < NodeList.Count; i++) { ObjectCollectionNode node = NodeList[i]; newPos = nodeGrid[i]; newPos.z = distance; node.Transform.localPosition = newPos; UpdateNodeFacing(node); NodeList[i] = node; } break; case ObjectOrientationSurfaceType.Cylinder: for (int i = 0; i < NodeList.Count; i++) { ObjectCollectionNode node = NodeList[i]; newPos = VectorExtensions.CylindricalMapping(nodeGrid[i], radius); node.Transform.localPosition = newPos; UpdateNodeFacing(node); NodeList[i] = node; } break; case ObjectOrientationSurfaceType.Sphere: for (int i = 0; i < NodeList.Count; i++) { ObjectCollectionNode node = NodeList[i]; newPos = VectorExtensions.SphericalMapping(nodeGrid[i], radius); node.Transform.localPosition = newPos; UpdateNodeFacing(node); NodeList[i] = node; } break; case ObjectOrientationSurfaceType.Radial: int curColumn = 0; int curRow = 1; for (int i = 0; i < NodeList.Count; i++) { ObjectCollectionNode node = NodeList[i]; newPos = VectorExtensions.RadialMapping(nodeGrid[i], radialRange, radius, curRow, rows, curColumn, Columns); if (curColumn == (Columns - 1)) { curColumn = 0; ++curRow; } else { ++curColumn; } node.Transform.localPosition = newPos; UpdateNodeFacing(node); NodeList[i] = node; } break; } } protected void ResolveGridLayout(Vector3[] grid, LayoutOrder order) { int cellCounter = 0; int xMax, yMax; switch (order) { case LayoutOrder.RowThenColumn: xMax = Columns; yMax = Rows; break; case LayoutOrder.ColumnThenRow: xMax = Columns; yMax = Rows; break; case LayoutOrder.Vertical: xMax = 1; yMax = NodeList.Count; break; case LayoutOrder.Horizontal: xMax = NodeList.Count; yMax = 1; break; default: xMax = Mathf.CeilToInt((float)NodeList.Count / rows); yMax = rows; break; } float startOffsetX = (xMax * 0.5f) * CellWidth; if (anchor == LayoutAnchor.BottomLeft || anchor == LayoutAnchor.UpperLeft || anchor == LayoutAnchor.MiddleLeft) { startOffsetX = anchorAlongAxis ? 0.5f * CellWidth : 0; } else if (anchor == LayoutAnchor.BottomRight || anchor == LayoutAnchor.UpperRight || anchor == LayoutAnchor.MiddleRight) { startOffsetX = anchorAlongAxis ? (xMax - 0.5f) * CellWidth : xMax * CellWidth; } float startOffsetY = (yMax * 0.5f) * CellHeight; if (anchor == LayoutAnchor.UpperLeft || anchor == LayoutAnchor.UpperCenter || anchor == LayoutAnchor.UpperRight) { startOffsetY = anchorAlongAxis ? 0.5f * CellHeight : 0; } else if (anchor == LayoutAnchor.BottomLeft || anchor == LayoutAnchor.BottomCenter || anchor == LayoutAnchor.BottomRight) { startOffsetY = anchorAlongAxis ? (yMax - 0.5f) * CellHeight : yMax * CellHeight; } float alignmentOffsetX = 0; float alignmentOffsetY = 0; if (layout == LayoutOrder.ColumnThenRow) { for (int y = 0; y < yMax; y++) { for (int x = 0; x < xMax; x++) { if (y == yMax - 1) { switch (ColumnAlignment) { case LayoutHorizontalAlignment.Left: alignmentOffsetX = 0; break; case LayoutHorizontalAlignment.Center: alignmentOffsetX = CellWidth * ((xMax - (NodeList.Count % xMax)) % xMax) * 0.5f; break; case LayoutHorizontalAlignment.Right: alignmentOffsetX = CellWidth * ((xMax - (NodeList.Count % xMax)) % xMax); break; } } if (cellCounter < NodeList.Count) { grid[cellCounter].Set((-startOffsetX + (x * CellWidth) + HalfCell.x) + NodeList[cellCounter].Offset.x + alignmentOffsetX, (startOffsetY - (y * CellHeight) - HalfCell.y) + NodeList[cellCounter].Offset.y + alignmentOffsetY, 0.0f); } cellCounter++; } } } else { for (int x = 0; x < xMax; x++) { for (int y = 0; y < yMax; y++) { if (x == xMax - 1) { switch (RowAlignment) { case LayoutVerticalAlignment.Top: alignmentOffsetY = 0; break; case LayoutVerticalAlignment.Middle: alignmentOffsetY = -CellHeight * ((yMax - (NodeList.Count % yMax)) % yMax) * 0.5f; break; case LayoutVerticalAlignment.Bottom: alignmentOffsetY = -CellHeight * ((yMax - (NodeList.Count % yMax)) % yMax); break; } } if (cellCounter < NodeList.Count) { grid[cellCounter].Set((-startOffsetX + (x * CellWidth) + HalfCell.x) + NodeList[cellCounter].Offset.x + alignmentOffsetX, (startOffsetY - (y * CellHeight) - HalfCell.y) + NodeList[cellCounter].Offset.y + alignmentOffsetY, 0.0f); } cellCounter++; } } } } /// <summary> /// Update the facing of a node given the nodes new position for facing origin with node and orientation type /// </summary> protected void UpdateNodeFacing(ObjectCollectionNode node) { Vector3 centerAxis; Vector3 pointOnAxisNearestNode; switch (OrientType) { case OrientationType.FaceOrigin: node.Transform.rotation = Quaternion.LookRotation(node.Transform.position - transform.position, transform.up); break; case OrientationType.FaceOriginReversed: node.Transform.rotation = Quaternion.LookRotation(transform.position - node.Transform.position, transform.up); break; case OrientationType.FaceCenterAxis: centerAxis = Vector3.Project(node.Transform.position - transform.position, transform.up); pointOnAxisNearestNode = transform.position + centerAxis; node.Transform.rotation = Quaternion.LookRotation(node.Transform.position - pointOnAxisNearestNode, transform.up); break; case OrientationType.FaceCenterAxisReversed: centerAxis = Vector3.Project(node.Transform.position - transform.position, transform.up); pointOnAxisNearestNode = transform.position + centerAxis; node.Transform.rotation = Quaternion.LookRotation(pointOnAxisNearestNode - node.Transform.position, transform.up); break; case OrientationType.FaceParentFoward: node.Transform.forward = transform.rotation * Vector3.forward; break; case OrientationType.FaceParentForwardReversed: node.Transform.forward = transform.rotation * Vector3.back; break; case OrientationType.FaceParentUp: node.Transform.forward = transform.rotation * Vector3.up; break; case OrientationType.FaceParentDown: node.Transform.forward = transform.rotation * Vector3.down; break; case OrientationType.None: break; default: Debug.LogWarning("OrientationType out of range"); break; } } // Gizmos to draw when the Collection is selected. protected virtual void OnDrawGizmosSelected() { Vector3 scale = (2f * radius) * Vector3.one; switch (surfaceType) { case ObjectOrientationSurfaceType.Plane: break; case ObjectOrientationSurfaceType.Cylinder: Gizmos.color = Color.green; Gizmos.DrawWireMesh(CylinderMesh, transform.position, transform.rotation, scale); break; case ObjectOrientationSurfaceType.Sphere: Gizmos.color = Color.green; Gizmos.DrawWireMesh(SphereMesh, transform.position, transform.rotation, scale); break; } } private void Awake() { #if UNITY_EDITOR if (!EditorApplication.isPlaying) { if (assetVersion != CurrentAssetVersion) { Undo.RecordObject(this, "version patching"); PerformVersionPatching(); } } #endif } #region asset version migration private const int CurrentAssetVersion = 1; [SerializeField] [HideInInspector] private int assetVersion = 0; private void PerformVersionPatching() { if (assetVersion == 0) { string friendlyName = GetUserFriendlyName(); // Migrate from version 0 to version 1 UpgradeAssetToVersion1(); assetVersion = 1; } assetVersion = CurrentAssetVersion; } /// <summary> /// Version 1 of GridObjectCollection introduced in MRTK 2.2 when /// incorrect semantics of "ColumnsThenRows" layout was fixed. /// See https://github.com/microsoft/MixedRealityToolkit-Unity/issues/6773#issuecomment-561918891 /// for details. /// </summary> private void UpgradeAssetToVersion1() { if (Layout == LayoutOrder.ColumnThenRow) { Layout = LayoutOrder.RowThenColumn; var friendlyName = GetUserFriendlyName(); Debug.Log($"[MRTK 2.2 asset upgrade] Changing LayoutOrder for {friendlyName} from ColumnThenRow to RowThenColumn. See https://github.com/microsoft/MixedRealityToolkit-Unity/issues/6773#issuecomment-561918891 for details."); } } private string GetUserFriendlyName() { string objectName = gameObject.name; if (gameObject.transform.parent != null) { objectName += " (parent " + transform.parent.gameObject.name + ")"; } return objectName; } #endregion } }
36.840753
239
0.497327
[ "MIT" ]
CDiaz-MS/MixedRealityToolkit-Unity
Assets/MRTK/SDK/Features/UX/Scripts/Collections/GridObjectCollection.cs
21,517
C#
using Microsoft.AspNetCore.Identity; using Abp.Authorization; using Abp.Authorization.Users; using Abp.Configuration; using Abp.Configuration.Startup; using Abp.Dependency; using Abp.Domain.Repositories; using Abp.Domain.Uow; using Abp.Zero.Configuration; using Concise_CMS.Authorization.Roles; using Concise_CMS.Authorization.Users; using Concise_CMS.MultiTenancy; namespace Concise_CMS.Authorization { public class LogInManager : AbpLogInManager<Tenant, Role, User> { public LogInManager( UserManager userManager, IMultiTenancyConfig multiTenancyConfig, IRepository<Tenant> tenantRepository, IUnitOfWorkManager unitOfWorkManager, ISettingManager settingManager, IRepository<UserLoginAttempt, long> userLoginAttemptRepository, IUserManagementConfig userManagementConfig, IIocResolver iocResolver, IPasswordHasher<User> passwordHasher, RoleManager roleManager, UserClaimsPrincipalFactory claimsPrincipalFactory) : base( userManager, multiTenancyConfig, tenantRepository, unitOfWorkManager, settingManager, userLoginAttemptRepository, userManagementConfig, iocResolver, passwordHasher, roleManager, claimsPrincipalFactory) { } } }
33.108696
76
0.641497
[ "MIT" ]
xhl592576605/Concise_CMS
src/concise_cms-aspnet-core/src/Concise_CMS.Core/Authorization/LoginManager.cs
1,523
C#
/* Copyright (c) 2019, Lars Brubaker, John Lewin All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project. */ using System; using System.Collections.Generic; using System.Linq; using Markdig.Agg; using MatterHackers.Agg; using MatterHackers.Agg.Image; using MatterHackers.Agg.UI; using MatterHackers.Agg.VertexSource; using MatterHackers.MatterControl.PartPreviewWindow; using MatterHackers.VectorMath; namespace MatterHackers.MatterControl.CustomWidgets { public class SettingsRow : FlowLayoutWidget { protected GuiWidget overrideIndicator; protected const bool debugLayout = false; protected ThemeConfig theme; private bool _fullRowSelect = false; protected bool mouseInBounds = false; private Color hoverColor; private Popover popoverBubble = null; private static Popover activePopover = null; private SystemWindow systemWindow = null; protected ImageWidget imageWidget; public GuiWidget ActionWidget { get; set; } public SettingsRow(string title, string helpText, ThemeConfig theme, ImageBuffer icon = null, bool enforceGutter = false, bool fullRowSelect = false) { using (this.LayoutLock()) { this.HelpText = helpText ?? ""; this.theme = theme; this.FullRowSelect = fullRowSelect; this.HAnchor = HAnchor.Stretch; this.VAnchor = VAnchor.Fit; this.MinimumSize = new Vector2(0, theme.ButtonHeight); this.Border = new BorderDouble(bottom: 1); this.BorderColor = theme.RowBorder; hoverColor = theme.MinimalShade; if (icon != null) { this.AddChild( imageWidget = new ImageWidget(icon) { Margin = new BorderDouble(right: 6, left: 6), VAnchor = VAnchor.Center }); } else if (enforceGutter) { // Add an icon placeholder to get consistent label indenting on items lacking icons this.AddChild(new GuiWidget() { Width = 24 + 12, Height = 24, Margin = new BorderDouble(0) }); } else { this.AddChild(overrideIndicator = new GuiWidget() { VAnchor = VAnchor.Stretch, HAnchor = HAnchor.Absolute, Width = 3, Margin = new BorderDouble(right: 6), Name = $"{title} OverrideIndicator", }); } textLabel = SettingsRow.CreateSettingsLabel(title, helpText, theme.TextColor); this.AddChild(textLabel); textLabel.Selectable = false; this.spacer = this.AddChild(new HorizontalSpacer()); } this.PerformLayout(); } public SettingsRow SetTextRightMargin(SafeList<SettingsRow> rows) { var spacing = 11 * GuiWidget.DeviceScale; var maxTextWidth = 0.0; foreach (var row in rows) { maxTextWidth = Math.Max(maxTextWidth, row.textLabel.Width); } var newWidth = spacing + maxTextWidth; foreach (var row in rows) { row.spacer.HAnchor = HAnchor.Absolute; row.spacer.Width = Math.Max(0, newWidth - row.textLabel.Width); } return this; } public bool FullRowSelect { get => _fullRowSelect; set { if (_fullRowSelect != value) { _fullRowSelect = value; foreach (var child in Children) { child.Selectable = !_fullRowSelect; } this.Cursor = _fullRowSelect ? Cursors.Hand : Cursors.Default; } } } public SettingsRow(string title, string helpText, GuiWidget guiWidget, ThemeConfig theme) : this(title, helpText, theme) { this.Padding = new BorderDouble(right: theme.DefaultContainerPadding); guiWidget.VAnchor |= VAnchor.Center; this.AddChild(guiWidget); } public static GuiWidget CreateSettingsLabel(string label, string helpText, Color textColor) { return new TextWidget(label, textColor: textColor, pointSize: 10) { AutoExpandBoundsToText = true, VAnchor = VAnchor.Center, }; } public string HelpText { get; protected set; } public ArrowDirection ArrowDirection { get; set; } = ArrowDirection.Right; public override GuiWidget AddChild(GuiWidget childToAdd, int indexInChildrenList = -1) { childToAdd.Selectable = this.FullRowSelect == false; return base.AddChild(childToAdd, indexInChildrenList); } protected override void OnClick(MouseEventArgs mouseEvent) { if (ActionWidget != null && mouseEvent.Button == MouseButtons.Left) { ActionWidget.InvokeClick(); return; } base.OnClick(mouseEvent); } public override Color BackgroundColor { get { if (this.ContainsFirstUnderMouseRecursive()) { return hoverColor; } return base.BackgroundColor; } set => base.BackgroundColor = value; } public int BorderRadius { get; set; } = 3; public override void OnLoad(EventArgs args) { // The top level SystemWindow - due to single window implementation details, multiple SystemWindow parents may exist - proceed to the topmost one systemWindow = this.Parents<SystemWindow>().LastOrDefault(); base.OnLoad(args); } private static int popupCount; private bool popupScheduled = false; private GuiWidget spacer; private GuiWidget textLabel; public override void OnMouseEnterBounds(MouseEventArgs mouseEvent) { mouseInBounds = true; this.Invalidate(); if (!popupScheduled) { UiThread.RunOnIdle(() => { void Popover_Closed (object sender, EventArgs e) { popupCount--; if (sender is GuiWidget widget) { widget.Closed -= Popover_Closed; } } if (mouseInBounds) { popupCount++; this.ShowPopover(this); if (popoverBubble != null) { popoverBubble.Closed += Popover_Closed; } this.Invalidate(); } popupScheduled = false; }, popupCount > 0 ? ToolTipManager.ReshowDelay : ToolTipManager.InitialDelay); } popupScheduled = true; base.OnMouseEnterBounds(mouseEvent); } public override void OnMouseLeaveBounds(MouseEventArgs mouseEvent) { mouseInBounds = false; this.Invalidate(); if (popoverBubble != null) { // Allow a moment to elapse to determine if the mouse is within the bubble or has returned to this control, close otherwise UiThread.RunOnIdle(() => { // Skip close if we are FirstWidgetUnderMouse if (this.FirstWidgetUnderMouse) { // Often we get OnMouseLeaveBounds when the mouse is still within bounds (as child mouse events are processed) // If the mouse is in bounds of this widget, abort the popover close below return; } // Close the popover as long as it doesn't contain the mouse if (!popoverBubble.ContainsFirstUnderMouseRecursive() && !PopupWidget.DebugKeepOpen) { // Close any active popover bubble popoverBubble?.Close(); } }, 1); } base.OnMouseLeaveBounds(mouseEvent); } protected virtual void ExtendPopover(ClickablePopover popover) { } public override void OnDrawBackground(Graphics2D graphics2D) { if (this.BorderRadius > 0) { var rect = new RoundedRect(this.LocalBounds, this.BorderRadius); graphics2D.Render(rect, this.BackgroundColor); } else { base.OnDrawBackground(graphics2D); } } protected void ShowPopover(SettingsRow settingsRow) { // Only display popovers when we're the active widget, exit if we're not first under mouse if (systemWindow == null || !this.ContainsFirstUnderMouseRecursive() || string.IsNullOrEmpty(settingsRow.HelpText)) { return; } int arrowOffset = (int)(settingsRow.Height / 2); var popover = new ClickablePopover(this.ArrowDirection, new BorderDouble(15, 10), 7, arrowOffset) { HAnchor = HAnchor.Fit, VAnchor = VAnchor.Fit, TagColor = theme.ResolveColor(AppContext.Theme.BackgroundColor, AppContext.Theme.AccentMimimalOverlay.WithAlpha(50)), }; GuiWidget contentWidget; if (true) { popover.HAnchor = HAnchor.Absolute; popover.Width = 300 * GuiWidget.DeviceScale; var markdown = new MarkdownWidget(theme); markdown.HAnchor = HAnchor.Stretch; markdown.VAnchor = VAnchor.Fit; markdown.Markdown = settingsRow.HelpText; contentWidget = markdown; } else // this is what it was before { contentWidget = new WrappedTextWidget(settingsRow.HelpText, pointSize: theme.DefaultFontSize - 1, textColor: AppContext.Theme.TextColor) { Width = 300 * GuiWidget.DeviceScale, HAnchor = HAnchor.Fit, }; } popover.AddChild(contentWidget); bool alignLeft = this.ArrowDirection == ArrowDirection.Right; // after a certain amount of time make the popover close (just like a tool tip) double closeSeconds = Math.Max(1, settingsRow.HelpText.Length / 50.0) * 5; this.ExtendPopover(popover); activePopover?.Close(); activePopover = popover; systemWindow.ShowPopover( new MatePoint(settingsRow) { Mate = new MateOptions(alignLeft ? MateEdge.Left : MateEdge.Right, MateEdge.Top), AltMate = new MateOptions(alignLeft ? MateEdge.Right : MateEdge.Left, MateEdge.Bottom), Offset = new RectangleDouble(12, 0, 12, 0) }, new MatePoint(popover) { Mate = new MateOptions(alignLeft ? MateEdge.Right : MateEdge.Left, MateEdge.Top), AltMate = new MateOptions(alignLeft ? MateEdge.Left : MateEdge.Right, MateEdge.Bottom), // Offset = new RectangleDouble(12, 0, 12, 0) }, secondsToClose: closeSeconds); popoverBubble = popover; } } }
27.533163
151
0.702307
[ "BSD-2-Clause" ]
Bhalddin/MatterControl
MatterControlLib/SlicerConfiguration/SettingsRow.cs
10,795
C#
using Splaak.Core.Values; using Splaak.Core.Values.Types; using Xunit; namespace Splaak.Tests.Values.Types { public class PairVTests { private static Value _1 = new IntV(0); private static Value _2 = new IntV(1); private PairV _obj = new PairV(_1, _2); [Fact] public void ConstructorTest() { Assert.Equal(_obj.Left, _1); Assert.Equal(_obj.Right, _2); } [Fact] public void EqualsEqualTest() { Assert.True(_obj.Equals(new PairV(_1, _2))); } [Fact] public void EqualsNullTest() { Assert.False(_obj.Equals(null)); } [Fact] public void EqualsNotEqualValueTest() { Assert.False(_obj.Equals(new PairV(_1, _1))); } [Fact] public void EqualsNotEqualTypeTest() { Assert.False(_obj.Equals(new IntV(0))); } [Fact] public void HashCodeEqualTest() { Assert.Equal(_obj.GetHashCode(), new PairV(_1, _2).GetHashCode()); } [Fact] public void ToStringTest() { Assert.Equal(_obj.ToString(), $"PairV({_1}, {_2})"); } } }
22.192982
78
0.515415
[ "MIT" ]
CptWesley/Splaak
tests/src/Values/Types/PairVTests.cs
1,267
C#
using UnityEngine; using UnityAtoms.SceneMgmt; namespace UnityAtoms.SceneMgmt { /// <summary> /// Event of type `SceneFieldPair`. Inherits from `AtomEvent&lt;SceneFieldPair&gt;`. /// </summary> [EditorIcon("atom-icon-cherry")] [CreateAssetMenu(menuName = "Unity Atoms/Events/SceneFieldPair", fileName = "SceneFieldPairEvent")] public sealed class SceneFieldPairEvent : AtomEvent<SceneFieldPair> { } }
32.769231
103
0.725352
[ "MIT" ]
puschie286/unity-atoms
Packages/SceneMgmt/Runtime/Events/SceneFieldPairEvent.cs
426
C#
// <auto-generated> // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. // </auto-generated> namespace Microsoft.Azure.Management.ProviderHub.Models { using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; public partial class SkuZoneDetail { /// <summary> /// Initializes a new instance of the SkuZoneDetail class. /// </summary> public SkuZoneDetail() { CustomInit(); } /// <summary> /// Initializes a new instance of the SkuZoneDetail class. /// </summary> public SkuZoneDetail(IList<string> name = default(IList<string>), IList<SkuCapability> capabilities = default(IList<SkuCapability>)) { Name = name; Capabilities = capabilities; CustomInit(); } /// <summary> /// An initialization method that performs custom operations like setting defaults /// </summary> partial void CustomInit(); /// <summary> /// </summary> [JsonProperty(PropertyName = "name")] public IList<string> Name { get; set; } /// <summary> /// </summary> [JsonProperty(PropertyName = "capabilities")] public IList<SkuCapability> Capabilities { get; set; } } }
29.345455
140
0.614002
[ "MIT" ]
93mishra/azure-sdk-for-net
sdk/providerhub/Microsoft.Azure.Management.ProviderHub/src/Generated/Models/SkuZoneDetail.cs
1,614
C#
using Microsoft.SqlServer.Server; using System; using System.Data.SqlTypes; using System.IO; namespace Microsoft.SqlServer.Types { /// <summary> /// The SqlGeography type represents data in a geodetic (round earth) coordinate system. /// </summary> [SqlUserDefinedType(Format.UserDefined, IsByteOrdered = false, MaxByteSize = -1, IsFixedLength = false)] public class SqlGeography : INullable, IBinarySerialize { private ShapeData _geometry; private int srid = 0; internal SqlGeography(bool isNull) { IsNull = isNull; } internal SqlGeography(ShapeData g, int srid) { this.srid = srid; this._geometry = g; } public SqlGeography() { _geometry = new ShapeData(); } /// <summary> /// Constructs a <see cref="SqlGeography"/> instance representing a Point instance from its x and y values and a spatial reference ID (SRID). /// </summary> /// <param name="latitude">A double that represents the latitude coordinate of the Point being generated.</param> /// <param name="longitude">A double that represents the longitude coordinate of the Point being generated.</param> /// <param name="srid">An int expression that represents the SRID of the geography instance you wish to return</param> /// <returns>A <see cref="SqlGeography"/> instance constructed from the specified latitude, longitude, and SRID values.</returns> [SqlMethod] public static SqlGeography Point(double latitude, double longitude, int srid) { if (!double.IsNaN(latitude) && !double.IsInfinity(latitude) && !double.IsNaN(longitude) && !double.IsInfinity(longitude)) { if (Math.Abs(latitude) > 90.0) throw new FormatException("latitude is not a valid value"); if (Math.Abs(longitude) > 15069.0) throw new FormatException("longitude is not a valid value"); return new SqlGeography(new ShapeData(latitude, longitude, null, null), srid); } throw new FormatException("Invalid coordinates"); } /// <summary> /// Returns the longitude property of the geography instance. /// </summary> /// <value>A SqlDouble value that specifies the longitude.</value> /// <remarks> /// In the OpenGIS model, Long is defined only on geography instances composed of a single point. /// This property will return NULL if geography instances contain more than a single point. This /// property is precise and read-only. /// </remarks> public SqlDouble Long { [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] get => _geometry.Type == OGCGeometryType.Point && _geometry.NumPoints == 1 ? new SqlDouble(_geometry.Y) : SqlDouble.Null; } /// <summary> /// Returns the latitude property of the geography instance. /// </summary> /// <value>A SqlDouble value that specifies the latitude.</value> /// <remarks> /// In the OpenGIS model, Lat is defined only on geography instances composed of a single point. /// This property will return NULL if geography instances contain more than a single point. This property /// is precise and read-only. /// </remarks> public SqlDouble Lat { [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] get => _geometry.Type == OGCGeometryType.Point && _geometry.NumPoints == 1 ? new SqlDouble(_geometry.X) : SqlDouble.Null; } /// <summary> /// Gets the Z (elevation) value of the instance. The semantics of the elevation value are user-defined. /// </summary> /// <value>true if at least one point in a spatial object contains value Z; otherwise false.</value> /// <remarks> /// <para>The value of this property is null if the geography instance is not a point, as well as for any Point instance for which it is not set.</para> /// <para>This property is read-only.</para> /// <para>Z-coordinates are not used in any calculations made by the library and are not carried through any library calculations.</para> /// </remarks> public SqlDouble Z { [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] get => _geometry.Type == OGCGeometryType.Point && _geometry.NumPoints == 1 && _geometry.HasZ && !double.IsNaN(_geometry.Z) ? new SqlDouble(_geometry.Z) : SqlDouble.Null; } /// <summary> /// Returns the M (measure) value of the geography instance. /// </summary> /// <remarks> /// <para>The semantics of the measure value are user-defined but generally describe the distance along a linestring. For example, the measure value could be used to keep track of mileposts along a road.</para> /// <para>The value of this property is null if the geography instance is not a Point, as well as for any Point instance for which it is not set.</para> /// <para>This property is read-only.</para> /// <para>M values are not used in any calculations made by the library and will not be carried through any library calculations.</para> /// </remarks> public SqlDouble M { [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] get => _geometry.Type == OGCGeometryType.Point && _geometry.NumPoints == 1 && _geometry.HasM && !double.IsNaN(_geometry.M) ? new SqlDouble(_geometry.M) : SqlDouble.Null; } /// <summary> /// Gets or sets id is an integer representing the Spatial Reference Identifier (SRID) of the instance. /// </summary> /// <value>A SqlInt32 that represents the SRID of the SqlGeography instance.</value> public SqlInt32 STSrid { [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] get => new SqlInt32(srid); [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] set { if (value.IsNull) throw new System.ArgumentNullException(); if ((srid < 4120 || srid > 4999) && srid != 104001) throw new ArgumentOutOfRangeException(nameof(srid), "SRID must be between 4120 and 4999 (inclusive)"); srid = value.Value; } } /// <summary> /// Returns true if at least one point in a spatial object contains value Z; otherwise returns false. This property is read-only. /// </summary> /// <value>true if at least one point in a spatial object contains value Z; otherwise false.</value> public bool HasZ { [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] get => _geometry.HasZ; } /// <summary> /// Returns true if at least one point in a spatial object contains value M; otherwise returns false. This property is read-only. /// </summary> /// <value>true if at least one point in a spatial object contains value M; otherwise false.</value> public bool HasM { [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] get => _geometry.HasM; } /// <summary> /// Determines whether the SqlGeography instance is null. /// </summary> /// <value>A bool value that specifies whether the SqlGeography instance is null. If true, the instance is null. Otherwise, false.</value> public bool IsNull { [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] get; } /// <summary> /// Returns a read-only property providing a null instance of the SqlGeography type. /// </summary> /// <value>A null instance of the SqlGeography class.</value> public static SqlGeography Null { [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] get; } = new SqlGeography(true); /// <summary> /// Returns the total number of rings in a Polygon instance. /// </summary> /// <returns> /// <para>A SqlInt32 value specifying the total number of rings.</para> /// <para>This method will return NULL if this is not a Polygon instance and will return 0 if the instance is empty.</para> /// </returns> /// <remarks>In the SQL Server geography type, external and internal rings are not distinguished, as any ring can be taken to be the external ring.</remarks> [SqlMethod(IsDeterministic = true, IsPrecise = true)] public SqlInt32 NumRings() { if (IsNull || (_geometry.Type != OGCGeometryType.Polygon && _geometry.Type != OGCGeometryType.CurvePolygon)) { return SqlInt32.Null; } return this._geometry.NumRings; } /// <summary> /// Returns the Open Geospatial Consortium (OGC) type name represented by a geography instance. /// </summary> /// <returns>A SqlString value containing the OGC type name.</returns> /// <remarks> /// The OGC type names that can be returned by the STGeometryType method are Point, LineString, Polygon, GeometryCollection, MultiPoint, MultiLineString, and MultiPolygon. /// </remarks> [SqlMethod(IsDeterministic = true, IsPrecise = true)] public SqlString STGeometryType() { if (IsNull) return SqlString.Null; return new SqlString(_geometry.Type.ToString()); } /// <summary> /// Returns the number of geometries that make up a SqlGeography instance. /// </summary> /// <returns>A SqlInt32 value that specifies the number of geometries that make up the <see cref="SqlGeography"/> instance. </returns> /// <remarks> /// This method returns 1 if the geography instance is not a MultiPoint, MultiLineString, MultiPolygon, or GeometryCollection instance, or 0 if the SqlGeography instance is empty. /// </remarks> [SqlMethod(IsDeterministic = true, IsPrecise = true)] public SqlInt32 STNumGeometries() => IsNull ? SqlInt32.Null : _geometry.NumGeometries; /// <summary> /// Returns the total number of points in each of the figures in a SqlGeography instance /// </summary> /// <returns>A SqlInt32 value specifying the total number of points in each figure of the <see cref="SqlGeography"/> instance.</returns> /// <remarks> /// This method counts the points in the description of a SqlGeography instance. Duplicate points are counted. /// If this instance is a GeometryCollection, this method returns of the total number of points in each of the /// elements in the collection. /// </remarks> [SqlMethod(IsDeterministic = true, IsPrecise = true)] public SqlInt32 STNumPoints() => IsNull ? SqlInt32.Null : _geometry.NumPoints; /// <summary> /// Returns the number of curves in a one-dimensional SqlGeography instance. /// </summary> /// <returns>The number of curves.</returns> [SqlMethod(IsDeterministic = true, IsPrecise = true)] public SqlInt32 STNumCurves() { if (IsNull || _geometry.IsEmpty) return SqlInt32.Null; if (_geometry.Type == OGCGeometryType.LineString) return _geometry.IsEmpty ? 0 : _geometry.NumPoints - 1; if (_geometry.Type == OGCGeometryType.CircularString) { if (_geometry.IsEmpty) return 0; return (_geometry.NumPoints - 1) / 2; } if(_geometry.Type == OGCGeometryType.Polygon) return _geometry.NumRings; if (_geometry.Type != OGCGeometryType.CompoundCurve) return SqlInt32.Null; return _geometry.NumRings; } /// <summary> /// Returns a specified geography element in a GeometryCollection or one of its subtypes. /// </summary> /// <param name="n">An int expression between 1 and the number of SqlGeography instances in the GeometryCollection.</param> /// <returns>A SqlGeography element from the specified instance in the GeometryCollection.</returns> /// <remarks> /// <para>When this method is used on a subtype of a GeometryCollection, such as MultiPoint or MultiLineString, this method returns the SqlGeography instance if called with N=1.</para> /// <para>This method returns null if the parameter is larger than the result of STNumGeometries and will throw an ArgumentOutOfRangeException if the expression parameter is less than 1.</para> /// </remarks> [SqlMethod(IsDeterministic = true, IsPrecise = true)] public SqlGeography STGeometryN(int n) { if (n < 1) throw new ArgumentOutOfRangeException(nameof(n)); if (IsNull || n > STNumGeometries()) return SqlGeography.Null; return new SqlGeography(_geometry.GetGeometryN(n), srid); } /// <summary> /// Returns the curve specified from a SqlGeography instance that is a LineString, CircularString, or CompoundCurve. /// </summary> /// <param name="n">An integer between 1 and the number of curves in the SqlGeography instance.</param> /// <returns>The specified curve.</returns> [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] public SqlGeography STCurveN(int n) { if (n < 1) { throw new ArgumentOutOfRangeException(nameof(n)); } SqlInt32 val = STNumCurves(); if (val.IsNull) return Null; return new SqlGeography(_geometry.GetRing(n - 1), srid); } /// <summary> /// Returns the specified point in a SqlGeography instance. /// </summary> /// <param name="n">An int expression between 1 and the number of points in the SqlGeography instance.</param> /// <returns>A <see cref="SqlGeography"/> representing the specified point in the calling instance.</returns> /// <remarks> /// <para>If a SqlGeography instance is user-created, the STPointN method returns the point specified by expression by ordering the points in the order in which they were originally input.</para> /// <para>If a SqlGeography instance is constructed by the system, STPointN returns the point specified by expression by ordering all the points in the same order they would be output: first by geography instance, then by ring within the instance(if appropriate), and then by point within the ring.This order is deterministic.</para> /// <para>If this method is called with a value less than 1, it throws an ArgumentOutOfRangeException.</para> /// <para>If this method is called with a value greater than the number of points in the instance, it returns null.</para> /// </remarks> public SqlGeography STPointN(int n) { if (n < 1) throw new ArgumentOutOfRangeException(nameof(n)); if (IsNull) return SqlGeography.Null; if (n > this._geometry.NumPoints) return SqlGeography.Null; var p = _geometry.GetPointN(n); return new SqlGeography(new ShapeData(p.X, p.Y, HasZ ? (double?)p.Z : null, HasM ? (double?)p.M : null), srid); } /// <summary> /// Returns the start point of a SqlGeography instance. /// </summary> /// <returns>A SqlGeography value that represents the start point of the calling SqlGeography.</returns> /// <remarks>STStartPoint is the equivalent of STPointN(1).</remarks> [SqlMethod(IsDeterministic = true, IsPrecise = true)] public SqlGeography STStartPoint() => this.STPointN(1); // public SqlGeography STUnion(SqlGeography sqlGeography) // { // throw new NotSupportedException(); // } /// <summary> /// Returns the end point of a SqlGeography instance. /// </summary> /// <returns>A SqlGeography value containing the end point.</returns> /// <remarks> /// <para>STEndPoint is the equivalent of SqlGeography.STPointN(x.STNumPoints()).</para> /// <para>This method returns null if called on an empty geography instance.</para> /// </remarks> [SqlMethod(IsDeterministic = true, IsPrecise = true)] public SqlGeography STEndPoint() => STPointN(Math.Max(1, _geometry.NumPoints)); /// <summary> /// Returns the specified ring of the SqlGeography instance: 1 ≤ n ≤ NumRings(). /// </summary> /// <param name="n">An int expression between 1 and the number of rings in a polygon instance.</param> /// <returns>A SqlGeography object that represents the ring specified by n.</returns> /// <remarks> /// If the value of the ring index n is less than 1, this method throws an ArgumentOutOfRangeException. The ring index value must be greater than or equal to 1 and should be less than or equal to the number returned by NumRings. /// </remarks> [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] public SqlGeography RingN(int n) { if (n < 1) throw new ArgumentOutOfRangeException(nameof(n)); if (IsNull || (_geometry.Type != OGCGeometryType.Polygon && _geometry.Type != OGCGeometryType.CurvePolygon) || n > this._geometry.NumRings) { return SqlGeography.Null; } ShapeData ring = _geometry.GetRing(n - 1); ring.SetIsValid(false); return new SqlGeography(ring, this.srid); } /// <summary> /// Determines whether the <see cref="SqlGeography"/> instance is empty. /// </summary> /// <returns>A SqlBoolean value that indicates whether the calling instance is empty. Returns true if it is empty. Otherwise, returns false.</returns> [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] public SqlBoolean STIsEmpty() { if (this.IsNull) return SqlBoolean.Null; return _geometry.IsEmpty; } /// <summary> /// Returns the Open Geospatial Consortium (OGC) Well-Known Text (WKT) representation of a <see cref="SqlGeography"/> instance. /// </summary> /// <returns>A SqlChars object containing the WKT representation of the SqlGeography.</returns> [SqlMethodAttribute(IsDeterministic = true, IsPrecise = false)] public SqlChars STAsText() => new SqlChars(ToString()); /// <summary> /// Tests if the <see cref="SqlGeography"/> instance is the same as the specified type. /// </summary> /// <param name="geometryType">Specifies the type of geometry that the calling <see cref="SqlGeography"/> will be compared to.</param> /// <returns>A SqlBoolean value indicating if the calling <see cref="SqlGeography"/> is of the specified geometry type. /// Returns true if the type of a <see cref="SqlGeography"/> instance is the same as the specified type, or if the specified /// type is an ancestor of the instance type. Otherwise, returns false.</returns> /// <remarks> /// The input for the method must be one of the following: Geometry, Point, Curve, LineString, Surface, Polygon, GeometryCollection, /// MultiSurface, MultiPolygon, MultiCurve, MultiLineString, FullGlobe, and MultiPoint. This method throws an ArgumentException if /// any other strings are used for the input. /// </remarks> [SqlMethod] public SqlBoolean InstanceOf(string geometryType) { if (geometryType == null) throw new ArgumentNullException(nameof(geometryType)); if (IsNull) { return SqlBoolean.Null; } if (_geometry.IsValid) throw new ArgumentException("Geometry is not valid"); string[] array = _parentGeometryTypeNames[(uint)_geometry.Type]; for (int i = 0; i < array.Length; i++) { if (string.Compare(geometryType, array[i], StringComparison.OrdinalIgnoreCase) == 0) return true; } for (int j = 0; j < _validGeometryTypeNames.Length; j++) { if (string.Compare(geometryType, _validGeometryTypeNames[j], StringComparison.OrdinalIgnoreCase) == 0) return false; } throw new ArgumentException("Invalid geometryType name", nameof(geometryType)); } private static readonly string[] _validGeometryTypeNames = new string[] { "Geometry", "Point", "LineString", "Polygon", "Curve", "Surface", "MultiPoint", "MultiLineString", "MultiPolygon", "MultiCurve", "MultiSurface", "GeometryCollection", "FullGlobe", "CircularString", "CompoundCurve", "CurvePolygon" }; private static readonly string[][] _parentGeometryTypeNames = new string[][] { new string[] { }, new [] { "Geometry", "Point" }, new [] { "Geometry", "Curve", "LineString" }, new [] { "Geometry", "Surface", "Polygon" }, new [] { "Geometry", "GeometryCollection", "MultiPoint" }, new [] { "Geometry", "GeometryCollection", "MultiCurve", "MultiLineString" }, new [] { "Geometry", "GeometryCollection", "MultiSurface", "MultiPolygon" }, new [] { "Geometry", "GeometryCollection" }, new [] { "Geometry", "Curve", "CircularString" }, new [] { "Geometry", "Curve", "CompounCurve" }, new [] { "Geometry", "Surface", "CurvePolygon" }, new [] { "Geometry", "FullGlobe" } }; /// <summary> /// Returns a SqlGeography instance from an Open Geospatial Consortium (OGC) Well-Known Text (WKT) representation augmented with any Z (elevation) and M (measure) values carried by the instance. /// </summary> /// <param name="geometryTaggedText">The WKT representation of the SqlGeography instance you wish to return. </param> /// <param name="srid">An int expression that represents the spatial reference ID (SRID) of the SqlGeography instance you wish to return.</param> /// <returns>A SqlGeography instance constructed from the WKY representation.</returns> /// <remarks> /// <para>The OGC type of the SqlGeography instance returned by STGeomFromText is set to the corresponding WKT input.</para> /// <para>This method will throw a <see cref="FormatException"/> if the input is not well-formatted.</para> /// </remarks> [SqlMethodAttribute(IsDeterministic = true, IsPrecise = false)] public static SqlGeography STGeomFromText(SqlChars geometryTaggedText, int srid) { if (geometryTaggedText.IsNull) return SqlGeography.Null; var data = Wkt.WktReader.Parse(System.Text.Encoding.UTF8.GetBytes(geometryTaggedText.Buffer), Wkt.CoordinateOrder.LatLong); return new SqlGeography(data, srid); } /// <summary> /// Returns a SqlGeography instance from an Open Geospatial Consortium (OGC) Well-Known Text (WKT) representation. /// </summary> /// <param name="s">The WKT representation of the SqlGeography instance you wish to return. </param> /// <returns>A SqlGeography value constructed from the specified WKT representation.</returns> /// <remarks> /// The Parse method is equivalent to <see cref="STGeomFromText"/> except that it assumes a spatial reference ID (SRID) of 4326 as a parameter. The input may carry optional Z (elevation) and M (measure) values. /// </remarks> [SqlMethodAttribute(IsDeterministic = true, IsPrecise = false)] public static SqlGeography Parse(SqlString s) { if (s.IsNull) return SqlGeography.Null; var data = Wkt.WktReader.Parse(System.Text.Encoding.UTF8.GetBytes(s.Value), Wkt.CoordinateOrder.LatLong); return new SqlGeography(data, 4326); } /// <summary> /// Reads a binary representation of a geography type into a SqlGeometry object. /// </summary> /// <param name="r">BinaryReader object that reads a binary representation of a geography type.</param> /// <remarks> /// <para>This member is sealed.</para> /// <para>This method will throw a FormatException if SRID value read by r is invalid.</para> /// </remarks> public void Read(BinaryReader r) { srid = r.ReadInt32(); this._geometry = new ShapeData(); this._geometry.Read(r, 1); } /// <summary> /// Writes a SqlGeography object to a binary stream. /// </summary> /// <param name="w">BinaryWriter object that writes a SqlGeography object to a binary stream.</param> [SqlMethodAttribute(IsDeterministic = true, IsPrecise = true)] public void Write(BinaryWriter w) { w.Write((!IsNull && !STSrid.IsNull ? STSrid.Value : 0)); //SRID _geometry.Write(w); } /// <summary> /// Returns a constructed SqlGeometry from an internal SQL Server format for spatial data. Can be used for sending spatial data over the network or reading them from files. /// </summary> /// <param name="bytes">The data representing the spatial data being sent across the network.</param> /// <returns>The data being sent over the network.</returns> public static SqlGeography Deserialize(SqlBytes bytes) { using (var r = new BinaryReader(bytes.Stream)) { var srid = r.ReadInt32(); var geometry = new ShapeData(); geometry.Read(r, 1); return new SqlGeography(geometry, srid); } } /// <summary> /// Used for sending spatial data across the network. /// </summary> /// <returns>A SqlBytes stream representing the spatial data being sent across the network.</returns> /// <remarks> /// Used in conjunction with <see cref="Deserialize"/>() for sending spatial data across the network. /// </remarks> public SqlBytes Serialize() { using (var ms = new MemoryStream()) { Write(new BinaryWriter(ms)); return new SqlBytes(ms.ToArray()); } } public override string ToString() => Wkt.WktWriter.Write(_geometry, Wkt.CoordinateOrder.LatLong); } }
52.319549
342
0.605698
[ "Apache-2.0" ]
MuhKuh7/Microsoft.SqlServer.Types
src/Microsoft.SqlServer.Types/SqlGeography.cs
27,840
C#
using System; using System.Collections.Generic; using System.Text.Json.Serialization; namespace MicroFeel.Yonyou.Api { public class Materialout { ///<Summary> ///单据编号 ///</Summary> [JsonPropertyName("code")] public string Code { get; set; } ///<Summary> ///制单日期 ///</Summary> [JsonPropertyName("date")] public DateTime Date { get; set; } ///<Summary> ///制单人名称 ///</Summary> [JsonPropertyName("maker")] public string Maker { get; set; } ///<Summary> ///仓库编码 ///</Summary> [JsonPropertyName("warehousecode")] public string Warehousecode { get; set; } ///<Summary> ///备注 ///</Summary> [JsonPropertyName("memory")] public string Memory { get; set; } ///<Summary> ///收发类型编码 ///</Summary> [JsonPropertyName("receivecode")] public string Receivecode { get; set; } ///<Summary> ///部门编码 ///</Summary> [JsonPropertyName("departmentcode")] public string Departmentcode { get; set; } ///<Summary> ///单据头自定义项1 ///</Summary> [JsonPropertyName("define1")] public string Define1 { get; set; } ///<Summary> ///单据头自定义项2 ///</Summary> [JsonPropertyName("define2")] public string Define2 { get; set; } ///<Summary> ///单据头自定义项3 ///</Summary> [JsonPropertyName("define3")] public string Define3 { get; set; } ///<Summary> ///单据头自定义项4 ///</Summary> [JsonPropertyName("define4")] public DateTime Define4 { get; set; } ///<Summary> ///单据头自定义项5 ///</Summary> [JsonPropertyName("define5")] public float Define5 { get; set; } ///<Summary> ///单据头自定义项6 ///</Summary> [JsonPropertyName("define6")] public DateTime Define6 { get; set; } ///<Summary> ///单据头自定义项7 ///</Summary> [JsonPropertyName("define7")] public float Define7 { get; set; } ///<Summary> ///单据头自定义项8 ///</Summary> [JsonPropertyName("define8")] public string Define8 { get; set; } ///<Summary> ///单据头自定义项9 ///</Summary> [JsonPropertyName("define9")] public string Define9 { get; set; } ///<Summary> ///单据头自定义项10 ///</Summary> [JsonPropertyName("define10")] public string Define10 { get; set; } ///<Summary> ///单据头自定义项11 ///</Summary> [JsonPropertyName("define11")] public string Define11 { get; set; } ///<Summary> ///单据头自定义项12 ///</Summary> [JsonPropertyName("define12")] public string Define12 { get; set; } ///<Summary> ///单据头自定义项13 ///</Summary> [JsonPropertyName("define13")] public string Define13 { get; set; } ///<Summary> ///单据头自定义项14 ///</Summary> [JsonPropertyName("define14")] public string Define14 { get; set; } ///<Summary> ///单据头自定义项15 ///</Summary> [JsonPropertyName("define15")] public float Define15 { get; set; } ///<Summary> ///单据头自定义项16 ///</Summary> [JsonPropertyName("define16")] public float Define16 { get; set; } ///<Summary> ///存货编码 ///</Summary> [JsonPropertyName("entry")] public IList<MaterialoutEntry> Materialoutentry { get; set; } } public class MaterialoutEntry { ///<Summary> ///存货编码 ///</Summary> [JsonPropertyName("inventorycode")] public string Inventorycode { get; set; } ///<Summary> ///数量 ///</Summary> [JsonPropertyName("quantity")] public float Quantity { get; set; } ///<Summary> ///辅记量单位编码 ///</Summary> [JsonPropertyName("assitantunit")] public string Assitantunit { get; set; } ///<Summary> ///换算率 ///</Summary> [JsonPropertyName("irate")] public float Irate { get; set; } ///<Summary> ///件数 ///</Summary> [JsonPropertyName("number")] public float Number { get; set; } ///<Summary> ///单价 ///</Summary> [JsonPropertyName("price")] public float Price { get; set; } ///<Summary> ///金额 ///</Summary> [JsonPropertyName("cost")] public float Cost { get; set; } ///<Summary> ///批号 ///</Summary> [JsonPropertyName("serial")] public string Serial { get; set; } ///<Summary> ///生产日期 ///</Summary> [JsonPropertyName("makedate")] public DateTime Makedate { get; set; } ///<Summary> ///失效日期 ///</Summary> [JsonPropertyName("validdate")] public DateTime Validdate { get; set; } ///<Summary> ///自由项1 ///</Summary> [JsonPropertyName("free1")] public string Free1 { get; set; } ///<Summary> ///自由项2 ///</Summary> [JsonPropertyName("free2")] public string Free2 { get; set; } ///<Summary> ///自由项3 ///</Summary> [JsonPropertyName("free3")] public string Free3 { get; set; } ///<Summary> ///自由项4 ///</Summary> [JsonPropertyName("free4")] public string Free4 { get; set; } ///<Summary> ///自由项5 ///</Summary> [JsonPropertyName("free5")] public string Free5 { get; set; } ///<Summary> ///自由项6 ///</Summary> [JsonPropertyName("free6")] public string Free6 { get; set; } ///<Summary> ///自由项7 ///</Summary> [JsonPropertyName("free7")] public string Free7 { get; set; } ///<Summary> ///自由项8 ///</Summary> [JsonPropertyName("free8")] public string Free8 { get; set; } ///<Summary> ///自由项9 ///</Summary> [JsonPropertyName("free9")] public string Free9 { get; set; } ///<Summary> ///自由项10 ///</Summary> [JsonPropertyName("free10")] public string Free10 { get; set; } ///<Summary> ///单据体自定义项1 ///</Summary> [JsonPropertyName("define22")] public string Define22 { get; set; } ///<Summary> ///单据体自定义项2 ///</Summary> [JsonPropertyName("define23")] public string Define23 { get; set; } ///<Summary> ///单据体自定义项3 ///</Summary> [JsonPropertyName("define24")] public string Define24 { get; set; } ///<Summary> ///单据体自定义项4 ///</Summary> [JsonPropertyName("define25")] public string Define25 { get; set; } ///<Summary> ///单据体自定义项5 ///</Summary> [JsonPropertyName("define26")] public float Define26 { get; set; } ///<Summary> ///单据体自定义项6 ///</Summary> [JsonPropertyName("define27")] public float Define27 { get; set; } ///<Summary> ///单据体自定义项7 ///</Summary> [JsonPropertyName("define28")] public string Define28 { get; set; } ///<Summary> ///单据体自定义项8 ///</Summary> [JsonPropertyName("define29")] public string Define29 { get; set; } ///<Summary> ///单据体自定义项9 ///</Summary> [JsonPropertyName("define30")] public string Define30 { get; set; } ///<Summary> ///单据体自定义项10 ///</Summary> [JsonPropertyName("define31")] public string Define31 { get; set; } ///<Summary> ///单据体自定义项11 ///</Summary> [JsonPropertyName("define32")] public string Define32 { get; set; } ///<Summary> ///单据体自定义项12 ///</Summary> [JsonPropertyName("define33")] public string Define33 { get; set; } ///<Summary> ///单据体自定义项13 ///</Summary> [JsonPropertyName("define34")] public float Define34 { get; set; } ///<Summary> ///单据体自定义项14 ///</Summary> [JsonPropertyName("define35")] public float Define35 { get; set; } ///<Summary> ///单据体自定义项15 ///</Summary> [JsonPropertyName("define36")] public DateTime Define36 { get; set; } ///<Summary> ///单据体自定义项16 ///</Summary> [JsonPropertyName("define37")] public DateTime Define37 { get; set; } ///<Summary> ///行号 ///</Summary> [JsonPropertyName("rowno")] public float Rowno { get; set; } } }
25.453521
69
0.482736
[ "Apache-2.0" ]
microfeel/Yonyou
MicroFeel.Yonyou.OpenApi/Model/Data/Stock/Materialout.cs
9,678
C#
// Copyright (c) Dapplo and contributors. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Drawing; using System.Linq; using System.Threading.Tasks; using Dapplo.HttpExtensions; using Dapplo.HttpExtensions.Extensions; using Dapplo.HttpExtensions.WinForms.ContentConverter; using Dapplo.HttpExtensions.Wpf.ContentConverter; using Dapplo.Jira.Entities; using Dapplo.Jira.Enums; using Dapplo.Jira.SvgWinForms.Converters; using Dapplo.Log; using Xunit; using Xunit.Abstractions; namespace Dapplo.Jira.Tests { public class ProjectTests : TestBase { public ProjectTests(ITestOutputHelper testOutputHelper) : base(testOutputHelper) { // Add SvgBitmapHttpContentConverter if it was not yet added if (HttpExtensionsGlobals.HttpContentConverters.All(x => x.GetType() != typeof(SvgBitmapHttpContentConverter))) { HttpExtensionsGlobals.HttpContentConverters.Add(SvgBitmapHttpContentConverter.Instance.Value); } // Add BitmapHttpContentConverter if it was not yet added if (HttpExtensionsGlobals.HttpContentConverters.All(x => x.GetType() != typeof(BitmapHttpContentConverter))) { HttpExtensionsGlobals.HttpContentConverters.Add(BitmapHttpContentConverter.Instance.Value); } // Add BitmapSourceHttpContentConverter if it was not yet added if (HttpExtensionsGlobals.HttpContentConverters.All(x => x.GetType() != typeof(BitmapSourceHttpContentConverter))) { HttpExtensionsGlobals.HttpContentConverters.Add(BitmapSourceHttpContentConverter.Instance.Value); } } [Fact] public async Task TestGetProjectAsync() { var project = await Client.Project.GetAsync("DIT"); Assert.NotNull(project); Assert.True(project.Roles.Count > 0); foreach (var componentDigest in project.Components) { var component = await Client.Project.GetComponentAsync(componentDigest.Id); Assert.NotNull(component?.Name); Log.Info().WriteLine("Component {0}", component.Name); } } [Fact] public async Task TestGetIssueCreatorsAsync() { var creators = await Client.Project.GetIssueCreatorsAsync("DIT"); Assert.NotNull(creators); var firstCreator = creators.First(); await Client.Server.GetAvatarAsync<Bitmap>(firstCreator.Avatars); } [Fact] public async Task TestComponentAsync() { // Create var component = new Component { Name = "Component from Test", Project = "DIT", Description = "This was created from a test" }; component = await Client.Project.CreateComponentAsync(component); // Update const string descriptionUpdate = "Changed the description"; component.Description = descriptionUpdate; await Client.Project.UpdateComponentAsync(component); // Delete component = await Client.Project.GetComponentAsync(component.Id); Assert.Equal(descriptionUpdate, component.Description); await Client.Project.DeleteComponentAsync(component.Id); } [Fact] public async Task TestGetProjectsAsync() { var projects = await Client.Project.GetAllAsync(); Assert.NotNull(projects); Assert.True(projects.Count > 0); Client.Behaviour.SetConfig(new SvgConfiguration {Width = 24, Height = 24}); foreach (var project in projects) { var avatar = await Client.Server.GetAvatarAsync<Bitmap>(project.Avatar, AvatarSizes.Medium); Assert.True(avatar.Width == 24); var projectDetails = await Client.Project.GetAsync(project.Key); Assert.NotNull(projectDetails); } } } }
36.220183
126
0.670719
[ "MIT" ]
oldmansauls/Dapplo.Jira
src/Dapplo.Jira.Tests/ProjectTests.cs
3,950
C#
using AspnetRun.Services.Orders.Core.Entities; using AspnetRun.Services.Orders.Core.Repositories.Base; using System.Collections.Generic; using System.Threading.Tasks; namespace AspnetRun.Services.Orders.Core.Repositories { public interface IOrderRepository : IRepository<Order> { Task<IEnumerable<Order>> GetOrderByUserNameAsync(string userName); } }
28.692308
74
0.788204
[ "MIT" ]
nilavanrajamani/ecommerce-microservice
AspnetRun.Services.Orders/Core/Repositories/IOrderRepository.cs
375
C#
// Polar2D using ClubPenguin; using UnityEngine; public struct Polar2D { public static readonly Polar2D Zero = new Polar2D(0f, 0f); public float Angle { get; private set; } public float Distance { get; private set; } public Polar2D(float angle, float distance) { this = default(Polar2D); Angle = angle; Distance = distance; } public Polar2D(Vector2 v) { this = default(Polar2D); Angle = Mathf.Atan2(v.y, v.x) * 57.29578f; Distance = Mathf.Sqrt(v.x * v.x + v.y * v.y); } public static implicit operator Vector2(Polar2D p) { return new Vector2(Mathf.Cos(p.Angle * 0.0174532924f) * p.Distance, Mathf.Sin(p.Angle * 0.0174532924f) * p.Distance); } public static Polar2D MoveTowards(Polar2D current, Polar2D target, float angleMaxDelta, float distanceMaxDelta) { float angle = Mathf.MoveTowardsAngle(current.Angle, target.Angle, angleMaxDelta); float distance = Mathf.MoveTowards(current.Distance, target.Distance, distanceMaxDelta); return new Polar2D(angle, distance); } public static bool operator ==(Polar2D p1, Polar2D p2) { return p1.Angle == p2.Angle && p1.Distance == p2.Distance; } public static bool operator !=(Polar2D p1, Polar2D p2) { return p1.Angle != p2.Angle || p1.Distance != p2.Distance; } public static Polar2D operator +(Polar2D p1, Polar2D p2) { return new Polar2D(p1.Angle + p2.Angle, p1.Distance + p2.Distance); } public static Polar2D operator -(Polar2D p1, Polar2D p2) { return new Polar2D(p1.Angle - p2.Angle, p1.Distance - p2.Distance); } public override string ToString() { return $"({Angle} deg,{Distance})"; } public override int GetHashCode() { return (Angle.GetHashCode() * 522133279) ^ Distance.GetHashCode(); } public override bool Equals(object obj) { if (obj == null || GetType() != obj.GetType()) { return false; } Polar2D polar2D = (Polar2D)obj; return Angle == polar2D.Angle && Distance == polar2D.Distance; } }
22.448276
119
0.692268
[ "MIT" ]
smdx24/CPI-Source-Code
ClubPenguin/Polar2D.cs
1,953
C#
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ namespace Microsoft.Azure.PowerShell.Cmdlets.ContainerInstance.Runtime.Json { public sealed class JsonObjectConverter : JsonConverter<JsonObject> { internal override JsonNode ToJson(JsonObject value) => value; internal override JsonObject FromJson(JsonNode node) => (JsonObject)node; } }
52.384615
97
0.528634
[ "MIT" ]
Agazoth/azure-powershell
src/ContainerInstance/generated/runtime/Conversions/Instances/JsonObjectConverter.cs
671
C#
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Net; namespace Octokit.Internal { /// <summary> /// Represents a generic HTTP response /// </summary> internal class Response : IResponse { [Obsolete("Use the constructor with maximum parameters to avoid shortcuts")] public Response() : this(new Dictionary<string, string>()) { } [Obsolete("Use the constructor with maximum parameters to avoid shortcuts")] public Response(IDictionary<string, string> headers) { Ensure.ArgumentNotNull(headers, nameof(headers)); Headers = new ReadOnlyDictionary<string, string>(headers); ApiInfo = ApiInfoParser.ParseResponseHeaders(headers); } public Response(HttpStatusCode statusCode, object body, IDictionary<string, string> headers, string contentType) { Ensure.ArgumentNotNull(headers, nameof(headers)); StatusCode = statusCode; Body = body; Headers = new ReadOnlyDictionary<string, string>(headers); ApiInfo = ApiInfoParser.ParseResponseHeaders(headers); ContentType = contentType; } /// <summary> /// Raw response body. Typically a string, but when requesting images, it will be a byte array. /// </summary> public object Body { get; private set; } /// <summary> /// Information about the API. /// </summary> public IReadOnlyDictionary<string, string> Headers { get; private set; } /// <summary> /// Information about the API response parsed from the response headers. /// </summary> public ApiInfo ApiInfo { get; internal set; } // This setter is internal for use in tests. /// <summary> /// The response status code. /// </summary> public HttpStatusCode StatusCode { get; private set; } /// <summary> /// The content type of the response. /// </summary> public string ContentType { get; private set; } } }
35.583333
120
0.614988
[ "MIT" ]
Aarojas3012a/octokit.net
Octokit/Http/Response.cs
2,137
C#
// // Copyright 2020 Google LLC // // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // using Google.Apis.Auth.OAuth2; using Google.Solutions.Common.Locator; using Google.Solutions.Common.Test.Integration; using Google.Solutions.IapDesktop.Application.Services.Adapters; using NUnit.Framework; using System.Threading; using System.Threading.Tasks; namespace Google.Solutions.IapDesktop.Application.Test.Services.Adapters { [TestFixture] [Category("IntegrationTest")] public class TestInstanceExtensions : ApplicationFixtureBase { [Test] public async Task WhenInstancePopulated_ThenGetInstanceLocatorSucceeds( [LinuxInstance] ResourceTask<InstanceLocator> testInstance, [Credential(Role = PredefinedRole.ComputeViewer)] ResourceTask<ICredential> credential) { var adapter = new ComputeEngineAdapter(await credential); var instance = await adapter .GetInstanceAsync( await testInstance, CancellationToken.None) .ConfigureAwait(false); var zoneLocator = instance.GetZoneLocator(); var instanceLocator = instance.GetInstanceLocator(); Assert.AreEqual(TestProject.Zone, zoneLocator.Name); Assert.AreEqual(TestProject.Zone, instanceLocator.Zone); Assert.AreEqual(await testInstance, instanceLocator); } [Test] public async Task WhenInstanceHasInternalIp_ThenPrivateAddressReturnsRfc1918Ip( [LinuxInstance] ResourceTask<InstanceLocator> testInstance, [Credential(Role = PredefinedRole.ComputeViewer)] ResourceTask<ICredential> credential) { var adapter = new ComputeEngineAdapter(await credential); var instance = await adapter.GetInstanceAsync( await testInstance, CancellationToken.None) .ConfigureAwait(false); Assert.IsNotNull(instance.InternalAddress()); CollectionAssert.Contains( new byte[] { 172, 192, 10 }, instance.InternalAddress().GetAddressBytes()[0], "Is RFC1918 address"); } [Test] public async Task WhenInstanceLacksPublicIp_ThenPublicAddressReturnsNull( [LinuxInstance(PublicIp = false)] ResourceTask<InstanceLocator> testInstance, [Credential(Role = PredefinedRole.ComputeViewer)] ResourceTask<ICredential> credential) { var adapter = new ComputeEngineAdapter(await credential); var instance = await adapter.GetInstanceAsync( await testInstance, CancellationToken.None) .ConfigureAwait(false); Assert.IsNull(instance.PublicAddress()); } [Test] public async Task WhenInstanceHasPublicIp_ThenPublicAddressReturnsNonRfc1918Ip( [LinuxInstance] ResourceTask<InstanceLocator> testInstance, [Credential(Role = PredefinedRole.ComputeViewer)] ResourceTask<ICredential> credential) { var adapter = new ComputeEngineAdapter(await credential); var instance = await adapter.GetInstanceAsync( await testInstance, CancellationToken.None) .ConfigureAwait(false); Assert.IsNotNull(instance.PublicAddress()); CollectionAssert.DoesNotContain( new byte[] { 172, 192, 10 }, instance.PublicAddress().GetAddressBytes()[0], "Is not a RFC1918 address"); } } }
40.805556
99
0.662355
[ "Apache-2.0" ]
Bhisma19/iap-desktop
sources/Google.Solutions.IapDesktop.Application.Test/Services/Adapters/TestInstanceExtensions.cs
4,409
C#
using System; using System.Linq; using FluentAssertions; using VaraniumSharp.DryIoc.Tests.Fixtures; using Xunit; namespace VaraniumSharp.DryIoc.Tests { public class ContainerSetupTest { #region Public Methods [Fact] public void ClassWithMultipleConstructorsIsRegisteredCorrectly() { // arrange var sut = new ContainerSetup(); var act = new Action(() => sut.RetrieveClassesRequiringRegistration(true)); // act // assert act.Should().NotThrow<Exception>(); } [Fact] public void ConcretionClassesAreResolvedCorrectly() { // arrange var sut = new ContainerSetup(); // act sut.RetrieveConcretionClassesRequiringRegistration(true); // assert var resolvedClass = sut.ResolveMany<BaseClassDummy>().ToList(); resolvedClass.Count.Should().Be(1); resolvedClass.First().GetType().Should().Be(typeof(InheritorClassDummy)); } [Fact] public void AutoResolveClassesAreCorrectlyResolved() { // arrange var sut = new ContainerSetup(); sut.RetrieveClassesRequiringRegistration(true); // act sut.AutoResolveRequiredClasses(); // assert var _ = sut.Resolve<AutoResolve>(); AutoResolve.TimesResolved.Should().BeGreaterOrEqualTo(2); } [Fact] public void ConcretionClassesCorrectlyApplyReuse() { // arrange var sut = new ContainerSetup(); // act sut.RetrieveConcretionClassesRequiringRegistration(true); // assert var resolvedClasses = sut.ResolveMany<ITestInterfaceDummy>(); var secondResolve = sut.ResolveMany<ITestInterfaceDummy>(); resolvedClasses.Should().BeEquivalentTo(secondResolve); } [Fact] public void ConcretionClassesFromInterfaceAreCorrectlyResolved() { // arrange var sut = new ContainerSetup(); // act sut.RetrieveConcretionClassesRequiringRegistration(true); // assert var resolvedClasses = sut.ResolveMany<ITestInterfaceDummy>().ToList(); resolvedClasses.Count.Should().Be(2); resolvedClasses.Should().Contain(x => x.GetType() == typeof(ImplementationClassDummy)); resolvedClasses.Should().Contain(x => x.GetType() == typeof(ImplementationClassTooDummy)); } [Fact] public void ConcretionClassWithMultipleConstructorsIsRegisteredCorrectly() { // arrange var sut = new ContainerSetup(); var act = new Action(() => sut.RetrieveConcretionClassesRequiringRegistration(true)); // act // assert act.Should().NotThrow<Exception>(); } [Fact] public void MultiTypeRegistrationSingletonsWorkCorrectly() { // arrange var sut = new ContainerSetup(); // act sut.RetrieveConcretionClassesRequiringRegistration(true); // assert var resolvedClasses = sut.ResolveMany<ITestInterfaceDummy>().ToList(); var interfaceResolvedClass = resolvedClasses.FirstOrDefault(t => t.GetType() == typeof(ImplementationClassDummy)); var directlyResolvedClass = sut.Resolve<ImplementationClassDummy>(); interfaceResolvedClass.Should().Be(directlyResolvedClass); } [Fact] public void RegisteringAttributedDisposableTransientDoesNotThrowAnException() { // arrange var sut = new ContainerSetup(); var act = new Action(() => sut.RetrieveClassesRequiringRegistration(true)); // act // assert act.Should().NotThrow<Exception>(); } [Fact] public void RegistrationOfDisposableConcretionClassDoesNotThrowAnException() { // arrange var sut = new ContainerSetup(); var act = new Action(() => sut.RetrieveConcretionClassesRequiringRegistration(true)); // act // assert act.Should().NotThrow<Exception>(); } [Fact] public void SetupContainer() { // arrange var sut = new ContainerSetup(); // act sut.RetrieveClassesRequiringRegistration(true); // assert var resolvedClass = sut.Resolve<AutoRegistrationDummy>(); resolvedClass.GetType().Should().Be<AutoRegistrationDummy>(); } [Fact] public void SingletonRegistrationsAreResolvedCorrectly() { // arrange var sut = new ContainerSetup(); // act sut.RetrieveClassesRequiringRegistration(true); // assert var resolvedClass = sut.Resolve<SingletonDummy>(); var secondResolve = sut.Resolve<SingletonDummy>(); resolvedClass.Should().Be(secondResolve); } [Fact] public void TransientRegistrationsAreResolvedCorrectly() { // arrange var sut = new ContainerSetup(); // act sut.RetrieveClassesRequiringRegistration(true); // assert var resolvedClass = sut.Resolve<AutoRegistrationDummy>(); var secondResolve = sut.Resolve<AutoRegistrationDummy>(); resolvedClass.Should().NotBe(secondResolve); } #endregion } }
30.774194
102
0.577743
[ "MIT" ]
NinetailLabs/VaraniumSharp.DryIoc
VaraniumSharp.DryIoc.Tests/ContainerSetupTest.cs
5,726
C#
// GENERATED AUTOMATICALLY FROM 'Assets/Input/InputActions.inputactions' using System; using System.Collections; using System.Collections.Generic; using UnityEngine.InputSystem; using UnityEngine.InputSystem.Utilities; public class @InputActions : IInputActionCollection, IDisposable { public InputActionAsset asset { get; } public @InputActions() { asset = InputActionAsset.FromJson(@"{ ""name"": ""InputActions"", ""maps"": [ { ""name"": ""CharacterControl"", ""id"": ""3713082f-bb36-41b6-8a52-552522de85bd"", ""actions"": [ { ""name"": ""Left"", ""type"": ""Button"", ""id"": ""9eb62d50-1ddc-47b2-8af0-e9bc0f4fb0cf"", ""expectedControlType"": ""Button"", ""processors"": """", ""interactions"": """" }, { ""name"": ""Right"", ""type"": ""Button"", ""id"": ""c48bbed3-6950-4332-ae0d-e9ee4d0058b0"", ""expectedControlType"": ""Button"", ""processors"": """", ""interactions"": """" }, { ""name"": ""Jump"", ""type"": ""Button"", ""id"": ""22ff7d96-0edf-4568-b25f-d44e48cb5c6a"", ""expectedControlType"": ""Button"", ""processors"": """", ""interactions"": """" }, { ""name"": ""Attack"", ""type"": ""Button"", ""id"": ""d3037a47-d5bc-47eb-8e17-a8a5212aaec7"", ""expectedControlType"": ""Button"", ""processors"": """", ""interactions"": """" }, { ""name"": ""LeftRightAxis"", ""type"": ""Value"", ""id"": ""556d4730-75b1-47ec-8457-647da7614bcb"", ""expectedControlType"": ""Axis"", ""processors"": """", ""interactions"": """" }, { ""name"": ""Crouch"", ""type"": ""Button"", ""id"": ""9d15fe7d-9964-4876-bb11-8630d83d6b67"", ""expectedControlType"": ""Button"", ""processors"": """", ""interactions"": """" }, { ""name"": ""CrouchAxis"", ""type"": ""Value"", ""id"": ""52f9a910-9a8d-4642-80aa-d64e1918fa3f"", ""expectedControlType"": ""Axis"", ""processors"": """", ""interactions"": """" }, { ""name"": ""Confirm"", ""type"": ""Button"", ""id"": ""85ffcda8-4190-4309-b597-492d8b1fb77c"", ""expectedControlType"": ""Button"", ""processors"": """", ""interactions"": """" }, { ""name"": ""TouchAxis"", ""type"": ""Value"", ""id"": ""7dbe8a00-0ecb-46a0-89b8-8be0b374eb8b"", ""expectedControlType"": ""Vector2"", ""processors"": """", ""interactions"": """" }, { ""name"": ""Tap"", ""type"": ""Button"", ""id"": ""0167bb20-ffd0-4ea8-afa7-a9ce903f072b"", ""expectedControlType"": ""Button"", ""processors"": """", ""interactions"": """" } ], ""bindings"": [ { ""name"": """", ""id"": ""2d40ba2a-4a9f-44b0-acf5-5f95bb5ed7e1"", ""path"": ""<Keyboard>/a"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Left"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""e0e5675f-ac65-4702-ae42-727446cc389c"", ""path"": ""<Gamepad>/dpad/left"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Left"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""3a27bef1-8584-4bdd-a4c4-a36b8a7a9204"", ""path"": ""<Keyboard>/d"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Right"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""ba80465a-514d-4806-af20-5ccb7830882f"", ""path"": ""<Gamepad>/dpad/right"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Right"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""89229752-d95e-4296-a6d0-1a5da74fc599"", ""path"": ""<Keyboard>/space"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Jump"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""1d530ba4-e0ef-42b6-8621-a63163cfe32e"", ""path"": ""<Gamepad>/buttonSouth"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Jump"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""bab4cb38-1ce8-4d11-bb0d-5e88cc2569f1"", ""path"": ""<Keyboard>/ctrl"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Attack"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""22bf601c-0f89-4ccf-b3da-a2fe3176fa91"", ""path"": ""<Gamepad>/buttonEast"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Attack"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""051f2a69-5a2e-43ed-9376-7eff2213bd90"", ""path"": ""<Gamepad>/leftStick/x"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""LeftRightAxis"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""c584780c-14a0-4ad8-9656-0c1502b81d4e"", ""path"": ""<Keyboard>/s"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Crouch"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""67696cf3-61a9-44d6-bfa2-1fe65b79f1d4"", ""path"": ""<Gamepad>/dpad/down"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Crouch"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""96ff6078-0673-4a3a-b3df-2f3264fb9901"", ""path"": ""<Gamepad>/leftStick/y"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""CrouchAxis"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""5fa3d4c6-0e4c-4d06-922c-55ebc35eeab2"", ""path"": ""<Keyboard>/enter"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Confirm"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""5847d20a-48dc-4880-934c-e296c02f51c4"", ""path"": ""<Gamepad>/start"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Confirm"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""66d30ff4-8bff-4d40-81c5-a4bed0291d45"", ""path"": ""<Gamepad>/select"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Confirm"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""edd898cb-3dba-4aeb-b5ac-57c50d860ae2"", ""path"": ""<Pointer>/position"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""TouchAxis"", ""isComposite"": false, ""isPartOfComposite"": false }, { ""name"": """", ""id"": ""f07fc7ff-c57b-4ee1-ab1f-16f95af25695"", ""path"": ""<Touchscreen>/primaryTouch/press"", ""interactions"": """", ""processors"": """", ""groups"": """", ""action"": ""Tap"", ""isComposite"": false, ""isPartOfComposite"": false } ] } ], ""controlSchemes"": [] }"); // CharacterControl m_CharacterControl = asset.FindActionMap("CharacterControl", throwIfNotFound: true); m_CharacterControl_Left = m_CharacterControl.FindAction("Left", throwIfNotFound: true); m_CharacterControl_Right = m_CharacterControl.FindAction("Right", throwIfNotFound: true); m_CharacterControl_Jump = m_CharacterControl.FindAction("Jump", throwIfNotFound: true); m_CharacterControl_Attack = m_CharacterControl.FindAction("Attack", throwIfNotFound: true); m_CharacterControl_LeftRightAxis = m_CharacterControl.FindAction("LeftRightAxis", throwIfNotFound: true); m_CharacterControl_Crouch = m_CharacterControl.FindAction("Crouch", throwIfNotFound: true); m_CharacterControl_CrouchAxis = m_CharacterControl.FindAction("CrouchAxis", throwIfNotFound: true); m_CharacterControl_Confirm = m_CharacterControl.FindAction("Confirm", throwIfNotFound: true); m_CharacterControl_TouchAxis = m_CharacterControl.FindAction("TouchAxis", throwIfNotFound: true); m_CharacterControl_Tap = m_CharacterControl.FindAction("Tap", throwIfNotFound: true); } public void Dispose() { UnityEngine.Object.Destroy(asset); } public InputBinding? bindingMask { get => asset.bindingMask; set => asset.bindingMask = value; } public ReadOnlyArray<InputDevice>? devices { get => asset.devices; set => asset.devices = value; } public ReadOnlyArray<InputControlScheme> controlSchemes => asset.controlSchemes; public bool Contains(InputAction action) { return asset.Contains(action); } public IEnumerator<InputAction> GetEnumerator() { return asset.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } public void Enable() { asset.Enable(); } public void Disable() { asset.Disable(); } // CharacterControl private readonly InputActionMap m_CharacterControl; private ICharacterControlActions m_CharacterControlActionsCallbackInterface; private readonly InputAction m_CharacterControl_Left; private readonly InputAction m_CharacterControl_Right; private readonly InputAction m_CharacterControl_Jump; private readonly InputAction m_CharacterControl_Attack; private readonly InputAction m_CharacterControl_LeftRightAxis; private readonly InputAction m_CharacterControl_Crouch; private readonly InputAction m_CharacterControl_CrouchAxis; private readonly InputAction m_CharacterControl_Confirm; private readonly InputAction m_CharacterControl_TouchAxis; private readonly InputAction m_CharacterControl_Tap; public struct CharacterControlActions { private @InputActions m_Wrapper; public CharacterControlActions(@InputActions wrapper) { m_Wrapper = wrapper; } public InputAction @Left => m_Wrapper.m_CharacterControl_Left; public InputAction @Right => m_Wrapper.m_CharacterControl_Right; public InputAction @Jump => m_Wrapper.m_CharacterControl_Jump; public InputAction @Attack => m_Wrapper.m_CharacterControl_Attack; public InputAction @LeftRightAxis => m_Wrapper.m_CharacterControl_LeftRightAxis; public InputAction @Crouch => m_Wrapper.m_CharacterControl_Crouch; public InputAction @CrouchAxis => m_Wrapper.m_CharacterControl_CrouchAxis; public InputAction @Confirm => m_Wrapper.m_CharacterControl_Confirm; public InputAction @TouchAxis => m_Wrapper.m_CharacterControl_TouchAxis; public InputAction @Tap => m_Wrapper.m_CharacterControl_Tap; public InputActionMap Get() { return m_Wrapper.m_CharacterControl; } public void Enable() { Get().Enable(); } public void Disable() { Get().Disable(); } public bool enabled => Get().enabled; public static implicit operator InputActionMap(CharacterControlActions set) { return set.Get(); } public void SetCallbacks(ICharacterControlActions instance) { if (m_Wrapper.m_CharacterControlActionsCallbackInterface != null) { @Left.started -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnLeft; @Left.performed -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnLeft; @Left.canceled -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnLeft; @Right.started -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnRight; @Right.performed -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnRight; @Right.canceled -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnRight; @Jump.started -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnJump; @Jump.performed -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnJump; @Jump.canceled -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnJump; @Attack.started -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnAttack; @Attack.performed -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnAttack; @Attack.canceled -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnAttack; @LeftRightAxis.started -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnLeftRightAxis; @LeftRightAxis.performed -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnLeftRightAxis; @LeftRightAxis.canceled -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnLeftRightAxis; @Crouch.started -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnCrouch; @Crouch.performed -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnCrouch; @Crouch.canceled -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnCrouch; @CrouchAxis.started -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnCrouchAxis; @CrouchAxis.performed -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnCrouchAxis; @CrouchAxis.canceled -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnCrouchAxis; @Confirm.started -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnConfirm; @Confirm.performed -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnConfirm; @Confirm.canceled -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnConfirm; @TouchAxis.started -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnTouchAxis; @TouchAxis.performed -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnTouchAxis; @TouchAxis.canceled -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnTouchAxis; @Tap.started -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnTap; @Tap.performed -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnTap; @Tap.canceled -= m_Wrapper.m_CharacterControlActionsCallbackInterface.OnTap; } m_Wrapper.m_CharacterControlActionsCallbackInterface = instance; if (instance != null) { @Left.started += instance.OnLeft; @Left.performed += instance.OnLeft; @Left.canceled += instance.OnLeft; @Right.started += instance.OnRight; @Right.performed += instance.OnRight; @Right.canceled += instance.OnRight; @Jump.started += instance.OnJump; @Jump.performed += instance.OnJump; @Jump.canceled += instance.OnJump; @Attack.started += instance.OnAttack; @Attack.performed += instance.OnAttack; @Attack.canceled += instance.OnAttack; @LeftRightAxis.started += instance.OnLeftRightAxis; @LeftRightAxis.performed += instance.OnLeftRightAxis; @LeftRightAxis.canceled += instance.OnLeftRightAxis; @Crouch.started += instance.OnCrouch; @Crouch.performed += instance.OnCrouch; @Crouch.canceled += instance.OnCrouch; @CrouchAxis.started += instance.OnCrouchAxis; @CrouchAxis.performed += instance.OnCrouchAxis; @CrouchAxis.canceled += instance.OnCrouchAxis; @Confirm.started += instance.OnConfirm; @Confirm.performed += instance.OnConfirm; @Confirm.canceled += instance.OnConfirm; @TouchAxis.started += instance.OnTouchAxis; @TouchAxis.performed += instance.OnTouchAxis; @TouchAxis.canceled += instance.OnTouchAxis; @Tap.started += instance.OnTap; @Tap.performed += instance.OnTap; @Tap.canceled += instance.OnTap; } } } public CharacterControlActions @CharacterControl => new CharacterControlActions(this); public interface ICharacterControlActions { void OnLeft(InputAction.CallbackContext context); void OnRight(InputAction.CallbackContext context); void OnJump(InputAction.CallbackContext context); void OnAttack(InputAction.CallbackContext context); void OnLeftRightAxis(InputAction.CallbackContext context); void OnCrouch(InputAction.CallbackContext context); void OnCrouchAxis(InputAction.CallbackContext context); void OnConfirm(InputAction.CallbackContext context); void OnTouchAxis(InputAction.CallbackContext context); void OnTap(InputAction.CallbackContext context); } }
45.949045
113
0.490204
[ "MIT" ]
noirhero/ActionFramework
Assets/Input/InputActions.cs
21,642
C#
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Text; using NUnit.Framework; using OpenSim.Framework; using OpenSim.Framework.Servers; using OpenSim.Framework.Servers.HttpServer; using OpenSim.Tests.Common.Mock; namespace OpenSim.Tests.Common { public class BaseRequestHandlerHelpers { private static string[] m_emptyStringArray = new string[] { }; public static void BaseTestGetParams(BaseRequestHandler handler, string assetsPath) { Assert.AreEqual(String.Empty, handler.GetParam(null), "Failed on null path."); Assert.AreEqual(String.Empty, handler.GetParam(""), "Failed on empty path."); Assert.AreEqual(String.Empty, handler.GetParam("s"), "Failed on short url."); Assert.AreEqual(String.Empty, handler.GetParam("corruptUrl"), "Failed on corruptUrl."); Assert.AreEqual(String.Empty, handler.GetParam(assetsPath)); Assert.AreEqual("/", handler.GetParam(assetsPath + "/")); Assert.AreEqual("/a", handler.GetParam(assetsPath + "/a")); Assert.AreEqual("/b/", handler.GetParam(assetsPath + "/b/")); Assert.AreEqual("/c/d", handler.GetParam(assetsPath + "/c/d")); Assert.AreEqual("/e/f/", handler.GetParam(assetsPath + "/e/f/")); } public static void BaseTestSplitParams(BaseRequestHandler handler, string assetsPath) { Assert.AreEqual(m_emptyStringArray, handler.SplitParams(null), "Failed on null."); Assert.AreEqual(m_emptyStringArray, handler.SplitParams(""), "Failed on empty path."); Assert.AreEqual(m_emptyStringArray, handler.SplitParams("corruptUrl"), "Failed on corrupt url."); Assert.AreEqual(m_emptyStringArray, handler.SplitParams(assetsPath), "Failed on empty params."); Assert.AreEqual(m_emptyStringArray, handler.SplitParams(assetsPath + "/"), "Failed on single slash."); Assert.AreEqual(new string[] { "a" }, handler.SplitParams(assetsPath + "/a"), "Failed on first segment."); Assert.AreEqual(new string[] { "b" }, handler.SplitParams(assetsPath + "/b/"), "Failed on second slash."); Assert.AreEqual(new string[] { "c", "d" }, handler.SplitParams(assetsPath + "/c/d"), "Failed on second segment."); Assert.AreEqual(new string[] { "e", "f" }, handler.SplitParams(assetsPath + "/e/f/"), "Failed on trailing slash."); } public static byte[] EmptyByteArray = new byte[] {}; } }
53.714286
127
0.694632
[ "BSD-3-Clause" ]
AlericInglewood/opensimulator
OpenSim/Tests/Common/Helpers/BaseRequestHandlerHelpers.cs
4,136
C#
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace NBitcoinBTG.Policy { public class StandardTransactionPolicy : ITransactionPolicy { public StandardTransactionPolicy() { ScriptVerify = NBitcoinBTG.ScriptVerify.Standard; MaxTransactionSize = 100000; MaxTxFee = new FeeRate(Money.Coins(0.1m)); MinRelayTxFee = new FeeRate(Money.Satoshis(5000)); CheckFee = true; CheckScriptPubKey = true; } public int? MaxTransactionSize { get; set; } /// <summary> /// Safety check, if the FeeRate exceed this value, a policy error is raised /// </summary> public FeeRate MaxTxFee { get; set; } public FeeRate MinRelayTxFee { get; set; } public ScriptVerify? ScriptVerify { get; set; } /// <summary> /// Check if the transaction is safe from malleability (default: false) /// </summary> public bool CheckMalleabilitySafe { get; set; } = false; public bool CheckFee { get; set; } #if !NOCONSENSUSLIB public bool UseConsensusLib { get; set; } #endif public const int MaxScriptSigLength = 1650; #region ITransactionPolicy Members public TransactionPolicyError[] Check(Transaction transaction, ICoin[] spentCoins) { if(transaction == null) throw new ArgumentNullException("transaction"); spentCoins = spentCoins ?? new ICoin[0]; List<TransactionPolicyError> errors = new List<TransactionPolicyError>(); foreach(var input in transaction.Inputs.AsIndexedInputs()) { var coin = spentCoins.FirstOrDefault(s => s.Outpoint == input.PrevOut); if(coin != null) { if(ScriptVerify != null) { ScriptError error; if(!VerifyScript(input, coin.TxOut.ScriptPubKey, coin.TxOut.Value, ScriptVerify.Value, out error)) { errors.Add(new ScriptPolicyError(input, error, ScriptVerify.Value, coin.TxOut.ScriptPubKey)); } } } var txin = input.TxIn; if(txin.ScriptSig.Length > MaxScriptSigLength) { errors.Add(new InputPolicyError("Max scriptSig length exceeded actual is " + txin.ScriptSig.Length + ", max is " + MaxScriptSigLength, input)); } if(!txin.ScriptSig.IsPushOnly) { errors.Add(new InputPolicyError("All operation should be push", input)); } if(!txin.ScriptSig.HasCanonicalPushes) { errors.Add(new InputPolicyError("All operation should be canonical push", input)); } } if(CheckMalleabilitySafe) { foreach(var input in transaction.Inputs.AsIndexedInputs()) { var coin = spentCoins.FirstOrDefault(s => s.Outpoint == input.PrevOut); if(coin != null && coin.GetHashVersion() != HashVersion.Witness) errors.Add(new InputPolicyError("Malleable input detected", input)); } } if(CheckScriptPubKey) { foreach(var txout in transaction.Outputs.AsCoins()) { var template = StandardScripts.GetTemplateFromScriptPubKey(txout.ScriptPubKey); if(template == null) errors.Add(new OutputPolicyError("Non-Standard scriptPubKey", (int)txout.Outpoint.N)); } } int txSize = transaction.GetSerializedSize(); if(MaxTransactionSize != null) { if(txSize >= MaxTransactionSize.Value) errors.Add(new TransactionSizePolicyError(txSize, MaxTransactionSize.Value)); } var fees = transaction.GetFee(spentCoins); if(fees != null) { if(CheckFee) { if(MaxTxFee != null) { var max = MaxTxFee.GetFee(txSize); if(fees > max) errors.Add(new FeeTooHighPolicyError(fees, max)); } if(MinRelayTxFee != null) { if(MinRelayTxFee != null) { var min = MinRelayTxFee.GetFee(txSize); if(fees < min) errors.Add(new FeeTooLowPolicyError(fees, min)); } } } } if(MinRelayTxFee != null) { foreach(var output in transaction.Outputs) { var bytes = output.ScriptPubKey.ToBytes(true); if(output.IsDust(MinRelayTxFee) && !IsOpReturn(bytes)) errors.Add(new DustPolicyError(output.Value, output.GetDustThreshold(MinRelayTxFee))); } } var opReturnCount = transaction.Outputs.Select(o => o.ScriptPubKey.ToBytes(true)).Count(b => IsOpReturn(b)); if(opReturnCount > 1) errors.Add(new TransactionPolicyError("More than one op return detected")); return errors.ToArray(); } private static bool IsOpReturn(byte[] bytes) { return bytes.Length > 0 && bytes[0] == (byte)OpcodeType.OP_RETURN; } private bool VerifyScript(IndexedTxIn input, Script scriptPubKey, Money value, ScriptVerify scriptVerify, out ScriptError error) { #if !NOCONSENSUSLIB if(!UseConsensusLib) #endif return input.VerifyScript(scriptPubKey, value, scriptVerify, out error); #if !NOCONSENSUSLIB else { var ok = Script.VerifyScriptConsensus(scriptPubKey, input.Transaction, input.Index, scriptVerify); if(!ok) { if(input.VerifyScript(scriptPubKey, scriptVerify, out error)) error = ScriptError.UnknownError; return false; } else { error = ScriptError.OK; } return true; } #endif } #endregion public StandardTransactionPolicy Clone() { return new StandardTransactionPolicy() { MaxTransactionSize = MaxTransactionSize, MaxTxFee = MaxTxFee, MinRelayTxFee = MinRelayTxFee, ScriptVerify = ScriptVerify, #if !NOCONSENSUSLIB UseConsensusLib = UseConsensusLib, #endif CheckMalleabilitySafe = CheckMalleabilitySafe, CheckScriptPubKey = CheckScriptPubKey, CheckFee = CheckFee }; } /// <summary> /// Check the standardness of scriptPubKey /// </summary> public bool CheckScriptPubKey { get; set; } } }
25.689655
149
0.648993
[ "MIT" ]
LykkeCity/NBitcoinBTG
NBitcoin/Policy/StandardTransactionPolicy.cs
5,962
C#
using System.Collections.Generic; using AutoStep.Elements.Metadata; namespace AutoStep.Elements.Test { /// <summary> /// A built scenario outline. /// </summary> public class ScenarioOutlineElement : ScenarioElement, IScenarioOutlineInfo { private readonly List<ExampleElement> examples = new List<ExampleElement>(); private readonly HashSet<string> allExampleVariables = new HashSet<string>(); /// <summary> /// Gets the contained example blocks. /// </summary> public IReadOnlyList<ExampleElement> Examples => examples; /// <inheritdoc/> IReadOnlyList<IExampleInfo> IScenarioOutlineInfo.Examples => examples; /// <summary> /// Adds an example to the scenario outline. /// </summary> /// <param name="example">The example to add.</param> public void AddExample(ExampleElement example) { if (example is null) { throw new System.ArgumentNullException(nameof(example)); } examples.Add(example); if (example.Table is object) { // Add to the set of all headers (for faster variable checking). foreach (var header in example.Table.Header.Headers) { if (header.HeaderName is object) { allExampleVariables.Add(header.HeaderName); } } } } /// <summary> /// Checks whether any of the example tables in the scenario outline contains /// the specified insertion variable name. /// </summary> /// <param name="variableName">The name of the variable.</param> /// <returns>True if the variable is available, false otherwise.</returns> public bool ContainsVariable(string variableName) { return allExampleVariables.Contains(variableName); } } }
33.566667
85
0.575968
[ "MIT" ]
SivaGudivada/AutoStep
src/AutoStep/Elements/Test/ScenarioOutlineElement.cs
2,016
C#
using System; using System.Collections.Concurrent; namespace _12.ConcurrentQueue { class Startup { static void Main(string[] args) { ConcurrentQueue<int> queue = new ConcurrentQueue<int>(); queue.Enqueue(1); queue.Enqueue(2); int result; if (queue.TryDequeue(out result)) Console.WriteLine($"Successfully removed {result} from the queue"); if (queue.TryPeek(out result)) Console.WriteLine($"The front element of the queue is {result}"); } } }
25.521739
83
0.572402
[ "MIT" ]
YakoYakov/ParallelProgrammingWithCSharp
ParallelProgrammingExamples/12.ConcurrentQueue/Startup.cs
589
C#
namespace Contoso.AADB2C.API.Models { public class OutputClaimsModel { public string loyaltyNumber { get; set; } public string action { get; set; } } }
22.375
49
0.636872
[ "MIT" ]
scottluskcis/adb2c-custom-claims
src/Contoso.AADB2C.API/Models/OutputClaimsModel.cs
179
C#
namespace HttpServer.Http.Constants { public static class CookieData { public static string MaxAgeHeader = "Max-Age"; public static string PathHeader = "Path"; public static string HttpOnlyHeader = "HttpOnly"; } }
22.909091
57
0.662698
[ "MIT" ]
gtsonkov/SimpleHttpServer
HttpServer/HttpServer.Http/Constants/CookieData.cs
254
C#
using System; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Infrastructure; using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Migrations; using fruitShop.EntityFrameworkCore; namespace fruitShop.Migrations { [DbContext(typeof(fruitShopDbContext))] [Migration("20170608053244_Upgraded_To_Abp_2_1_0")] partial class Upgraded_To_Abp_2_1_0 { protected override void BuildTargetModel(ModelBuilder modelBuilder) { modelBuilder .HasAnnotation("ProductVersion", "1.1.2") .HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn); modelBuilder.Entity("Abp.Application.Editions.Edition", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("DisplayName") .IsRequired() .HasMaxLength(64); b.Property<bool>("IsDeleted"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Name") .IsRequired() .HasMaxLength(32); b.HasKey("Id"); b.ToTable("AbpEditions"); }); modelBuilder.Entity("Abp.Application.Features.FeatureSetting", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("Discriminator") .IsRequired(); b.Property<string>("Name") .IsRequired() .HasMaxLength(128); b.Property<string>("Value") .IsRequired() .HasMaxLength(2000); b.HasKey("Id"); b.ToTable("AbpFeatures"); b.HasDiscriminator<string>("Discriminator").HasValue("FeatureSetting"); }); modelBuilder.Entity("Abp.Auditing.AuditLog", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("BrowserInfo") .HasMaxLength(256); b.Property<string>("ClientIpAddress") .HasMaxLength(64); b.Property<string>("ClientName") .HasMaxLength(128); b.Property<string>("CustomData") .HasMaxLength(2000); b.Property<string>("Exception") .HasMaxLength(2000); b.Property<int>("ExecutionDuration"); b.Property<DateTime>("ExecutionTime"); b.Property<int?>("ImpersonatorTenantId"); b.Property<long?>("ImpersonatorUserId"); b.Property<string>("MethodName") .HasMaxLength(256); b.Property<string>("Parameters") .HasMaxLength(1024); b.Property<string>("ServiceName") .HasMaxLength(256); b.Property<int?>("TenantId"); b.Property<long?>("UserId"); b.HasKey("Id"); b.HasIndex("TenantId", "ExecutionDuration"); b.HasIndex("TenantId", "ExecutionTime"); b.HasIndex("TenantId", "UserId"); b.ToTable("AbpAuditLogs"); }); modelBuilder.Entity("Abp.Authorization.PermissionSetting", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("Discriminator") .IsRequired(); b.Property<bool>("IsGranted"); b.Property<string>("Name") .IsRequired() .HasMaxLength(128); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("TenantId", "Name"); b.ToTable("AbpPermissions"); b.HasDiscriminator<string>("Discriminator").HasValue("PermissionSetting"); }); modelBuilder.Entity("Abp.Authorization.Roles.RoleClaim", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<int>("RoleId"); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("RoleId"); b.HasIndex("TenantId", "ClaimType"); b.ToTable("AbpRoleClaims"); }); modelBuilder.Entity("Abp.Authorization.Users.UserAccount", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("EmailAddress"); b.Property<bool>("IsDeleted"); b.Property<DateTime?>("LastLoginTime"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.Property<long?>("UserLinkId"); b.Property<string>("UserName"); b.HasKey("Id"); b.HasIndex("EmailAddress"); b.HasIndex("UserName"); b.HasIndex("TenantId", "EmailAddress"); b.HasIndex("TenantId", "UserId"); b.HasIndex("TenantId", "UserName"); b.ToTable("AbpUserAccounts"); }); modelBuilder.Entity("Abp.Authorization.Users.UserClaim", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("UserId"); b.HasIndex("TenantId", "ClaimType"); b.ToTable("AbpUserClaims"); }); modelBuilder.Entity("Abp.Authorization.Users.UserLogin", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("LoginProvider") .IsRequired() .HasMaxLength(128); b.Property<string>("ProviderKey") .IsRequired() .HasMaxLength(256); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("UserId"); b.HasIndex("TenantId", "UserId"); b.HasIndex("TenantId", "LoginProvider", "ProviderKey"); b.ToTable("AbpUserLogins"); }); modelBuilder.Entity("Abp.Authorization.Users.UserLoginAttempt", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("BrowserInfo") .HasMaxLength(256); b.Property<string>("ClientIpAddress") .HasMaxLength(64); b.Property<string>("ClientName") .HasMaxLength(128); b.Property<DateTime>("CreationTime"); b.Property<byte>("Result"); b.Property<string>("TenancyName") .HasMaxLength(64); b.Property<int?>("TenantId"); b.Property<long?>("UserId"); b.Property<string>("UserNameOrEmailAddress") .HasMaxLength(255); b.HasKey("Id"); b.HasIndex("UserId", "TenantId"); b.HasIndex("TenancyName", "UserNameOrEmailAddress", "Result"); b.ToTable("AbpUserLoginAttempts"); }); modelBuilder.Entity("Abp.Authorization.Users.UserOrganizationUnit", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long>("OrganizationUnitId"); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("TenantId", "OrganizationUnitId"); b.HasIndex("TenantId", "UserId"); b.ToTable("AbpUserOrganizationUnits"); }); modelBuilder.Entity("Abp.Authorization.Users.UserRole", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<int>("RoleId"); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("UserId"); b.HasIndex("TenantId", "RoleId"); b.HasIndex("TenantId", "UserId"); b.ToTable("AbpUserRoles"); }); modelBuilder.Entity("Abp.Authorization.Users.UserToken", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("LoginProvider"); b.Property<string>("Name"); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.Property<string>("Value"); b.HasKey("Id"); b.HasIndex("UserId"); b.HasIndex("TenantId", "UserId"); b.ToTable("AbpUserTokens"); }); modelBuilder.Entity("Abp.BackgroundJobs.BackgroundJobInfo", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<bool>("IsAbandoned"); b.Property<string>("JobArgs") .IsRequired() .HasMaxLength(1048576); b.Property<string>("JobType") .IsRequired() .HasMaxLength(512); b.Property<DateTime?>("LastTryTime"); b.Property<DateTime>("NextTryTime"); b.Property<byte>("Priority"); b.Property<short>("TryCount"); b.HasKey("Id"); b.HasIndex("IsAbandoned", "NextTryTime"); b.ToTable("AbpBackgroundJobs"); }); modelBuilder.Entity("Abp.Configuration.Setting", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Name") .IsRequired() .HasMaxLength(256); b.Property<int?>("TenantId"); b.Property<long?>("UserId"); b.Property<string>("Value") .HasMaxLength(2000); b.HasKey("Id"); b.HasIndex("UserId"); b.HasIndex("TenantId", "Name"); b.ToTable("AbpSettings"); }); modelBuilder.Entity("Abp.Localization.ApplicationLanguage", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("DisplayName") .IsRequired() .HasMaxLength(64); b.Property<string>("Icon") .HasMaxLength(128); b.Property<bool>("IsDeleted"); b.Property<bool>("IsDisabled"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Name") .IsRequired() .HasMaxLength(10); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("TenantId", "Name"); b.ToTable("AbpLanguages"); }); modelBuilder.Entity("Abp.Localization.ApplicationLanguageText", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("Key") .IsRequired() .HasMaxLength(256); b.Property<string>("LanguageName") .IsRequired() .HasMaxLength(10); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Source") .IsRequired() .HasMaxLength(128); b.Property<int?>("TenantId"); b.Property<string>("Value") .IsRequired() .HasMaxLength(67108864); b.HasKey("Id"); b.HasIndex("TenantId", "Source", "LanguageName", "Key"); b.ToTable("AbpLanguageTexts"); }); modelBuilder.Entity("Abp.Notifications.NotificationInfo", b => { b.Property<Guid>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("Data") .HasMaxLength(1048576); b.Property<string>("DataTypeName") .HasMaxLength(512); b.Property<string>("EntityId") .HasMaxLength(96); b.Property<string>("EntityTypeAssemblyQualifiedName") .HasMaxLength(512); b.Property<string>("EntityTypeName") .HasMaxLength(250); b.Property<string>("ExcludedUserIds") .HasMaxLength(131072); b.Property<string>("NotificationName") .IsRequired() .HasMaxLength(96); b.Property<byte>("Severity"); b.Property<string>("TenantIds") .HasMaxLength(131072); b.Property<string>("UserIds") .HasMaxLength(131072); b.HasKey("Id"); b.ToTable("AbpNotifications"); }); modelBuilder.Entity("Abp.Notifications.NotificationSubscriptionInfo", b => { b.Property<Guid>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("EntityId") .HasMaxLength(96); b.Property<string>("EntityTypeAssemblyQualifiedName") .HasMaxLength(512); b.Property<string>("EntityTypeName") .HasMaxLength(250); b.Property<string>("NotificationName") .HasMaxLength(96); b.Property<int?>("TenantId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("NotificationName", "EntityTypeName", "EntityId", "UserId"); b.HasIndex("TenantId", "NotificationName", "EntityTypeName", "EntityId", "UserId"); b.ToTable("AbpNotificationSubscriptions"); }); modelBuilder.Entity("Abp.Notifications.TenantNotificationInfo", b => { b.Property<Guid>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<string>("Data") .HasMaxLength(1048576); b.Property<string>("DataTypeName") .HasMaxLength(512); b.Property<string>("EntityId") .HasMaxLength(96); b.Property<string>("EntityTypeAssemblyQualifiedName") .HasMaxLength(512); b.Property<string>("EntityTypeName") .HasMaxLength(250); b.Property<string>("NotificationName") .IsRequired() .HasMaxLength(96); b.Property<byte>("Severity"); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("TenantId"); b.ToTable("AbpTenantNotifications"); }); modelBuilder.Entity("Abp.Notifications.UserNotificationInfo", b => { b.Property<Guid>("Id") .ValueGeneratedOnAdd(); b.Property<DateTime>("CreationTime"); b.Property<int>("State"); b.Property<int?>("TenantId"); b.Property<Guid>("TenantNotificationId"); b.Property<long>("UserId"); b.HasKey("Id"); b.HasIndex("UserId", "State", "CreationTime"); b.ToTable("AbpUserNotifications"); }); modelBuilder.Entity("Abp.Organizations.OrganizationUnit", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<string>("Code") .IsRequired() .HasMaxLength(95); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("DisplayName") .IsRequired() .HasMaxLength(128); b.Property<bool>("IsDeleted"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<long?>("ParentId"); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("ParentId"); b.HasIndex("TenantId", "Code"); b.ToTable("AbpOrganizationUnits"); }); modelBuilder.Entity("fruitShop.Authorization.Roles.Role", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ConcurrencyStamp") .IsConcurrencyToken(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("DisplayName") .IsRequired() .HasMaxLength(64); b.Property<bool>("IsDefault"); b.Property<bool>("IsDeleted"); b.Property<bool>("IsStatic"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Name") .IsRequired() .HasMaxLength(32); b.Property<string>("NormalizedName") .IsRequired() .HasMaxLength(32); b.Property<int?>("TenantId"); b.HasKey("Id"); b.HasIndex("CreatorUserId"); b.HasIndex("DeleterUserId"); b.HasIndex("LastModifierUserId"); b.HasIndex("TenantId", "NormalizedName"); b.ToTable("AbpRoles"); }); modelBuilder.Entity("fruitShop.Authorization.Users.User", b => { b.Property<long>("Id") .ValueGeneratedOnAdd(); b.Property<int>("AccessFailedCount"); b.Property<string>("AuthenticationSource") .HasMaxLength(64); b.Property<string>("ConcurrencyStamp") .IsConcurrencyToken(); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<string>("EmailAddress") .IsRequired() .HasMaxLength(256); b.Property<string>("EmailConfirmationCode") .HasMaxLength(328); b.Property<bool>("IsActive"); b.Property<bool>("IsDeleted"); b.Property<bool>("IsEmailConfirmed"); b.Property<bool>("IsLockoutEnabled"); b.Property<bool>("IsPhoneNumberConfirmed"); b.Property<bool>("IsTwoFactorEnabled"); b.Property<DateTime?>("LastLoginTime"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<DateTime?>("LockoutEndDateUtc"); b.Property<string>("Name") .IsRequired() .HasMaxLength(32); b.Property<string>("NormalizedEmailAddress") .IsRequired() .HasMaxLength(256); b.Property<string>("NormalizedUserName") .IsRequired() .HasMaxLength(32); b.Property<string>("Password") .IsRequired() .HasMaxLength(128); b.Property<string>("PasswordResetCode") .HasMaxLength(328); b.Property<string>("PhoneNumber"); b.Property<string>("SecurityStamp"); b.Property<string>("Surname") .IsRequired() .HasMaxLength(32); b.Property<int?>("TenantId"); b.Property<string>("UserName") .IsRequired() .HasMaxLength(32); b.HasKey("Id"); b.HasIndex("CreatorUserId"); b.HasIndex("DeleterUserId"); b.HasIndex("LastModifierUserId"); b.HasIndex("TenantId", "NormalizedEmailAddress"); b.HasIndex("TenantId", "NormalizedUserName"); b.ToTable("AbpUsers"); }); modelBuilder.Entity("fruitShop.MultiTenancy.Tenant", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ConnectionString") .HasMaxLength(1024); b.Property<DateTime>("CreationTime"); b.Property<long?>("CreatorUserId"); b.Property<long?>("DeleterUserId"); b.Property<DateTime?>("DeletionTime"); b.Property<int?>("EditionId"); b.Property<bool>("IsActive"); b.Property<bool>("IsDeleted"); b.Property<DateTime?>("LastModificationTime"); b.Property<long?>("LastModifierUserId"); b.Property<string>("Name") .IsRequired() .HasMaxLength(128); b.Property<string>("TenancyName") .IsRequired() .HasMaxLength(64); b.HasKey("Id"); b.HasIndex("CreatorUserId"); b.HasIndex("DeleterUserId"); b.HasIndex("EditionId"); b.HasIndex("LastModifierUserId"); b.HasIndex("TenancyName"); b.ToTable("AbpTenants"); }); modelBuilder.Entity("Abp.Application.Features.EditionFeatureSetting", b => { b.HasBaseType("Abp.Application.Features.FeatureSetting"); b.Property<int>("EditionId"); b.HasIndex("EditionId", "Name"); b.ToTable("AbpFeatures"); b.HasDiscriminator().HasValue("EditionFeatureSetting"); }); modelBuilder.Entity("Abp.MultiTenancy.TenantFeatureSetting", b => { b.HasBaseType("Abp.Application.Features.FeatureSetting"); b.Property<int>("TenantId"); b.HasIndex("TenantId", "Name"); b.ToTable("AbpFeatures"); b.HasDiscriminator().HasValue("TenantFeatureSetting"); }); modelBuilder.Entity("Abp.Authorization.Roles.RolePermissionSetting", b => { b.HasBaseType("Abp.Authorization.PermissionSetting"); b.Property<int>("RoleId"); b.HasIndex("RoleId"); b.ToTable("AbpPermissions"); b.HasDiscriminator().HasValue("RolePermissionSetting"); }); modelBuilder.Entity("Abp.Authorization.Users.UserPermissionSetting", b => { b.HasBaseType("Abp.Authorization.PermissionSetting"); b.Property<long>("UserId"); b.HasIndex("UserId"); b.ToTable("AbpPermissions"); b.HasDiscriminator().HasValue("UserPermissionSetting"); }); modelBuilder.Entity("Abp.Authorization.Roles.RoleClaim", b => { b.HasOne("fruitShop.Authorization.Roles.Role") .WithMany("Claims") .HasForeignKey("RoleId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Users.UserClaim", b => { b.HasOne("fruitShop.Authorization.Users.User") .WithMany("Claims") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Users.UserLogin", b => { b.HasOne("fruitShop.Authorization.Users.User") .WithMany("Logins") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Users.UserRole", b => { b.HasOne("fruitShop.Authorization.Users.User") .WithMany("Roles") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Users.UserToken", b => { b.HasOne("fruitShop.Authorization.Users.User") .WithMany("Tokens") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Configuration.Setting", b => { b.HasOne("fruitShop.Authorization.Users.User") .WithMany("Settings") .HasForeignKey("UserId"); }); modelBuilder.Entity("Abp.Organizations.OrganizationUnit", b => { b.HasOne("Abp.Organizations.OrganizationUnit", "Parent") .WithMany("Children") .HasForeignKey("ParentId"); }); modelBuilder.Entity("fruitShop.Authorization.Roles.Role", b => { b.HasOne("fruitShop.Authorization.Users.User", "CreatorUser") .WithMany() .HasForeignKey("CreatorUserId"); b.HasOne("fruitShop.Authorization.Users.User", "DeleterUser") .WithMany() .HasForeignKey("DeleterUserId"); b.HasOne("fruitShop.Authorization.Users.User", "LastModifierUser") .WithMany() .HasForeignKey("LastModifierUserId"); }); modelBuilder.Entity("fruitShop.Authorization.Users.User", b => { b.HasOne("fruitShop.Authorization.Users.User", "CreatorUser") .WithMany() .HasForeignKey("CreatorUserId"); b.HasOne("fruitShop.Authorization.Users.User", "DeleterUser") .WithMany() .HasForeignKey("DeleterUserId"); b.HasOne("fruitShop.Authorization.Users.User", "LastModifierUser") .WithMany() .HasForeignKey("LastModifierUserId"); }); modelBuilder.Entity("fruitShop.MultiTenancy.Tenant", b => { b.HasOne("fruitShop.Authorization.Users.User", "CreatorUser") .WithMany() .HasForeignKey("CreatorUserId"); b.HasOne("fruitShop.Authorization.Users.User", "DeleterUser") .WithMany() .HasForeignKey("DeleterUserId"); b.HasOne("Abp.Application.Editions.Edition", "Edition") .WithMany() .HasForeignKey("EditionId"); b.HasOne("fruitShop.Authorization.Users.User", "LastModifierUser") .WithMany() .HasForeignKey("LastModifierUserId"); }); modelBuilder.Entity("Abp.Application.Features.EditionFeatureSetting", b => { b.HasOne("Abp.Application.Editions.Edition", "Edition") .WithMany() .HasForeignKey("EditionId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Roles.RolePermissionSetting", b => { b.HasOne("fruitShop.Authorization.Roles.Role") .WithMany("Permissions") .HasForeignKey("RoleId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Abp.Authorization.Users.UserPermissionSetting", b => { b.HasOne("fruitShop.Authorization.Users.User") .WithMany("Permissions") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); } } }
31.718438
117
0.439981
[ "MIT" ]
ThomasMar1/fruitShop
aspnet-core/src/fruitShop.EntityFrameworkCore/Migrations/20170608053244_Upgraded_To_Abp_2_1_0.Designer.cs
34,924
C#
using Incoding.Core.Block.Logging; using Incoding.Core.Block.Logging.Core; namespace Incoding.UnitTest.Block { #region << Using >> using System; using Incoding.UnitTests.MSpec; using Machine.Specifications; #endregion [Subject(typeof(LoggingFactory))] public class When_logging_factory_log : Context_logging_factory { Establish establish = () => loggingFactory.Initialize(logging => logging.WithPolicy(r => r.For(LogType.Debug).Use(defaultMockLogger.Object))); Because of = () => loggingFactory.Log(LogType.Debug, Pleasure.Generator.TheSameString(), Pleasure.Generator.Invent<ArgumentException>(), Pleasure.Generator.The20120406Noon()); It should_be_log = () => { Action<LogMessage> verify = message => { message.Message.ShouldEqual(Pleasure.Generator.TheSameString()); message.Exception.ShouldBeAssignableTo<ArgumentException>(); message.State.ShouldEqual(Pleasure.Generator.The20120406Noon()); }; defaultMockLogger.Verify(r => r.Log(Pleasure.MockIt.Is(verify))); }; } }
47.375
183
0.505937
[ "Apache-2.0" ]
Incoding-Software/Incoding-Framework-Core
src/Incoding.UnitTestsCore/Block/LoggingFactoryGroup/Factory/When_logging_factory_log.cs
1,516
C#
//////////////////////////////////////////////////////////////////////////////// //NUnit tests for "EF Core Provider for LCPI OLE DB" // IBProvider and Contributors. 29.05.2018. using NUnit.Framework; using xEFCore=Lcpi.EntityFrameworkCore.DataProvider.LcpiOleDb; namespace EFCore_LcpiOleDb_Tests.General.Unit.ErrorServices.Check{ //////////////////////////////////////////////////////////////////////////////// //class TestsFor__RelationalTypeMappingInfo__IsUnicode class TestsFor__RelationalTypeMappingInfo__IsUnicode { [Test] public static void Test_01() { // public RelationalTypeMappingInfo( // [NotNull] Type type, // [CanBeNull] string storeTypeName, // [CanBeNull] string storeTypeNameBase = null, // bool keyOrIndex, // bool? unicode, // int? size, // bool? rowVersion, // bool? fixedLength, // int? precision, // int? scale) var mappingInfo =new Microsoft.EntityFrameworkCore.Storage.RelationalTypeMappingInfo (typeof(string), "CHAR", "CHAR", false, /*unicode*/true, null, null, /*fixedLength*/null, null, null); Assert.IsTrue (mappingInfo.IsUnicode.HasValue); xEFCore.Check.RelationalTypeMappingInfo__IsUnicode (xEFCore.ErrSourceID.FB_Common__TypeMapping__CHAR, mappingInfo, true); }//Test_01 //----------------------------------------------------------------------- [Test] public static void Test_02() { // public RelationalTypeMappingInfo( // [NotNull] string storeTypeName, // [NotNull] string storeTypeNameBase, // bool? unicode, // int? size, // int? precision, // int? scale) var mappingInfo =new Microsoft.EntityFrameworkCore.Storage.RelationalTypeMappingInfo ("CHAR", "CHAR", null, null, null, null); Assert.IsFalse (mappingInfo.IsUnicode.HasValue); xEFCore.Check.RelationalTypeMappingInfo__IsUnicode (xEFCore.ErrSourceID.FB_Common__TypeMapping__NUMERIC, mappingInfo, true); }//Test_02 //----------------------------------------------------------------------- [Test] public static void Test_03__err() { // public RelationalTypeMappingInfo( // [NotNull] Type type, // [CanBeNull] string storeTypeName, // [CanBeNull] string storeTypeNameBase = null, // bool keyOrIndex, // bool? unicode, // int? size, // bool? rowVersion, // bool? fixedLength, // int? precision, // int? scale) var mappingInfo =new Microsoft.EntityFrameworkCore.Storage.RelationalTypeMappingInfo (typeof(string), "CHAR", "CHAR", false, /*unicode*/true, null, null, /*fixedLength*/null, null, null); Assert.IsTrue (mappingInfo.IsUnicode.HasValue); try { xEFCore.Check.RelationalTypeMappingInfo__IsUnicode (xEFCore.ErrSourceID.FB_Common__TypeMapping__CHAR, mappingInfo, false); } catch(xEFCore.LcpiOleDb__DataToolException e) { CheckErrors.PrintException_OK (e); Assert.AreEqual (1, TestUtils.GetRecordCount(e)); CheckErrors.CheckErrorRecord__type_mapping_err__unexpected_IsUnicode_2 (TestUtils.GetRecord(e,0), CheckErrors.c_src__EFCoreDataProvider__FB_Common__TypeMapping__CHAR, true, false); return; }//catch TestServices.ThrowWeWaitError(); }//Test_03__err };//class TestsFor__RelationalTypeMappingInfo__IsUnicode //////////////////////////////////////////////////////////////////////////////// }//namespace EFCore_LcpiOleDb_Tests.General.Unit.ErrorServices.Check
26.013889
81
0.585958
[ "MIT" ]
ibprovider/Lcpi.EFCore.LcpiOleDb
Tests/General/Source/Unit/ErrorServices/Check/TestsFor__RelationalTypeMappingInfo__IsUnicode.cs
3,748
C#
using LunarLabs.WebServer.Core; using LunarLabs.WebServer.HTTP; using LunarLabs.WebServer.Plugins; using Phantasma.Core; namespace Phantasma.API { public class RPCServer : Runnable { public int Port { get; } public string EndPoint { get; } public readonly NexusAPI API; private readonly HTTPServer _server; public RPCServer(NexusAPI api, string endPoint, int port, LoggerCallback logger = null) { if (string.IsNullOrEmpty(endPoint)) { endPoint = "/"; } Port = port; EndPoint = endPoint; API = api; var settings = new ServerSettings() { Environment = ServerEnvironment.Prod, Port = port, MaxPostSizeInBytes = 1024 * 128, Compression = false}; _server = new HTTPServer(settings, logger); var rpc = new RPCPlugin(_server, endPoint); foreach (var entry in api.Methods) { var methodName = char.ToLower(entry.Name[0]) + entry.Name.Substring(1); var apiMethod = entry; rpc.RegisterHandler(methodName, (paramNode) => { var args = new object[apiMethod.Parameters.Count]; for (int i=0; i<args.Length; i++) { if (i < paramNode.ChildCount) { args[i] = paramNode.GetNodeByIndex(i).Value; } else if (apiMethod.Parameters[i].HasDefaultValue) { args[i] = apiMethod.Parameters[i].DefaultValue; } else { throw new RPCException("missing argument: " + apiMethod.Parameters[i].Name); } } IAPIResult result; try { result = api.Execute(apiMethod.Name, args); } catch (APIException e) { throw new RPCException(e.Message); } CheckForError(result); return APIUtils.FromAPIResult(result); }); } } protected override void OnStop() { _server.Stop(); } protected override bool Run() { _server.Run(); return true; } private static void CheckForError(IAPIResult response) { if (response is ErrorResult) { var temp = (ErrorResult)response; throw new RPCException(temp.error); } } } }
30.239583
155
0.451257
[ "MIT" ]
GOATi/PhantasmaChain
Phantasma.API/RPCServer.cs
2,905
C#
using System; using System.Collections.Generic; using System.IO; using System.Linq; namespace Day08 { class Program { static void Main(string[] args) { var input = File.ReadAllLines("input.txt") .Select(line => (op: line[..3], arg: int.Parse(line[4..]))) .ToArray(); Console.WriteLine(Part1(input)); Console.WriteLine(Part2(input)); } static int Part1((string op, int arg)[] input) { var (acc, completed) = Run(input); return acc; } static int Part2((string op, int arg)[] input) { var alternatives = Enumerable.Range(0, input.Length) .Where(pc => input[pc].op != "acc") .Select(pc => new[] {input.Take(pc), new[] {ToggleInstruction(input[pc])}, input.Skip(pc + 1)} .SelectMany(instruction => instruction).ToArray()); var result = alternatives .Select(program => Run(program)) .First(run => run.completed); return result.accumulator; } static (string op, int arg) ToggleInstruction((string op, int arg) instruction) { var newOp = instruction.op switch { "nop" => "jmp", "jmp" => "nop", _ => instruction.op }; return (newOp, instruction.arg); } static (int accumulator, bool completed) Run((string op, int arg)[] program) { var accumulator = 0; var pc = 0; var executed = new HashSet<int>(); do { if (executed.Add(pc)) { var (op, arg) = program[pc]; switch (op) { case "acc": pc++; accumulator += arg; break; case "jmp": pc += arg; break; case "nop": pc++; break; default: throw new ArgumentException("invalid op"); } } else { return (accumulator, false); } } while (pc < program.Length); return (accumulator, true); } } }
27.680851
100
0.399308
[ "MIT" ]
modernist/AdventOfCode
2020/Day08/Program.cs
2,604
C#
namespace Terka.FontBuilder.Optimizer { using System; using System.Collections.Generic; using System.Linq; using Terka.FontBuilder.Compiler; using Terka.FontBuilder.Compiler.Output; using Terka.FontBuilder.Extensions; class TransitionCountStateEqualityComparer : IEqualityComparer<State> { public bool Equals(State x, State y) { return x.Transitions.Count == y.Transitions.Count; } public int GetHashCode(State obj) { return obj.Transitions.Count; } } /// <summary> /// Removes redudant states from state machines. /// </summary> public class StateMachineOptimizer { /// <summary> /// Optimizes the specified input machine. /// </summary> /// <param name="inputMachine">The input machine. Must be in format as generated by the <see cref="StateMachineBuilder"/> (no loops other than regular back-transitions within a single path).</param> /// <returns>State machine with all equivalent states merged into one state.</returns> public StateMachine Optimize(StateMachine inputMachine) { //var sortedMachine = inputMachine;//new StateMachineTransitionSorter().SortTransitions(inputMachine); var sortedMachine = new StateMachineTransitionSorter().SortTransitions(inputMachine); return this.OptimizeSortedMachine(sortedMachine); } /// <summary> /// Optimizes the specified input machine. /// </summary> /// <param name="inputMachine">The input machine. Must have transitions in a common order. Must be in format as generated by the <see cref="StateMachineBuilder"/> (no loops other than regular back-transitions within a single path).</param> /// <returns>State machine with all equivalent states merged into one state.</returns> public StateMachine OptimizeSortedMachine(StateMachine inputMachine) { var parentStateMap = this.BuildParentStateMap(inputMachine); var finalStates = this.GetFinalStates(inputMachine); var transitionComparer = new TransitionNonrecursiveEqualityComparer(); var queue = new FastEquivalenceQueue<State>(new TransitionCountStateEqualityComparer()); var oldToNewMap = new Dictionary<State, State>(); var waitingForBacktransitionConfirmationMap = new Dictionary<State, WaitingForConfirmationInfo>(); var backtTransitionsByState = this.GetBackTransitionsByState(inputMachine); foreach (var finalState in finalStates) { queue.Enqueue(finalState); } //int i = 0; while (!queue.IsEmpty) { //i++; var currentSet = queue.DequeueEquivalenceSet().ToList(); /*Console.WriteLine("======================="); Console.WriteLine(i + " T " + queue.Count + ") " + string.Join(", ", currentSet.Select(p => p)));*/ // First state is the "template" for the equivalency class (all the other states in the group have to be equivalent // to this state to be in its group) var firstState = currentSet.First(); foreach (var state in currentSet) { //Console.WriteLine("Current: " + state); // If the previous template state was kicked out due to not having all its children processed, // try to use the next state as a template. if (firstState == null) { firstState = state; } var currentStateBackTransition = backtTransitionsByState.GetValueOrDefault(state); var firstStateBackTransition = backtTransitionsByState.GetValueOrDefault(firstState); // Were all children of this state already processed? (skip back-transitions) if (!state.Transitions.Where(p => currentStateBackTransition != p).All(p => oldToNewMap.ContainsKey(p.TargetState))) { // Another state will have to become the template. if (state == firstState) { firstState = null; } //Console.WriteLine("Requeueing because children not processed"); queue.Enqueue(state); continue; } // By now, at least one state is still in the equivalency class (and is the template). // The template is obviously auto-included in the group. if (firstState != state) { if (firstState.Transitions.Count != state.Transitions.Count) { //Console.WriteLine("Requeueing because not same number of transitions as template"); queue.Enqueue(state); continue; } // Do a rough check of the back-transition (we can discard obviously non-equivalent back-transitions) if ( !transitionComparer.Equals(currentStateBackTransition, firstStateBackTransition) || ((currentStateBackTransition != null) != (firstStateBackTransition != null))) { //Console.WriteLine("Requeueing because preliminary backtransition check failed"); queue.Enqueue(state); continue; } // Check if the children states of the current state (and transitions leading to them) match the template. var transitionPairs = state.Transitions.Where(p => currentStateBackTransition != p).Zip( firstState.Transitions.Where(p => backtTransitionsByState.GetValueOrDefault(firstState) != p), (currentStateTransition, firstStateTransition) => new { currentStateTransition, firstStateTransition } ); if (transitionPairs.Any(pair => { //Console.WriteLine("Compare transition: first " + pair.firstStateTransition.SortingKey + " current" + pair.firstStateTransition.SortingKey); // The transitions must match either way (whether there is backtransition going around current state or not). if (!transitionComparer.Equals(pair.currentStateTransition, pair.firstStateTransition)) { return true; } var waitingForConfirmationFirstStateInfo = waitingForBacktransitionConfirmationMap.GetValueOrDefault(pair.firstStateTransition.TargetState); var waitingForConfirmationCurrentStateInfo = waitingForBacktransitionConfirmationMap.GetValueOrDefault(pair.currentStateTransition.TargetState); /*Console.WriteLine( "Waiting template: first " + (waitingForConfirmationFirstStateInfo != null ? waitingForConfirmationFirstStateInfo.TemplateState.HeadShift.ToString() : "x") + " current " + (waitingForConfirmationCurrentStateInfo != null ? waitingForConfirmationCurrentStateInfo.TemplateState.HeadShift.ToString() : "x"));*/ if (waitingForConfirmationFirstStateInfo == null && waitingForConfirmationCurrentStateInfo == null) { return oldToNewMap[pair.firstStateTransition.TargetState] != oldToNewMap[pair.currentStateTransition.TargetState]; } else if (waitingForConfirmationFirstStateInfo == null || waitingForConfirmationCurrentStateInfo == null) { return true; } else { // Equivalent template state in the waiting-for-conf info means the children were processed and that they are equivalent. return waitingForConfirmationFirstStateInfo.TemplateState != waitingForConfirmationCurrentStateInfo.TemplateState; } } //oldToNewMap[pair.firstStateTransition.TargetState] != oldToNewMap[pair.currentStateTransition.TargetState] /*&& //waitingForBacktransitionConfirmationMap.GetValueOrDefault().TemplateState )) { //Console.WriteLine("Requeueing because children (or transitions to children) don't match"); queue.Enqueue(state); continue; } } // Were children of this state waiting for backtransition confirmation? // TODO: Check if children are waiting for the same state (opposite would mean a bug in Compiler) var childrenWaitingForConfirmationInfo = state.Transitions .Where(p => currentStateBackTransition != p && waitingForBacktransitionConfirmationMap.ContainsKey(p.TargetState)) .Select(p => p.TargetState) .Select(childState => waitingForBacktransitionConfirmationMap[childState]) .ToList(); // If there is a backtransition loop going from this state to this state, consider the state to be a child of itself for purpose // of back-transition handling (to avoid having to handle this as a special case later). var hasSelfBacktransition = currentStateBackTransition != null && currentStateBackTransition.TargetState == state; if (hasSelfBacktransition) { childrenWaitingForConfirmationInfo.Add(new WaitingForConfirmationInfo { TemplateState = firstState, ConfirmingState = state, StatesWithBacktransitions = new List<State> { state } }); } /*Console.WriteLine("Following waiting for confirmation infos were passed by children:"); foreach (var waitingForConfirmationInfo in childrenWaitingForConfirmationInfo) { Console.WriteLine("\t" + waitingForConfirmationInfo); }*/ var isWaitingBacktransitionTarget = childrenWaitingForConfirmationInfo.Count > 0 && childrenWaitingForConfirmationInfo.First().ConfirmingState == state; /*Console.WriteLine("Reuse template?"); Console.WriteLine("\tIs NOT the template:" + (state != firstState)); Console.WriteLine("\tHas no backtransition:" + (currentStateBackTransition == null) + " OR it is self-backtransition :" + hasSelfBacktransition); Console.WriteLine("\tChildren have no open backtransitions:" + (childrenWaitingForConfirmationInfo.Count == 0) + " OR It is the target :" + isWaitingBacktransitionTarget);*/ // Merge this state's subtree into the template state's subtree, if it matches following conditions State newState; if ( state != firstState && // Always create a new subree for the template state. (currentStateBackTransition == null || hasSelfBacktransition) && // If the state has backtransition, it must its target. (childrenWaitingForConfirmationInfo.Count == 0 || isWaitingBacktransitionTarget) // There are no backtransitions going around it ) { newState = oldToNewMap[firstState]; //Console.WriteLine("Reusing template"); } else { newState = new State(); newState.Transitions = state.Transitions.Where(p => p != currentStateBackTransition).Select(t => { var newTransition = t.Clone(); newTransition.TargetState = oldToNewMap[t.TargetState]; return newTransition; }).ToList(); //Console.WriteLine("Creating new state"); } oldToNewMap[state] = newState; if (currentStateBackTransition != null && currentStateBackTransition.TargetState != state) { waitingForBacktransitionConfirmationMap.Add( state, new WaitingForConfirmationInfo { ConfirmingState = currentStateBackTransition.TargetState, TemplateState = firstState, StatesWithBacktransitions = new List<State> { state } }); //Console.WriteLine("wrote waiting for template " + firstState + " because has backtrans"); } if (childrenWaitingForConfirmationInfo.Count > 0) { if (!isWaitingBacktransitionTarget && currentStateBackTransition == null /*!stateComparer.Equals(childrenWaitingForConfirmationInfo.First().ConfirmingState, state)*/) // TODO: Co tady? { // This is not the state these states are waiting for -> forward the waiting for confirmation info further waitingForBacktransitionConfirmationMap.Add( state, new WaitingForConfirmationInfo { ConfirmingState = childrenWaitingForConfirmationInfo.First().ConfirmingState, TemplateState = firstState, StatesWithBacktransitions = childrenWaitingForConfirmationInfo.SelectMany(p => p.StatesWithBacktransitions).ToList() }); //Console.WriteLine("wrote waiting for template " + firstState + " not which waiting for"); } else { // This is the state the unconfirmed states are waiting for! // Add back-transitions from these states to the current state. foreach (var stateWaitingForBacktransition in childrenWaitingForConfirmationInfo.SelectMany(p => p.StatesWithBacktransitions)) { var backTransitionOrigin = oldToNewMap[stateWaitingForBacktransition]; backTransitionOrigin.Transitions.Add(new AlwaysTransition { TargetState = newState, HeadShift = backTransitionOrigin == newState ? 1 : 0 // Do not loop in place }); } //Console.WriteLine("skipped forwarding waiting for template " + firstState + " , is backtrans target (backtrans created)"); } } foreach (var parentState in parentStateMap[state]) { // Parent map also includes backtransition -> exclude them now. var parentBackTransition = backtTransitionsByState.GetValueOrDefault(parentState); if (parentBackTransition != null && parentBackTransition.TargetState == state) { continue; } //Console.WriteLine("Enqueued parent state " + parentState); // Tady bude treba resit, aby se do fronty vrcholy nepridavaly vicekrat queue.Enqueue(parentState); } } } return new StateMachine(oldToNewMap[inputMachine.EntryState]); } /// <summary> /// Prepares a dictionary matching each transition in the machine with the state from which that transition originates. /// </summary> /// <param name="inputMachine">The input machine.</param> /// <returns>A map maching each transition to its origin state.</returns> internal ILookup<State, State> BuildParentStateMap(StateMachine inputMachine) { return ( from state in inputMachine.States from transition in state.Transitions select new { transition.TargetState, state}).ToLookup( p => p.TargetState, p => p.state); } internal IEnumerable<State> GetFinalStates(StateMachine inputMachine) { var finalStates = new List<State>(); var queue = new Queue<DfsQueueItem>(); queue.Enqueue(new DfsQueueItem { State = inputMachine.EntryState, PathStates = new List<State>() }); while (queue.Any()) { var currentItem = queue.Dequeue(); int numForwardTransitions = 0; var extendedPath = currentItem.PathStates.Append(currentItem.State).ToList(); foreach (var transition in currentItem.State.Transitions) { if (!currentItem.PathStates.Contains(transition.TargetState)) { numForwardTransitions++; queue.Enqueue( new DfsQueueItem { State = transition.TargetState, PathStates = extendedPath }); } } if (numForwardTransitions == 0) { finalStates.Add(currentItem.State); } } return finalStates; } /// <summary> /// Gets a dictionary which contains all back-transitions indexed by the state they originate from. /// </summary> /// <param name="inputMachine">The input machine.</param> /// <returns>Dictionary indexed by back-transition origin, value is the back-transition itself.</returns> internal IDictionary<State, ITransition> GetBackTransitionsByState(StateMachine inputMachine) { var queue = new Queue<DfsQueueItem>(); queue.Enqueue(new DfsQueueItem { State = inputMachine.EntryState, PathStates = new List<State>() }); // First item in the tuple is origin state, second is the backtransition itself. var backtransitions = new List<Tuple<State, ITransition>>(); while (queue.Any()) { var currentItem = queue.Dequeue(); var extendedPath = currentItem.PathStates.Append(currentItem.State).ToList(); var count = currentItem.State.Transitions.Count(p => currentItem.PathStates.Contains(p.TargetState)); if (count > 1) { throw new InvalidOperationException(); } foreach (var transition in currentItem.State.Transitions) { if (!extendedPath.Contains(transition.TargetState)) { queue.Enqueue(new DfsQueueItem { State = transition.TargetState, PathStates = extendedPath }); } else { backtransitions.Add(Tuple.Create(currentItem.State, transition)); } } } // This is a possible failure point in case the input machine is not in correct format (incorrect loops in the machine may cause InvalidOperationException here). return backtransitions.ToDictionary(p => p.Item1, p => p.Item2); } private class DfsQueueItem { public State State { get; set; } public ICollection<State> PathStates { get; set; } } private class WaitingForConfirmationInfo { public List<State> StatesWithBacktransitions { get; set; } public State ConfirmingState { get; set; } public State TemplateState { get; set; } public override string ToString() { return "[Confirm state: " + this.ConfirmingState + " Template state: " + this.TemplateState + " Origin states: " + String.Join(", ", this.StatesWithBacktransitions) + "]"; } } } }
50.805046
247
0.528238
[ "Apache-2.0" ]
miloush/Terka.TinyFonts
TerkaFont Builder/Optimizer/StateMachineOptimizer.cs
22,153
C#
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the comprehend-2017-11-27.normal.json service model. */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; using System.IO; using Amazon.Runtime; using Amazon.Runtime.Internal; namespace Amazon.Comprehend.Model { /// <summary> /// Container for the parameters to the DetectEntities operation. /// Inspects text for named entities, and returns information about them. For more information, /// about named entities, see <a>how-entities</a>. /// </summary> public partial class DetectEntitiesRequest : AmazonComprehendRequest { private LanguageCode _languageCode; private string _text; /// <summary> /// Gets and sets the property LanguageCode. /// <para> /// The language of the input documents. You can specify any of the primary languages /// supported by Amazon Comprehend: German ("de"), English ("en"), Spanish ("es"), French /// ("fr"), Italian ("it"), or Portuguese ("pt"). All documents must be in the same language. /// </para> /// </summary> [AWSProperty(Required=true)] public LanguageCode LanguageCode { get { return this._languageCode; } set { this._languageCode = value; } } // Check to see if LanguageCode property is set internal bool IsSetLanguageCode() { return this._languageCode != null; } /// <summary> /// Gets and sets the property Text. /// <para> /// A UTF-8 text string. Each string must contain fewer that 5,000 bytes of UTF-8 encoded /// characters. /// </para> /// </summary> [AWSProperty(Required=true, Min=1)] public string Text { get { return this._text; } set { this._text = value; } } // Check to see if Text property is set internal bool IsSetText() { return this._text != null; } } }
32.829268
108
0.625186
[ "Apache-2.0" ]
TallyUpTeam/aws-sdk-net
sdk/src/Services/Comprehend/Generated/Model/DetectEntitiesRequest.cs
2,692
C#
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics.CodeAnalysis; [assembly: SuppressMessage("Microsoft.MSInternal", "CA905:SystemAndMicrosoftNamespacesRequireApproval", Scope = "namespace", Target = "System.Windows.Forms.VisualStyles")] namespace System.Windows.Forms.VisualStyles { [Flags] [SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue", Justification = "Maps to native enum.")] public enum Edges { Left = 0x0001, Top = 0x0002, Right = 0x0004, Bottom = 0x0008, Diagonal = 0x0010, // #define BF_LEFT 0x0001 // #define BF_TOP 0x0002 // #define BF_RIGHT 0x0004 // #define BF_BOTTOM 0x0008 // // #define BF_TOPLEFT (BF_TOP | BF_LEFT) // #define BF_TOPRIGHT (BF_TOP | BF_RIGHT) // #define BF_BOTTOMLEFT (BF_BOTTOM | BF_LEFT) // #define BF_BOTTOMRIGHT (BF_BOTTOM | BF_RIGHT) // #define BF_RECT (BF_LEFT | BF_TOP | BF_RIGHT | BF_BOTTOM) // // #define BF_DIAGONAL 0x0010 // // For diagonal lines, the BF_RECT flags specify the end point of the // // vector bounded by the rectangle parameter. // #define BF_DIAGONAL_ENDTOPRIGHT (BF_DIAGONAL | BF_TOP | BF_RIGHT) // #define BF_DIAGONAL_ENDTOPLEFT (BF_DIAGONAL | BF_TOP | BF_LEFT) // #define BF_DIAGONAL_ENDBOTTOMLEFT (BF_DIAGONAL | BF_BOTTOM | BF_LEFT) // #define BF_DIAGONAL_ENDBOTTOMRIGHT (BF_DIAGONAL | BF_BOTTOM | BF_RIGHT) } }
42.047619
171
0.636467
[ "MIT" ]
Bhaskers-Blu-Org2/winforms
src/System.Windows.Forms/src/System/Windows/Forms/VisualStyles/Edges.cs
1,766
C#
#region License // // Copyright 2002-2019 Drew Noakes // Ported from Java to C# by Yakov Danilov for Imazen LLC in 2014 // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // More information about this project is available at: // // https://github.com/drewnoakes/metadata-extractor-dotnet // https://drewnoakes.com/code/exif/ // #endregion using System.Diagnostics.CodeAnalysis; using JetBrains.Annotations; namespace MetadataExtractor.Formats.Exif.Makernotes { /// <summary> /// Provides human-readable string representations of tag values stored in a <see cref="SamsungType2MakernoteDirectory"/>. /// <para /> /// Tag reference from: http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/Samsung.html /// </summary> /// <author>Kevin Mott https://github.com/kwhopper</author> /// <author>Drew Noakes https://drewnoakes.com</author> [SuppressMessage("ReSharper", "MemberCanBePrivate.Global")] public class SamsungType2MakernoteDescriptor : TagDescriptor<SamsungType2MakernoteDirectory> { public SamsungType2MakernoteDescriptor([NotNull] SamsungType2MakernoteDirectory directory) : base(directory) { } public override string GetDescription(int tagType) { switch (tagType) { case SamsungType2MakernoteDirectory.TagMakerNoteVersion: return GetMakernoteVersionDescription(); case SamsungType2MakernoteDirectory.TagDeviceType: return GetDeviceTypeDescription(); case SamsungType2MakernoteDirectory.TagSamsungModelId: return GetSamsungModelIdDescription(); case SamsungType2MakernoteDirectory.TagCameraTemperature: return GetCameraTemperatureDescription(); case SamsungType2MakernoteDirectory.TagFaceDetect: return GetFaceDetectDescription(); case SamsungType2MakernoteDirectory.TagFaceRecognition: return GetFaceRecognitionDescription(); default: return base.GetDescription(tagType); } } [CanBeNull] public string GetMakernoteVersionDescription() { return GetVersionBytesDescription(SamsungType2MakernoteDirectory.TagMakerNoteVersion, 2); } [CanBeNull] public string GetDeviceTypeDescription() { if (!Directory.TryGetUInt32(SamsungType2MakernoteDirectory.TagDeviceType, out uint value)) return null; switch (value) { case 0x1000: return "Compact Digital Camera"; case 0x2000: return "High-end NX Camera"; case 0x3000: return "HXM Video Camera"; case 0x12000: return "Cell Phone"; case 0x300000: return "SMX Video Camera"; default: return "Unknown (" + value + ")"; } } [CanBeNull] public string GetSamsungModelIdDescription() { if (!Directory.TryGetUInt32(SamsungType2MakernoteDirectory.TagSamsungModelId, out uint value)) return null; switch (value) { case 0x100101c: return "NX10"; /*case 0x1001226: return "HMX-S10BP";*/ case 0x1001226: return "HMX-S15BP"; case 0x1001233: return "HMX-Q10"; /*case 0x1001234: return "HMX-H300";*/ case 0x1001234: return "HMX-H304"; case 0x100130c: return "NX100"; case 0x1001327: return "NX11"; case 0x170104e: return "ES70, ES71 / VLUU ES70, ES71 / SL600"; case 0x1701052: return "ES73 / VLUU ES73 / SL605"; case 0x1701300: return "ES28 / VLUU ES28"; case 0x1701303: return "ES74,ES75,ES78 / VLUU ES75,ES78"; case 0x2001046: return "PL150 / VLUU PL150 / TL210 / PL151"; case 0x2001311: return "PL120,PL121 / VLUU PL120,PL121"; case 0x2001315: return "PL170,PL171 / VLUUPL170,PL171"; case 0x200131e: return "PL210, PL211 / VLUU PL210, PL211"; case 0x2701317: return "PL20,PL21 / VLUU PL20,PL21"; case 0x2a0001b: return "WP10 / VLUU WP10 / AQ100"; case 0x3000000: return "Various Models (0x3000000)"; case 0x3a00018: return "Various Models (0x3a00018)"; case 0x400101f: return "ST1000 / ST1100 / VLUU ST1000 / CL65"; case 0x4001022: return "ST550 / VLUU ST550 / TL225"; case 0x4001025: return "Various Models (0x4001025)"; case 0x400103e: return "VLUU ST5500, ST5500, CL80"; case 0x4001041: return "VLUU ST5000, ST5000, TL240"; case 0x4001043: return "ST70 / VLUU ST70 / ST71"; case 0x400130a: return "Various Models (0x400130a)"; case 0x400130e: return "ST90,ST91 / VLUU ST90,ST91"; case 0x4001313: return "VLUU ST95, ST95"; case 0x4a00015: return "VLUU ST60"; case 0x4a0135b: return "ST30, ST65 / VLUU ST65 / ST67"; case 0x5000000: return "Various Models (0x5000000)"; case 0x5001038: return "Various Models (0x5001038)"; case 0x500103a: return "WB650 / VLUU WB650 / WB660"; case 0x500103c: return "WB600 / VLUU WB600 / WB610"; case 0x500133e: return "WB150 / WB150F / WB152 / WB152F / WB151"; case 0x5a0000f: return "WB5000 / HZ25W"; case 0x6001036: return "EX1"; case 0x700131c: return "VLUU SH100, SH100"; case 0x27127002: return "SMX - C20N"; default: return "Unknown (" + value + ")"; } } [CanBeNull] private string GetCameraTemperatureDescription() { return GetFormattedInt(SamsungType2MakernoteDirectory.TagCameraTemperature, "{0} C"); } [CanBeNull] public string GetFaceDetectDescription() { return GetIndexedDescription(SamsungType2MakernoteDirectory.TagFaceDetect, "Off", "On"); } [CanBeNull] public string GetFaceRecognitionDescription() { return GetIndexedDescription(SamsungType2MakernoteDirectory.TagFaceRecognition, "Off", "On"); } } }
37.117925
126
0.541492
[ "Apache-2.0" ]
XelaNimed/metadata-extractor-dotnet
MetadataExtractor/Formats/Exif/makernotes/SamsungType2MakernoteDescriptor.cs
7,869
C#
using System; using Monitor = System.Threading.Monitor; namespace ThreadingUtilities { /// <summary> /// Reader writer. /// /// This class Solves a fequent problem with databases in mind. /// This allows you to read multiple times with a lightswitch and allows you to write using the semaphore token. /// This means you can have multiple reads going on at a time and only one write at a time. /// This allows us to keep the integrety of the data. /// /// This also has some fixes to this problem, there will be a number of reads that will be able to go through and when there is a write, it allows it through /// this is by using turnstyles to allow this through. /// </summary> public class ReaderWriter { private readonly Semaphore ReaderWriterLockSemaphore; private readonly Lightswitch ReaderWriterLockLightswitch; private readonly Semaphore ReaderBlock; private readonly Semaphore WriteBlock; private ulong NumberOfReaders; private readonly object ReaderWriterLock; /// <summary> /// Initializes a new instance of the <see cref="Mark.Utilities.MAC.ReaderWriter"/> class. /// /// This Initializes the Reader Writer. All this will do is Initialize all of the global variables for the class to work. /// </summary> public ReaderWriter () { ReaderWriterLockSemaphore = new Semaphore (1); ReaderWriterLockLightswitch = new Lightswitch (ReaderWriterLockSemaphore); ReaderBlock = new Semaphore (1); WriteBlock = new Semaphore (1); NumberOfReaders = 0; ReaderWriterLock = new object (); } /// <summary> /// Read Acquire Method. /// /// This allows you to acquire the token for the intent of READING only. /// This works by using a lightswitch and allows to pass multiple threads for the price of only one token. /// /// This will hold onto the token until a point where the Writer has the ReaderBlock token, otherwise will keep on reading /// </summary> public void ReadAcquire() { ReaderBlock.AcquireToken (); ReaderBlock.ReleaseToken (); lock(ReaderWriterLock) { NumberOfReaders++; } ReaderWriterLockLightswitch.Acquire (); lock(ReaderWriterLock) { NumberOfReaders--; Monitor.PulseAll (ReaderWriterLock); } } /// <summary> /// Read Release Method. /// /// This allows you to release the thread from the pool of threads. /// This will use the method in Lightswitch.Release to handle this request. /// </summary> public void ReadRelease() { ReaderWriterLockLightswitch.Release (); } /// <summary> /// Writer Acquire Method. /// /// Writer acquire will be used for WRITING only this is designed only to have this go through at a time. /// This works by using the full semaphore as the way of letting in the token. /// /// Once the thread has started in the methd it will make sure that no Readers can interuppt and allow it to write safely. /// </summary> public void WriterAcquire() { WriteBlock.AcquireToken (); ReaderBlock.AcquireToken (); ReaderWriterLockSemaphore.AcquireToken (); ReaderBlock.ReleaseToken (); } /// <summary> /// Write release for Writing to a source. /// /// This allows you to release the thread from the one Thread. /// This will use the method in the Semaphore.Release to handle this request. /// </summary> public void WriteRelease() { ReaderWriterLockSemaphore.ReleaseToken (); lock (ReaderWriterLock) { if(NumberOfReaders > 0) { Monitor.Wait (ReaderWriterLock); } } WriteBlock.ReleaseToken (); } } }
31.875
158
0.702241
[ "MIT" ]
Mark-Fulton/Threading-Utilities
ReaderWriter.cs
3,572
C#
using Microsoft.AspNetCore.Mvc; namespace Saas.Admin.Web.Controllers { public class UsersController : Controller { public IActionResult Index() { return View(); } } }
16.692308
45
0.59447
[ "MIT" ]
Azure/azure-saas
src/Saas.Admin/Saas.Admin.Web/Controllers/UsersController.cs
219
C#
namespace Tavenem.Wiki.Web; /// <summary> /// Options for the wiki web client. /// </summary> public interface IWikiWebOptions { /// <summary> /// <para> /// The title of the main about page. /// </para> /// <para> /// Default is "About" /// </para> /// <para> /// May be set to <see langword="null"/> or an empty <see cref="string"/>, which disables /// the about page. /// </para> /// </summary> string? AboutPageTitle { get; } /// <summary> /// <para> /// An optional collection of namespaces which may not be assigned to pages by non-admin /// users. /// </para> /// <para> /// The namespace assigned to <see cref="SystemNamespace"/> is included automatically. /// </para> /// </summary> IEnumerable<string> AdminNamespaces { get; } /// <summary> /// <para> /// The title of the main contact page. /// </para> /// <para> /// Default is "Contact" /// </para> /// <para> /// May be set to <see langword="null"/> or an empty <see cref="string"/>, which disables /// the contact page. /// </para> /// </summary> string? ContactPageTitle { get; } /// <summary> /// <para> /// The title of the main contents page. /// </para> /// <para> /// Default is "Contents" /// </para> /// <para> /// May be set to <see langword="null"/> or an empty <see cref="string"/>, which disables /// the contents page. /// </para> /// </summary> string? ContentsPageTitle { get; } /// <summary> /// <para> /// The title of the main copyright page. /// </para> /// <para> /// Default is "Copyright" /// </para> /// <para> /// May be set to <see langword="null"/> or an empty <see cref="string"/>, which disables /// the copyright page and the copyright notice on pages. /// </para> /// <para> /// Consider carefully before omitting this special page, unless you supply an alternate /// copyright notice on your wiki. /// </para> /// </summary> string? CopyrightPageTitle { get; } /// <summary> /// <para> /// The name of the user group namespace. /// </para> /// <para> /// If omitted "Group" is used. /// </para> /// </summary> string GroupNamespace { get; } /// <summary> /// <para> /// The title of the main help page. /// </para> /// <para> /// Default is "Help" /// </para> /// <para> /// May be set to <see langword="null"/> or an empty <see cref="string"/>, which disables /// the help page. /// </para> /// </summary> string? HelpPageTitle { get; } /// <summary> /// <para> /// The maximum size (in bytes) of uploaded files. /// </para> /// <para> /// Setting this to a value less than or equal to zero effectively prevents file uploads. /// </para> /// </summary> int MaxFileSize { get; } /// <summary> /// Gets a string representing the <see cref="MaxFileSize"/> in a reasonable unit (GB for /// large sizes, down to bytes for small ones). /// </summary> public string MaxFileSizeString { get { if (MaxFileSize >= 1000000000) { return $"{MaxFileSize / 1000000000.0:N3} GB"; } else if (MaxFileSize >= 1000000) { return $"{MaxFileSize / 1000000.0:N3} MB"; } else if (MaxFileSize >= 1000) { return $"{MaxFileSize / 1000.0:G} KB"; } else { return $"{MaxFileSize} bytes"; } } } /// <summary> /// <para> /// The title of the main policy page. /// </para> /// <para> /// Default is "Policies" /// </para> /// <para> /// May be set to <see langword="null"/> or an empty <see cref="string"/>, which disables /// the policy page. /// </para> /// </summary> string? PolicyPageTitle { get; } /// <summary> /// <para> /// The name of the system namespace. /// </para> /// <para> /// If omitted "System" is used. /// </para> /// </summary> string SystemNamespace { get; } /// <summary> /// <para> /// The name of the user namespace. /// </para> /// <para> /// If omitted "User" is used. /// </para> /// </summary> string UserNamespace { get; } }
26.028902
93
0.508106
[ "MIT" ]
Tavenem/Wiki.Web
src/IWikiWebOptions.cs
4,505
C#
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:2.0.50727.3053 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace MXTankWeb { public partial class _Default { /// <summary> /// form1 control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::System.Web.UI.HtmlControls.HtmlForm form1; /// <summary> /// TankManagement1 control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::MXTankWeb.TankManagement TankManagement1; } }
31.714286
84
0.494595
[ "Apache-2.0" ]
MoysheBenRabi/setp
csharp/Examples/XnaTank/MXTankWeb/Default.aspx.designer.cs
1,112
C#
// Maybe: Unit Tests // Copyright (c) bfren - licensed under https://mit.bfren.dev/2019 using MaybeF; using NSubstitute.ExceptionExtensions; using static MaybeF.F.M; namespace Abstracts; public abstract class IfNull_Tests { public abstract void Test00_Exception_In_IfNull_Func_Returns_None_With_UnhandledExceptionMsg(); protected static void Test00(Func<Maybe<object?>, Func<Maybe<object?>>, Maybe<object?>> act) { // Arrange var some = F.Some<object>(null, true); var none = F.None<object?, NullValueMsg>(); var throws = Substitute.For<Func<Maybe<object?>>>(); throws.Invoke().Throws<Exception>(); // Act var r0 = act(some, throws); var r1 = act(none, throws); // Assert var n0 = r0.AssertNone(); Assert.IsType<UnhandledExceptionMsg>(n0); var n1 = r1.AssertNone(); Assert.IsType<UnhandledExceptionMsg>(n1); } public abstract void Test01_Some_With_Null_Value_Runs_IfNull_Func(); protected static void Test01(Func<Maybe<object?>, Func<Maybe<object?>>, Maybe<object?>> act) { // Arrange var maybe = F.Some<object>(null, true); var ifNull = Substitute.For<Func<Maybe<object?>>>(); // Act act(maybe, ifNull); // Assert ifNull.Received().Invoke(); } public abstract void Test02_None_With_NullValueMsg_Runs_IfNull_Func(); protected static void Test02(Func<Maybe<object>, Func<Maybe<object>>, Maybe<object>> act) { // Arrange var maybe = F.None<object, NullValueMsg>(); var ifNull = Substitute.For<Func<Maybe<object>>>(); // Act act(maybe, ifNull); // Assert ifNull.Received().Invoke(); } public abstract void Test03_Some_With_Null_Value_Runs_IfNull_Func_Returns_None_With_Msg(); protected static void Test03(Func<Maybe<object?>, Func<IMsg>, Maybe<object?>> act) { // Arrange var maybe = F.Some<object>(null, true); var ifNull = Substitute.For<Func<IMsg>>(); var message = new TestMsg(); ifNull.Invoke().Returns(message); // Act var result = act(maybe, ifNull); // Assert ifNull.Received().Invoke(); var none = result.AssertNone(); Assert.Same(message, none); } public abstract void Test04_None_With_NullValueMsg_Runs_IfNull_Func_Returns_None_With_Msg(); protected static void Test04(Func<Maybe<object>, Func<IMsg>, Maybe<object>> act) { // Arrange var maybe = F.None<object, NullValueMsg>(); var ifNull = Substitute.For<Func<IMsg>>(); var message = new TestMsg(); ifNull.Invoke().Returns(message); // Act var result = act(maybe, ifNull); // Assert ifNull.Received().Invoke(); var none = result.AssertNone(); Assert.Same(message, none); } public abstract void Test05_Null_Maybe_Runs_IfNull_Func(Maybe<int> input); protected static void Test05(Func<Func<Maybe<int>>, Maybe<int>> act) { // Arrange var ifNull = Substitute.For<Func<Maybe<int>>>(); // Act act(ifNull); // Assert ifNull.Received().Invoke(); } public abstract void Test06_Some_With_Null__Runs_IfNull(); protected static void Test06(Func<Maybe<int?>, Func<uint>, Func<int?, uint>, Maybe<uint>> act) { // Arrange var some = F.Some<int?>(() => null, true, F.DefaultHandler); var ifNull = Substitute.For<Func<uint>>(); var ifSome = Substitute.For<Func<int?, uint>>(); // Act act(some, ifNull, ifSome); // Assert ifNull.Received().Invoke(); ifSome.DidNotReceiveWithAnyArgs().Invoke(default); } public abstract void Test07_Some_With_Value__Runs_IfSome(); protected static void Test07(Func<Maybe<string?>, Func<uint>, Func<string?, uint>, Maybe<uint>> act) { // Arrange var value = Rnd.Str; var some = F.Some<string?>(value); var ifNull = Substitute.For<Func<uint>>(); var ifSome = Substitute.For<Func<string?, uint>>(); // Act act(some, ifNull, ifSome); // Assert ifNull.DidNotReceiveWithAnyArgs().Invoke(); ifSome.Received().Invoke(value); } public abstract void Test08_None_With_NullValueMsg__Runs_IfNull(); protected static void Test08(Func<Maybe<string?>, Func<uint>, Func<string?, uint>, Maybe<uint>> act) { // Arrange var none = F.None<string?, NullValueMsg>(); var ifNull = Substitute.For<Func<uint>>(); var ifSome = Substitute.For<Func<string?, uint>>(); // Act act(none, ifNull, ifSome); // Assert ifNull.Received().Invoke(); ifSome.DidNotReceiveWithAnyArgs().Invoke(default); } public abstract void Test09_None_With_Msg__Returns_None(); protected static void Test09(Func<Maybe<string?>, Func<uint>, Func<string?, uint>, Maybe<uint>> act) { // Arrange var msg = new TestMsg(); var maybe = F.None<string?>(msg); var ifNull = Substitute.For<Func<uint>>(); var ifSome = Substitute.For<Func<string?, uint>>(); // Act var result = act(maybe, ifNull, ifSome); // Assert var none = result.AssertNone(); Assert.Same(msg, none); ifNull.DidNotReceiveWithAnyArgs().Invoke(); ifSome.DidNotReceiveWithAnyArgs().Invoke(default); } public abstract void Test10_Exception_In_IfNull__Uses_Handler(); protected static void Test10(Func<Maybe<Guid?>, Func<string>, Func<Guid?, string>, F.Handler, Maybe<string>> act) { // Arrange var some = F.Some<Guid?>(() => null, true, F.DefaultHandler); var none = F.None<Guid?, NullValueMsg>(); var ifNull = Substitute.For<Func<string>>(); var message = Rnd.Str; var ex = new Exception(message); ifNull.Invoke() .Throws(ex); var ifSome = Substitute.For<Func<Guid?, string>>(); var handler = Substitute.For<F.Handler>(); // Act var r0 = act(some, ifNull, ifSome, handler); var r1 = act(none, ifNull, ifSome, handler); // Assert r0.AssertNone(); r1.AssertNone(); handler.Received(2).Invoke(ex); } public abstract void Test11_Exception_In_IfSome__Uses_Handler(); protected static void Test11(Func<Maybe<Guid?>, Func<string>, Func<Guid?, string>, F.Handler, Maybe<string>> act) { // Arrange var value = Rnd.Guid; var some = F.Some<Guid?>(value); var ifNull = Substitute.For<Func<string>>(); var ifSome = Substitute.For<Func<Guid?, string>>(); var message = Rnd.Str; var ex = new Exception(message); ifSome.Invoke(value) .Throws(ex); var handler = Substitute.For<F.Handler>(); // Act var result = act(some, ifNull, ifSome, handler); // Assert result.AssertNone(); handler.Received().Invoke(ex); } public abstract void Test12_Exception_In_IfNull__Uses_DefaultHandler(); protected static void Test12(Func<Maybe<Guid?>, Func<Maybe<string>>, Func<Guid?, Maybe<string>>, Maybe<string>> act) { // Arrange var some = F.Some<Guid?>(() => null, true, F.DefaultHandler); var none = F.None<Guid?, NullValueMsg>(); var ifNull = Substitute.For<Func<Maybe<string>>>(); ifNull.Invoke() .Throws(new Exception()); var ifSome = Substitute.For<Func<Guid?, Maybe<string>>>(); // Act var r0 = act(some, ifNull, ifSome); var r1 = act(none, ifNull, ifSome); // Assert r0.AssertNone().AssertType<UnhandledExceptionMsg>(); r1.AssertNone().AssertType<UnhandledExceptionMsg>(); } public abstract void Test13_Exception_In_IfSome__Uses_DefaultHandler(); protected static void Test13(Func<Maybe<Guid?>, Func<Maybe<string>>, Func<Guid?, Maybe<string>>, Maybe<string>> act) { // Arrange var value = Rnd.Guid; var some = F.Some<Guid?>(value); var none = F.None<Guid?, NullValueMsg>(); var ifNull = Substitute.For<Func<Maybe<string>>>(); var ifSome = Substitute.For<Func<Guid?, Maybe<string>>>(); ifSome.Invoke(value) .Throws(new Exception()); // Act var result = act(some, ifNull, ifSome); // Assert result.AssertNone().AssertType<UnhandledExceptionMsg>(); } public sealed record class TestMsg : IMsg; }
27.21223
117
0.69187
[ "MIT" ]
bfren/maybe
tests/Tests.MaybeF/- Test Abstracts -/IfNull/IfNull_Tests.cs
7,565
C#
using System; using System.Collections.Concurrent; using WireMock.Handlers; using WireMock.Logging; using WireMock.Matchers; using WireMock.Util; #if !USE_ASPNETCORE using Owin; #else using IAppBuilder = Microsoft.AspNetCore.Builder.IApplicationBuilder; using Microsoft.Extensions.DependencyInjection; #endif namespace WireMock.Owin { internal class WireMockMiddlewareOptions : IWireMockMiddlewareOptions { public IWireMockLogger Logger { get; set; } public TimeSpan? RequestProcessingDelay { get; set; } public IStringMatcher AuthorizationMatcher { get; set; } public bool? AllowPartialMapping { get; set; } public ConcurrentDictionary<Guid, IMapping> Mappings { get; } = new ConcurrentDictionary<Guid, IMapping>(); public ConcurrentDictionary<string, ScenarioState> Scenarios { get; } = new ConcurrentDictionary<string, ScenarioState>(); public ConcurrentObservableCollection<LogEntry> LogEntries { get; } = new ConcurrentObservableCollection<LogEntry>(); public int? RequestLogExpirationDuration { get; set; } public int? MaxRequestLogCount { get; set; } public Action<IAppBuilder> PreWireMockMiddlewareInit { get; set; } public Action<IAppBuilder> PostWireMockMiddlewareInit { get; set; } #if USE_ASPNETCORE public Action<IServiceCollection> AdditionalServiceRegistration { get; set; } #endif /// <inheritdoc cref="IWireMockMiddlewareOptions.FileSystemHandler"/> public IFileSystemHandler FileSystemHandler { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.AllowBodyForAllHttpMethods"/> public bool? AllowBodyForAllHttpMethods { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.AllowOnlyDefinedHttpStatusCodeInResponse"/> public bool? AllowOnlyDefinedHttpStatusCodeInResponse { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.DisableJsonBodyParsing"/> public bool? DisableJsonBodyParsing { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.DisableRequestBodyDecompressing"/> public bool? DisableRequestBodyDecompressing { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.HandleRequestsSynchronously"/> public bool? HandleRequestsSynchronously { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.X509StoreName"/> public string X509StoreName { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.X509StoreLocation"/> public string X509StoreLocation { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.X509ThumbprintOrSubjectName"/> public string X509ThumbprintOrSubjectName { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.X509CertificateFilePath"/> public string X509CertificateFilePath { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.X509CertificatePassword"/> public string X509CertificatePassword { get; set; } /// <inheritdoc cref="IWireMockMiddlewareOptions.CustomCertificateDefined"/> public bool CustomCertificateDefined => !string.IsNullOrEmpty(X509StoreName) && !string.IsNullOrEmpty(X509StoreLocation) || !string.IsNullOrEmpty(X509CertificateFilePath) && !string.IsNullOrEmpty(X509CertificatePassword); } }
41.329268
130
0.730304
[ "Apache-2.0" ]
qq362220083/WireMock.Net
src/WireMock.Net/Owin/WireMockMiddlewareOptions.cs
3,391
C#
using System; using System.Collections.Generic; using System.Linq; using NUnit.Framework; using Umbraco.Core.Models; namespace Umbraco.Tests.Models { [TestFixture] public class PreValueCollectionTests { [Test] public void Can_Deep_Clone() { var d = new PreValueCollection(new Dictionary<string, PreValue> { {"blah1", new PreValue(1, "test1", 1)}, {"blah2", new PreValue(2, "test1", 3)}, {"blah3", new PreValue(3, "test1", 2)} }); var a = new PreValueCollection(new[] { new PreValue(1, "test1", 1), new PreValue(2, "test1", 3), new PreValue(3, "test1", 2) }); var clone1 = (PreValueCollection)d.DeepClone(); var clone2 = (PreValueCollection)a.DeepClone(); Action<PreValueCollection, PreValueCollection> assert = (orig, clone) => { Assert.AreNotSame(orig, clone); var oDic = orig.FormatAsDictionary(); var cDic = clone.FormatAsDictionary(); Assert.AreEqual(oDic.Keys.Count(), cDic.Keys.Count()); foreach (var k in oDic.Keys) { Assert.AreNotSame(oDic[k], cDic[k]); Assert.AreEqual(oDic[k].Id, cDic[k].Id); Assert.AreEqual(oDic[k].SortOrder, cDic[k].SortOrder); Assert.AreEqual(oDic[k].Value, cDic[k].Value); } }; assert(d, clone1); assert(a, clone2); } } }
32.352941
84
0.503636
[ "MIT" ]
Abhith/Umbraco-CMS
src/Umbraco.Tests/Models/PreValueCollectionTests.cs
1,650
C#
using CodeBrosForum.Data; using CodeBrosForum.Data.Models; using CodeBrosForum.Service; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Identity; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; namespace CodeBrosForum { public class Startup { public Startup(IConfiguration configuration) { Configuration = configuration; } public IConfiguration Configuration { get; } // This method gets called by the runtime. Use this method to add services to the container. public void ConfigureServices(IServiceCollection services) { services.AddDbContext<ApplicationDbContext>(options => options.UseSqlServer( Configuration.GetConnectionString("DefaultConnection"))); services.AddDefaultIdentity<ApplicationUser>(options => options.SignIn.RequireConfirmedAccount = true) .AddRoles<IdentityRole>() .AddEntityFrameworkStores<ApplicationDbContext>(); services.AddControllersWithViews(); services.AddRazorPages(); services.AddScoped<IForum, ForumService>(); services.AddScoped<IPost, PostService>(); services.AddScoped<IApplicationUser, AppUserService>(); } // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public void Configure(IApplicationBuilder app, IWebHostEnvironment env) { if (env.IsDevelopment()) { app.UseDeveloperExceptionPage(); } else { app.UseExceptionHandler("/Home/Error"); // The default HSTS value is 30 days. You may want to change this for production scenarios, see https://aka.ms/aspnetcore-hsts. app.UseHsts(); } app.UseHttpsRedirection(); app.UseStaticFiles(); app.UseRouting(); app.UseAuthentication(); app.UseAuthorization(); app.UseEndpoints(endpoints => { endpoints.MapControllerRoute( name: "default", pattern: "{controller=Home}/{action=Index}/{id?}"); endpoints.MapRazorPages(); }); } } }
33.824324
143
0.620455
[ "MIT" ]
FatlumBehrami/CodeBros-Forum
Startup.cs
2,503
C#
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using Humanizer; using JetBrains.Annotations; using Microsoft.EntityFrameworkCore; using osu.Framework.Bindables; using osu.Framework.Extensions; using osu.Framework.Extensions.IEnumerableExtensions; using osu.Framework.Logging; using osu.Framework.Platform; using osu.Framework.Threading; using osu.Game.IO; using osu.Game.IO.Archives; using osu.Game.IPC; using osu.Game.Overlays.Notifications; using SharpCompress.Archives.Zip; namespace osu.Game.Database { /// <summary> /// Encapsulates a model store class to give it import functionality. /// Adds cross-functionality with <see cref="FileStore"/> to give access to the central file store for the provided model. /// </summary> /// <typeparam name="TModel">The model type.</typeparam> /// <typeparam name="TFileModel">The associated file join type.</typeparam> public abstract class ArchiveModelManager<TModel, TFileModel> : IModelManager<TModel>, IModelFileManager<TModel, TFileModel> where TModel : class, IHasFiles<TFileModel>, IHasPrimaryKey, ISoftDelete where TFileModel : class, INamedFileInfo, IHasPrimaryKey, new() { private const int import_queue_request_concurrency = 1; /// <summary> /// The size of a batch import operation before considering it a lower priority operation. /// </summary> private const int low_priority_import_batch_size = 1; /// <summary> /// A singleton scheduler shared by all <see cref="ArchiveModelManager{TModel,TFileModel}"/>. /// </summary> /// <remarks> /// This scheduler generally performs IO and CPU intensive work so concurrency is limited harshly. /// It is mainly being used as a queue mechanism for large imports. /// </remarks> private static readonly ThreadedTaskScheduler import_scheduler = new ThreadedTaskScheduler(import_queue_request_concurrency, nameof(ArchiveModelManager<TModel, TFileModel>)); /// <summary> /// A second scheduler for lower priority imports. /// For simplicity, these will just run in parallel with normal priority imports, but a future refactor would see this implemented via a custom scheduler/queue. /// See https://gist.github.com/peppy/f0e118a14751fc832ca30dd48ba3876b for an incomplete version of this. /// </summary> private static readonly ThreadedTaskScheduler import_scheduler_low_priority = new ThreadedTaskScheduler(import_queue_request_concurrency, nameof(ArchiveModelManager<TModel, TFileModel>)); public Action<Notification> PostNotification { protected get; set; } /// <summary> /// Fired when a new or updated <typeparamref name="TModel"/> becomes available in the database. /// This is not guaranteed to run on the update thread. /// </summary> public IBindable<WeakReference<TModel>> ItemUpdated => itemUpdated; private readonly Bindable<WeakReference<TModel>> itemUpdated = new Bindable<WeakReference<TModel>>(); /// <summary> /// Fired when a <typeparamref name="TModel"/> is removed from the database. /// This is not guaranteed to run on the update thread. /// </summary> public IBindable<WeakReference<TModel>> ItemRemoved => itemRemoved; private readonly Bindable<WeakReference<TModel>> itemRemoved = new Bindable<WeakReference<TModel>>(); public virtual IEnumerable<string> HandledExtensions => new[] { @".zip" }; protected readonly FileStore Files; protected readonly IDatabaseContextFactory ContextFactory; protected readonly MutableDatabaseBackedStore<TModel> ModelStore; // ReSharper disable once NotAccessedField.Local (we should keep a reference to this so it is not finalised) private ArchiveImportIPCChannel ipc; private readonly Storage exportStorage; protected ArchiveModelManager(Storage storage, IDatabaseContextFactory contextFactory, MutableDatabaseBackedStoreWithFileIncludes<TModel, TFileModel> modelStore, IIpcHost importHost = null) { ContextFactory = contextFactory; ModelStore = modelStore; ModelStore.ItemUpdated += item => handleEvent(() => itemUpdated.Value = new WeakReference<TModel>(item)); ModelStore.ItemRemoved += item => handleEvent(() => itemRemoved.Value = new WeakReference<TModel>(item)); exportStorage = storage.GetStorageForDirectory(@"exports"); Files = new FileStore(contextFactory, storage); if (importHost != null) ipc = new ArchiveImportIPCChannel(importHost, this); ModelStore.Cleanup(); } /// <summary> /// Import one or more <typeparamref name="TModel"/> items from filesystem <paramref name="paths"/>. /// </summary> /// <remarks> /// This will be treated as a low priority import if more than one path is specified; use <see cref="Import(ImportTask[])"/> to always import at standard priority. /// This will post notifications tracking progress. /// </remarks> /// <param name="paths">One or more archive locations on disk.</param> public Task Import(params string[] paths) { var notification = new ImportProgressNotification(); PostNotification?.Invoke(notification); return Import(notification, paths.Select(p => new ImportTask(p)).ToArray()); } public Task Import(params ImportTask[] tasks) { var notification = new ImportProgressNotification(); PostNotification?.Invoke(notification); return Import(notification, tasks); } public async Task<IEnumerable<ILive<TModel>>> Import(ProgressNotification notification, params ImportTask[] tasks) { if (tasks.Length == 0) { notification.CompletionText = $"No {HumanisedModelName}s were found to import!"; notification.State = ProgressNotificationState.Completed; return Enumerable.Empty<ILive<TModel>>(); } notification.Progress = 0; notification.Text = $"{HumanisedModelName.Humanize(LetterCasing.Title)} import is initialising..."; int current = 0; var imported = new List<ILive<TModel>>(); bool isLowPriorityImport = tasks.Length > low_priority_import_batch_size; try { await Task.WhenAll(tasks.Select(async task => { notification.CancellationToken.ThrowIfCancellationRequested(); try { var model = await Import(task, isLowPriorityImport, notification.CancellationToken).ConfigureAwait(false); lock (imported) { if (model != null) imported.Add(model); current++; notification.Text = $"Imported {current} of {tasks.Length} {HumanisedModelName}s"; notification.Progress = (float)current / tasks.Length; } } catch (TaskCanceledException) { throw; } catch (Exception e) { Logger.Error(e, $@"Could not import ({task})", LoggingTarget.Database); } })).ConfigureAwait(false); } catch (OperationCanceledException) { if (imported.Count == 0) { notification.State = ProgressNotificationState.Cancelled; return imported; } } if (imported.Count == 0) { notification.Text = $"{HumanisedModelName.Humanize(LetterCasing.Title)} import failed!"; notification.State = ProgressNotificationState.Cancelled; } else { notification.CompletionText = imported.Count == 1 ? $"Imported {imported.First().Value}!" : $"Imported {imported.Count} {HumanisedModelName}s!"; if (imported.Count > 0 && PostImport != null) { notification.CompletionText += " Click to view."; notification.CompletionClickAction = () => { PostImport?.Invoke(imported); return true; }; } notification.State = ProgressNotificationState.Completed; } return imported; } /// <summary> /// Import one <typeparamref name="TModel"/> from the filesystem and delete the file on success. /// Note that this bypasses the UI flow and should only be used for special cases or testing. /// </summary> /// <param name="task">The <see cref="ImportTask"/> containing data about the <typeparamref name="TModel"/> to import.</param> /// <param name="lowPriority">Whether this is a low priority import.</param> /// <param name="cancellationToken">An optional cancellation token.</param> /// <returns>The imported model, if successful.</returns> public async Task<ILive<TModel>> Import(ImportTask task, bool lowPriority = false, CancellationToken cancellationToken = default) { cancellationToken.ThrowIfCancellationRequested(); ILive<TModel> import; using (ArchiveReader reader = task.GetReader()) import = await Import(reader, lowPriority, cancellationToken).ConfigureAwait(false); // We may or may not want to delete the file depending on where it is stored. // e.g. reconstructing/repairing database with items from default storage. // Also, not always a single file, i.e. for LegacyFilesystemReader // TODO: Add a check to prevent files from storage to be deleted. try { if (import != null && File.Exists(task.Path) && ShouldDeleteArchive(task.Path)) File.Delete(task.Path); } catch (Exception e) { LogForModel(import?.Value, $@"Could not delete original file after import ({task})", e); } return import; } public Action<IEnumerable<ILive<TModel>>> PostImport { protected get; set; } /// <summary> /// Silently import an item from an <see cref="ArchiveReader"/>. /// </summary> /// <param name="archive">The archive to be imported.</param> /// <param name="lowPriority">Whether this is a low priority import.</param> /// <param name="cancellationToken">An optional cancellation token.</param> public Task<ILive<TModel>> Import(ArchiveReader archive, bool lowPriority = false, CancellationToken cancellationToken = default) { cancellationToken.ThrowIfCancellationRequested(); TModel model = null; try { model = CreateModel(archive); if (model == null) return Task.FromResult<ILive<TModel>>(new EntityFrameworkLive<TModel>(null)); } catch (TaskCanceledException) { throw; } catch (Exception e) { LogForModel(model, @$"Model creation of {archive.Name} failed.", e); return null; } return Import(model, archive, lowPriority, cancellationToken); } /// <summary> /// Any file extensions which should be included in hash creation. /// Generally should include all file types which determine the file's uniqueness. /// Large files should be avoided if possible. /// </summary> /// <remarks> /// This is only used by the default hash implementation. If <see cref="ComputeHash"/> is overridden, it will not be used. /// </remarks> protected abstract string[] HashableFileTypes { get; } internal static void LogForModel(TModel model, string message, Exception e = null) { string prefix = $"[{(model?.Hash ?? "?????").Substring(0, 5)}]"; if (e != null) Logger.Error(e, $"{prefix} {message}", LoggingTarget.Database); else Logger.Log($"{prefix} {message}", LoggingTarget.Database); } /// <summary> /// Whether the implementation overrides <see cref="ComputeHash"/> with a custom implementation. /// Custom hash implementations must bypass the early exit in the import flow (see <see cref="computeHashFast"/> usage). /// </summary> protected virtual bool HasCustomHashFunction => false; /// <summary> /// Create a SHA-2 hash from the provided archive based on file content of all files matching <see cref="HashableFileTypes"/>. /// </summary> /// <remarks> /// In the case of no matching files, a hash will be generated from the passed archive's <see cref="ArchiveReader.Name"/>. /// </remarks> protected virtual string ComputeHash(TModel item) { var hashableFiles = item.Files .Where(f => HashableFileTypes.Any(ext => f.Filename.EndsWith(ext, StringComparison.OrdinalIgnoreCase))) .OrderBy(f => f.Filename) .ToArray(); if (hashableFiles.Length > 0) { // for now, concatenate all hashable files in the set to create a unique hash. MemoryStream hashable = new MemoryStream(); foreach (TFileModel file in hashableFiles) { using (Stream s = Files.Store.GetStream(file.FileInfo.StoragePath)) s.CopyTo(hashable); } if (hashable.Length > 0) return hashable.ComputeSHA2Hash(); } return generateFallbackHash(); } /// <summary> /// Silently import an item from a <typeparamref name="TModel"/>. /// </summary> /// <param name="item">The model to be imported.</param> /// <param name="archive">An optional archive to use for model population.</param> /// <param name="lowPriority">Whether this is a low priority import.</param> /// <param name="cancellationToken">An optional cancellation token.</param> public virtual async Task<ILive<TModel>> Import(TModel item, ArchiveReader archive = null, bool lowPriority = false, CancellationToken cancellationToken = default) => await Task.Factory.StartNew(async () => { cancellationToken.ThrowIfCancellationRequested(); bool checkedExisting = false; TModel existing = null; if (archive != null && !HasCustomHashFunction) { // this is a fast bail condition to improve large import performance. item.Hash = computeHashFast(archive); checkedExisting = true; existing = CheckForExisting(item); if (existing != null) { // bare minimum comparisons // // note that this should really be checking filesizes on disk (of existing files) for some degree of sanity. // or alternatively doing a faster hash check. either of these require database changes and reprocessing of existing files. if (CanSkipImport(existing, item) && getFilenames(existing.Files).SequenceEqual(getShortenedFilenames(archive).Select(p => p.shortened).OrderBy(f => f))) { LogForModel(item, @$"Found existing (optimised) {HumanisedModelName} for {item} (ID {existing.ID}) – skipping import."); Undelete(existing); return existing.ToEntityFrameworkLive(); } LogForModel(item, @"Found existing (optimised) but failed pre-check."); } } void rollback() { if (!Delete(item)) { // We may have not yet added the model to the underlying table, but should still clean up files. LogForModel(item, @"Dereferencing files for incomplete import."); Files.Dereference(item.Files.Select(f => f.FileInfo).ToArray()); } } delayEvents(); try { LogForModel(item, @"Beginning import..."); item.Files = archive != null ? createFileInfos(archive, Files) : new List<TFileModel>(); item.Hash = ComputeHash(item); await Populate(item, archive, cancellationToken).ConfigureAwait(false); using (var write = ContextFactory.GetForWrite()) // used to share a context for full import. keep in mind this will block all writes. { try { if (!write.IsTransactionLeader) throw new InvalidOperationException(@$"Ensure there is no parent transaction so errors can correctly be handled by {this}"); if (!checkedExisting) existing = CheckForExisting(item); if (existing != null) { if (CanReuseExisting(existing, item)) { Undelete(existing); LogForModel(item, @$"Found existing {HumanisedModelName} for {item} (ID {existing.ID}) – skipping import."); // existing item will be used; rollback new import and exit early. rollback(); flushEvents(true); return existing.ToEntityFrameworkLive(); } LogForModel(item, @"Found existing but failed re-use check."); Delete(existing); ModelStore.PurgeDeletable(s => s.ID == existing.ID); } PreImport(item); // import to store ModelStore.Add(item); } catch (Exception e) { write.Errors.Add(e); throw; } } LogForModel(item, @"Import successfully completed!"); } catch (Exception e) { if (!(e is TaskCanceledException)) LogForModel(item, @"Database import or population failed and has been rolled back.", e); rollback(); flushEvents(false); throw; } flushEvents(true); return item.ToEntityFrameworkLive(); }, cancellationToken, TaskCreationOptions.HideScheduler, lowPriority ? import_scheduler_low_priority : import_scheduler).Unwrap().ConfigureAwait(false); /// <summary> /// Exports an item to a legacy (.zip based) package. /// </summary> /// <param name="item">The item to export.</param> public void Export(TModel item) { var retrievedItem = ModelStore.ConsumableItems.FirstOrDefault(s => s.ID == item.ID); if (retrievedItem == null) throw new ArgumentException(@"Specified model could not be found", nameof(item)); string filename = $"{getValidFilename(item.ToString())}{HandledExtensions.First()}"; using (var stream = exportStorage.GetStream(filename, FileAccess.Write, FileMode.Create)) ExportModelTo(retrievedItem, stream); exportStorage.PresentFileExternally(filename); } /// <summary> /// Exports an item to the given output stream. /// </summary> /// <param name="model">The item to export.</param> /// <param name="outputStream">The output stream to export to.</param> public virtual void ExportModelTo(TModel model, Stream outputStream) { using (var archive = ZipArchive.Create()) { foreach (var file in model.Files) archive.AddEntry(file.Filename, Files.Storage.GetStream(file.FileInfo.StoragePath)); archive.SaveTo(outputStream); } } /// <summary> /// Replace an existing file with a new version. /// </summary> /// <param name="model">The item to operate on.</param> /// <param name="file">The existing file to be replaced.</param> /// <param name="contents">The new file contents.</param> /// <param name="filename">An optional filename for the new file. Will use the previous filename if not specified.</param> public void ReplaceFile(TModel model, TFileModel file, Stream contents, string filename = null) { using (ContextFactory.GetForWrite()) { DeleteFile(model, file); AddFile(model, contents, filename ?? file.Filename); } } /// <summary> /// Delete an existing file. /// </summary> /// <param name="model">The item to operate on.</param> /// <param name="file">The existing file to be deleted.</param> public void DeleteFile(TModel model, TFileModel file) { using (var usage = ContextFactory.GetForWrite()) { // Dereference the existing file info, since the file model will be removed. if (file.FileInfo != null) { Files.Dereference(file.FileInfo); if (file.ID > 0) { // This shouldn't be required, but here for safety in case the provided TModel is not being change tracked // Definitely can be removed once we rework the database backend. usage.Context.Set<TFileModel>().Remove(file); } } model.Files.Remove(file); } } /// <summary> /// Add a new file. /// </summary> /// <param name="model">The item to operate on.</param> /// <param name="contents">The new file contents.</param> /// <param name="filename">The filename for the new file.</param> public void AddFile(TModel model, Stream contents, string filename) { using (ContextFactory.GetForWrite()) { model.Files.Add(new TFileModel { Filename = filename, FileInfo = Files.Add(contents) }); } if (model.ID > 0) Update(model); } /// <summary> /// Perform an update of the specified item. /// TODO: Support file additions/removals. /// </summary> /// <param name="item">The item to update.</param> public void Update(TModel item) { using (ContextFactory.GetForWrite()) { item.Hash = ComputeHash(item); ModelStore.Update(item); } } /// <summary> /// Delete an item from the manager. /// Is a no-op for already deleted items. /// </summary> /// <param name="item">The item to delete.</param> /// <returns>false if no operation was performed</returns> public bool Delete(TModel item) { using (ContextFactory.GetForWrite()) { // re-fetch the model on the import context. var foundModel = queryModel().Include(s => s.Files).ThenInclude(f => f.FileInfo).FirstOrDefault(s => s.ID == item.ID); if (foundModel == null || foundModel.DeletePending) return false; if (ModelStore.Delete(foundModel)) Files.Dereference(foundModel.Files.Select(f => f.FileInfo).ToArray()); return true; } } /// <summary> /// Delete multiple items. /// This will post notifications tracking progress. /// </summary> public void Delete(List<TModel> items, bool silent = false) { if (items.Count == 0) return; var notification = new ProgressNotification { Progress = 0, Text = $"Preparing to delete all {HumanisedModelName}s...", CompletionText = $"Deleted all {HumanisedModelName}s!", State = ProgressNotificationState.Active, }; if (!silent) PostNotification?.Invoke(notification); int i = 0; foreach (var b in items) { if (notification.State == ProgressNotificationState.Cancelled) // user requested abort return; notification.Text = $"Deleting {HumanisedModelName}s ({++i} of {items.Count})"; Delete(b); notification.Progress = (float)i / items.Count; } notification.State = ProgressNotificationState.Completed; } /// <summary> /// Restore multiple items that were previously deleted. /// This will post notifications tracking progress. /// </summary> public void Undelete(List<TModel> items, bool silent = false) { if (!items.Any()) return; var notification = new ProgressNotification { CompletionText = "Restored all deleted items!", Progress = 0, State = ProgressNotificationState.Active, }; if (!silent) PostNotification?.Invoke(notification); int i = 0; foreach (var item in items) { if (notification.State == ProgressNotificationState.Cancelled) // user requested abort return; notification.Text = $"Restoring ({++i} of {items.Count})"; Undelete(item); notification.Progress = (float)i / items.Count; } notification.State = ProgressNotificationState.Completed; } /// <summary> /// Restore an item that was previously deleted. Is a no-op if the item is not in a deleted state, or has its protected flag set. /// </summary> /// <param name="item">The item to restore</param> public void Undelete(TModel item) { using (var usage = ContextFactory.GetForWrite()) { usage.Context.ChangeTracker.AutoDetectChangesEnabled = false; if (!ModelStore.Undelete(item)) return; Files.Reference(item.Files.Select(f => f.FileInfo).ToArray()); usage.Context.ChangeTracker.AutoDetectChangesEnabled = true; } } private string computeHashFast(ArchiveReader reader) { MemoryStream hashable = new MemoryStream(); foreach (string file in reader.Filenames.Where(f => HashableFileTypes.Any(ext => f.EndsWith(ext, StringComparison.OrdinalIgnoreCase))).OrderBy(f => f)) { using (Stream s = reader.GetStream(file)) s.CopyTo(hashable); } if (hashable.Length > 0) return hashable.ComputeSHA2Hash(); return generateFallbackHash(); } /// <summary> /// Create all required <see cref="IO.FileInfo"/>s for the provided archive, adding them to the global file store. /// </summary> private List<TFileModel> createFileInfos(ArchiveReader reader, FileStore files) { var fileInfos = new List<TFileModel>(); // import files to manager foreach (var filenames in getShortenedFilenames(reader)) { using (Stream s = reader.GetStream(filenames.original)) { fileInfos.Add(new TFileModel { Filename = filenames.shortened, FileInfo = files.Add(s) }); } } return fileInfos; } private IEnumerable<(string original, string shortened)> getShortenedFilenames(ArchiveReader reader) { string prefix = reader.Filenames.GetCommonPrefix(); if (!(prefix.EndsWith('/') || prefix.EndsWith('\\'))) prefix = string.Empty; // import files to manager foreach (string file in reader.Filenames) yield return (file, file.Substring(prefix.Length).ToStandardisedPath()); } #region osu-stable import /// <summary> /// The relative path from osu-stable's data directory to import items from. /// </summary> protected virtual string ImportFromStablePath => null; /// <summary> /// Select paths to import from stable where all paths should be absolute. Default implementation iterates all directories in <see cref="ImportFromStablePath"/>. /// </summary> protected virtual IEnumerable<string> GetStableImportPaths(Storage storage) => storage.GetDirectories(ImportFromStablePath) .Select(path => storage.GetFullPath(path)); /// <summary> /// Whether this specified path should be removed after successful import. /// </summary> /// <param name="path">The path for consideration. May be a file or a directory.</param> /// <returns>Whether to perform deletion.</returns> protected virtual bool ShouldDeleteArchive(string path) => false; public Task ImportFromStableAsync(StableStorage stableStorage) { var storage = PrepareStableStorage(stableStorage); // Handle situations like when the user does not have a Skins folder. if (!storage.ExistsDirectory(ImportFromStablePath)) { string fullPath = storage.GetFullPath(ImportFromStablePath); Logger.Log(@$"Folder ""{fullPath}"" not available in the target osu!stable installation to import {HumanisedModelName}s.", LoggingTarget.Information, LogLevel.Error); return Task.CompletedTask; } return Task.Run(async () => await Import(GetStableImportPaths(storage).ToArray()).ConfigureAwait(false)); } /// <summary> /// Run any required traversal operations on the stable storage location before performing operations. /// </summary> /// <param name="stableStorage">The stable storage.</param> /// <returns>The usable storage. Return the unchanged <paramref name="stableStorage"/> if no traversal is required.</returns> protected virtual Storage PrepareStableStorage(StableStorage stableStorage) => stableStorage; #endregion /// <summary> /// Create a barebones model from the provided archive. /// Actual expensive population should be done in <see cref="Populate"/>; this should just prepare for duplicate checking. /// </summary> /// <param name="archive">The archive to create the model for.</param> /// <returns>A model populated with minimal information. Returning a null will abort importing silently.</returns> protected abstract TModel CreateModel(ArchiveReader archive); /// <summary> /// Populate the provided model completely from the given archive. /// After this method, the model should be in a state ready to commit to a store. /// </summary> /// <param name="model">The model to populate.</param> /// <param name="archive">The archive to use as a reference for population. May be null.</param> /// <param name="cancellationToken">An optional cancellation token.</param> protected abstract Task Populate(TModel model, [CanBeNull] ArchiveReader archive, CancellationToken cancellationToken = default); /// <summary> /// Perform any final actions before the import to database executes. /// </summary> /// <param name="model">The model prepared for import.</param> protected virtual void PreImport(TModel model) { } /// <summary> /// Check whether an existing model already exists for a new import item. /// </summary> /// <param name="model">The new model proposed for import.</param> /// <returns>An existing model which matches the criteria to skip importing, else null.</returns> protected TModel CheckForExisting(TModel model) => model.Hash == null ? null : ModelStore.ConsumableItems.FirstOrDefault(b => b.Hash == model.Hash); public bool IsAvailableLocally(TModel model) => CheckLocalAvailability(model, ModelStore.ConsumableItems.Where(m => !m.DeletePending)); /// <summary> /// Performs implementation specific comparisons to determine whether a given model is present in the local store. /// </summary> /// <param name="model">The <typeparamref name="TModel"/> whose existence needs to be checked.</param> /// <param name="items">The usable items present in the store.</param> /// <returns>Whether the <typeparamref name="TModel"/> exists.</returns> protected virtual bool CheckLocalAvailability(TModel model, IQueryable<TModel> items) => model.ID > 0 && items.Any(i => i.ID == model.ID && i.Files.Any()); /// <summary> /// Whether import can be skipped after finding an existing import early in the process. /// Only valid when <see cref="ComputeHash"/> is not overridden. /// </summary> /// <param name="existing">The existing model.</param> /// <param name="import">The newly imported model.</param> /// <returns>Whether to skip this import completely.</returns> protected virtual bool CanSkipImport(TModel existing, TModel import) => true; /// <summary> /// After an existing <typeparamref name="TModel"/> is found during an import process, the default behaviour is to use/restore the existing /// item and skip the import. This method allows changing that behaviour. /// </summary> /// <param name="existing">The existing model.</param> /// <param name="import">The newly imported model.</param> /// <returns>Whether the existing model should be restored and used. Returning false will delete the existing and force a re-import.</returns> protected virtual bool CanReuseExisting(TModel existing, TModel import) => // for the best or worst, we copy and import files of a new import before checking whether // it is a duplicate. so to check if anything has changed, we can just compare all FileInfo IDs. getIDs(existing.Files).SequenceEqual(getIDs(import.Files)) && getFilenames(existing.Files).SequenceEqual(getFilenames(import.Files)); private IEnumerable<long> getIDs(List<TFileModel> files) { foreach (var f in files.OrderBy(f => f.Filename)) yield return f.FileInfo.ID; } private IEnumerable<string> getFilenames(List<TFileModel> files) { foreach (var f in files.OrderBy(f => f.Filename)) yield return f.Filename; } private DbSet<TModel> queryModel() => ContextFactory.Get().Set<TModel>(); public virtual string HumanisedModelName => $"{typeof(TModel).Name.Replace(@"Info", "").ToLower()}"; #region Event handling / delaying private readonly List<Action> queuedEvents = new List<Action>(); /// <summary> /// Allows delaying of outwards events until an operation is confirmed (at a database level). /// </summary> private bool delayingEvents; /// <summary> /// Begin delaying outwards events. /// </summary> private void delayEvents() => delayingEvents = true; /// <summary> /// Flush delayed events and disable delaying. /// </summary> /// <param name="perform">Whether the flushed events should be performed.</param> private void flushEvents(bool perform) { Action[] events; lock (queuedEvents) { events = queuedEvents.ToArray(); queuedEvents.Clear(); } if (perform) { foreach (var a in events) a.Invoke(); } delayingEvents = false; } private void handleEvent(Action a) { if (delayingEvents) { lock (queuedEvents) queuedEvents.Add(a); } else a.Invoke(); } #endregion private static string generateFallbackHash() { // if a hash could no be generated from file content, presume a unique / new import. // therefore, let's use a guaranteed unique hash. // this doesn't follow the SHA2 hashing schema intentionally, so such entries on the data store can be identified. return Guid.NewGuid().ToString(); } private string getValidFilename(string filename) { foreach (char c in Path.GetInvalidFileNameChars()) filename = filename.Replace(c, '_'); return filename; } } }
43.222944
215
0.559743
[ "MIT" ]
DouglasMarq/osu
osu.Game/Database/ArchiveModelManager.cs
39,019
C#
 #region License /* Copyright (c) 2015 Betson Roy Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #endregion using System.Text.RegularExpressions; namespace QueryMaster.GameServer.DataObjects { /// <summary> /// Represents log filter. /// </summary> public abstract class LogFilter { /// <summary> /// Regex instance. /// </summary> protected internal Regex RegexInstance { get; set; } /// <summary> /// used to store the regex pattern. /// </summary> protected internal string FilterString { get; set; } /// <summary> /// Gets or sets a value indicating whether the filter is enabled. /// </summary> public bool Enabled { get; set; } /// <summary> /// Gets or sets <see cref="LogFilterAction"/> /// </summary> public LogFilterAction Action { get; set; } internal LogFilter() { Enabled = true; Action = LogFilterAction.Allow; } } }
32
74
0.68254
[ "MIT" ]
HMBSbige/QueryMaster
QueryMaster/GameServer/DataObjects/LogFilter.cs
2,018
C#
using System; using System.Collections.Generic; using System.Text; using Game.Logic.AI; namespace GameServerScript.AI.Game { public class TimeVortexEpicGame : APVEGameControl { public override void OnCreated() { Game.SetupMissions("40040");// vòng xóa LG Game.TotalMissionCount = 2; } public override void OnPrepated() { //Game.SessionId = 0; } public override int CalculateScoreGrade(int score) { if(score > 800) { return 3; } else if(score > 725) { return 2; } else if(score > 650) { return 1; } else { return 0; } } public override void OnGameOverAllSession() { } } }
20.413043
58
0.447284
[ "MIT" ]
HuyTruong19x/DDTank4.1
Source Server/Game.Server.Scripts/AI/Game/TimeVortexEpicGame.cs
943
C#
//------------------------------------------------------------------------------ // <auto-generated /> // This file was automatically generated by the UpdateVendors tool. //------------------------------------------------------------------------------ // dnlib: See LICENSE.txt for more info using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading; using Datadog.Trace.Vendors.dnlib.DotNet.MD; using Datadog.Trace.Vendors.dnlib.DotNet.Pdb; using Datadog.Trace.Vendors.dnlib.Threading; namespace Datadog.Trace.Vendors.dnlib.DotNet { /// <summary> /// A high-level representation of a row in the Property table /// </summary> internal abstract class PropertyDef : IHasConstant, IHasCustomAttribute, IHasSemantic, IHasCustomDebugInformation, IFullName, IMemberDef { /// <summary> /// The row id in its table /// </summary> protected uint rid; #if THREAD_SAFE readonly Lock theLock = Lock.Create(); #endif /// <inheritdoc/> public MDToken MDToken => new MDToken(Table.Property, rid); /// <inheritdoc/> public uint Rid { get => rid; set => rid = value; } /// <inheritdoc/> public int HasConstantTag => 2; /// <inheritdoc/> public int HasCustomAttributeTag => 9; /// <inheritdoc/> public int HasSemanticTag => 1; /// <summary> /// From column Property.PropFlags /// </summary> public PropertyAttributes Attributes { get => (PropertyAttributes)attributes; set => attributes = (int)value; } /// <summary>Attributes</summary> protected int attributes; /// <summary> /// From column Property.Name /// </summary> public UTF8String Name { get => name; set => name = value; } /// <summary>Name</summary> protected UTF8String name; /// <summary> /// From column Property.Type /// </summary> public CallingConventionSig Type { get => type; set => type = value; } /// <summary/> protected CallingConventionSig type; /// <inheritdoc/> public Constant Constant { get { if (!constant_isInitialized) InitializeConstant(); return constant; } set { #if THREAD_SAFE theLock.EnterWriteLock(); try { #endif constant = value; constant_isInitialized = true; #if THREAD_SAFE } finally { theLock.ExitWriteLock(); } #endif } } /// <summary/> protected Constant constant; /// <summary/> protected bool constant_isInitialized; void InitializeConstant() { #if THREAD_SAFE theLock.EnterWriteLock(); try { #endif if (constant_isInitialized) return; constant = GetConstant_NoLock(); constant_isInitialized = true; #if THREAD_SAFE } finally { theLock.ExitWriteLock(); } #endif } /// <summary>Called to initialize <see cref="constant"/></summary> protected virtual Constant GetConstant_NoLock() => null; /// <summary>Reset <see cref="Constant"/></summary> protected void ResetConstant() => constant_isInitialized = false; /// <summary> /// Gets all custom attributes /// </summary> public CustomAttributeCollection CustomAttributes { get { if (customAttributes is null) InitializeCustomAttributes(); return customAttributes; } } /// <summary/> protected CustomAttributeCollection customAttributes; /// <summary>Initializes <see cref="customAttributes"/></summary> protected virtual void InitializeCustomAttributes() => Interlocked.CompareExchange(ref customAttributes, new CustomAttributeCollection(), null); /// <inheritdoc/> public int HasCustomDebugInformationTag => 9; /// <inheritdoc/> public bool HasCustomDebugInfos => CustomDebugInfos.Count > 0; /// <summary> /// Gets all custom debug infos /// </summary> public IList<PdbCustomDebugInfo> CustomDebugInfos { get { if (customDebugInfos is null) InitializeCustomDebugInfos(); return customDebugInfos; } } /// <summary/> protected IList<PdbCustomDebugInfo> customDebugInfos; /// <summary>Initializes <see cref="customDebugInfos"/></summary> protected virtual void InitializeCustomDebugInfos() => Interlocked.CompareExchange(ref customDebugInfos, new List<PdbCustomDebugInfo>(), null); /// <summary> /// Gets/sets the first getter method. Writing <c>null</c> will clear all get methods. /// </summary> public MethodDef GetMethod { get { if (otherMethods is null) InitializePropertyMethods(); return getMethods.Count == 0 ? null : getMethods[0]; } set { if (otherMethods is null) InitializePropertyMethods(); if (value is null) getMethods.Clear(); else if (getMethods.Count == 0) getMethods.Add(value); else getMethods[0] = value; } } /// <summary> /// Gets/sets the first setter method. Writing <c>null</c> will clear all set methods. /// </summary> public MethodDef SetMethod { get { if (otherMethods is null) InitializePropertyMethods(); return setMethods.Count == 0 ? null : setMethods[0]; } set { if (otherMethods is null) InitializePropertyMethods(); if (value is null) setMethods.Clear(); else if (setMethods.Count == 0) setMethods.Add(value); else setMethods[0] = value; } } /// <summary> /// Gets all getter methods /// </summary> public IList<MethodDef> GetMethods { get { if (otherMethods is null) InitializePropertyMethods(); return getMethods; } } /// <summary> /// Gets all setter methods /// </summary> public IList<MethodDef> SetMethods { get { if (otherMethods is null) InitializePropertyMethods(); return setMethods; } } /// <summary> /// Gets the other methods /// </summary> public IList<MethodDef> OtherMethods { get { if (otherMethods is null) InitializePropertyMethods(); return otherMethods; } } void InitializePropertyMethods() { #if THREAD_SAFE theLock.EnterWriteLock(); try { #endif if (otherMethods is null) InitializePropertyMethods_NoLock(); #if THREAD_SAFE } finally { theLock.ExitWriteLock(); } #endif } /// <summary> /// Initializes <see cref="otherMethods"/>, <see cref="getMethods"/>, /// and <see cref="setMethods"/>. /// </summary> protected virtual void InitializePropertyMethods_NoLock() { getMethods = new List<MethodDef>(); setMethods = new List<MethodDef>(); otherMethods = new List<MethodDef>(); } /// <summary/> protected IList<MethodDef> getMethods; /// <summary/> protected IList<MethodDef> setMethods; /// <summary/> protected IList<MethodDef> otherMethods; /// <summary>Reset <see cref="GetMethods"/>, <see cref="SetMethods"/>, <see cref="OtherMethods"/></summary> protected void ResetMethods() => otherMethods = null; /// <summary> /// <c>true</c> if there are no methods attached to this property /// </summary> public bool IsEmpty => // The first property access initializes the other fields we access here GetMethods.Count == 0 && setMethods.Count == 0 && otherMethods.Count == 0; /// <inheritdoc/> public bool HasCustomAttributes => CustomAttributes.Count > 0; /// <summary> /// <c>true</c> if <see cref="OtherMethods"/> is not empty /// </summary> public bool HasOtherMethods => OtherMethods.Count > 0; /// <summary> /// <c>true</c> if <see cref="Constant"/> is not <c>null</c> /// </summary> public bool HasConstant => Constant is not null; /// <summary> /// Gets the constant element type or <see cref="dnlib.DotNet.ElementType.End"/> if there's no constant /// </summary> public ElementType ElementType { get { var c = Constant; return c is null ? ElementType.End : c.Type; } } /// <summary> /// Gets/sets the property sig /// </summary> public PropertySig PropertySig { get => type as PropertySig; set => type = value; } /// <summary> /// Gets/sets the declaring type (owner type) /// </summary> public TypeDef DeclaringType { get => declaringType2; set { var currentDeclaringType = DeclaringType2; if (currentDeclaringType == value) return; if (currentDeclaringType is not null) currentDeclaringType.Properties.Remove(this); // Will set DeclaringType2 = null if (value is not null) value.Properties.Add(this); // Will set DeclaringType2 = value } } /// <inheritdoc/> ITypeDefOrRef IMemberRef.DeclaringType => declaringType2; /// <summary> /// Called by <see cref="DeclaringType"/> and should normally not be called by any user /// code. Use <see cref="DeclaringType"/> instead. Only call this if you must set the /// declaring type without inserting it in the declaring type's method list. /// </summary> public TypeDef DeclaringType2 { get => declaringType2; set => declaringType2 = value; } /// <summary/> protected TypeDef declaringType2; /// <inheritdoc/> public ModuleDef Module => declaringType2?.Module; /// <summary> /// Gets the full name of the property /// </summary> public string FullName => FullNameFactory.PropertyFullName(declaringType2?.FullName, name, type, null, null); bool IIsTypeOrMethod.IsType => false; bool IIsTypeOrMethod.IsMethod => false; bool IMemberRef.IsField => false; bool IMemberRef.IsTypeSpec => false; bool IMemberRef.IsTypeRef => false; bool IMemberRef.IsTypeDef => false; bool IMemberRef.IsMethodSpec => false; bool IMemberRef.IsMethodDef => false; bool IMemberRef.IsMemberRef => false; bool IMemberRef.IsFieldDef => false; bool IMemberRef.IsPropertyDef => true; bool IMemberRef.IsEventDef => false; bool IMemberRef.IsGenericParam => false; /// <summary> /// Set or clear flags in <see cref="attributes"/> /// </summary> /// <param name="set"><c>true</c> if flags should be set, <c>false</c> if flags should /// be cleared</param> /// <param name="flags">Flags to set or clear</param> void ModifyAttributes(bool set, PropertyAttributes flags) { if (set) attributes |= (int)flags; else attributes &= ~(int)flags; } /// <summary> /// Gets/sets the <see cref="PropertyAttributes.SpecialName"/> bit /// </summary> public bool IsSpecialName { get => ((PropertyAttributes)attributes & PropertyAttributes.SpecialName) != 0; set => ModifyAttributes(value, PropertyAttributes.SpecialName); } /// <summary> /// Gets/sets the <see cref="PropertyAttributes.RTSpecialName"/> bit /// </summary> public bool IsRuntimeSpecialName { get => ((PropertyAttributes)attributes & PropertyAttributes.RTSpecialName) != 0; set => ModifyAttributes(value, PropertyAttributes.RTSpecialName); } /// <summary> /// Gets/sets the <see cref="PropertyAttributes.HasDefault"/> bit /// </summary> public bool HasDefault { get => ((PropertyAttributes)attributes & PropertyAttributes.HasDefault) != 0; set => ModifyAttributes(value, PropertyAttributes.HasDefault); } /// <inheritdoc/> public override string ToString() => FullName; } /// <summary> /// A Property row created by the user and not present in the original .NET file /// </summary> internal class PropertyDefUser : PropertyDef { /// <summary> /// Default constructor /// </summary> public PropertyDefUser() { } /// <summary> /// Constructor /// </summary> /// <param name="name">Name</param> public PropertyDefUser(UTF8String name) : this(name, null) { } /// <summary> /// Constructor /// </summary> /// <param name="name">Name</param> /// <param name="sig">Property signature</param> public PropertyDefUser(UTF8String name, PropertySig sig) : this(name, sig, 0) { } /// <summary> /// Constructor /// </summary> /// <param name="name">Name</param> /// <param name="sig">Property signature</param> /// <param name="flags">Flags</param> public PropertyDefUser(UTF8String name, PropertySig sig, PropertyAttributes flags) { this.name = name; type = sig; attributes = (int)flags; } } /// <summary> /// Created from a row in the Property table /// </summary> sealed class PropertyDefMD : PropertyDef, IMDTokenProviderMD { /// <summary>The module where this instance is located</summary> readonly ModuleDefMD readerModule; readonly uint origRid; /// <inheritdoc/> public uint OrigRid => origRid; /// <inheritdoc/> protected override Constant GetConstant_NoLock() => readerModule.ResolveConstant(readerModule.Metadata.GetConstantRid(Table.Property, origRid)); /// <inheritdoc/> protected override void InitializeCustomAttributes() { var list = readerModule.Metadata.GetCustomAttributeRidList(Table.Property, origRid); var tmp = new CustomAttributeCollection(list.Count, list, (list2, index) => readerModule.ReadCustomAttribute(list[index])); Interlocked.CompareExchange(ref customAttributes, tmp, null); } /// <inheritdoc/> protected override void InitializeCustomDebugInfos() { var list = new List<PdbCustomDebugInfo>(); readerModule.InitializeCustomDebugInfos(new MDToken(MDToken.Table, origRid), new GenericParamContext(declaringType2), list); Interlocked.CompareExchange(ref customDebugInfos, list, null); } /// <summary> /// Constructor /// </summary> /// <param name="readerModule">The module which contains this <c>Property</c> row</param> /// <param name="rid">Row ID</param> /// <exception cref="ArgumentNullException">If <paramref name="readerModule"/> is <c>null</c></exception> /// <exception cref="ArgumentException">If <paramref name="rid"/> is invalid</exception> public PropertyDefMD(ModuleDefMD readerModule, uint rid) { #if DEBUG if (readerModule is null) throw new ArgumentNullException("readerModule"); if (readerModule.TablesStream.PropertyTable.IsInvalidRID(rid)) throw new BadImageFormatException($"Property rid {rid} does not exist"); #endif origRid = rid; this.rid = rid; this.readerModule = readerModule; bool b = readerModule.TablesStream.TryReadPropertyRow(origRid, out var row); Debug.Assert(b); attributes = row.PropFlags; name = readerModule.StringsStream.ReadNoNull(row.Name); declaringType2 = readerModule.GetOwnerType(this); type = readerModule.ReadSignature(row.Type, new GenericParamContext(declaringType2)); } internal PropertyDefMD InitializeAll() { MemberMDInitializer.Initialize(Attributes); MemberMDInitializer.Initialize(Name); MemberMDInitializer.Initialize(Type); MemberMDInitializer.Initialize(Constant); MemberMDInitializer.Initialize(CustomAttributes); MemberMDInitializer.Initialize(GetMethod); MemberMDInitializer.Initialize(SetMethod); MemberMDInitializer.Initialize(OtherMethods); MemberMDInitializer.Initialize(DeclaringType); return this; } /// <inheritdoc/> protected override void InitializePropertyMethods_NoLock() { if (otherMethods is not null) return; IList<MethodDef> newOtherMethods; IList<MethodDef> newGetMethods, newSetMethods; var dt = declaringType2 as TypeDefMD; if (dt is null) { newGetMethods = new List<MethodDef>(); newSetMethods = new List<MethodDef>(); newOtherMethods = new List<MethodDef>(); } else dt.InitializeProperty(this, out newGetMethods, out newSetMethods, out newOtherMethods); getMethods = newGetMethods; setMethods = newSetMethods; // Must be initialized last otherMethods = newOtherMethods; } } }
29.184762
146
0.680068
[ "Apache-2.0" ]
theletterf/signalfx-dotnet-tracing
tracer/src/Datadog.Trace/Vendors/dnlib/DotNet/PropertyDef.cs
15,322
C#
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information. namespace System.Data.Entity.Core.SchemaObjectModel { using System.Collections.Generic; using System.Data.Entity.Core.Metadata.Edm; using System.Data.Entity.Utilities; using System.Diagnostics; using System.Globalization; using System.Text; using System.Xml; internal class RowTypePropertyElement : ModelFunctionTypeElement { private ModelFunctionTypeElement _typeSubElement; private bool _isRefType; private CollectionKind _collectionKind = CollectionKind.None; internal RowTypePropertyElement(SchemaElement parentElement) : base(parentElement) { _typeUsageBuilder = new TypeUsageBuilder(this); } internal override void ResolveTopLevelNames() { if (_unresolvedType != null) { base.ResolveTopLevelNames(); } if (_typeSubElement != null) { _typeSubElement.ResolveTopLevelNames(); } } protected override bool HandleAttribute(XmlReader reader) { if (base.HandleAttribute(reader)) { return true; } else if (CanHandleAttribute(reader, XmlConstants.TypeElement)) { HandleTypeAttribute(reader); return true; } return false; } protected void HandleTypeAttribute(XmlReader reader) { DebugCheck.NotNull(reader); string type; if (!Utils.GetString(Schema, reader, out type)) { return; } TypeModifier typeModifier; Function.RemoveTypeModifier(ref type, out typeModifier, out _isRefType); switch (typeModifier) { case TypeModifier.Array: _collectionKind = CollectionKind.Bag; break; default: Debug.Assert( typeModifier == TypeModifier.None, string.Format( CultureInfo.CurrentCulture, "Type is not valid for property {0}: {1}. The modifier for the type cannot be used in this context.", FQName, reader.Value)); break; } if (!Utils.ValidateDottedName(Schema, reader, type)) { return; } _unresolvedType = type; } protected override bool HandleElement(XmlReader reader) { if (CanHandleElement(reader, XmlConstants.CollectionType)) { HandleCollectionTypeElement(reader); return true; } else if (CanHandleElement(reader, XmlConstants.ReferenceType)) { HandleReferenceTypeElement(reader); return true; } else if (CanHandleElement(reader, XmlConstants.TypeRef)) { HandleTypeRefElement(reader); return true; } else if (CanHandleElement(reader, XmlConstants.RowType)) { HandleRowTypeElement(reader); return true; } return false; } protected void HandleCollectionTypeElement(XmlReader reader) { DebugCheck.NotNull(reader); var subElement = new CollectionTypeElement(this); subElement.Parse(reader); _typeSubElement = subElement; } protected void HandleReferenceTypeElement(XmlReader reader) { DebugCheck.NotNull(reader); var subElement = new ReferenceTypeElement(this); subElement.Parse(reader); _typeSubElement = subElement; } protected void HandleTypeRefElement(XmlReader reader) { DebugCheck.NotNull(reader); var subElement = new TypeRefElement(this); subElement.Parse(reader); _typeSubElement = subElement; } protected void HandleRowTypeElement(XmlReader reader) { DebugCheck.NotNull(reader); var subElement = new RowTypeElement(this); subElement.Parse(reader); _typeSubElement = subElement; } internal override void WriteIdentity(StringBuilder builder) { builder.Append("Property("); if (!string.IsNullOrWhiteSpace(UnresolvedType)) { if (_collectionKind != CollectionKind.None) { builder.Append("Collection(" + UnresolvedType + ")"); } else if (_isRefType) { builder.Append("Ref(" + UnresolvedType + ")"); } else { builder.Append(UnresolvedType); } } else { _typeSubElement.WriteIdentity(builder); } builder.Append(")"); } internal override TypeUsage GetTypeUsage() { if (_typeUsage != null) { return _typeUsage; } Debug.Assert(_typeSubElement != null, "For attributes typeusage should have been resolved"); if (_typeSubElement != null) { _typeUsage = _typeSubElement.GetTypeUsage(); } return _typeUsage; } internal override bool ResolveNameAndSetTypeUsage( Converter.ConversionCache convertedItemCache, Dictionary<SchemaElement, GlobalItem> newGlobalItems) { if (_typeUsage == null) { if (_typeSubElement != null) //Has sub-elements { return _typeSubElement.ResolveNameAndSetTypeUsage(convertedItemCache, newGlobalItems); } else //Does not have sub-elements; try to resolve { if (_type is ScalarType) //Create and store type usage for scalar type { _typeUsageBuilder.ValidateAndSetTypeUsage(_type as ScalarType, false); _typeUsage = _typeUsageBuilder.TypeUsage; } else //Try to resolve edm type. If not now, it will resolve in the second pass { var edmType = (EdmType)Converter.LoadSchemaElement(_type, _type.Schema.ProviderManifest, convertedItemCache, newGlobalItems); if (edmType != null) { if (_isRefType) { var entityType = edmType as EntityType; DebugCheck.NotNull(entityType); _typeUsage = TypeUsage.Create(new RefType(entityType)); } else { _typeUsageBuilder.ValidateAndSetTypeUsage(edmType, false); //use typeusagebuilder so dont lose facet information _typeUsage = _typeUsageBuilder.TypeUsage; } } } if (_collectionKind != CollectionKind.None) { _typeUsage = TypeUsage.Create(new CollectionType(_typeUsage)); } return _typeUsage != null; } } return true; } // <summary> // True is property is scalar, otherwise false. // During validation (after all types have been resolved). // </summary> internal bool ValidateIsScalar() { if (_type != null) { if (_type is ScalarType == false || _isRefType || _collectionKind != CollectionKind.None) { return false; } } else if (_typeSubElement != null) { if (_typeSubElement.Type is ScalarType == false) { return false; } } return true; } internal override void Validate() { base.Validate(); ValidationHelper.ValidateFacets(this, _type, _typeUsageBuilder); ValidationHelper.ValidateTypeDeclaration(this, _type, _typeSubElement); if (_isRefType) { ValidationHelper.ValidateRefType(this, _type); } if (_typeSubElement != null) { _typeSubElement.Validate(); } } } }
32.56338
139
0.497405
[ "Apache-2.0" ]
Cireson/EntityFramework6
src/EntityFramework/Core/SchemaObjectModel/RowTypePropertyElement.cs
9,248
C#
using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Text; using UnityEditor; using UnityEngine; /** [EasyMotionRecorder] Copyright (c) 2018 Duo.inc This software is released under the MIT License. http://opensource.org/licenses/mit-license.php */ namespace Entum { /// <summary> /// Blendshapeの動きを記録するクラス /// リップシンクは後入れでTimeline上にAudioClipをつけて、みたいな可能性が高いので /// Exclusive(除外)するBlendshape名を登録できるようにしています。 /// </summary> [RequireComponent(typeof(MotionDataRecorder))] public class FaceAnimationRecorder : MonoBehaviour { [Header("表情記録を同時に行う場合はtrueにします")] [SerializeField] private bool _recordFaceBlendshapes = false; [Header("リップシンクを記録したくない場合はここにモーフ名を入れていく 例:face_mouse_eなど")] [SerializeField] private List<string> _exclusiveBlendshapeNames; private MotionDataRecorder _animRecorder; private SkinnedMeshRenderer[] _smeshs; private CharacterFacialData _facialData = null; private bool _recording = false; private int _frameCount = 0; CharacterFacialData.SerializeHumanoidFace _past = new CharacterFacialData.SerializeHumanoidFace(); private float _recordedTime = 0f; // Use this for initialization private void OnEnable() { _animRecorder = GetComponent<MotionDataRecorder>(); _animRecorder.OnRecordStart += RecordStart; _animRecorder.OnRecordEnd += RecordEnd; if (_animRecorder.CharacterAnimator != null) { _smeshs = GetSkinnedMeshRenderers(_animRecorder.CharacterAnimator); } } SkinnedMeshRenderer[] GetSkinnedMeshRenderers(Animator root) { var helper = root; var renderers = helper.GetComponentsInChildren<SkinnedMeshRenderer>(); List<SkinnedMeshRenderer> smeshList = new List<SkinnedMeshRenderer>(); for (int i = 0; i < renderers.Length; i++) { var rend = renderers[i]; var cnt = rend.sharedMesh.blendShapeCount; if (cnt > 0) { smeshList.Add(rend); } } return smeshList.ToArray(); } private void OnDisable() { if (_recording) { RecordEnd(); _recording = false; } if (_animRecorder == null) return; _animRecorder.OnRecordStart -= RecordStart; _animRecorder.OnRecordEnd -= RecordEnd; } /// <summary> /// 記録開始 /// </summary> private void RecordStart() { if (_recordFaceBlendshapes == false) { return; } if (_recording) { return; } if (_smeshs.Length == 0) { Debug.LogError("顔のメッシュ指定がされていないので顔のアニメーションは記録しません"); return; } Debug.Log("FaceAnimationRecorder record start"); _recording = true; _recordedTime = 0f; _frameCount = 0; _facialData = ScriptableObject.CreateInstance<CharacterFacialData>(); } /// <summary> /// 記録終了 /// </summary> private void RecordEnd() { if (_recordFaceBlendshapes == false) { return; } if (_smeshs.Length == 0) { Debug.LogError("顔のメッシュ指定がされていないので顔のアニメーションは記録しませんでした"); if (_recording == true) { Debug.LogAssertion("Unexpected execution!!!!"); } } else { //WriteAnimationFileToScriptableObject(); ExportFacialAnimationClip(_animRecorder.CharacterAnimator, _facialData); } Debug.Log("FaceAnimationRecorder record end"); _recording = false; } private void WriteAnimationFileToScriptableObject() { MotionDataRecorder.SafeCreateDirectory("Assets/Resources"); string path = AssetDatabase.GenerateUniqueAssetPath( "Assets/Resources/RecordMotion_ face" + _animRecorder.CharacterAnimator.name + DateTime.Now.ToString("yyyy_MM_dd_HH_mm_ss") + ".asset"); if (_facialData == null) { Debug.LogError("記録されたFaceデータがnull"); } else { AssetDatabase.CreateAsset(_facialData, path); AssetDatabase.Refresh(); } _recordedTime = 0f; _frameCount = 0; } //フレーム内の差分が無いかをチェックするやつ。 private bool IsSame(CharacterFacialData.SerializeHumanoidFace a, CharacterFacialData.SerializeHumanoidFace b) { if (a == null || b == null || a.Smeshes.Count == 0 || b.Smeshes.Count == 0) { return false; } if (a.BlendShapeNum() != b.BlendShapeNum()) { return false; } return !a.Smeshes.Where((t1, i) => t1.blendShapes.Where((t, j) => Mathf.Abs(t - b.Smeshes[i].blendShapes[j]) > 1).Any()).Any(); } private void LateUpdate() { if (Input.GetKeyDown(KeyCode.Y)) { ExportFacialAnimationClipTest(); } if (!_recording) { return; } _recordedTime += Time.deltaTime; var p = new CharacterFacialData.SerializeHumanoidFace(); for (int i = 0; i < _smeshs.Length; i++) { var mesh = new CharacterFacialData.SerializeHumanoidFace.MeshAndBlendshape(); mesh.path = _smeshs[i].name; mesh.blendShapes = new float[_smeshs[i].sharedMesh.blendShapeCount]; for (int j = 0; j < _smeshs[i].sharedMesh.blendShapeCount; j++) { var tname = _smeshs[i].sharedMesh.GetBlendShapeName(j); var useThis = true; foreach (var item in _exclusiveBlendshapeNames) { if (item.IndexOf(tname, StringComparison.Ordinal) >= 0) { useThis = false; } } if (useThis) { mesh.blendShapes[j] = _smeshs[i].GetBlendShapeWeight(j); } } p.Smeshes.Add(mesh); } if (!IsSame(p, _past)) { p.FrameCount = _frameCount; p.Time = _recordedTime; _facialData.Facials.Add(p); _past = new CharacterFacialData.SerializeHumanoidFace(p); } _frameCount++; } /// <summary> /// Animatorと記録したデータで書き込む /// </summary> /// <param name="root"></param> /// <param name="facial"></param> void ExportFacialAnimationClip(Animator root, CharacterFacialData facial) { var animclip = new AnimationClip(); var mesh = _smeshs; for (int i = 0; i < mesh.Length; i++) { var pathsb = new StringBuilder().Append(mesh[i].transform.name); var trans = mesh[i].transform; while (trans.parent != null && trans.parent != root.transform) { trans = trans.parent; pathsb.Insert(0, "/").Insert(0, trans.name); } var path = pathsb.ToString(); for (var j = 0; j < mesh[i].sharedMesh.blendShapeCount; j++) { var curveBinding = new EditorCurveBinding(); curveBinding.type = typeof(SkinnedMeshRenderer); curveBinding.path = path; curveBinding.propertyName = "blendShape." + mesh[i].sharedMesh.GetBlendShapeName(j); AnimationCurve curve = new AnimationCurve(); float pastBlendshapeWeight = -1; for (int k = 0; k < _facialData.Facials.Count; k++) { if (!(Mathf.Abs(pastBlendshapeWeight - _facialData.Facials[k].Smeshes[i].blendShapes[j]) > 0.1f)) continue; curve.AddKey(facial.Facials[k].Time, _facialData.Facials[k].Smeshes[i].blendShapes[j]); pastBlendshapeWeight = _facialData.Facials[k].Smeshes[i].blendShapes[j]; } AnimationUtility.SetEditorCurve(animclip, curveBinding, curve); } } MotionDataRecorder.SafeCreateDirectory("Assets/Resources"); var outputPath = "Assets/Resources/FaceRecordMotion_" + _animRecorder.CharacterAnimator.name + "_" + DateTime.Now.ToString("yyyy_MM_dd_HH_mm_ss") + "_Clip.anim"; Debug.Log("outputPath:" + outputPath); AssetDatabase.CreateAsset(animclip, AssetDatabase.GenerateUniqueAssetPath(outputPath)); AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); } /// <summary> /// Animatorと記録したデータで書き込むテスト /// </summary> /// <param name="root"></param> /// <param name="facial"></param> void ExportFacialAnimationClipTest() { var animclip = new AnimationClip(); var mesh = _smeshs; for (int i = 0; i < mesh.Length; i++) { var pathsb = new StringBuilder().Append(mesh[i].transform.name); var trans = mesh[i].transform; while (trans.parent != null && trans.parent != _animRecorder.CharacterAnimator.transform) { trans = trans.parent; pathsb.Insert(0, "/").Insert(0, trans.name); } var path = pathsb.ToString(); for (var j = 0; j < mesh[i].sharedMesh.blendShapeCount; j++) { var curveBinding = new EditorCurveBinding(); curveBinding.type = typeof(SkinnedMeshRenderer); curveBinding.path = path; curveBinding.propertyName = "blendShape." + mesh[i].sharedMesh.GetBlendShapeName(j); AnimationCurve curve = new AnimationCurve(); //全てのBlendshapeに対して0→100→0の遷移でキーを打つ curve.AddKey(0, 0); curve.AddKey(1, 100); curve.AddKey(2, 0); Debug.Log("path: " + curveBinding.path + "\r\nname: " + curveBinding.propertyName + " val:"); AnimationUtility.SetEditorCurve(animclip, curveBinding, curve); } } AssetDatabase.CreateAsset(animclip, AssetDatabase.GenerateUniqueAssetPath("Assets/" + _animRecorder.CharacterAnimator.name + "_facial_ClipTest.anim")); AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); } } }
32.25419
117
0.514333
[ "MIT" ]
arumani/EasyMotionRecorder
EasyMotionRecorder/Assets/EasyMotionRecorder/Scripts/FaceAnimationRecorder.cs
12,081
C#
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; namespace ItelexLogger { public partial class LogView : Form { public delegate void ClosedEventHandler(); public event ClosedEventHandler Closed; private Point _position; private Point? _tempPosition; public LogView(Point position) { _tempPosition = position; InitializeComponent(); LogListView.View = View.Details; LogListView.HideSelection = true; LogListView.FullRowSelect = true; LogListView.Sorting = SortOrder.None; LogListView.Columns[0].Width = LogListView.Width - 4; LogListView.HeaderStyle = ColumnHeaderStyle.None; //LogLister.Instance.Add("1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890"); LogListView.Items.Clear(); foreach (string line in LogLister.Instance.LogList) { LogListView.Items.Add(new ListViewItem(line)); } } public void Log(string line) { Helper.ControlInvokeRequired(LogListView, () => { LogListView.Items.Add(new ListViewItem(line)); LogListView.EnsureVisible(LogListView.Items.Count - 1); LogListView.Refresh(); }); } private void LogView_Load(object sender, EventArgs e) { if (_tempPosition != null) { SetPosition(_tempPosition.Value.X, _tempPosition.Value.Y); _tempPosition = null; } else { SetPosition(_position.X, _position.Y); } SetLogViewWidth(); } private void LogView_FormClosed(object sender, FormClosedEventArgs e) { Closed?.Invoke(); } private void CloseBtn_Click(object sender, EventArgs e) { Close(); } public void SetPosition(int x, int y) { _position = new Point(x, y); SetBounds(x, y, Bounds.Width, Bounds.Height); } public void ChangePosition(int x, int y) { if (Bounds.X != -32000) { int dx = x - _position.X; int dy = y - _position.Y; SetPosition(Bounds.X + dx, Bounds.Y + dy); } } private void LogView_LocationChanged(object sender, EventArgs e) { _position = new Point(Bounds.X, Bounds.Y); } private void LogView_ResizeEnd(object sender, EventArgs e) { SetLogViewWidth(); } private void SetLogViewWidth() { LogListView.Columns[0].Width = LogListView.ClientSize.Width; } } }
23.183486
133
0.677879
[ "MIT" ]
detlefgerhardt/ItelexLogger
Visual Studio/ItelexLogger/LogView.cs
2,421
C#
// ========================================================================== // Squidex Headless CMS // ========================================================================== // Copyright (c) Squidex UG (haftungsbeschraenkt) // All rights reserved. Licensed under the MIT license. // ========================================================================== using GraphQL.DataLoader; using Squidex.Domain.Apps.Entities.Assets; using Squidex.Domain.Apps.Entities.Contents.Queries; using Squidex.Infrastructure; using Squidex.Infrastructure.Json.Objects; using Squidex.Shared.Users; namespace Squidex.Domain.Apps.Entities.Contents.GraphQL { public sealed class GraphQLExecutionContext : QueryExecutionContext { private static readonly List<IEnrichedAssetEntity> EmptyAssets = new List<IEnrichedAssetEntity>(); private static readonly List<IEnrichedContentEntity> EmptyContents = new List<IEnrichedContentEntity>(); private readonly IDataLoaderContextAccessor dataLoaders; public override Context Context { get; } public GraphQLExecutionContext( IDataLoaderContextAccessor dataLoaders, IAssetQueryService assetQuery, IAssetCache assetCache, IContentQueryService contentQuery, IContentCache contentCache, IServiceProvider serviceProvider, Context context) : base(assetQuery, assetCache, contentQuery, contentCache, serviceProvider) { this.dataLoaders = dataLoaders; Context = context.Clone(b => b .WithoutCleanup() .WithoutContentEnrichment()); } public async Task<IUser?> FindUserAsync(RefToken refToken, CancellationToken ct) { if (refToken.IsClient) { return new ClientUser(refToken); } else { var dataLoader = GetUserLoader(); return await dataLoader.LoadAsync(refToken.Identifier).GetResultAsync(ct); } } public async Task<IEnrichedAssetEntity?> FindAssetAsync(DomainId id, CancellationToken ct) { var dataLoader = GetAssetsLoader(); return await dataLoader.LoadAsync(id).GetResultAsync(ct); } public async Task<IContentEntity?> FindContentAsync(DomainId schemaId, DomainId id, CancellationToken ct) { var dataLoader = GetContentsLoader(); var content = await dataLoader.LoadAsync(id).GetResultAsync(ct); if (content?.SchemaId.Id != schemaId) { content = null; } return content; } public async Task<IReadOnlyList<IEnrichedAssetEntity>> GetReferencedAssetsAsync(IJsonValue value, TimeSpan cacheDuration, CancellationToken ct) { var ids = ParseIds(value); if (ids == null) { return EmptyAssets; } async Task<IReadOnlyList<IEnrichedAssetEntity>> LoadAsync(IEnumerable<DomainId> ids) { var result = await GetAssetsLoader().LoadAsync(ids).GetResultAsync(ct); return result?.NotNull().ToList() ?? EmptyAssets; } if (cacheDuration > TimeSpan.Zero) { var assets = await AssetCache.CacheOrQueryAsync(ids, async pendingIds => { return await LoadAsync(pendingIds); }, cacheDuration); return assets; } return await LoadAsync(ids); } public async Task<IReadOnlyList<IEnrichedContentEntity>> GetReferencedContentsAsync(IJsonValue value, TimeSpan cacheDuration, CancellationToken ct) { var ids = ParseIds(value); if (ids == null) { return EmptyContents; } async Task<IReadOnlyList<IEnrichedContentEntity>> LoadAsync(IEnumerable<DomainId> ids) { var result = await GetContentsLoader().LoadAsync(ids).GetResultAsync(ct); return result?.NotNull().ToList() ?? EmptyContents; } if (cacheDuration > TimeSpan.Zero) { var contents = await ContentCache.CacheOrQueryAsync(ids, async pendingIds => { return await LoadAsync(pendingIds); }, cacheDuration); return contents.ToList(); } return await LoadAsync(ids); } private IDataLoader<DomainId, IEnrichedAssetEntity> GetAssetsLoader() { return dataLoaders.Context.GetOrAddBatchLoader<DomainId, IEnrichedAssetEntity>(nameof(GetAssetsLoader), async (batch, ct) => { var result = await GetReferencedAssetsAsync(new List<DomainId>(batch), ct); return result.ToDictionary(x => x.Id); }); } private IDataLoader<DomainId, IEnrichedContentEntity> GetContentsLoader() { return dataLoaders.Context.GetOrAddBatchLoader<DomainId, IEnrichedContentEntity>(nameof(GetContentsLoader), async (batch, ct) => { var result = await GetReferencedContentsAsync(new List<DomainId>(batch), ct); return result.ToDictionary(x => x.Id); }); } private IDataLoader<string, IUser> GetUserLoader() { return dataLoaders.Context.GetOrAddBatchLoader<string, IUser>(nameof(GetUserLoader), async (batch, ct) => { var result = await Resolve<IUserResolver>().QueryManyAsync(batch.ToArray(), ct); return result; }); } private static List<DomainId>? ParseIds(IJsonValue value) { try { List<DomainId>? result = null; if (value is JsonArray array) { foreach (var id in array) { if (id is JsonString jsonString) { result ??= new List<DomainId>(); result.Add(DomainId.Create(jsonString.Value)); } } } return result; } catch { return null; } } } }
33.42
133
0.53471
[ "MIT" ]
BrainPlus/squidex
backend/src/Squidex.Domain.Apps.Entities/Contents/GraphQL/GraphQLExecutionContext.cs
6,686
C#
// Copyright © Tanner Gooding and Contributors. Licensed under the MIT License (MIT). See License.md in the repository root for more information. // Ported from um/UIAnimation.h in the Windows SDK for Windows 10.0.20348.0 // Original source is Copyright © Microsoft. All rights reserved. using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; namespace TerraFX.Interop { [Guid("829B6CF1-4F3A-4412-AE09-B243EB4C6B58")] [NativeTypeName("struct IUIAnimationVariableIntegerChangeHandler2 : IUnknown")] [NativeInheritance("IUnknown")] public unsafe partial struct IUIAnimationVariableIntegerChangeHandler2 { public void** lpVtbl; [MethodImpl(MethodImplOptions.AggressiveInlining)] [VtblIndex(0)] [return: NativeTypeName("HRESULT")] public int QueryInterface([NativeTypeName("const IID &")] Guid* riid, void** ppvObject) { return ((delegate* unmanaged<IUIAnimationVariableIntegerChangeHandler2*, Guid*, void**, int>)(lpVtbl[0]))((IUIAnimationVariableIntegerChangeHandler2*)Unsafe.AsPointer(ref this), riid, ppvObject); } [MethodImpl(MethodImplOptions.AggressiveInlining)] [VtblIndex(1)] [return: NativeTypeName("ULONG")] public uint AddRef() { return ((delegate* unmanaged<IUIAnimationVariableIntegerChangeHandler2*, uint>)(lpVtbl[1]))((IUIAnimationVariableIntegerChangeHandler2*)Unsafe.AsPointer(ref this)); } [MethodImpl(MethodImplOptions.AggressiveInlining)] [VtblIndex(2)] [return: NativeTypeName("ULONG")] public uint Release() { return ((delegate* unmanaged<IUIAnimationVariableIntegerChangeHandler2*, uint>)(lpVtbl[2]))((IUIAnimationVariableIntegerChangeHandler2*)Unsafe.AsPointer(ref this)); } [MethodImpl(MethodImplOptions.AggressiveInlining)] [VtblIndex(3)] [return: NativeTypeName("HRESULT")] public int OnIntegerValueChanged(IUIAnimationStoryboard2* storyboard, IUIAnimationVariable2* variable, [NativeTypeName("INT32 *")] int* newValue, [NativeTypeName("INT32 *")] int* previousValue, [NativeTypeName("UINT")] uint cDimension) { return ((delegate* unmanaged<IUIAnimationVariableIntegerChangeHandler2*, IUIAnimationStoryboard2*, IUIAnimationVariable2*, int*, int*, uint, int>)(lpVtbl[3]))((IUIAnimationVariableIntegerChangeHandler2*)Unsafe.AsPointer(ref this), storyboard, variable, newValue, previousValue, cDimension); } } }
49
302
0.715463
[ "MIT" ]
DaZombieKiller/terrafx.interop.windows
sources/Interop/Windows/um/UIAnimation/IUIAnimationVariableIntegerChangeHandler2.cs
2,550
C#
using UnityEngine; using UnityEngine.XR.Interaction.Toolkit; [RequireComponent(typeof(XRGrabInteractable))] public class ComplexCube : MonoBehaviour { XRGrabInteractable m_GrabInteractable; MeshRenderer m_MeshRenderer; static Color s_UnityMagenta = new Color(0.929f, 0.094f, 0.278f); static Color s_UnityCyan = new Color(0.019f, 0.733f, 0.827f); bool m_Held; protected void OnEnable() { m_GrabInteractable = GetComponent<XRGrabInteractable>(); m_MeshRenderer = GetComponent<MeshRenderer>(); m_GrabInteractable.firstHoverEntered.AddListener(OnFirstHoverEntered); m_GrabInteractable.lastHoverExited.AddListener(OnLastHoverExited); m_GrabInteractable.selectEntered.AddListener(OnSelectEntered); m_GrabInteractable.selectExited.AddListener(OnSelectExited); } protected void OnDisable() { m_GrabInteractable.firstHoverEntered.RemoveListener(OnFirstHoverEntered); m_GrabInteractable.lastHoverExited.RemoveListener(OnLastHoverExited); m_GrabInteractable.selectEntered.RemoveListener(OnSelectEntered); m_GrabInteractable.selectExited.RemoveListener(OnSelectExited); } protected virtual void OnSelectEntered(SelectEnterEventArgs args) { m_MeshRenderer.material.color = s_UnityCyan; m_Held = true; } protected virtual void OnSelectExited(SelectExitEventArgs args) { m_MeshRenderer.material.color = Color.white; m_Held = false; } protected virtual void OnLastHoverExited(HoverExitEventArgs args) { if (!m_Held) { m_MeshRenderer.material.color = Color.white; } } protected virtual void OnFirstHoverEntered(HoverEnterEventArgs args) { if (!m_Held) { m_MeshRenderer.material.color = s_UnityMagenta; } } }
30.079365
81
0.705013
[ "MIT" ]
BIGdinosaurDDD/VR_exist
Assets/Scripts/ComplexCube.cs
1,897
C#
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("CheckCheque.iOS")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("CheckCheque.iOS")] [assembly: AssemblyCopyright("Copyright © 2017")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("50c7b8c9-e664-45af-b88e-0c9b8b9c1be1")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
37.918919
84
0.744833
[ "MIT" ]
aaRopi/CheckCheque
Application/CheckCheque/CheckCheque.iOS/Properties/AssemblyInfo.cs
1,406
C#
using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.Mvc; namespace Catalogo.Controllers { public class HomeController : Controller { public ActionResult Index() { return View(); } public ActionResult About() { ViewBag.Message = "Your application description page."; return View(); } public ActionResult Contact() { ViewBag.Message = "Your contact page."; return View(); } } }
19.166667
67
0.561739
[ "Apache-2.0" ]
esteban18plus/Catalogo
Catalogo/Controllers/HomeController.cs
577
C#
using Discord; using Discord.WebSocket; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Nayu.Core.Features.GlobalAccounts; using Nayu.Modules.Inbox; namespace Nayu.Core.LevelingSystem { public class LevelingRewards { public static Task CheckDuelRewards(SocketUser user) { var config = GlobalUserAccounts.GetUserAccount(user); uint wins = config.Wins; switch (wins) { case (10): config.Title = "Classic Adventurer"; break; case (20): config.Title = "Courageous Explorer"; break; case (30): config.Title = "Daring Knight"; break; case (40): config.Title = "Monster Hunter"; break; case (50): config.Title = ("Noble Swordsman"); break; case (60): config.Title = ("Valiant Paladin"); break; case (70): config.Title = ("Dragon Slayer"); break; case (80): config.Title = ("Respectable Hero"); break; case (90): config.Title = ("Holy Protectorate"); break; case (100): config.Title = ("Saint"); break; case (120): config.Title = ("Defender of Gods"); break; case (150): config.Title = ("God Eater"); break; } GlobalUserAccounts.SaveAccounts(config.Id); return Task.CompletedTask; } public static async Task CheckDuelLootboxes(SocketUser user) { var config = GlobalUserAccounts.GetUserAccount(user); var channel = await user.GetOrCreateDMChannelAsync(); var wins = config.Wins; string msg = ""; var c = wins % 3; var uc = wins % 10; var rare = wins % 20; var epic = wins % 35; var legendary = wins % 50; if (legendary == 0) { config.LootBoxLegendary += 1; msg = $"**{user.Username}**, you have received a **LEGENDARY** lootbox for reaching {config.Wins} wins!"; } else if (epic == 0) { config.LootBoxEpic += 1; msg = $"**{user.Username}**, you have received a **EPIC** lootbox for reaching {config.Wins} wins!"; } else if (rare == 0) { config.LootBoxRare += 1; msg = $"**{user.Username}**, you have received a **RARE** lootbox for reaching {config.Wins} wins!"; } else if (uc == 0) { config.LootBoxUncommon += 1; msg = $"**{user.Username}**, you have received a **UNCOMMON** lootbox for reaching {config.Wins} wins!"; } else if (c == 0) { config.LootBoxCommon += 1; msg = $"**{user.Username}**, you have received a **COMMON** lootbox for reaching {config.Wins} wins!"; } GlobalUserAccounts.SaveAccounts(user.Id); await CreateMessage.CreateAndSendMessageAsync("Lootbox Reward", msg, DateTime.UtcNow, user); } public static async Task CheckLootBoxRewards(SocketUser user) { var config = GlobalUserAccounts.GetUserAccount(user); var channel = await user.GetOrCreateDMChannelAsync(); int level = (int) config.LevelNumber; string msg; int uc = level % 5; int rare = level % 10; int epic = level % 15; int legendary = level % 20; if (legendary == 0) { config.LootBoxLegendary += 1; msg = lootBoxMessage(user, "LEGENDARY"); } else if (epic == 0) { config.LootBoxEpic += 1; msg = lootBoxMessage(user, "EPIC"); } else if (rare == 0) { config.LootBoxRare += 1; msg = lootBoxMessage(user, "RARE"); } else if (uc == 0) { config.LootBoxUncommon += 1; msg = lootBoxMessage(user, "UNCOMMON"); } else { config.LootBoxCommon += 1; msg = lootBoxMessage(user, "COMMON"); } GlobalUserAccounts.SaveAccounts(user.Id); await CreateMessage.CreateAndSendMessageAsync("Lootbox Reward", msg, DateTime.UtcNow, user); } public static string lootBoxMessage(SocketUser user, string rarity) { var config = GlobalUserAccounts.GetUserAccount(user); return $"**{user.Username}**, you have received a **{rarity}** loot box for reaching level {config.LevelNumber}!\nUse `n!lbi` to view all of your loot boxes!"; } } }
34.705128
168
0.468969
[ "MIT" ]
Phytal/Nayu
Nayu/Core/LevelingSystem/LevelingRewards.cs
5,416
C#
using lunge.Library.Discord.RPC.Message; /// <summary> /// Delegates used for events sent by the pipe. /// </summary> namespace lunge.Library.Discord.RPC { /// <summary> /// Called when the Discord Client is ready to send and receive messages. /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnReadyEvent(object sender, ReadyMessage args); /// <summary> /// Called when connection to the Discord Client is lost. The connection will remain close and unready to accept messages until the Ready event is called again. /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnCloseEvent(object sender, CloseMessage args); /// <summary> /// Called when a error has occured during the transmission of a message. For example, if a bad Rich Presence payload is sent, this event will be called explaining what went wrong. /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnErrorEvent(object sender, ErrorMessage args); /// <summary> /// Called when the Discord Client has updated the presence. /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnPresenceUpdateEvent(object sender, PresenceMessage args); /// <summary> /// Called when the Discord Client has subscribed to an event. /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnSubscribeEvent(object sender, SubscribeMessage args); /// <summary> /// Called when the Discord Client has unsubscribed from an event. /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnUnsubscribeEvent(object sender, UnsubscribeMessage args); /// <summary> /// Called when the Discord Client wishes for this process to join a game. /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnJoinEvent(object sender, JoinMessage args); /// <summary> /// Called when the Discord Client wishes for this process to spectate a game. /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnSpectateEvent(object sender, SpectateMessage args); /// <summary> /// Called when another discord user requests permission to join this game. /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnJoinRequestedEvent(object sender, JoinRequestMessage args); /// <summary> /// The connection to the discord client was succesfull. This is called before <see cref="Ready"/>. /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnConnectionEstablishedEvent(object sender, ConnectionEstablishedMessage args); /// <summary> /// Failed to establish any connection with discord. Discord is potentially not running? /// </summary> /// <param name="sender">The Discord client handler that sent this event</param> /// <param name="args">The arguments supplied with the event</param> public delegate void OnConnectionFailedEvent(object sende, ConnectionFailedMessage args); }
47.604651
181
0.736932
[ "MIT" ]
lunacys/lunge
src/lunge.Library/Discord/RPC/Events.cs
4,096
C#
using Knight.Core.WindJson; using Microsoft.IO; using System; using System.Collections.Generic; using System.ComponentModel; using System.IO; using System.Threading.Tasks; namespace Knight.Hotfix.Core { public class HotfixNetworkMessagePacker { private static readonly RecyclableMemoryStreamManager mRecyclableMSMgr = new RecyclableMemoryStreamManager(); public object DeserializeFrom(Type rType, byte[] rBytes) { var rObj = HotfixReflectAssists.Construct(rType) as HotfixSerializerBinary; using (var ms = new MemoryStream(rBytes)) { using (var br = new BinaryReader(ms)) { rObj.Deserialize(br); } } return rObj; } public object DeserializeFrom(Type rType, byte[] rBytes, int nIndex, int nCount) { var rObj = HotfixReflectAssists.Construct(rType) as HotfixSerializerBinary; using (MemoryStream ms = mRecyclableMSMgr.GetStream("protobuf", rBytes, nIndex, nCount)) { using (var br = new BinaryReader(ms)) { rObj.Deserialize(br); } } ISupportInitialize iSupportInitialize = rObj as ISupportInitialize; if (iSupportInitialize == null) { return rObj; } iSupportInitialize.EndInit(); return rObj; } public T DeserializeFrom<T>(byte[] rBytes) { return DeserializeFrom<T>(rBytes, 0, rBytes.Length); } public T DeserializeFrom<T>(byte[] rBytes, int nIndex, int nCount) { return (T)DeserializeFrom(typeof(T), rBytes, nIndex, nCount); } public T DeserializeFrom<T>(string rStr) { JsonNode rJsonNode = HotfixJsonParser.Parse(rStr); return rJsonNode.ToObject<T>(); } public object DeserializeFrom(Type rType, string rStr) { JsonNode rJsonNode = HotfixJsonParser.Parse(rStr); return rJsonNode.ToObject(rType); } public byte[] SerializeToByteArray(object rObj) { var rSerializerObj = rObj as HotfixSerializerBinary; using (var ms = new MemoryStream()) { using (var bw = new BinaryWriter(ms)) { rSerializerObj?.Serialize(bw); return ms.GetBuffer(); } } } public string SerializeToText(object rObj) { JsonNode rJsonNode = HotfixJsonParser.ToJsonNode(rObj); return rJsonNode.ToString(); } } }
31.022472
117
0.558855
[ "MIT" ]
JansonC/knight
knight-client/Assets/Game.Hotfix/Core/Network/HotfixNetworkMessagePacker.cs
2,763
C#
using System.Collections.Generic; using Coevery.ContentManagement; using Coevery.Localization; using Coevery.Messaging.Services; using Coevery.UI.Admin.Notification; using Coevery.UI.Notify; using Coevery.Users.Models; namespace Coevery.Users.Services { public class MissingSettingsBanner : INotificationProvider { private readonly ICoeveryServices _CoeveryServices; private readonly IMessageChannelManager _messageManager; public MissingSettingsBanner(ICoeveryServices CoeveryServices, IMessageChannelManager messageManager) { _CoeveryServices = CoeveryServices; _messageManager = messageManager; T = NullLocalizer.Instance; } public Localizer T { get; set; } public IEnumerable<NotifyEntry> GetNotifications() { var registrationSettings = _CoeveryServices.WorkContext.CurrentSite.As<RegistrationSettingsPart>(); if ( registrationSettings != null && ( registrationSettings.UsersMustValidateEmail || registrationSettings.NotifyModeration || registrationSettings.EnableLostPassword ) && null == _messageManager.GetMessageChannel("Email", new Dictionary<string, object> { {"Body", ""}, {"Subject", "Subject"}, {"Recipients", "john.doe@outlook.com"} }) ) { yield return new NotifyEntry { Message = T("Some Coevery.User settings require an Email channel to be enabled."), Type = NotifyType.Warning }; } } } }
40.425
158
0.65368
[ "BSD-3-Clause" ]
Coevery/Coevery-Framework
src/Coevery.Web/Modules/Coevery.Users/Services/MissingSettingsBanner.cs
1,619
C#
using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; using System.Threading.Tasks; using Abp.Application.Services; using Abp.Application.Services.Dto; using Abp.Authorization; using Abp.Domain.Entities; using Abp.Domain.Repositories; using Abp.Extensions; using Abp.IdentityFramework; using Abp.Linq.Extensions; using Abp.Localization; using Abp.Runtime.Session; using Abp.UI; using MyFirstBP.Authorization; using MyFirstBP.Authorization.Accounts; using MyFirstBP.Authorization.Roles; using MyFirstBP.Authorization.Users; using MyFirstBP.Roles.Dto; using MyFirstBP.Users.Dto; using Microsoft.AspNetCore.Identity; using Microsoft.EntityFrameworkCore; namespace MyFirstBP.Users { [AbpAuthorize(PermissionNames.Pages_Users)] public class UserAppService : AsyncCrudAppService<User, UserDto, long, PagedUserResultRequestDto, CreateUserDto, UserDto>, IUserAppService { private readonly UserManager _userManager; private readonly RoleManager _roleManager; private readonly IRepository<Role> _roleRepository; private readonly IPasswordHasher<User> _passwordHasher; private readonly IAbpSession _abpSession; private readonly LogInManager _logInManager; public UserAppService( IRepository<User, long> repository, UserManager userManager, RoleManager roleManager, IRepository<Role> roleRepository, IPasswordHasher<User> passwordHasher, IAbpSession abpSession, LogInManager logInManager) : base(repository) { _userManager = userManager; _roleManager = roleManager; _roleRepository = roleRepository; _passwordHasher = passwordHasher; _abpSession = abpSession; _logInManager = logInManager; } public override async Task<UserDto> CreateAsync(CreateUserDto input) { CheckCreatePermission(); var user = ObjectMapper.Map<User>(input); user.TenantId = AbpSession.TenantId; user.IsEmailConfirmed = true; await _userManager.InitializeOptionsAsync(AbpSession.TenantId); CheckErrors(await _userManager.CreateAsync(user, input.Password)); if (input.RoleNames != null) { CheckErrors(await _userManager.SetRolesAsync(user, input.RoleNames)); } CurrentUnitOfWork.SaveChanges(); return MapToEntityDto(user); } public override async Task<UserDto> UpdateAsync(UserDto input) { CheckUpdatePermission(); var user = await _userManager.GetUserByIdAsync(input.Id); MapToEntity(input, user); CheckErrors(await _userManager.UpdateAsync(user)); if (input.RoleNames != null) { CheckErrors(await _userManager.SetRolesAsync(user, input.RoleNames)); } return await GetAsync(input); } public override async Task DeleteAsync(EntityDto<long> input) { var user = await _userManager.GetUserByIdAsync(input.Id); await _userManager.DeleteAsync(user); } public async Task<ListResultDto<RoleDto>> GetRoles() { var roles = await _roleRepository.GetAllListAsync(); return new ListResultDto<RoleDto>(ObjectMapper.Map<List<RoleDto>>(roles)); } public async Task ChangeLanguage(ChangeUserLanguageDto input) { await SettingManager.ChangeSettingForUserAsync( AbpSession.ToUserIdentifier(), LocalizationSettingNames.DefaultLanguage, input.LanguageName ); } protected override User MapToEntity(CreateUserDto createInput) { var user = ObjectMapper.Map<User>(createInput); user.SetNormalizedNames(); return user; } protected override void MapToEntity(UserDto input, User user) { ObjectMapper.Map(input, user); user.SetNormalizedNames(); } protected override UserDto MapToEntityDto(User user) { var roleIds = user.Roles.Select(x => x.RoleId).ToArray(); var roles = _roleManager.Roles.Where(r => roleIds.Contains(r.Id)).Select(r => r.NormalizedName); var userDto = base.MapToEntityDto(user); userDto.RoleNames = roles.ToArray(); return userDto; } protected override IQueryable<User> CreateFilteredQuery(PagedUserResultRequestDto input) { return Repository.GetAllIncluding(x => x.Roles) .WhereIf(!input.Keyword.IsNullOrWhiteSpace(), x => x.UserName.Contains(input.Keyword) || x.Name.Contains(input.Keyword) || x.EmailAddress.Contains(input.Keyword)) .WhereIf(input.IsActive.HasValue, x => x.IsActive == input.IsActive); } protected override async Task<User> GetEntityByIdAsync(long id) { var user = await Repository.GetAllIncluding(x => x.Roles).FirstOrDefaultAsync(x => x.Id == id); if (user == null) { throw new EntityNotFoundException(typeof(User), id); } return user; } protected override IQueryable<User> ApplySorting(IQueryable<User> query, PagedUserResultRequestDto input) { return query.OrderBy(r => r.UserName); } protected virtual void CheckErrors(IdentityResult identityResult) { identityResult.CheckErrors(LocalizationManager); } public async Task<bool> ChangePassword(ChangePasswordDto input) { if (_abpSession.UserId == null) { throw new UserFriendlyException("Please log in before attemping to change password."); } long userId = _abpSession.UserId.Value; var user = await _userManager.GetUserByIdAsync(userId); var loginAsync = await _logInManager.LoginAsync(user.UserName, input.CurrentPassword, shouldLockout: false); if (loginAsync.Result != AbpLoginResultType.Success) { throw new UserFriendlyException("Your 'Existing Password' did not match the one on record. Please try again or contact an administrator for assistance in resetting your password."); } if (!new Regex(AccountAppService.PasswordRegex).IsMatch(input.NewPassword)) { throw new UserFriendlyException("Passwords must be at least 8 characters, contain a lowercase, uppercase, and number."); } user.Password = _passwordHasher.HashPassword(user, input.NewPassword); CurrentUnitOfWork.SaveChanges(); return true; } public async Task<bool> ResetPassword(ResetPasswordDto input) { if (_abpSession.UserId == null) { throw new UserFriendlyException("Please log in before attemping to reset password."); } long currentUserId = _abpSession.UserId.Value; var currentUser = await _userManager.GetUserByIdAsync(currentUserId); var loginAsync = await _logInManager.LoginAsync(currentUser.UserName, input.AdminPassword, shouldLockout: false); if (loginAsync.Result != AbpLoginResultType.Success) { throw new UserFriendlyException("Your 'Admin Password' did not match the one on record. Please try again."); } if (currentUser.IsDeleted || !currentUser.IsActive) { return false; } var roles = await _userManager.GetRolesAsync(currentUser); if (!roles.Contains(StaticRoleNames.Tenants.Admin)) { throw new UserFriendlyException("Only administrators may reset passwords."); } var user = await _userManager.GetUserByIdAsync(input.UserId); if (user != null) { user.Password = _passwordHasher.HashPassword(user, input.NewPassword); CurrentUnitOfWork.SaveChanges(); } return true; } } }
36.714912
198
0.630868
[ "MIT" ]
Hastrodis/MyFirstBP
aspnet-core/src/MyFirstBP.Application/Users/UserAppService.cs
8,373
C#
using UnityEngine; using UnityEngine.EventSystems; namespace m039.Common { public class Unselectable : MonoBehaviour, ISelectHandler { bool _selected; public void OnSelect(BaseEventData eventData) { _selected = true; } void Update() { if (!_selected) return; if (EventSystem.current.currentSelectedGameObject == gameObject) { EventSystem.current.SetSelectedGameObject(null); } _selected = false; } } }
19.333333
76
0.548276
[ "MIT" ]
m039/CommonUnityLibrary
Runtime/Scripts/UI/Unselectable.cs
580
C#
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("WindowsGameLibrary1")] [assembly: AssemblyProduct("WindowsGameLibrary1")] [assembly: AssemblyDescription("")] [assembly: AssemblyCompany("")] [assembly: AssemblyCopyright("Copyright © 2009")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("7c0cad28-0d1c-43d0-a440-0aa0c09de903")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // [assembly: AssemblyVersion("1.0.0.0")]
36.53125
84
0.754491
[ "MIT" ]
PumpkinPaul/Gearset
GearsetContent/Properties/AssemblyInfo.cs
1,172
C#
using System; using System.Collections.Generic; using UnityEngine; namespace PNLib.Utility { public class SingletonCollection : MonoBehaviour { protected static readonly Dictionary<Type, object> Instances = new Dictionary<Type, object>(); protected static bool IsApplicationQuitting; protected virtual void OnApplicationQuit() { SetApplicationQuit(); } private static void SetApplicationQuit() { IsApplicationQuitting = true; } } }
20.909091
96
0.76087
[ "MIT" ]
protonames/PNLib
Runtime/Utility/SingletonCollection.cs
462
C#
using System; using System.Collections.Generic; using NUnit.Framework; using Rebus.Logging; using Rebus.Testing; using Rhino.Mocks; using log4net.Config; namespace Rebus.Tests { public abstract class FixtureBase { static FixtureBase() { XmlConfigurator.Configure(); } [SetUp] public void SetUp() { Console.WriteLine("---BEGIN SETUP---------------------------------------------"); TimeMachine.Reset(); FakeMessageContext.Reset(); RebusLoggerFactory.Reset(); DoSetUp(); Console.WriteLine("---DONE SETTING UP-----------------------------------------"); } protected virtual void DoSetUp() { } [TearDown] public void TearDown() { Console.WriteLine("---BEGIN TEARDOWN------------------------------------------"); DoTearDown(); CleanUpTrackedDisposables(); Console.WriteLine("---DONE TEARING DOWN---------------------------------------"); } protected T TrackDisposable<T>(T instanceToTrack) where T : IDisposable { DisposableTracker.TrackDisposable(instanceToTrack); return instanceToTrack; } protected void CleanUpTrackedDisposables() { DisposableTracker.DisposeTheDisposables(); } protected virtual void DoTearDown() { } protected T Mock<T>() where T : class { return MockRepository.GenerateMock<T>(); } } }
26.951613
94
0.478755
[ "Apache-2.0" ]
dev4ce/Rebus
src/Rebus.Tests/FixtureBase.cs
1,671
C#
using System.Web; using System.Web.Optimization; namespace WebApplicationDbcontext { public class BundleConfig { // For more information on bundling, visit https://go.microsoft.com/fwlink/?LinkId=301862 public static void RegisterBundles(BundleCollection bundles) { bundles.Add(new ScriptBundle("~/bundles/jquery").Include( "~/Scripts/jquery-{version}.js")); bundles.Add(new ScriptBundle("~/bundles/jqueryval").Include( "~/Scripts/jquery.validate*")); // Use the development version of Modernizr to develop with and learn from. Then, when you're // ready for production, use the build tool at https://modernizr.com to pick only the tests you need. bundles.Add(new ScriptBundle("~/bundles/modernizr").Include( "~/Scripts/modernizr-*")); bundles.Add(new ScriptBundle("~/bundles/bootstrap").Include( "~/Scripts/bootstrap.js")); bundles.Add(new StyleBundle("~/Content/css").Include( "~/Content/bootstrap.css", "~/Content/site.css")); } } }
38.967742
113
0.586921
[ "MPL-2.0" ]
kvolodymyr/Clout-It-C-19-04
Course/Module10/ExampleApps/WebApplicationDbcontext/WebApplicationDbcontext/App_Start/BundleConfig.cs
1,210
C#
using System; using System.Collections.Generic; using System.Linq; using System.Text; using AvaPlugin; using System.Threading; using System.Data; namespace Driver { public class DriverInterface { HANDLER.PluginTool _doc; HANDLER.PluginTool doc { get { if (_doc == null) _doc = new HANDLER.PluginTool(); return _doc; } } public object call(object[] pData) { object arg1 = pData.Length > 0 ? pData[0] : null; object arg2 = pData.Length > 1 ? pData[1] : null; //object arg3 = pData.Length > 2 ? pData[2] : null; //object arg4 = pData.Length > 3 ? pData[3] : null; //object arg5 = pData.Length > 4 ? pData[4] : null; string cmd_ = arg1 as string; switch (cmd_) { case "_activity": doc._activity = arg2 as WaitCallback; break; case "_dataSet": doc._dataSet = arg2 as DataSet; break; case "_beginDoc": doc.beginDoc(); break; case "_saveDoc": doc.changeDoc(); break; case "_exc": return doc._exception; case "_desc": return doc._desc; case "_return": return doc._return = arg2; case "_print": return null; case "_exception": return doc._exception; } return null; } } }
28.433333
107
0.444314
[ "MIT" ]
rualb/ava-agent-xamarin
AvaPlugin/DriverInterface.cs
1,706
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; public class PlayerCollision : MonoBehaviour { private AudioManager audioManager; private void Start() { audioManager = AudioManager.Instance; } private void OnCollisionEnter(Collision collision) { audioManager.PlayWithRandomizedPitch("Box"); } }
20.722222
54
0.718499
[ "Unlicense" ]
Zai-shen/Fless
Assets/Scripts/PlayerCollision.cs
375
C#
/* * Copyright 2016 Google Inc. All Rights Reserved. * Use of this source code is governed by a BSD-style * license that can be found in the LICENSE file or at * https://developers.google.com/open-source/licenses/bsd */ using System; using System.Collections; using System.Collections.Generic; namespace Google.Api.Gax { /// <summary> /// A sequence of resources obtained via API responses, each of which contributes a page of resources. /// Application code can treat this as a simple sequence (with API calls automatically being made /// lazily as more results are required), or call <see cref="AsRawResponses"/> to retrieve /// a page at a time, potentially with additional information. /// </summary> /// <typeparam name="TResponse">The API response type. Each response contains a page of resources.</typeparam> /// <typeparam name="TResource">The resource type contained within the response.</typeparam> public abstract class PagedEnumerable<TResponse, TResource> : IEnumerable<TResource> { /// <summary> /// Returns the sequence of raw API responses, each of which contributes a page of /// resources to this sequence. /// </summary> /// <returns>A sequence of raw API responses, each containing a page of resources.</returns> public virtual IEnumerable<TResponse> AsRawResponses() { throw new NotImplementedException(); } /// <summary> /// Eagerly reads a single page of results with a fixed maximum size. The returned page is guaranteed /// to have that many results, unless there is no more data available. /// </summary> /// <remarks> /// "Natural" pages returned by the API may contain a smaller number of resources than requested. /// For example, a request for a page with 100 resources may return a page with 80 resources but /// a next page token for more to be retrieved. This is suitable for batch-processing, but not /// for user-visible paging such as in a web application, where fixed-size pages are expected. /// This method may make more than one API call in order to fill the page, but after the page has been /// returned, all the data will have been loaded. (In particular, iterating over the items in the page /// multiple times will not make any further requests.) /// </remarks> /// <param name="pageSize">The page size. Must be greater than 0.</param> /// <returns>An asynchronous operation, the result of which is a page of resources.</returns> public virtual Page<TResource> ReadPage(int pageSize) { throw new NotImplementedException(); } /// <inheritdoc /> public virtual IEnumerator<TResource> GetEnumerator() { throw new NotImplementedException(); } /// <inheritdoc /> IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); } }
47.09375
114
0.666888
[ "BSD-3-Clause" ]
Global19/gax-dotnet
Google.Api.Gax/PagedEnumerable.cs
3,016
C#
/***************************************************************************** Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ******************************************************************************/ using System; using System.Collections.Generic; using System.Linq; namespace Tensorflow { public class saver { public static (Saver, object) _import_meta_graph_with_return_elements(string meta_graph_or_file, bool clear_devices = false, string import_scope = "", string[] return_elements = null) { var meta_graph_def = meta_graph.read_meta_graph_file(meta_graph_or_file); var meta = meta_graph.import_scoped_meta_graph_with_return_elements( meta_graph_def, clear_devices: clear_devices, import_scope: import_scope, return_elements: return_elements); var (imported_vars, imported_return_elements) = meta; var saver = _create_saver_from_imported_meta_graph( meta_graph_def, import_scope, imported_vars); return (saver, imported_return_elements); } /// <summary> /// Return a saver for restoring variable values to an imported MetaGraph. /// </summary> /// <param name="meta_graph_def"></param> /// <param name="import_scope"></param> /// <param name="imported_vars"></param> /// <returns></returns> public static Saver _create_saver_from_imported_meta_graph(MetaGraphDef meta_graph_def, string import_scope, Dictionary<string, VariableV1> imported_vars) { if(meta_graph_def.SaverDef != null) { // Infer the scope that is prepended by `import_scoped_meta_graph`. string scope = import_scope; var var_names = imported_vars.Keys.ToArray(); if(var_names.Length > 0) { var sample_key = var_names[0]; var sample_var = imported_vars[sample_key]; scope = string.Join("", sample_var.name.Skip(sample_key.Length)); } return new Saver(saver_def: meta_graph_def.SaverDef, name: scope); } else { if(variables._all_saveable_objects(scope: import_scope).Length > 0) { // Return the default saver instance for all graph variables. return new Saver(); } else { // If no graph variables exist, then a Saver cannot be constructed. Console.WriteLine("Saver not created because there are no variables in the" + " graph to restore"); return null; } } } } }
40.136364
104
0.562571
[ "Apache-2.0" ]
BradLewis/TensorFlow.NET
src/TensorFlowNET.Core/Train/Saving/saver.py.cs
3,534
C#
using CharacterMap.Core; using CharacterMap.Helpers; using CharacterMap.Models; using CharacterMap.Services; using CharacterMap.Views; using Microsoft.Toolkit.Mvvm.DependencyInjection; using Microsoft.Toolkit.Mvvm.Messaging; using Microsoft.Toolkit.Mvvm.Input; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Input; using Windows.ApplicationModel; using System.Collections.ObjectModel; namespace CharacterMap.ViewModels { public class QuickCompareViewModel : ViewModelBase { public static WindowInformation QuickCompareWindow { get; set; } public string Text { get => Get<string>(); set => Set(value); } public string FilterTitle { get => Get<string>(); set => Set(value); } public InstalledFont SelectedFont { get => Get<InstalledFont>(); set => Set(value); } public List<InstalledFont> FontList { get => Get<List<InstalledFont>>(); set => Set(value); } private BasicFontFilter _fontListFilter = BasicFontFilter.All; public BasicFontFilter FontListFilter { get => _fontListFilter; set { if (Set(ref _fontListFilter, value)) RefreshFontList(); } } public ObservableCollection<CharacterRenderingOptions> QuickFonts { get; } private UserFontCollection _selectedCollection; public UserFontCollection SelectedCollection { get => _selectedCollection; set { if (value != null && value.IsSystemSymbolCollection) { FontListFilter = BasicFontFilter.SymbolFonts; return; } if (Set(ref _selectedCollection, value) && value != null) RefreshFontList(value); } } public object ItemsSource => IsQuickCompare ? QuickFonts : FontList; public IReadOnlyList<string> TextOptions { get; } = GlyphService.DefaultTextOptions; public UserCollectionsService FontCollections { get; } public ICommand FilterCommand { get; } public bool IsQuickCompare { get; } public QuickCompareViewModel(bool isQuickCompare) { IsQuickCompare = isQuickCompare; if (DesignMode.DesignModeEnabled) return; RefreshFontList(); FontCollections = Ioc.Default.GetService<UserCollectionsService>(); FilterCommand = new RelayCommand<object>(e => OnFilterClick(e)); if (IsQuickCompare) { QuickFonts = new ObservableCollection<CharacterRenderingOptions>(); Register<CharacterRenderingOptions>(m => { // Only add the font variant if it's not already in the list. // Once we start accepting custom typography this comparison // will have to change. if (!QuickFonts.Any(q => m.IsCompareMatch(q))) QuickFonts.Add(m); }, nameof(QuickCompareViewModel)); } } public void Deactivated() { if (IsQuickCompare) QuickCompareWindow = null; Messenger.UnregisterAll(this); } protected override void OnPropertyChangeNotified(string propertyName) { if (propertyName == nameof(FontList) || propertyName == nameof(QuickFonts)) OnPropertyChanged(nameof(ItemsSource)); } private void OnFilterClick(object e) { if (e is BasicFontFilter filter) { if (filter == FontListFilter) RefreshFontList(); else FontListFilter = filter; } } private void RefreshFontList(UserFontCollection collection = null) { try { var fontList = FontFinder.Fonts.AsEnumerable(); if (collection != null) { FilterTitle = collection.Name; fontList = fontList.Where(f => collection.Fonts.Contains(f.Name)); } else { SelectedCollection = null; FilterTitle = FontListFilter.FilterTitle; if (FontListFilter == BasicFontFilter.ImportedFonts) fontList = FontFinder.ImportedFonts; else fontList = FontListFilter.Query(fontList, FontCollections); } FontList = fontList.ToList(); } catch (Exception e) { } } public void OpenCurrentFont() { if (SelectedFont is not null) _ = FontMapView.CreateNewViewForFontAsync(SelectedFont); } } }
32.649351
101
0.56424
[ "MIT" ]
Avid29/Character-Map-UWP
CharacterMap/CharacterMap/ViewModels/QuickCompareViewModel.cs
5,030
C#
using System; /* Task Description: Write a computer program that finds the number of the cat (from 10 cats all together) that is going to win the competition "Miss Cat", i.e. has gathered the most jury votes (from N members of the jury). Input: The input data is being read from the console. The number N is on the first input line. An integer between 1 and 10 is written on each of the next N lines (this is the number of the cats). The input data will always be valid and in the format described. There is no need to check it explicitly. If two cats have equal votes, the winner of the contest is the one whose number is smaller. Output: The output data must be printed on the console. On the only output line you must print the number of the cat, which has won the competition. Constraints: The number N is a positive integer between 1 and 100 000, inclusive. The numbers of the cats for which the jury votes are positive integer numbers between 1 and 10, inclusive. */ class MissCat2011 { static void Main() { int n = int.Parse(Console.ReadLine()); int[] votes = new int[n]; for (int i = 0; i < n; i++) { votes[i] = int.Parse(Console.ReadLine()); } Array.Sort(votes); int counter = 1; int result = 0; int winnerCat = 0; for (int i = 0; i < n - 1; i++) { if (votes[i] == votes[i + 1]) { counter++; } else { if (counter > result) { result = counter; winnerCat = votes[i]; } counter = 1; } } if (counter > result) { result = counter; winnerCat = votes[votes.Length - 1]; } Console.WriteLine(winnerCat); } }
32.566667
122
0.553736
[ "MIT" ]
VesKark/Telerik-Academy-BG
Programming with C#/C# - Part 1 - Contests/Sample-Exam-2011-2012/MissCat2011/MissCat2011.cs
1,956
C#
using NotaFiscalNet.Core.Interfaces; using NotaFiscalNet.Core.Utils; using NotaFiscalNet.Core.Validacao; using System.Xml; namespace NotaFiscalNet.Core { /// <summary> /// Representa a Contribuição de Intervenção no Domínio Econômico do Combustível /// </summary> public sealed class CideCombustivel : ISerializavel, IModificavel { public void Serializar(XmlWriter writer, INFe nfe) { writer.WriteStartElement("CIDE"); writer.WriteStartElement("qBCProd", SerializationUtil.ToTDec_1204(BaseCalculo)); writer.WriteStartElement("vAliqProd", SerializationUtil.ToTDec_1104(Aliquota)); writer.WriteStartElement("vCIDE", SerializationUtil.ToTDec_1302(Valor)); writer.WriteEndElement(); } private decimal _baseCalculo; private decimal _aliquota; private decimal _valor; /// <summary> /// [qBCProd] Retorna ou define a Base de Cálculo do CIDE (Quantidade Comercializada) /// </summary> [NFeField(ID = "L106", FieldName = "qBCProd", DataType = "TDec_1204", Pattern = @"0|0\.[0-9]{4}|[1-9]{1}[0-9]{0,11}(\.[0-9]{4})?")] [CampoValidavel(1, ChaveErroValidacao.CampoNaoPreenchido)] public decimal BaseCalculo { get => _baseCalculo; set { ValidationUtil.ValidateTDec_1204(value, "BaseCalculo"); _baseCalculo = value; } } /// <summary> /// [vAliqProd] Retorna ou define a Alíquota do CIDE em moeda corrente. /// </summary> [NFeField(ID = "L107", FieldName = "vAliqProd", DataType = "TDec_1104", Pattern = @"0|0\.[0-9]{4}|[1-9]{1}[0-9]{0,10}(\.[0-9]{4})?")] [CampoValidavel(2, ChaveErroValidacao.CampoNaoPreenchido)] public decimal Aliquota { get => _aliquota; set { ValidationUtil.ValidateTDec_1104(value, "Aliquota"); _aliquota = value; } } /// <summary> /// [vCIDE] Retorna ou define o Valor da CIDE /// </summary> [NFeField(ID = "L108", FieldName = "vCIDE", DataType = "TDec_1302", Pattern = @"0|0\.[0-9]{2}|[1-9]{1}[0-9]{0,12}(\.[0-9]{2})?")] [CampoValidavel(3, ChaveErroValidacao.CampoNaoPreenchido)] public decimal Valor { get => _valor; set { ValidationUtil.ValidateTDec_1302(value, "Valor"); _valor = value; } } /// <summary> /// Retorna se a Classe foi modificada /// </summary> public bool Modificado => BaseCalculo != 0m || Aliquota != 0m || Valor != 0m; } }
34.373494
93
0.545741
[ "MIT" ]
NotaFiscalNet/NotaFiscalNet
src/NotaFiscalNet.Core/CideCombustivel.cs
2,864
C#
using System; using System.Collections.Generic; using System.Text; using Utils.NET.IO; namespace TitanCore.Net.Packets.Server { public class TnError : TnPacket { public override TnPacketType Type => TnPacketType.Error; public string message; public TnError() { } public TnError(string message) { this.message = message; } protected override void Read(BitReader r) { message = r.ReadUTF(200); } protected override void Write(BitWriter w) { w.Write(message); } } }
18.028571
64
0.562599
[ "MIT" ]
steele123/Trials-Of-Titan
Client/Project-Titan-Client/Assets/Plugins/TitanCore/Net/Packets/Server/TnError.cs
633
C#
#region "copyright" /* Copyright © 2016 - 2021 Stefan Berg <isbeorn86+NINA@googlemail.com> and the N.I.N.A. contributors This file is part of N.I.N.A. - Nighttime Imaging 'N' Astronomy. This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #endregion "copyright" using FluentAssertions; using NINA.Sequencer.Conditions; using NINA.Core.Enum; using NUnit.Framework; using System.Collections.Generic; using Moq; using NINA.Sequencer.Interfaces; using NINA.Sequencer.Container; namespace NINATest.Sequencer.Conditions { [TestFixture] public class MoonIlluminationConditionTest { [Test] public void MoonIlluminationCondition_Clone_GoodClone() { var sut = new MoonIlluminationCondition(); sut.Icon = new System.Windows.Media.GeometryGroup(); var item2 = (MoonIlluminationCondition)sut.Clone(); item2.Should().NotBeSameAs(sut); item2.Icon.Should().BeSameAs(sut.Icon); item2.UserMoonIllumination.Should().Be(sut.UserMoonIllumination); item2.Comparator.Should().Be(sut.Comparator); } [Test] public void MoonIlluminationCondition_NoProviderInConstructor_NoCrash() { var sut = new MoonIlluminationCondition(); sut.UserMoonIllumination.Should().Be(0); sut.Comparator.Should().Be(ComparisonOperatorEnum.GREATER_THAN); } [Test] public void ComparisonOperators_FilteredAccordingly() { var sut = new MoonIlluminationCondition(); var expectedOperators = new List<ComparisonOperatorEnum>() { ComparisonOperatorEnum.LESS_THAN, ComparisonOperatorEnum.LESS_THAN_OR_EQUAL, ComparisonOperatorEnum.GREATER_THAN, ComparisonOperatorEnum.GREATER_THAN_OR_EQUAL }; sut.ComparisonOperators.Should().BeEquivalentTo(expectedOperators); } [Test] [TestCase(10, 20, ComparisonOperatorEnum.LESS_THAN, false)] [TestCase(20, 10, ComparisonOperatorEnum.LESS_THAN, true)] [TestCase(10, 10, ComparisonOperatorEnum.LESS_THAN, true)] [TestCase(10, 10.01, ComparisonOperatorEnum.LESS_THAN, false)] [TestCase(10, 9.99, ComparisonOperatorEnum.LESS_THAN, true)] [TestCase(10, 20, ComparisonOperatorEnum.GREATER_THAN, true)] [TestCase(20, 10, ComparisonOperatorEnum.GREATER_THAN, false)] [TestCase(10, 10, ComparisonOperatorEnum.GREATER_THAN, true)] [TestCase(10, 10.01, ComparisonOperatorEnum.GREATER_THAN, true)] [TestCase(10, 9.99, ComparisonOperatorEnum.GREATER_THAN, false)] [TestCase(10, 20, ComparisonOperatorEnum.LESS_THAN_OR_EQUAL, false)] [TestCase(20, 10, ComparisonOperatorEnum.LESS_THAN_OR_EQUAL, true)] [TestCase(10, 10, ComparisonOperatorEnum.LESS_THAN_OR_EQUAL, false)] [TestCase(10, 10.01, ComparisonOperatorEnum.LESS_THAN_OR_EQUAL, false)] [TestCase(10, 9.99, ComparisonOperatorEnum.LESS_THAN_OR_EQUAL, true)] [TestCase(10, 20, ComparisonOperatorEnum.GREATER_THAN_OR_EQUAL, true)] [TestCase(20, 10, ComparisonOperatorEnum.GREATER_THAN_OR_EQUAL, false)] [TestCase(10, 10, ComparisonOperatorEnum.GREATER_THAN_OR_EQUAL, false)] [TestCase(10, 10.01, ComparisonOperatorEnum.GREATER_THAN_OR_EQUAL, true)] [TestCase(10, 9.99, ComparisonOperatorEnum.GREATER_THAN_OR_EQUAL, false)] public void Check_LESS_THAN(double currentAlt, double userAlt, ComparisonOperatorEnum Comparator, bool expected) { var sut = new MoonIlluminationCondition(); sut.Comparator = Comparator; sut.UserMoonIllumination = userAlt; sut.CurrentMoonIllumination = currentAlt; sut.Check(default, default).Should().Be(expected); } [Test] public void ToString_Test() { var sut = new MoonIlluminationCondition(); sut.Comparator = ComparisonOperatorEnum.GREATER_THAN_OR_EQUAL; sut.UserMoonIllumination = 10; sut.CurrentMoonIllumination = 20; sut.ToString().Should().Be("Condition: MoonIlluminationCondition, CurrentMoonIllumination: 20%, Comparator: GREATER_THAN_OR_EQUAL, UserMoonIllumination: 10%"); } [Test] public void AfterParentChanged_NoParent_WatchdogNotStarted() { var watchdogMock = new Mock<IConditionWatchdog>(); var sut = new MoonIlluminationCondition(); sut.ConditionWatchdog = watchdogMock.Object; sut.AfterParentChanged(); watchdogMock.Verify(x => x.Start(), Times.Never); watchdogMock.Verify(x => x.Cancel(), Times.Once); } [Test] public void AfterParentChanged_NotInRootContainer_WatchdogNotStarted() { var watchdogMock = new Mock<IConditionWatchdog>(); var parentMock = new Mock<ISequenceContainer>(); var sut = new MoonIlluminationCondition(); sut.ConditionWatchdog = watchdogMock.Object; sut.Parent = parentMock.Object; sut.AfterParentChanged(); watchdogMock.Verify(x => x.Start(), Times.Never); watchdogMock.Verify(x => x.Cancel(), Times.Once); } [Test] public void AfterParentChanged_InRootContainer_WatchdogStarted() { var watchdogMock = new Mock<IConditionWatchdog>(); var parentMock = new Mock<ISequenceRootContainer>(); var sut = new MoonIlluminationCondition(); sut.ConditionWatchdog = watchdogMock.Object; sut.Parent = parentMock.Object; sut.AfterParentChanged(); watchdogMock.Verify(x => x.Start(), Times.Once); watchdogMock.Verify(x => x.Cancel(), Times.Never); } [Test] public void OnDeserialized_NoParent_WatchdogNotStarted() { var watchdogMock = new Mock<IConditionWatchdog>(); var sut = new MoonIlluminationCondition(); sut.ConditionWatchdog = watchdogMock.Object; sut.OnDeserialized(default); watchdogMock.Verify(x => x.Start(), Times.Never); watchdogMock.Verify(x => x.Cancel(), Times.Once); } [Test] public void OnDeserialized_NotInRootContainer_WatchdogNotStarted() { var watchdogMock = new Mock<IConditionWatchdog>(); var parentMock = new Mock<ISequenceContainer>(); var sut = new MoonIlluminationCondition(); sut.ConditionWatchdog = watchdogMock.Object; sut.Parent = parentMock.Object; sut.OnDeserialized(default); watchdogMock.Verify(x => x.Start(), Times.Never); watchdogMock.Verify(x => x.Cancel(), Times.Once); } [Test] public void OnDeserialized_InRootContainer_WatchdogStarted() { var watchdogMock = new Mock<IConditionWatchdog>(); var parentMock = new Mock<ISequenceRootContainer>(); var sut = new MoonIlluminationCondition(); sut.ConditionWatchdog = watchdogMock.Object; sut.Parent = parentMock.Object; sut.OnDeserialized(default); watchdogMock.Verify(x => x.Start(), Times.Once); watchdogMock.Verify(x => x.Cancel(), Times.Never); } } }
39.867725
171
0.653616
[ "MPL-2.0", "MPL-2.0-no-copyleft-exception" ]
daleghent/NINA
NINATest/Sequencer/Conditions/MoonIlluminationConditionTest.cs
7,538
C#
namespace AH.ModuleController.UI.INVMS.Forms { partial class frmTrackingGroupwiseStock { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.cboStoreType = new AtiqsControlLibrary.SmartComboBox(); this.smartLabel1 = new AtiqsControlLibrary.SmartLabel(); this.smartLabel2 = new AtiqsControlLibrary.SmartLabel(); this.smartLabel3 = new AtiqsControlLibrary.SmartLabel(); this.smartLabel4 = new AtiqsControlLibrary.SmartLabel(); this.cboMajorGroup = new AtiqsControlLibrary.SmartComboBox(); this.smartLabel5 = new AtiqsControlLibrary.SmartLabel(); this.smartLabel6 = new AtiqsControlLibrary.SmartLabel(); this.cboMinorGroup = new AtiqsControlLibrary.SmartComboBox(); this.btnShow = new System.Windows.Forms.Button(); this.dgvItemList = new System.Windows.Forms.DataGridView(); this.groupBox2 = new System.Windows.Forms.GroupBox(); this.pnlMain.SuspendLayout(); this.pnlTop.SuspendLayout(); ((System.ComponentModel.ISupportInitialize)(this.dgvItemList)).BeginInit(); this.groupBox2.SuspendLayout(); this.SuspendLayout(); // // frmLabel // this.frmLabel.Location = new System.Drawing.Point(329, 9); this.frmLabel.Size = new System.Drawing.Size(191, 33); this.frmLabel.Text = "Item Stock List"; // // pnlMain // this.pnlMain.Controls.Add(this.groupBox2); this.pnlMain.Controls.Add(this.btnShow); this.pnlMain.Controls.Add(this.smartLabel5); this.pnlMain.Controls.Add(this.smartLabel6); this.pnlMain.Controls.Add(this.cboMinorGroup); this.pnlMain.Controls.Add(this.smartLabel3); this.pnlMain.Controls.Add(this.smartLabel4); this.pnlMain.Controls.Add(this.cboMajorGroup); this.pnlMain.Controls.Add(this.smartLabel2); this.pnlMain.Controls.Add(this.smartLabel1); this.pnlMain.Controls.Add(this.cboStoreType); // // btnEdit // this.btnEdit.Visible = false; // // btnSave // this.btnSave.Visible = false; // // btnDelete // this.btnDelete.Visible = false; // // btnNew // this.btnNew.Visible = false; // // btnPrint // this.btnPrint.Visible = false; // // cboStoreType // this.cboStoreType.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.cboStoreType.Font = new System.Drawing.Font("Microsoft Sans Serif", 11F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.cboStoreType.ForeColor = System.Drawing.Color.Blue; this.cboStoreType.FormattingEnabled = true; this.cboStoreType.Location = new System.Drawing.Point(112, 161); this.cboStoreType.Name = "cboStoreType"; this.cboStoreType.Size = new System.Drawing.Size(244, 26); this.cboStoreType.TabIndex = 0; this.cboStoreType.SelectedIndexChanged += new System.EventHandler(this.cboStoreType_SelectedIndexChanged); // // smartLabel1 // this.smartLabel1.AutoSize = true; this.smartLabel1.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold); this.smartLabel1.Location = new System.Drawing.Point(12, 168); this.smartLabel1.Name = "smartLabel1"; this.smartLabel1.Size = new System.Drawing.Size(69, 13); this.smartLabel1.TabIndex = 1; this.smartLabel1.Text = "Store Type"; // // smartLabel2 // this.smartLabel2.AutoSize = true; this.smartLabel2.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold); this.smartLabel2.Location = new System.Drawing.Point(94, 168); this.smartLabel2.Name = "smartLabel2"; this.smartLabel2.Size = new System.Drawing.Size(11, 13); this.smartLabel2.TabIndex = 2; this.smartLabel2.Text = ":"; // // smartLabel3 // this.smartLabel3.AutoSize = true; this.smartLabel3.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold); this.smartLabel3.Location = new System.Drawing.Point(94, 200); this.smartLabel3.Name = "smartLabel3"; this.smartLabel3.Size = new System.Drawing.Size(11, 13); this.smartLabel3.TabIndex = 5; this.smartLabel3.Text = ":"; // // smartLabel4 // this.smartLabel4.AutoSize = true; this.smartLabel4.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold); this.smartLabel4.Location = new System.Drawing.Point(12, 200); this.smartLabel4.Name = "smartLabel4"; this.smartLabel4.Size = new System.Drawing.Size(76, 13); this.smartLabel4.TabIndex = 4; this.smartLabel4.Text = "Major Group"; // // cboMajorGroup // this.cboMajorGroup.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.cboMajorGroup.Font = new System.Drawing.Font("Microsoft Sans Serif", 11F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.cboMajorGroup.ForeColor = System.Drawing.Color.Blue; this.cboMajorGroup.FormattingEnabled = true; this.cboMajorGroup.Location = new System.Drawing.Point(112, 193); this.cboMajorGroup.Name = "cboMajorGroup"; this.cboMajorGroup.Size = new System.Drawing.Size(244, 26); this.cboMajorGroup.TabIndex = 3; this.cboMajorGroup.SelectedIndexChanged += new System.EventHandler(this.cboMajorGroup_SelectedIndexChanged); // // smartLabel5 // this.smartLabel5.AutoSize = true; this.smartLabel5.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold); this.smartLabel5.Location = new System.Drawing.Point(94, 232); this.smartLabel5.Name = "smartLabel5"; this.smartLabel5.Size = new System.Drawing.Size(11, 13); this.smartLabel5.TabIndex = 8; this.smartLabel5.Text = ":"; // // smartLabel6 // this.smartLabel6.AutoSize = true; this.smartLabel6.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold); this.smartLabel6.Location = new System.Drawing.Point(12, 232); this.smartLabel6.Name = "smartLabel6"; this.smartLabel6.Size = new System.Drawing.Size(76, 13); this.smartLabel6.TabIndex = 7; this.smartLabel6.Text = "Minor Group"; // // cboMinorGroup // this.cboMinorGroup.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.cboMinorGroup.Font = new System.Drawing.Font("Microsoft Sans Serif", 11F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.cboMinorGroup.ForeColor = System.Drawing.Color.Blue; this.cboMinorGroup.FormattingEnabled = true; this.cboMinorGroup.Location = new System.Drawing.Point(112, 225); this.cboMinorGroup.Name = "cboMinorGroup"; this.cboMinorGroup.Size = new System.Drawing.Size(244, 26); this.cboMinorGroup.TabIndex = 6; // // btnShow // this.btnShow.BackColor = System.Drawing.Color.DarkGreen; this.btnShow.Cursor = System.Windows.Forms.Cursors.Hand; this.btnShow.FlatStyle = System.Windows.Forms.FlatStyle.Flat; this.btnShow.Font = new System.Drawing.Font("Georgia", 9F, ((System.Drawing.FontStyle)((System.Drawing.FontStyle.Bold | System.Drawing.FontStyle.Italic))), System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.btnShow.ForeColor = System.Drawing.Color.White; this.btnShow.Location = new System.Drawing.Point(359, 225); this.btnShow.Name = "btnShow"; this.btnShow.Size = new System.Drawing.Size(53, 25); this.btnShow.TabIndex = 171; this.btnShow.Text = "Show"; this.btnShow.UseVisualStyleBackColor = false; this.btnShow.Click += new System.EventHandler(this.btnShow_Click); // // dgvItemList // this.dgvItemList.BackgroundColor = System.Drawing.Color.Silver; this.dgvItemList.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize; this.dgvItemList.GridColor = System.Drawing.Color.DarkGray; this.dgvItemList.Location = new System.Drawing.Point(10, 21); this.dgvItemList.Name = "dgvItemList"; this.dgvItemList.Size = new System.Drawing.Size(826, 335); this.dgvItemList.TabIndex = 0; // // groupBox2 // this.groupBox2.Controls.Add(this.dgvItemList); this.groupBox2.Font = new System.Drawing.Font("Times New Roman", 9.75F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.groupBox2.Location = new System.Drawing.Point(7, 260); this.groupBox2.Name = "groupBox2"; this.groupBox2.Size = new System.Drawing.Size(845, 366); this.groupBox2.TabIndex = 172; this.groupBox2.TabStop = false; this.groupBox2.Text = "Item Lists"; // // frmTrackingGroupwiseStock // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.ClientSize = new System.Drawing.Size(864, 623); this.isEnterTabAllow = true; this.Name = "frmTrackingGroupwiseStock"; this.Load += new System.EventHandler(this.frmTrackingGroupwiseStock_Load); this.pnlMain.ResumeLayout(false); this.pnlMain.PerformLayout(); this.pnlTop.ResumeLayout(false); this.pnlTop.PerformLayout(); ((System.ComponentModel.ISupportInitialize)(this.dgvItemList)).EndInit(); this.groupBox2.ResumeLayout(false); this.ResumeLayout(false); } #endregion private AtiqsControlLibrary.SmartComboBox cboStoreType; private AtiqsControlLibrary.SmartLabel smartLabel5; private AtiqsControlLibrary.SmartLabel smartLabel6; private AtiqsControlLibrary.SmartComboBox cboMinorGroup; private AtiqsControlLibrary.SmartLabel smartLabel3; private AtiqsControlLibrary.SmartLabel smartLabel4; private AtiqsControlLibrary.SmartComboBox cboMajorGroup; private AtiqsControlLibrary.SmartLabel smartLabel2; private AtiqsControlLibrary.SmartLabel smartLabel1; private System.Windows.Forms.Button btnShow; private System.Windows.Forms.GroupBox groupBox2; private System.Windows.Forms.DataGridView dgvItemList; } }
49.482213
216
0.607557
[ "Apache-2.0" ]
atiq-shumon/DotNetProjects
Hospital_ERP_VS13-WCF_WF/AH.ModuleController/UI/INVMS/Forms/frmTrackingGroupwiseStock.Designer.cs
12,521
C#
// TODO: To speedup summing up of long to decimal accumulation, Josh suggested using a long accumulator and catching the overflow exception and then adding to decimal - i.e. most of the time accumulate to long and once in // a while accumulate to decimal instead of always accumulating to decimal (offer this version as an alternate) // TODO: Implement a for loop instead of divide-and-conquer, since they really accomplish the same thing, and the for loop will be more efficient and easier to make cache line boundary divisible. // Combining will be slightly harder, but we could create an array of sums, where each task has its own array element to fill in, then we combine all of the sums at the end serially. Still has the issue of managed memory // where the array may move and not be cache aligned any more, requiring fixing the array in memory to not move (an unsafe version). // TODO: Study parallel solution presented here (parallel for), which may be better in some cases: https://stackoverflow.com/questions/2419343/how-to-sum-up-an-array-of-integers-in-c-sharp/54794753#54794753 // This method should control the degree of parallelism better than divide-and-conquer (DAC), since DAC we found to be unable to control degree of parallelism // TODO: Implement nullable versions of Sum, only faster than the standard C# ones. Should be able to still use SSE and multi-core, but need to check out each element for null before adding it. These // will be much slower than non-nullable. Hmmm.. Need to test what Linq.Sum() returns for nullable, as I bet the sum becomes null, since null is really an unknown and if one value of an array is unknown, then the Sum() becomes unknown. // If that's the case for Linq.Sum() for nullable types, then SIMD acceleration is still possible, but will be much slower, since we'll need to test each Vector.Count array items for null, and only if they all are not-null then add them to the sum. // From research on the web, Linq.Sum() skips null values by default - i.e. treats them as zeroes. This is inconsistent with adding a null value to a number. // TODO: See if SSEandScalar version is faster when the array is entirely inside the cache, to make sure that it's not just being memory bandwidth limited and hiding ILP speedup. Port it to C++ and see // if it speeds up. Run many times over the same array using .Sum() and provide the average and minimum timing. Also, could test using a single core, where memory bandwidth is not the limiter. // TODO: Return a tupple (sum and c) from each parallel Neumaier result and figure out how to combine these results for a more accurate and possibly perfect overall result that will match serial array processing result. // TODO: Implement .Sum() for summing a field of Objects/UserDefinedTypes, if it's possible to add value by possibly aggregating elements into a local array of Vector size and doing an SSE sum. Is it possible to abstract it well and to // perform well to support extracting all numeric data types, so that performance and abstraction are competitive and as simple or simpler than Linq? // TODO: Write a blog on floating-point .Sum() and all of it's capabilities, options and trade-offs in performance and accuracy (Submitted a paper proposal to the MSDN journal. Waiting for response first.) // TODO: Re-use the new generic divide-and-conquer function, and it could even be a lambda function for some implementations (like non-Kahan-Neumaier addition). For float and double summation, we just need to pass in function of double or float. // This would reduce the code base within this file by a very large amount, as most if not all divide-and-conquer repeated implementations would go away. Then we can claim that we use our own divide-and-conquer abstraction inside HPCsharp itself. // TODO: Note that by default parallel .Sum() implementations are pairwise summation, since it does divide-and-conquer. This needs to be noted in the Readme or somehow be communicated to the user, and bloged about and in the parallel section of // wikipedia pairwise summation. // TODO: One of the issues with C# nullable type is that it consists of a byte for the boolean and of another type. This makes // it convenient for single elements, but difficult to process in a data parallel fashion with higher performance. // A better array structure would be an array for booleans and an array to another type. This helps data parallel instruction // since these work only on data of the same size. Maybe that's what we could suggest and implement it to provide // a higher performance alternative. // TODO: Apply the same technique of double the memory load as used for ZeroDetection by loading two per loop. // Wonder if having two or more independent loop counters would also help, along with two or more memory loads. // Need to see if SSE on single core is memory limited. Need to add to table single-core performance. // TODO: Need to implement integer and unsigned integer .SumPar() - i.e. without SSE, but multi-core. // TODO: Implement pair-wise floating-point summation that is multi-core and SSE, with separate SSE implementation which recursively combines an SSE-word all the way down possibly, as this eliminates the problem of base-case function being non-pair, // as this most likely could be just about as fast, or we could develop one that is just as fast and keeps the pairing using SSE all the way to the bottom of recursion. // TODO: One idea that Josh and I came up with to detect overflow for SSE instructions if they saturate for addition is // to subtract and see if the result is the same as the original. If C# chooses the wrap around SSE instructions then // the same technique may still work, or we may need to come up with a different technique to detect wrap around, // possibly when the addition to a positive value makes the result go negative. // TODO: Benchmark all Int64.MaxValue to show the worst case of BigInteger and Decimal summation of long[] // TODO: Benchmark smaller arrays that fit into cache to show even a higher level of acceleration for a common user case // where the previous step in functional flow will most likely put the result inside the cache. // TODO: Implement long[] to decimal and BigInteger in SSE with overflow detection in s/w. For ulong[] detections can // possibly be done by checking if the sum after the addition is smaller than the sum before the addition. // The same can be done with scalar code, to avoid the overhead of checking for overflow, raising exception, catching it // and then dealing with it - instead just checking for it all the time, which shouold be just as expensive/cheap // as doing checked. If this works, this would make a great blog post or part of an article. // We will have to figure out a similar method for long[] since it's signed. // TODO: Don't forget for ulong Sum of ulong[] needs to throw an overflow exception, along with long Sum of long[]. Now, that we've developed // a way to detect it that is pretty cheap. // TODO: Create a checked ulong[] .Sum and long[] .Sum that simply use SSE instructions, but now check for numerical overflow and throw an exception if it happens, and don't do anything about it, but at // at least report it. This will match functionality of Linq implementation, but now with SSE, and at least ulong[] checked SSE Sum will be way faster. // TODO: We could also do SSE equivalent versions of int[] .Sum() in SSE, and uint[] in SSE that are fast and check // for numeric overflow. We could even do it for other smaller data types, if it's worthwhile to do. This may provide // higher performance, especially for uint[] checked, which would be equivalent in functionality to LInq and be higher // performance than expanding to long implementation in SSE - i.e. still safe, but eliminating overflow, just checking for it. // TODO: Create a checkedAdd() for Vector<long> addition and Vector<ulong> addition cases, since we now know exactly what to do to work around // the lack of this capability by the checked block key word, and throw an overflow exception in the detected cases, with minimal overhead. // TODO: Make sure to look at this https://stackoverflow.com/questions/49308115/c-sharp-vectordouble-copyto-barely-faster-than-non-simd-version?rq=1 // TODO: Contribute to this https://stackoverflow.com/questions/2056445/no-overflow-exception-for-int-in-c and point them to my question of // no overflow even when checked for SSE instructions. // TODO: Suggest to Intel/AMD how to improve SSE instructions to make them better for addition, such as like multiplication, put the carry bits in another result, or produce a bigger result. The overflow bit seems like a really // outdated and inefficient way of computing. We can do better. // TODO: Figure out the bug with "BigInteger += longValue" that Microsoft seems to have, possibly posting to StackOverflow, as it seems to produce wrong result, but "BigInteger = BigInteger + longValue" seems to work fine. // TODO: Implement checked SSE subtraction, which is a bit less useful, but completes checked Addition/Subtraction coverage // TODO: Figure out how .AsParallel() works by studying .NET Core open source code, and make HPCsharp compattible with it. // Add a new .AsDataParallel() that implements it in SIMD/SSE as an option. // TODO: Check out this blog and links that it points to https://devblogs.microsoft.com/dotnet/hardware-intrinsics-in-net-core/ // TODO: Apply the new overflow detecting SSE implementation which has no if statements to byte, ushort, uint .SumSse() // as this may be faster, because + would be done at native data type size (byte, ushort, uint) instead of // spending instruction to expand bytes to ulong, which takes many steps. The way to do this for byte is to use a byte (same data type) // for carry-out bits and make an inner loop be fixed at 256 times, and figure out ahead of time how many of these 256 times loops we // can do. Then outside the loop accumulate these carry-out's into several 32-bit split SSE registers. This way the very inner loop is // minimal and the outer loops happens less often, amortized over the 256 times loop. This method should also work well for ushort. Uint won't need it. // TODO: Benchmark the above improved SSE .Sum() implementation inside CPU cache only, by doing many loops accross // an array that fits into the cache completely, to see how much faster it really runs when not being // limited by speed of system memory. This may change how we do Functional programming efficiently in the future! // TODO: Apply the new SSE worst-case optimization for ulong[].Sum() to BigInteger as well, as this will speedup even more than Decimal in the worst-case. // TODO: Document in the Readme that .Sum() is a next higher level of abstraction for Sum(), since it takes care of more details internally, such as // removes the need to be concerned about arithmetic overflow, as that is handled inside the HPCsharp .Sum() functions. // TODO: Implement .AsParallel() and .AsSafe() methods, which transform input array into whatever output .AsParallel() transforms normally to. // Also develop a special data type for .AsSafe() to transform the output to, and handle this data type in HPCsharp functions. This will make it // simpler for the developers to use instead of dealing with naming of functions. // TODO: Benchmarks of unrolled SSE .Sum() show > 2X speedup for arrays inside the cache. Create routines using this methodology and show even faster // overall performance for arrays inside the cache using multi-core or single core. Single-core unrolled is running at almost 7 GigaAdds/sec for long[]. // Multi-core performance seems to get limited to 4 GigaAdds/sec, possibly due to being limited by memory contention. Unrolled SSE multi-core seems // to perform at the same level as single SSE instruction per loop (not-unrolled). Thus, it's beneficial to use unrolled SSE, since it gains speed // when the array is inside the cache, especially for single core. // TODO: 8-way unrolling of SIMD for long .SUM slowed things down when compared to 4-way unrolling, which provides 2X speedup over no unrolling. // for small arrays that fit into L2 cache. Need to try 2-way and 3-way unroll to see if these provide even higher performance. // TODO: Write a blog comparing SumToLongParFor(intToLong) with HPCsharp using only two cores versus this one with 2 thru 6 cores, since HCPsharp uses SIMD/SSE. // Great comparison versus Lambda's too, since Lambda's have function call overhead per array element. This would be a great blog on its own - Prefer ParallelFor to Lambda's for Performance. // TODO: Perform a random search of the best split of arrays for multi-core performance. Is it on cache-line boundaries, page boundaries, or relatively prime to // each other in some way. Another approach is to study why some array sizes and thus their splits perform better than others and see if there is a patterns // Maybe the first place to check is if the top performers are consistenly top performing. // TODO: Figure out why BigInteger sum from long and ulong FasterPar divide-and-conquer generic is throwing an type conversion exception when using the Generic Divide and Conquer. using System.Collections.Generic; using System.Text; using System.Numerics; using System.Threading.Tasks; using System; using System.Collections.Concurrent; using System.Threading; namespace HPCsharp.ParallelAlgorithms { static public partial class Sum { /// <summary> /// Summation of sbyte[] array, which uses a long accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>long sum</returns> public static long SumToLongSse(this sbyte[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of sbyte[] array, which uses a long accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>long sum</returns> public static long SumToLongSse(this sbyte[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } private static long SumSseInner(this sbyte[] arrayToSum, int l, int r) { var sumVector = new Vector<long>(); int sseIndexEnd = l + ((r - l + 1) / (256 * Vector<sbyte>.Count)) * (256 * Vector<sbyte>.Count); int incr = Vector<sbyte>.Count; int i; for (i = l; i < sseIndexEnd;) { var shortSumLow0 = new Vector<short>(); var shortSumHigh0 = new Vector<short>(); for (int j = 0; j < 256; j++, i += incr) { var inVector0 = new Vector<sbyte>(arrayToSum, i); Vector.Widen(inVector0, out var shortLow0, out var shortHigh0); shortSumLow0 += shortLow0; shortSumHigh0 += shortHigh0; } Vector.Widen(shortSumLow0, out var int0, out var int1); int0 += int1; Vector.Widen(shortSumHigh0, out var int2, out var int3); int0 += int2; int0 += int3; Vector.Widen(int0, out var long0, out var long1); sumVector += long0; sumVector += long1; } long overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<long>.Count; i++) overallSum += sumVector[i]; return overallSum; } /// <summary> /// Summation of byte[] array, which uses a ulong accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSse(this byte[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of byte[] array, which uses a ulong accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSse(this byte[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } private static ulong SumSseInner(this byte[] arrayToSum, int l, int r) { var sumVector = new Vector<ulong>(); int sseIndexEnd = l + ((r - l + 1) / (256 * Vector<byte>.Count)) * (256 * Vector<byte>.Count); int incr = Vector<byte>.Count; int i; for (i = l; i < sseIndexEnd; ) { var ushortSumLow0 = new Vector<ushort>(); var ushortSumHigh0 = new Vector<ushort>(); for (int j = 0; j < 256; j++, i += incr) { var inVector0 = new Vector<byte>(arrayToSum, i); Vector.Widen(inVector0, out var ushortLow0, out var ushortHigh0); ushortSumLow0 += ushortLow0; ushortSumHigh0 += ushortHigh0; } Vector.Widen(ushortSumLow0, out var uint0, out var uint1); uint0 += uint1; Vector.Widen(ushortSumHigh0, out var uint2, out var uint3); uint0 += uint2; uint0 += uint3; Vector.Widen(uint0, out var ulong0, out var ulong1); sumVector += ulong0; sumVector += ulong1; } ulong overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<ulong>.Count; i++) overallSum += sumVector[i]; return overallSum; } private static ulong SumSseInnerUnrolled(this byte[] arrayToSum, int l, int r) { var sumVector = new Vector<ulong>(); int sseIndexEnd = l + ((r - l + 1) / (256 * Vector<byte>.Count)) * (256 * Vector<byte>.Count); int incr = Vector<byte>.Count; int incrTimes2 = 2 * Vector<byte>.Count; int i; for (i = l; i < sseIndexEnd;) { var ushortSum00 = new Vector<ushort>(); var ushortSum01 = new Vector<ushort>(); var ushortSum10 = new Vector<ushort>(); var ushortSum11 = new Vector<ushort>(); for (int j = 0; j < 256; j += 2, i += incrTimes2) { var inVector0 = new Vector<byte>(arrayToSum, i); var inVector1 = new Vector<byte>(arrayToSum, i + incr); Vector.Widen(inVector0, out var ushort00, out var ushort01); Vector.Widen(inVector1, out var ushort10, out var ushort11); ushortSum00 += ushort00; ushortSum01 += ushort01; ushortSum10 += ushort10; ushortSum11 += ushort11; } Vector.Widen(ushortSum00, out var uint0, out var uint1); uint0 += uint1; Vector.Widen(ushortSum01, out var uint2, out var uint3); uint0 += uint2; uint0 += uint3; Vector.Widen(ushortSum10, out uint2, out uint3); uint0 += uint2; uint0 += uint3; Vector.Widen(ushortSum11, out uint2, out uint3); uint0 += uint2; uint0 += uint3; Vector.Widen(uint0, out var ulong0, out var ulong1); sumVector += ulong0; sumVector += ulong1; } ulong overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<ulong>.Count; i++) overallSum += sumVector[i]; return overallSum; } /// <summary> /// Summation of short[] array, which uses a long accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>long sum</returns> public static long SumToLongSse(this short[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of short[] array, which uses a long accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>long sum</returns> public static long SumToLongSse(this short[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } private static long SumSseInner(this short[] arrayToSum, int l, int r) { var sumVector = new Vector<long>(); int sseIndexEnd = l + ((r - l + 1) / (256 * Vector<short>.Count)) * (256 * Vector<short>.Count); int incr = Vector<short>.Count; int i; for (i = l; i < sseIndexEnd;) { var intSumLow0 = new Vector<int>(); var intSumHigh0 = new Vector<int>(); for (int j = 0; j < 256; j++, i += incr) { var inVector0 = new Vector<short>(arrayToSum, i); Vector.Widen(inVector0, out var intLow0, out var intHigh0); intSumLow0 += intLow0; intSumHigh0 += intHigh0; } Vector.Widen(intSumLow0, out var long0, out var long1); sumVector += long0; sumVector += long1; Vector.Widen(intSumHigh0, out long0, out long1); sumVector += long0; sumVector += long1; } long overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<long>.Count; i++) overallSum += sumVector[i]; return overallSum; } /// <summary> /// Summation of ushort[] array, which uses a ulong accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSse(this ushort[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of ushort[] array, which uses a ulong accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSse(this ushort[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } private static ulong SumSseInner(this ushort[] arrayToSum, int l, int r) { var sumVector = new Vector<ulong>(); int sseIndexEnd = l + ((r - l + 1) / (256 * Vector<ushort>.Count)) * (256 * Vector<ushort>.Count); int incr = Vector<ushort>.Count; int i; for (i = l; i < sseIndexEnd;) { var uintSumLow = new Vector<uint>(); var uintSumHigh = new Vector<uint>(); for (int j = 0; j < 256; j++, i += incr) { var inVector = new Vector<ushort>(arrayToSum, i); Vector.Widen(inVector, out var uintLow, out var uintHigh); uintSumLow += uintLow; uintSumHigh += uintHigh; } Vector.Widen(uintSumLow, out var ulong0, out var ulong1); sumVector += ulong0; sumVector += ulong1; Vector.Widen(uintSumHigh, out ulong0, out ulong1); sumVector += ulong0; sumVector += ulong1; } ulong overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<ulong>.Count; i++) overallSum += sumVector[i]; return overallSum; } /// <summary> /// Summation of int[] array, which uses a long accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>long sum</returns> public static long SumToLongSse(this int[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of int[] array, which uses a long accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>long sum</returns> public static long SumToLongSse(this int[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } private static long SumSseInner(this int[] arrayToSum, int l, int r) { var sumVectorLower = new Vector<long>(); var sumVectorUpper = new Vector<long>(); int sseIndexEnd = l + ((r - l + 1) / Vector<int>.Count) * Vector<int>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<int>.Count) { var inVector = new Vector<int>(arrayToSum, i); Vector.Widen(inVector, out var longLower, out var longUpper); sumVectorLower += longLower; sumVectorUpper += longUpper; } long overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; sumVectorLower += sumVectorUpper; for (i = 0; i < Vector<long>.Count; i++) overallSum += sumVectorLower[i]; return overallSum; } // TODO: Implement aligned SIMD sum, since memory alignment is critical for SIMD instructions. So, do scalar first until we are SIMD aligned and then do SIMD, followed by more scarlar to finish all // left over elements that are not SIMD size divisible. First simple step is to check alignment of SIMD portion of the sum. See cache-aligned entry below, which may solve this problem. // Conclusion: In modern Intel CPUs this issue seems to have been resolved. It used to be an issue in earlier generations when SIMD/SSE was young. private static long SumSseOffset(this int[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } private static long SumSseOffsetInner(this int[] arrayToSum, int l, int r) { var sumVectorLower = new Vector<long>(); var sumVectorUpper = new Vector<long>(); int offset = 3; long overallSum = 0; int i; for (i = l; i < (l + offset); i++) overallSum += arrayToSum[i]; l += offset; int sseIndexEnd = l + ((r - l + 1) / Vector<int>.Count) * Vector<int>.Count; for (i = l; i < sseIndexEnd; i += Vector<int>.Count) { var inVector = new Vector<int>(arrayToSum, i); Vector.Widen(inVector, out var longLower, out var longUpper); sumVectorLower += longLower; sumVectorUpper += longUpper; } for (; i <= r; i++) overallSum += arrayToSum[i]; sumVectorLower += sumVectorUpper; for (i = 0; i < Vector<long>.Count; i++) overallSum += sumVectorLower[i]; return overallSum; } private static long SumSseInner(this int?[] arrayToSum, int l, int r) { var sumVectorLower = new Vector<long>(); var sumVectorUpper = new Vector<long>(); var longLower = new Vector<long>(); var longUpper = new Vector<long>(); var intLocalVector = new int[Vector<int>.Count]; var intLocalZero = new Vector<int>(); intLocalZero = default(Vector<int>); int sseIndexEnd = l + ((r - l + 1) / Vector<int>.Count) * Vector<int>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<int>.Count) { #if true intLocalZero.CopyTo(intLocalVector, 0); for (int j = 0, k = i; j < intLocalVector.Length; j++, k++) if (arrayToSum[k] != null) intLocalVector[j] = (int)arrayToSum[k]; var inVector = new Vector<int>(intLocalVector, 0); #else for (int j = 0, k = i; j < intLocalVector.Length; j++, k++) intLocalVector[j] = arrayToSum[k] ?? 0; var inVector = new Vector<int>(intLocalVector, 0); #endif Vector.Widen(inVector, out longLower, out longUpper); sumVectorLower += longLower; sumVectorUpper += longUpper; } long overallSum = 0; for (; i <= r; i++) if (arrayToSum[i] != null) overallSum += (int)arrayToSum[i]; sumVectorLower += sumVectorUpper; for (i = 0; i < Vector<long>.Count; i++) overallSum += sumVectorLower[i]; return overallSum; } /// <summary> /// Summation of int?[] nullable array, which uses a long accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Null values are skipped. Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>long sum</returns> public static long SumToLongSse(this int?[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of int?[] nullable array, which uses a long accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Null values are skipped. Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>long sum</returns> public static long SumToLongSse(this int?[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } public static long SumSseAndScalarSingleStream(this int[] arrayToSum) { //return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); return arrayToSum.SumSseAndScalarSingleStreamInner(0, arrayToSum.Length - 1); } public static long SumSseAndScalarSingleStream(this int[] arrayToSum, int start, int length) { return arrayToSum.SumSseAndScalarSingleStreamInner(start, start + length - 1); } // 256-bit vector means eight 32-bit, which is the minimal size scalar needs to do // and then some number of vectors as well. private static long SumSseAndScalarSingleStreamInner(this int[] arrayToSum, int l, int r) { //const int numScalarOps = 2; var sumVectorLower0 = new Vector<long>(); var sumVectorUpper0 = new Vector<long>(); var sumVectorLower1 = new Vector<long>(); var sumVectorUpper1 = new Vector<long>(); var sumVectorLower2 = new Vector<long>(); var sumVectorUpper2 = new Vector<long>(); var sumVectorLower3 = new Vector<long>(); var sumVectorUpper3 = new Vector<long>(); var longLower0 = new Vector<long>(); var longUpper0 = new Vector<long>(); var longLower1 = new Vector<long>(); var longUpper1 = new Vector<long>(); var longLower2 = new Vector<long>(); var longUpper2 = new Vector<long>(); var longLower3 = new Vector<long>(); var longUpper3 = new Vector<long>(); int sseIndexEnd = l + ((r - l + 1) / (5 * Vector<int>.Count)) * Vector<int>.Count; //int numFullVectors = lengthForVector / Vector<int>.Count; long partialScalarSum0 = 0; long partialScalarSum1 = 0; long partialScalarSum2 = 0; long partialScalarSum3 = 0; int i = l; //int scalarIndex = l + numFullVectors * Vector<int>.Count; //int sseIndexEnd = scalarIndex; //System.Console.WriteLine("{0}, {1}, {2}, {3}, {4}, {5}", arrayToSum.Length, Vector<int>.Count, lengthForVector, numFullVectors, scalarIndex, sseIndexEnd); for (; i < sseIndexEnd; ) { var inVector0 = new Vector<int>(arrayToSum, i); i += Vector<int>.Count; Vector.Widen(inVector0, out longLower0, out longUpper0); var inVector1 = new Vector<int>(arrayToSum, i); i += Vector<int>.Count; Vector.Widen(inVector1, out longLower1, out longUpper1); var inVector2 = new Vector<int>(arrayToSum, i); i += Vector<int>.Count; Vector.Widen(inVector2, out longLower2, out longUpper2); var inVector3 = new Vector<int>(arrayToSum, i); i += Vector<int>.Count; Vector.Widen(inVector3, out longLower3, out longUpper3); // Vector number of bits / 32-bits = number of scalar additions need to perform - i.e. one Vector's worth of scalar work partialScalarSum0 += arrayToSum[i++]; sumVectorLower0 += longLower0; partialScalarSum1 += arrayToSum[i++]; sumVectorUpper0 += longUpper0; partialScalarSum2 += arrayToSum[i++]; sumVectorLower1 += longLower1; partialScalarSum3 += arrayToSum[i++]; sumVectorUpper1 += longUpper1; partialScalarSum0 += arrayToSum[i++]; sumVectorLower2 += longLower2; partialScalarSum1 += arrayToSum[i++]; sumVectorUpper2 += longUpper2; partialScalarSum2 += arrayToSum[i++]; sumVectorLower3 += longLower3; partialScalarSum3 += arrayToSum[i++]; sumVectorUpper3 += longUpper3; } //System.Console.WriteLine("{0}", scalarIndex); for (; i <= r; i++) partialScalarSum0 += arrayToSum[i]; partialScalarSum0 += partialScalarSum1; partialScalarSum0 += partialScalarSum2; partialScalarSum0 += partialScalarSum3; sumVectorLower0 += sumVectorUpper0; sumVectorLower1 += sumVectorUpper1; sumVectorLower2 += sumVectorUpper2; sumVectorLower3 += sumVectorUpper3; sumVectorLower0 += sumVectorLower1; sumVectorLower0 += sumVectorLower2; sumVectorLower0 += sumVectorLower3; for (i = 0; i < Vector<long>.Count; i++) partialScalarSum0 += sumVectorLower0[i]; return partialScalarSum0; } public static long SumSseAndScalar(this int[] arrayToSum) { //return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); return arrayToSum.SumSseAndScalarInner(0, arrayToSum.Length - 1); } public static long SumSseAndScalar(this int[] arrayToSum, int start, int length) { return arrayToSum.SumSseAndScalarInner(start, start + length - 1); } // Sadly, even in-cache small arrays are not speeding up with this interleaving idea :-( // Yeah, sadly it's about 15% slower private static long SumSseAndScalarInner(this int[] arrayToSum, int l, int r) { const int numScalarOps = 1; var sumVectorLower = new Vector<long>(); var sumVectorUpper = new Vector<long>(); var longLower = new Vector<long>(); var longUpper = new Vector<long>(); int lengthForVector = (r - l + 1) / (Vector<int>.Count + numScalarOps) * Vector<int>.Count; int numFullVectors = lengthForVector / Vector<int>.Count; long partialScalarSum0 = 0; //long partialScalarSum1 = 0; int i = l; int scalarIndex = l + numFullVectors * Vector<int>.Count; int sseIndexEnd = scalarIndex; //System.Console.WriteLine("{0}, {1}, {2}, {3}, {4}, {5}", arrayToSum.Length, Vector<int>.Count, lengthForVector, numFullVectors, scalarIndex, sseIndexEnd); for (; i < sseIndexEnd; i += Vector<int>.Count) { var inVector = new Vector<int>(arrayToSum, i); Vector.Widen(inVector, out longLower, out longUpper); partialScalarSum0 += arrayToSum[scalarIndex++]; // interleave SSE and Scaler operations sumVectorLower += longLower; //partialScalarSum1 += arrayToSum[scalarIndex++]; sumVectorUpper += longUpper; } //System.Console.WriteLine("{0}", scalarIndex); for (i = scalarIndex; i <= r; i++) partialScalarSum0 += arrayToSum[i]; //partialScalarSum0 += partialScalarSum1; sumVectorLower += sumVectorUpper; for (i = 0; i < Vector<long>.Count; i++) partialScalarSum0 += sumVectorLower[i]; return partialScalarSum0; } public static ulong SumSseAndScalarSingleStream(this ulong[] arrayToSum) { //return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); return arrayToSum.SumSseAndScalarSingleStreamInner(0, arrayToSum.Length - 1); } public static ulong SumSseAndScalarSingleStream(this ulong[] arrayToSum, int start, int length) { return arrayToSum.SumSseAndScalarSingleStreamInner(start, start + length - 1); } // 256-bit vector means eight 32-bit, which is the minimal size scalar needs to do // and then some number of vectors as well. // Sadly, this idea also doesn't seem to work and slows down performance. private static ulong SumSseAndScalarSingleStreamInner(this ulong[] arrayToSum, int l, int r) { //const int numScalarOps = 2; var sumVector0 = new Vector<ulong>(); //var sumVector1 = new Vector<ulong>(); //var sumVector2 = new Vector<ulong>(); //var sumVector3 = new Vector<ulong>(); int sseIndexEnd = l + ((r - l + 1) / (1 * Vector<ulong>.Count)) * Vector<ulong>.Count; //int numFullVectors = lengthForVector / Vector<int>.Count; ulong partialScalarSum0 = 0; //ulong partialScalarSum1 = 0; //ulong partialScalarSum2 = 0; //ulong partialScalarSum3 = 0; int i = l; //int j = l + (4 * Vector<ulong>.Count); //int j = l + 1 * Vector<ulong>.Count; //int k = l + 2 * Vector<ulong>.Count; //int m = l + 3 * Vector<ulong>.Count; int increment = 1 * Vector<ulong>.Count; //int scalarIndex = l + numFullVectors * Vector<int>.Count; //int sseIndexEnd = scalarIndex; //System.Console.WriteLine("{0}, {1}, {2}, {3}, {4}, {5}", arrayToSum.Length, Vector<int>.Count, lengthForVector, numFullVectors, scalarIndex, sseIndexEnd); for (; i < sseIndexEnd; i += increment) { sumVector0 += new Vector<ulong>(arrayToSum, i); //sumVector1 += new Vector<ulong>(arrayToSum, j); //sumVector2 += new Vector<ulong>(arrayToSum, k); //sumVector3 += new Vector<ulong>(arrayToSum, m); //var inVector0 = new Vector<ulong>(arrayToSum, i); //i += Vector<ulong>.Count; //var inVector1 = new Vector<ulong>(arrayToSum, j); //i += Vector<ulong>.Count; //var inVector2 = new Vector<ulong>(arrayToSum, k); //i += Vector<ulong>.Count; //var inVector3 = new Vector<ulong>(arrayToSum, m); //i += Vector<ulong>.Count; // Vector number of bits / 32-bits = number of scalar additions need to perform - i.e. one Vector's worth of scalar work //partialScalarSum0 += arrayToSum[j++]; //sumVector0 += inVector0; //partialScalarSum1 += arrayToSum[j++]; //sumVector1 += inVector1; //partialScalarSum2 += arrayToSum[j++]; //sumVector2 += inVector2; //partialScalarSum3 += arrayToSum[j++]; //sumVector3 += inVector3; //i += increment; //j += increment; //k += increment; //m += increment; //j += 4 * Vector<ulong>.Count; } //System.Console.WriteLine("{0}", scalarIndex); for (; i <= r; i++) partialScalarSum0 += arrayToSum[i]; //partialScalarSum0 += partialScalarSum1; //partialScalarSum0 += partialScalarSum2; //partialScalarSum0 += partialScalarSum3; //sumVector0 += sumVector1; //sumVector0 += sumVector2; //sumVector0 += sumVector3; for (i = 0; i < Vector<ulong>.Count; i++) partialScalarSum0 += sumVector0[i]; return partialScalarSum0; } /// <summary> /// Summation of uint[] array, which uses a ulong accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSse(this uint[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of uint[] array, which uses a ulong accumulator for perfect accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Will not throw overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSse(this uint[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } private static ulong SumSseInner(this uint[] arrayToSum, int l, int r) { var sumVectorLower = new Vector<ulong>(); var sumVectorUpper = new Vector<ulong>(); int sseIndexEnd = l + ((r - l + 1) / Vector<uint>.Count) * Vector<uint>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<int>.Count) { var inVector = new Vector<uint>(arrayToSum, i); Vector.Widen(inVector, out var ulongLower, out var ulongUpper); sumVectorLower += ulongLower; sumVectorUpper += ulongUpper; } ulong overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; sumVectorLower += sumVectorUpper; for (i = 0; i < Vector<ulong>.Count; i++) overallSum += sumVectorLower[i]; return overallSum; } /// <summary> /// Summation of long[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Caution: Will not throw an overflow exception for the majority of the array, but instead will wrap around to negatives, when the sum goes beyond Int64.MaxValue /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>long sum</returns> /// <exception>TSource:System.OverflowException: when the sum value is greater than Int64.MaxValue</exception> public static long SumSse(this long[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of long[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Caution: Will not throw an overflow exception for the majority of the array, but instead will wrap around to negatives, when the sum goes beyond Int64.MaxValue /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>long sum</returns> /// <exception>TSource:System.OverflowException: when the sum value is greater than Int64.MaxValue</exception> public static long SumSse(this long[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } private static long SumSseInner(this long[] arrayToSum, int l, int r) { var sumVector = new Vector<long>(); int sseIndexEnd = l + ((r - l + 1) / Vector<long>.Count) * Vector<long>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<long>.Count) { var inVector = new Vector<long>(arrayToSum, i); sumVector += inVector; } long overallSum = 0; for (; i <= r; i++) { overallSum = checked(overallSum + arrayToSum[i]); } for (i = 0; i < Vector<long>.Count; i++) { overallSum = checked(overallSum + sumVector[i]); } return overallSum; } private static long SumSse2(this long[] arrayToSum) { return arrayToSum.SumSseInner2(0, arrayToSum.Length - 1); } private static long SumSse2(this long[] arrayToSum, int start, int length) { return arrayToSum.SumSseInner2(start, start + length - 1); } private static long SumSseInner2(this long[] arrayToSum, int l, int r) { var sumVector = new Vector<long>(); int concurrentAmount = 4; int sseIndexEnd = l + ((r - l + 1) / (Vector<long>.Count * concurrentAmount)) * (Vector<long>.Count * concurrentAmount); int offset1 = Vector<long>.Count; int offset2 = Vector<long>.Count * 2; int offset3 = Vector<long>.Count * 3; int i; int increment = Vector<long>.Count * concurrentAmount; for (i = l; i < sseIndexEnd; i += increment) { sumVector += new Vector<long>(arrayToSum, i); sumVector += new Vector<long>(arrayToSum, i + offset1); sumVector += new Vector<long>(arrayToSum, i + offset2); sumVector += new Vector<long>(arrayToSum, i + offset3); } long overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<long>.Count; i++) overallSum += sumVector[i]; return overallSum; } private static long SumSse3(this long[] arrayToSum) { return arrayToSum.SumSseInner3(0, arrayToSum.Length - 1); } private static long SumSse3(this long[] arrayToSum, int start, int length) { return arrayToSum.SumSseInner3(start, start + length - 1); } // More than 2X faster on my 6-core laptop when the array is inside the cache. Single-core runs at nearly 7 GigaAdds/sec, but multi-core isn't performing well // 8-way unrolling slowed performance way down. Wonder if 2 or 3-way unroll will provide a higher speedup?! private static long SumSseInner3(this long[] arrayToSum, int l, int r) { var sumVector0 = new Vector<long>(); var sumVector1 = new Vector<long>(); var sumVector2 = new Vector<long>(); var sumVector3 = new Vector<long>(); int concurrentAmount = 4; int sseIndexEnd = l + ((r - l + 1) / (Vector<long>.Count * concurrentAmount)) * (Vector<long>.Count * concurrentAmount); int offset1 = Vector<long>.Count; int offset2 = Vector<long>.Count * 2; int offset3 = Vector<long>.Count * 3; int i, j, k, m; int increment = Vector<long>.Count * concurrentAmount; for (i = l, j = l + offset1, k = l + offset2, m = l + offset3; i < sseIndexEnd; i += increment, j += increment, k += increment, m += increment) { sumVector0 += new Vector<long>(arrayToSum, i); sumVector1 += new Vector<long>(arrayToSum, j); sumVector2 += new Vector<long>(arrayToSum, k); sumVector3 += new Vector<long>(arrayToSum, m); } long overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; sumVector0 += sumVector1; sumVector0 += sumVector2; sumVector0 += sumVector3; for (i = 0; i < Vector<long>.Count; i++) overallSum += sumVector0[i]; return overallSum; } /// <summary> /// Summation of long[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Throws a System.OverflowException when the sum goes beyond Int64.MaxValue, even for the portion of the array that is processed using SSE instructions. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>long sum</returns> /// <exception>TSource:System.OverflowException: when the sum value is greater than Int64.MaxValue</exception> public static long SumCheckedSse(this long[] arrayToSum) { return arrayToSum.SumCheckedSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of long[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Throws a System.OverflowException when the sum goes beyond Int64.MaxValue, even for the portion of the array that is processed using SSE instructions. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>long sum</returns> /// <exception>TSource:System.OverflowException: when the sum value is greater than Int64.MaxValue</exception> public static long SumCheckedSse(this long[] arrayToSum, int startIndex, int length) { return arrayToSum.SumCheckedSseInner(startIndex, startIndex + length - 1); } private static long SumCheckedSseInner(this long[] arrayToSum, int l, int r) { var sumVector = new Vector<long>(0); var newSumVector = new Vector<long>(); var zeroVector = new Vector<long>(0); var allOnesVector = new Vector<long>(-1L); int i; int sseIndexEnd = l + ((r - l + 1) / Vector<long>.Count) * Vector<long>.Count; for (i = l; i < sseIndexEnd; i += Vector<long>.Count) { var inVector = new Vector<long>(arrayToSum, i); var inVectorGteZeroMask = Vector.GreaterThanOrEqual(inVector, zeroVector); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> var sumVectorGteZeroMask = Vector.GreaterThanOrEqual(sumVector, zeroVector); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> var inVectorLtZeroMask = Vector.OnesComplement(inVectorGteZeroMask); var sumVectorLtZeroMask = Vector.OnesComplement(sumVectorGteZeroMask); // Optimize performance of paths which don't overflow or underflow, assuming that's the common case // if (inVector >= 0 && sumVector < 0) var inGteZeroAndSumLtZeroMask = Vector.BitwiseAnd(inVectorGteZeroMask, sumVectorLtZeroMask); // if (inVector < 0 && sumVector >= 0) var inLtZeroAndSumGteZeroMask = Vector.BitwiseAnd(inVectorLtZeroMask, sumVectorGteZeroMask); var orMask = Vector.BitwiseOr(inGteZeroAndSumLtZeroMask, inLtZeroAndSumGteZeroMask); if (Vector.EqualsAll(orMask, allOnesVector)) { sumVector += inVector; continue; } newSumVector = sumVector + inVector; // if (inVector >= 0 && sumVector >= 0) var bothGteZeroMask = Vector.BitwiseAnd(inVectorGteZeroMask, sumVectorGteZeroMask); // if (inVector < 0 && sumVector < 0) var bothLtZeroMask = Vector.BitwiseAnd(inVectorLtZeroMask, sumVectorLtZeroMask); var newSumLtSumMask = Vector.LessThan( newSumVector, sumVector); var newSumGtSumMask = Vector.GreaterThan(newSumVector, sumVector); var comb10Mask = Vector.BitwiseAnd(bothGteZeroMask, newSumLtSumMask); var comb01Mask = Vector.BitwiseAnd(bothLtZeroMask, newSumGtSumMask); if (Vector.EqualsAny(comb10Mask, allOnesVector) || Vector.EqualsAny(comb01Mask, allOnesVector)) throw new OverflowException(); sumVector = newSumVector; } long overallSum = 0; for (; i <= r; i++) overallSum = checked(overallSum + arrayToSum[i]); for (i = 0; i < Vector<ulong>.Count; i++) overallSum = checked(overallSum + sumVector[i]); return overallSum; } /// <summary> /// Summation of long[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a long accumulator for faster performance while detecting overflow/underflow without exceptions and returning a decimal for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseFaster(this long[] arrayToSum) { return arrayToSum.SumToDecimalSseFasterInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of long[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a long accumulator for faster performance while detecting overflow/underflow without exceptions and returning a decimal for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseFaster(this long[] arrayToSum, int startIndex, int length) { return arrayToSum.SumToDecimalSseFasterInner(startIndex, startIndex + length - 1); } private static decimal SumToDecimalSseFasterInner(this long[] arrayToSum, int l, int r) { decimal overallSum = 0; var sumVector = new Vector<long>(0); var newSumVector = new Vector<long>(); var zeroVector = new Vector<long>(0); var tmpVector = new Vector<long>(); var allOnesVector = Vector.OnesComplement(new Vector<long>(0)); int sseIndexEnd = l + ((r - l + 1) / Vector<long>.Count) * Vector<long>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<long>.Count) { var inVector = new Vector<long>(arrayToSum, i); var sumVectorPositiveMask = Vector.GreaterThanOrEqual(sumVector, zeroVector); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> var inVectorPositiveMask = Vector.GreaterThanOrEqual(inVector, zeroVector); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> var sumVectorNegativeMask = Vector.OnesComplement(sumVectorPositiveMask); var inVectorNegativeMask = Vector.OnesComplement(inVectorPositiveMask); // Optimize performance of paths which don't overflow or underflow, assuming that's the common case // if (sumVector >= 0 && inVector < 0) var sumPositiveAndInNegativeMask = Vector.BitwiseAnd(sumVectorPositiveMask, inVectorNegativeMask); // if (sumVector < 0 && inVector >= 0) var sumNegativeAndInPositiveMask = Vector.BitwiseAnd(sumVectorNegativeMask, inVectorPositiveMask); var oppositeSignsMask = Vector.BitwiseOr(sumNegativeAndInPositiveMask, sumPositiveAndInNegativeMask); sumVector = Vector.ConditionalSelect(oppositeSignsMask, sumVector + inVector, sumVector); newSumVector = sumVector + inVector; // if (inVector >= 0 && sumVector >= 0) var bothPositiveMask = Vector.BitwiseAnd(inVectorPositiveMask, sumVectorPositiveMask); // if (inVector < 0 && sumVector < 0) var bothNegativeMask = Vector.BitwiseAnd(inVectorNegativeMask, sumVectorNegativeMask); var newSumLtSumMask = Vector.LessThan( newSumVector, sumVector); var newSumGtSumMask = Vector.GreaterThan(newSumVector, sumVector); var comb10Mask = Vector.BitwiseAnd(bothPositiveMask, newSumLtSumMask); var comb01Mask = Vector.BitwiseAnd(bothNegativeMask, newSumGtSumMask); if (Vector.EqualsAny(comb10Mask, allOnesVector)) // overflow occured in one of the vector elements { for (int j = 0; j < Vector<ulong>.Count; j++) { if (comb10Mask[j] == -1L) // this particular sum overflowed { overallSum += sumVector[j]; overallSum += inVector[j]; } } tmpVector = Vector.ConditionalSelect(newSumLtSumMask, zeroVector, newSumVector); } else tmpVector = newSumVector; sumVector = Vector.ConditionalSelect(bothPositiveMask, tmpVector, sumVector); if (Vector.EqualsAny(comb01Mask, allOnesVector)) // underflow occured in one of the vector elements { for (int j = 0; j < Vector<ulong>.Count; j++) { if (comb01Mask[j] == -1L) // this particular sum underflowed { overallSum += sumVector[j]; overallSum += inVector[j]; } } tmpVector = Vector.ConditionalSelect(newSumGtSumMask, zeroVector, newSumVector); } else tmpVector = newSumVector; sumVector = Vector.ConditionalSelect(bothNegativeMask, tmpVector, sumVector); } for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<long>.Count; i++) overallSum += sumVector[i]; return overallSum; } /// <summary> /// Summation of long[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a long accumulator for faster performance while detecting overflow/underflow without exceptions and returning a decimal for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseFaster(this long[] arrayToSum) { return arrayToSum.SumToBigIntegerSseFasterInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of long[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a long accumulator for faster performance while detecting overflow/underflow without exceptions and returning a decimal for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseFaster(this long[] arrayToSum, int startIndex, int length) { return arrayToSum.SumToBigIntegerSseFasterInner(startIndex, startIndex + length - 1); } private static BigInteger SumToBigIntegerSseFasterInner(this long[] arrayToSum, int l, int r) { BigInteger overallSum = 0; var sumVector = new Vector<long>(0); var newSumVector = new Vector<long>(); var zeroVector = new Vector<long>(0); var tmpVector = new Vector<long>(); var allOnesVector = Vector.OnesComplement(new Vector<long>(0)); int sseIndexEnd = l + ((r - l + 1) / Vector<long>.Count) * Vector<long>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<long>.Count) { var inVector = new Vector<long>(arrayToSum, i); var sumVectorPositiveMask = Vector.GreaterThanOrEqual(sumVector, zeroVector); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> var inVectorPositiveMask = Vector.GreaterThanOrEqual(inVector, zeroVector); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> var sumVectorNegativeMask = Vector.OnesComplement(sumVectorPositiveMask); var inVectorNegativeMask = Vector.OnesComplement(inVectorPositiveMask); // Optimize performance of paths which don't overflow or underflow, assuming that's the common case // if (sumVector >= 0 && inVector < 0) var sumPositiveAndInNegativeMask = Vector.BitwiseAnd(sumVectorPositiveMask, inVectorNegativeMask); // if (sumVector < 0 && inVector >= 0) var sumNegativeAndInPositiveMask = Vector.BitwiseAnd(sumVectorNegativeMask, inVectorPositiveMask); var oppositeSignsMask = Vector.BitwiseOr(sumNegativeAndInPositiveMask, sumPositiveAndInNegativeMask); sumVector = Vector.ConditionalSelect(oppositeSignsMask, sumVector + inVector, sumVector); newSumVector = sumVector + inVector; // if (inVector >= 0 && sumVector >= 0) var bothPositiveMask = Vector.BitwiseAnd(inVectorPositiveMask, sumVectorPositiveMask); // if (inVector < 0 && sumVector < 0) var bothNegativeMask = Vector.BitwiseAnd(inVectorNegativeMask, sumVectorNegativeMask); var newSumLtSumMask = Vector.LessThan( newSumVector, sumVector); var newSumGtSumMask = Vector.GreaterThan(newSumVector, sumVector); var comb10Mask = Vector.BitwiseAnd(bothPositiveMask, newSumLtSumMask); var comb01Mask = Vector.BitwiseAnd(bothNegativeMask, newSumGtSumMask); if (Vector.EqualsAny(comb10Mask, allOnesVector)) // overflow occured in one of the vector elements { for (int j = 0; j < Vector<long>.Count; j++) { if (comb10Mask[j] == -1L) // this particular sum overflowed { overallSum = overallSum + sumVector[j]; overallSum = overallSum + inVector[j]; } } tmpVector = Vector.ConditionalSelect(newSumLtSumMask, zeroVector, newSumVector); } else tmpVector = newSumVector; sumVector = Vector.ConditionalSelect(bothPositiveMask, tmpVector, sumVector); if (Vector.EqualsAny(comb01Mask, allOnesVector)) // underflow occured in one of the vector elements { for (int j = 0; j < Vector<long>.Count; j++) { if (comb01Mask[j] == -1L) // this particular sum underflowed { overallSum = overallSum + sumVector[j]; overallSum = overallSum + inVector[j]; } } tmpVector = Vector.ConditionalSelect(newSumGtSumMask, zeroVector, newSumVector); } else tmpVector = newSumVector; sumVector = Vector.ConditionalSelect(bothNegativeMask, tmpVector, sumVector); } for (; i <= r; i++) overallSum = overallSum + arrayToSum[i]; for (i = 0; i < Vector<long>.Count; i++) overallSum = overallSum + sumVector[i]; return overallSum; } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Caution: Will not throw an overflow exception for the majority of the array, but instead will wrap around to smaller values, when the sum goes beyond UInt64.MaxValue /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>ulong sum</returns> /// <exception>TSource:System.OverflowException: when the sum value is greater than UInt64.MaxValue</exception> public static ulong SumSse(this ulong[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Caution: Will not throw an overflow exception for the majority of the array, but instead will wrap around to smaller values, when the sum goes beyond UInt64.MaxValue /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>ulong sum</returns> /// <exception>TSource:System.OverflowException: when the sum value is greater than UInt64.MaxValue</exception> public static ulong SumSse(this ulong[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } private static ulong SumSseInner(this ulong[] arrayToSum, int l, int r) { var sumVector = new Vector<ulong>(); int sseIndexEnd = l + ((r - l + 1) / Vector<ulong>.Count) * Vector<ulong>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<long>.Count) { var inVector = new Vector<ulong>(arrayToSum, i); sumVector += inVector; } ulong overallSum = 0; for (; i <= r; i++) { checked { overallSum += arrayToSum[i]; } } for (i = 0; i < Vector<long>.Count; i++) { checked { overallSum += sumVector[i]; } } return overallSum; } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Throws a System.OverflowException when the sum goes beyond UInt64.MaxValue, even for the portion of the array that is processed using SSE instructions. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>ulong sum</returns> /// <exception>TSource:System.OverflowException: when the sum value is greater than UInt64.MaxValue</exception> public static ulong SumCheckedSse(this ulong[] arrayToSum) { return arrayToSum.SumCheckedSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Throws a System.OverflowException when the sum goes beyond UInt64.MaxValue, even for the portion of the array that is processed using SSE instructions. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>ulong sum</returns> /// <exception>TSource:System.OverflowException: when the sum value is greater than UInt64.MaxValue</exception> public static ulong SumCheckedSse(this ulong[] arrayToSum, int startIndex, int length) { return arrayToSum.SumCheckedSseInner(startIndex, startIndex + length - 1); } private static ulong SumCheckedSseInner(this ulong[] arrayToSum, int l, int r) { var sumVector = new Vector<ulong>(); var newSumVector = new Vector<ulong>(); var allOnesVector = new Vector<ulong>(0xFFFFFFFFFFFFFFFFL); int sseIndexEnd = l + ((r - l + 1) / Vector<ulong>.Count) * Vector<ulong>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<long>.Count) { var inVector = new Vector<ulong>(arrayToSum, i); newSumVector = sumVector + inVector; Vector<ulong> gteMask = Vector.GreaterThanOrEqual(newSumVector, sumVector); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> if (Vector.EqualsAll(gteMask, allOnesVector)) sumVector = newSumVector; else throw new System.OverflowException(); } ulong overallSum = 0; for (; i <= r; i++) { overallSum = checked(overallSum + arrayToSum[i]); } for (i = 0; i < Vector<ulong>.Count; i++) { overallSum = checked(overallSum + sumVector[i]); } return overallSum; } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseFaster(this ulong[] arrayToSum) { return arrayToSum.SumToDecimalSseFasterInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseFaster(this ulong[] arrayToSum, int startIndex, int length) { return arrayToSum.SumToDecimalSseFasterInner(startIndex, startIndex + length - 1); } private static decimal SumToDecimalSseFasterInner(this ulong[] arrayToSum, int l, int r) { decimal overallSum = 0; var sumVector = new Vector<ulong>(); var newSumVector = new Vector<ulong>(); var zeroVector = new Vector<ulong>(0); int sseIndexEnd = l + ((r - l + 1) / Vector<ulong>.Count) * Vector<ulong>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<ulong>.Count) { var inVector = new Vector<ulong>(arrayToSum, i); newSumVector = sumVector + inVector; Vector<ulong> gteMask = Vector.GreaterThanOrEqual(newSumVector, sumVector); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> if (Vector.EqualsAny(gteMask, zeroVector)) { for(int j = 0; j < Vector<ulong>.Count; j++) { if (gteMask[j] == 0) // this particular sum overflowed, since sum decreased { overallSum += sumVector[j]; overallSum += inVector[ j]; } } } sumVector = Vector.ConditionalSelect(gteMask, newSumVector, zeroVector); } for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<ulong>.Count; i++) overallSum += sumVector[i]; return overallSum; } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a 128-bit accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseEvenFaster(this ulong[] arrayToSum) { return arrayToSum.SumToDecimalSseEvenFasterInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a 128-bit accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseEvenFaster(this ulong[] arrayToSum, int startIndex, int length) { return arrayToSum.SumToDecimalSseEvenFasterInner(startIndex, startIndex + length - 1); } private static decimal SumToDecimalSseEvenFasterInner(this ulong[] arrayToSum, int l, int r) { decimal overallSum = 0; var sumVector = new Vector<ulong>(); var upperSumVector = new Vector<ulong>(); var newSumVector = new Vector<ulong>(); int sseIndexEnd = l + ((r - l + 1) / Vector<ulong>.Count) * Vector<ulong>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<ulong>.Count) { var inVector = new Vector<ulong>(arrayToSum, i); newSumVector = sumVector + inVector; Vector<ulong> ltMask = Vector.LessThan(newSumVector, sumVector); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> upperSumVector -= ltMask; sumVector = newSumVector; } for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<ulong>.Count; i++) { overallSum += sumVector[i]; decimal multiplier = 0x8000_0000_0000_0000; overallSum += multiplier * (Decimal)2 * (Decimal)upperSumVector[i]; // uintUpperSum << 64 } return overallSum; } private static BigInteger SumToBigIntegerSseEvenFasterInner(this ulong[] arrayToSum, int l, int r) { return (BigInteger)SumToDecimalSseEvenFasterInner(arrayToSum, l, r); } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a 128-bit accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseEvenFaster(this ulong[] arrayToSum) { return (BigInteger)arrayToSum.SumToDecimalSseEvenFasterInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a 128-bit accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseEvenFaster(this ulong[] arrayToSum, int startIndex, int length) { return (BigInteger)arrayToSum.SumToDecimalSseEvenFasterInner(startIndex, startIndex + length - 1); } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseFaster(this ulong[] arrayToSum) { return arrayToSum.SumToBigIntegerSseFasterInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of ulong[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseFaster(this ulong[] arrayToSum, int startIndex, int length) { return arrayToSum.SumToBigIntegerSseFasterInner(startIndex, startIndex + length - 1); } private static BigInteger SumToBigIntegerSseFasterInner(this ulong[] arrayToSum, int l, int r) { BigInteger overallSum = 0; var sumVector = new Vector<ulong>(); var newSumVector = new Vector<ulong>(); var zeroVector = new Vector<ulong>(0); int sseIndexEnd = l + ((r - l + 1) / Vector<ulong>.Count) * Vector<ulong>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<ulong>.Count) { var inVector = new Vector<ulong>(arrayToSum, i); newSumVector = sumVector + inVector; Vector<ulong> gteMask = Vector.GreaterThanOrEqual(newSumVector, sumVector); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> if (Vector.EqualsAny(gteMask, zeroVector)) { for (int j = 0; j < Vector<ulong>.Count; j++) { if (gteMask[j] == 0) // this particular sum overflowed, since sum decreased { overallSum += sumVector[j]; overallSum += inVector[j]; } } } sumVector = Vector.ConditionalSelect(gteMask, newSumVector, zeroVector); } for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<ulong>.Count; i++) overallSum += sumVector[i]; return overallSum; } /// <summary> /// Summation of float[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>float sum</returns> public static float SumSse(this float[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of float[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>float sum</returns> public static float SumSse(this float[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } private static float SumSseInner(this float[] arrayToSum, int l, int r) { var sumVector = new Vector<float>(); int sseIndexEnd = l + ((r - l + 1) / Vector<float>.Count) * Vector<float>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<float>.Count) { var inVector = new Vector<float>(arrayToSum, i); sumVector += inVector; } float overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<float>.Count; i++) overallSum += sumVector[i]; return overallSum; } /// <summary> /// Summation of float[] array, using a double accumulator for higher accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumToDoubleSse(this float[] arrayToSum) { return arrayToSum.SumSseDoubleInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of float[] array, using a double accumulator for higher accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumToDoubleSse(this float[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseDoubleInner(startIndex, startIndex + length - 1); } private static double SumSseDoubleInner(this float[] arrayToSum, int l, int r) { var sumVectorLower = new Vector<double>(); var sumVectorUpper = new Vector<double>(); var longLower = new Vector<double>(); var longUpper = new Vector<double>(); int sseIndexEnd = l + ((r - l + 1) / Vector<float>.Count) * Vector<float>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<float>.Count) { var inVector = new Vector<float>(arrayToSum, i); Vector.Widen(inVector, out longLower, out longUpper); sumVectorLower += longLower; sumVectorUpper += longUpper; } double overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; sumVectorLower += sumVectorUpper; for (i = 0; i < Vector<double>.Count; i++) overallSum += sumVectorLower[i]; return overallSum; } /// <summary> /// Summation of double[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumSse(this double[] arrayToSum) { return arrayToSum.SumSseInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of double[] array, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumSse(this double[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseInner(startIndex, startIndex + length - 1); } private static double SumSseInner(this double[] arrayToSum, int l, int r) { var sumVector = new Vector<double>(); int sseIndexEnd = l + ((r - l + 1) / Vector<double>.Count) * Vector<double>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<double>.Count) { var inVector = new Vector<double>(arrayToSum, i); sumVector += inVector; } double overallSum = 0; for (; i <= r; i++) overallSum += arrayToSum[i]; for (i = 0; i < Vector<double>.Count; i++) overallSum += sumVector[i]; return overallSum; } /// <summary> /// Summation of float[] array, using Neumaier variation of Kahan summation for higher accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>float sum</returns> public static float SumSseMostAccurate(this float[] arrayToSum) { return arrayToSum.SumSseNeumaierInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of float[] array, using Neumaier variation of Kahan summation for higher accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>float sum</returns> public static float SumSseMostAccurate(this float[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseNeumaierInner(startIndex, startIndex + length - 1); } private static float SumSseNeumaierInner(this float[] arrayToSum, int l, int r) { var sumVector = new Vector<float>(); var cVector = new Vector<float>(); int sseIndexEnd = l + ((r - l + 1) / Vector<float>.Count) * Vector<float>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<float>.Count) { var inVector = new Vector<float>(arrayToSum, i); var tVector = sumVector + inVector; Vector<int> gteMask = Vector.GreaterThanOrEqual(Vector.Abs(sumVector), Vector.Abs(inVector)); // if true then 0xFFFFFFFFL else 0L at each element of the Vector<int> cVector += Vector.ConditionalSelect(gteMask, sumVector, inVector) - tVector + Vector.ConditionalSelect(gteMask, inVector, sumVector); // ConditionalSelect selects left for 0xFFFFFFFFL and right for 0x0L sumVector = tVector; } int iLast = i; // At this point we have sumVector and cVector, which have Vector<float>.Count number of sum's and c's // Reduce these Vector's to a single sum and a single c float sum = 0.0f; float c = 0.0f; for (i = 0; i < Vector<float>.Count; i++) { float t = sum + sumVector[i]; if (Math.Abs(sum) >= Math.Abs(sumVector[i])) c += (sum - t) + sumVector[i]; // If sum is bigger, low-order digits of input[i] are lost. else c += (sumVector[i] - t) + sum; // Else low-order digits of sum are lost sum = t; c += cVector[i]; } for (i = iLast; i <= r; i++) { float t = sum + arrayToSum[i]; if (Math.Abs(sum) >= Math.Abs(arrayToSum[i])) c += (sum - t) + arrayToSum[i]; // If sum is bigger, low-order digits of input[i] are lost. else c += (arrayToSum[i] - t) + sum; // Else low-order digits of sum are lost sum = t; } return sum + c; } /// <summary> /// Summation of float[] array, using Neumaier variation of Kahan summation along with double precision for higher accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumToDoubleSseMostAccurate(this float[] arrayToSum) { return arrayToSum.SumSseNeumaierDoubleInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of float[] array, using Neumaier variation of Kahan summation along with double precision for higher accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumToDoubleSseMostAccurate(this float[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseNeumaierDoubleInner(startIndex, startIndex + length - 1); } private static double SumSseNeumaierDoubleInner(this float[] arrayToSum, int l, int r) { var sumVector = new Vector<double>(); var cVector = new Vector<double>(); var longLower = new Vector<double>(); var longUpper = new Vector<double>(); int sseIndexEnd = l + ((r - l + 1) / Vector<float>.Count) * Vector<float>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<float>.Count) { var inVector = new Vector<float>(arrayToSum, i); Vector.Widen(inVector, out longLower, out longUpper); var tVector = sumVector + longLower; Vector<long> gteMask = Vector.GreaterThanOrEqual(Vector.Abs(sumVector), Vector.Abs(longLower)); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> cVector += Vector.ConditionalSelect(gteMask, sumVector, longLower) - tVector + Vector.ConditionalSelect(gteMask, longLower, sumVector); sumVector = tVector; tVector = sumVector + longUpper; gteMask = Vector.GreaterThanOrEqual(Vector.Abs(sumVector), Vector.Abs(longUpper)); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> cVector += Vector.ConditionalSelect(gteMask, sumVector, longUpper) - tVector + Vector.ConditionalSelect(gteMask, longUpper, sumVector); sumVector = tVector; } int iLast = i; // At this point we have sumVector and cVector, which have Vector<double>.Count number of sum's and c's // Reduce these Vector's to a single sum and a single c double sum = 0.0; double c = 0.0; for (i = 0; i < Vector<double>.Count; i++) { double t = sum + sumVector[i]; if (Math.Abs(sum) >= Math.Abs(sumVector[i])) c += (sum - t) + sumVector[i]; // If sum is bigger, low-order digits of input[i] are lost. else c += (sumVector[i] - t) + sum; // Else low-order digits of sum are lost sum = t; c += cVector[i]; } for (i = iLast; i <= r; i++) { double t = sum + arrayToSum[i]; if (Math.Abs(sum) >= Math.Abs(arrayToSum[i])) c += (sum - t) + arrayToSum[i]; // If sum is bigger, low-order digits of input[i] are lost. else c += (arrayToSum[i] - t) + sum; // Else low-order digits of sum are lost sum = t; } return sum + c; } /// <summary> /// Summation of double[] array, using Neumaier variation of Kahan summation for higher accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumSseMostAccurate(this double[] arrayToSum) { return arrayToSum.SumSseNeumaierInner(0, arrayToSum.Length - 1); } /// <summary> /// Summation of double[] array, using Neumaier variation of Kahan summation for higher accuracy, using data parallel SIMD/SSE instructions for higher performance on a single core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumSseMostAccurate(this double[] arrayToSum, int startIndex, int length) { return arrayToSum.SumSseNeumaierInner(startIndex, startIndex + length - 1); } private static double SumSseNeumaierInner(this double[] arrayToSum, int l, int r) { var sumVector = new Vector<double>(); var cVector = new Vector<double>(); int sseIndexEnd = l + ((r - l + 1) / Vector<double>.Count) * Vector<double>.Count; int i; for (i = l; i < sseIndexEnd; i += Vector<double>.Count) { var inVector = new Vector<double>(arrayToSum, i); var tVector = sumVector + inVector; Vector<long> gteMask = Vector.GreaterThanOrEqual(Vector.Abs(sumVector), Vector.Abs(inVector)); // if true then 0xFFFFFFFFFFFFFFFFL else 0L at each element of the Vector<long> cVector += Vector.ConditionalSelect(gteMask, sumVector, inVector) - tVector + Vector.ConditionalSelect(gteMask, inVector, sumVector); sumVector = tVector; } int iLast = i; // At this point we have sumVector and cVector, which have Vector<double>.Count number of sum's and c's // Reduce these Vector's to a single sum and a single c double sum = 0.0; double c = 0.0; for (i = 0; i < Vector<double>.Count; i++) { double t = sum + sumVector[i]; if (Math.Abs(sum) >= Math.Abs(sumVector[i])) c += (sum - t) + sumVector[i]; // If sum is bigger, low-order digits of input[i] are lost. else c += (sumVector[i] - t) + sum; // Else low-order digits of sum are lost sum = t; c += cVector[i]; } for (i = iLast; i <= r; i++) { double t = sum + arrayToSum[i]; if (Math.Abs(sum) >= Math.Abs(arrayToSum[i])) c += (sum - t) + arrayToSum[i]; // If sum is bigger, low-order digits of input[i] are lost. else c += (arrayToSum[i] - t) + sum; // Else low-order digits of sum are lost sum = t; } return sum + c; } private static ulong NumberOfBytesToNextCacheLine(float[] arrayToAlign) { ulong numBytesUnaligned = 0; unsafe { fixed (float* ptrToArray = &arrayToAlign[0]) { byte* ptrByteToArray = (byte*)ptrToArray; numBytesUnaligned = ((ulong)ptrToArray) & 63; } } return numBytesUnaligned; } /// <summary> /// Summation of sbyte[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>long sum</returns> public static long SumToLongSseParDac(this sbyte[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, SumToLongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of sbyte[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>long sum</returns> public static long SumToLongSseParDac(this sbyte[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, SumToLongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of byte[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSsePar(this byte[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, SumToUlongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } public static ulong SumToUlongSseParInvoke(this byte[] arrayToSum, int degreeOfParallelism = 0) { int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; // TODO: Need to deal with small arrays and small quantas, possibly 0 int quanta = arrayToSum.Length / maxDegreeOfPar; var concurrentSums = new ConcurrentBag<ulong>(); //var startIndexList = new List<int>(); //var lengthList = new List<int>(); //ulong sumGolden = 0; //ulong sumLocal = 0; var listOfActions = new List<Action>(); int startIndex = 0; int i = 0; for (i = 0; i < (maxDegreeOfPar - 1); i++, startIndex += quanta) { //sumGolden = 0; //for (int j = 0; j < quanta; j++) // sumGolden += arrayToSum[startIndex + j]; //sumLocal = SumToUlongSse(arrayToSum, startIndex, quanta); //Console.WriteLine("quanta = {0} startIndex = {1} sumGolden = {2} sumLocal = {3}", quanta, startIndex, sumGolden, sumLocal); //Int32 startIndexLoc1 = new Int32(); int startIndexLoc1 = startIndex; //startIndexList.Add(startIndexLoc1); //Int32 quantaLoc1 = new Int32(); int quantaLoc1 = quanta; //lengthList.Add(quantaLoc1); listOfActions.Add(() => { ulong sumOfAction = SumToUlongSse(arrayToSum, startIndexLoc1, quantaLoc1); //Console.WriteLine("Action: sum = {0} startIndex = {1} length = {2}", sumOfAction, startIndex, quanta); concurrentSums.Add(sumOfAction); }); } //sumGolden = 0; //for (int j = 0; j < quanta; j++) // sumGolden += arrayToSum[startIndex + j]; //sumLocal = Algorithms.Sum.SumToUlong(arrayToSum, startIndex, arrayToSum.Length - startIndex); //Console.WriteLine("quanta = {0} startIndex = {1} length = {2} sumGolden = {3} sumLocal = {4}", quanta, startIndex, arrayToSum.Length - startIndex, sumGolden, sumLocal); //Console.WriteLine("quanta = {0} startIndex = {1} length = {2}", quanta, startIndex, arrayToSum.Length - startIndex); //Int32 startIndexLoc = new Int32(); int startIndexLoc = startIndex; //startIndexList.Add(startIndexLoc); //Int32 quantaLoc = new Int32(); int quantaLoc = arrayToSum.Length - startIndex; //lengthList.Add(quantaLoc); listOfActions.Add(() => { ulong sumOfLastAction = SumToUlongSse(arrayToSum, startIndexLoc, quantaLoc); //Console.WriteLine("Last Action: sum = {0} startIndex = {1} length = {2}", sumOfLastAction, startIndex, arrayToSum.Length - startIndex); concurrentSums.Add(sumOfLastAction); }); // the rest Parallel.Invoke(options, listOfActions.ToArray()); ulong sum = 0; var sumsArray = concurrentSums.ToArray(); for (int k = 0; k < sumsArray.Length; k++) { //Console.WriteLine("concurrentBag[{0}] = {1}", k, sumsArray[k]); sum += sumsArray[k]; } return sum; } /// <summary> /// Summation of byte[] array, using multiple cores. /// Uses a ulong accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongPar(this byte[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToUlongPar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of byte[] array, using multiple cores. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongPar(this byte[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<ulong>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { //Console.WriteLine("Partition: start = {0} end = {1}", range.Item1, range.Item2); ulong localSum = 0; for (int i = range.Item1; i < range.Item2; i++) localSum += arrayToSum[i]; concurrentSums.Add(localSum); }); ulong sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of ushort[] array, using multiple cores. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongPar(this ushort[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToUlongPar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of byte[] array, using multiple cores. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongPar(this ushort[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<ulong>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { ulong localSum = 0; for (int i = range.Item1; i < range.Item2; i++) localSum += arrayToSum[i]; concurrentSums.Add(localSum); }); ulong sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of uint[] array, using multiple cores. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongPar(this uint[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToUlongPar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of uint[] array, using multiple cores. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongPar(this uint[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<ulong>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { ulong localSum = 0; for (int i = range.Item1; i < range.Item2; i++) localSum += arrayToSum[i]; concurrentSums.Add(localSum); }); ulong sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of sbyte[] array, using multiple corese. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongPar(this sbyte[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToLongPar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of sbyte[] array, using multiple cores. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongPar(this sbyte[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<long>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { long localSum = 0; for (int i = range.Item1; i < range.Item2; i++) localSum += arrayToSum[i]; concurrentSums.Add(localSum); }); long sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of short[] array, using multiple cores. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongPar(this short[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToLongPar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of short[] array, using multiple cores. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongPar(this short[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<long>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { long localSum = 0; for (int i = range.Item1; i < range.Item2; i++) localSum += arrayToSum[i]; concurrentSums.Add(localSum); }); long sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of int[] array, using multiple cores. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongPar(this int[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToLongPar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of short[] array, using multiple cores. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongPar(this int[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<long>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { long localSum = 0; for (int i = range.Item1; i < range.Item2; i++) localSum += arrayToSum[i]; concurrentSums.Add(localSum); }); long sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } // 7.8 GigaAdds/sec on 6-core dual-memory-channel CPU, using this scalar algorithm = 31 GigaBytes/sec of memory bandwidth for large arrays // from https://stackoverflow.com/questions/2419343/how-to-sum-up-an-array-of-integers-in-c-sharp?noredirect=1&lq=1 private static long SumToLongParForInterlocked(this int[] arrayToSum, int degreeOfParallelism = 0) { long sum = 0; int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(0, arrayToSum.Length), options, range => { long localSum = 0; for (int i = range.Item1; i < range.Item2; i++) { localSum += arrayToSum[i]; } Interlocked.Add(ref sum, localSum); }); return sum; } /// <summary> /// Summation of sbyte[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongSsePar(this sbyte[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToLongSsePar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of sbyte[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongSsePar(this sbyte[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<long>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { long localSum = SumSseInner(arrayToSum, range.Item1, range.Item2 - 1); concurrentSums.Add(localSum); }); long sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of short[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongSsePar(this short[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToLongSsePar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of short[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongSsePar(this short[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<long>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { long localSum = SumSseInner(arrayToSum, range.Item1, range.Item2 - 1); concurrentSums.Add(localSum); }); long sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of int[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongSsePar(this int[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToLongSsePar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of int[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumToLongSsePar(this int[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<long>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { long localSum = SumSseInner(arrayToSum, range.Item1, range.Item2 - 1); concurrentSums.Add(localSum); }); long sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of long[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. /// Warning: will not throw an arithmetic overflow exception, wrapping around to the opposite sign quietly. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumSsePar(this long[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumSsePar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. /// Warning: will not throw an arithmetic overflow exception, wrapping around to the opposite sign quietly. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static long SumSsePar(this long[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<long>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { long localSum = SumSseInner(arrayToSum, range.Item1, range.Item2 - 1); concurrentSums.Add(localSum); }); long sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of byte[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSsePar(this byte[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToUlongSsePar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of byte[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSsePar(this byte[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<ulong>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { //Console.WriteLine("Partition: start = {0} end = {1}", range.Item1, range.Item2); ulong localSum = SumSseInner(arrayToSum, range.Item1, range.Item2 - 1); concurrentSums.Add(localSum); }); ulong sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of ushort[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSsePar(this ushort[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToUlongSsePar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of byte[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSsePar(this ushort[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<ulong>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { ulong localSum = SumSseInner(arrayToSum, range.Item1, range.Item2 - 1); concurrentSums.Add(localSum); }); ulong sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of uint[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSsePar(this uint[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumToUlongSsePar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of uint[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not throw an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumToUlongSsePar(this uint[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<ulong>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { ulong localSum = SumSseInner(arrayToSum, range.Item1, range.Item2 - 1); concurrentSums.Add(localSum); }); ulong sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. /// Warning: will not throw an arithmetic overflow exception, wrapping around to smaller values quietly. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumSsePar(this ulong[] arrayToSum, int degreeOfParallelism = 0) { return arrayToSum.SumSsePar(0, arrayToSum.Length, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. /// Warning: will not throw an arithmetic overflow exception, wrapping around to smaller values quietly. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <param name="degreeOfParallelism">number of computational cores to use</param> /// <returns>ulong sum</returns> public static ulong SumSsePar(this ulong[] arrayToSum, int startIndex, int length, int degreeOfParallelism = 0) { var concurrentSums = new ConcurrentBag<ulong>(); int maxDegreeOfPar = degreeOfParallelism <= 0 ? Environment.ProcessorCount : degreeOfParallelism; var options = new ParallelOptions() { MaxDegreeOfParallelism = maxDegreeOfPar }; Parallel.ForEach(Partitioner.Create(startIndex, startIndex + length), options, range => { ulong localSum = SumSseInner(arrayToSum, range.Item1, range.Item2 - 1); concurrentSums.Add(localSum); }); ulong sum = 0; var sumsArray = concurrentSums.ToArray(); for (int i = 0; i < sumsArray.Length; i++) sum += sumsArray[i]; return sum; } private static BigInteger SumToBigIntegerFasterParInner(this ulong[] arrayToSum, int l, int r, int thresholdParallel = 16 * 1024) { BigInteger sumLeft = 0; if (l > r) return sumLeft; if ((r - l + 1) <= thresholdParallel) return Algorithms.Sum.SumToBigIntegerFaster(arrayToSum, l, r - l + 1); int m = (r + l) / 2; BigInteger sumRight = 0; Parallel.Invoke( () => { sumLeft = SumToBigIntegerFasterParInner(arrayToSum, l, m, thresholdParallel); }, () => { sumRight = SumToBigIntegerFasterParInner(arrayToSum, m + 1, r, thresholdParallel); } ); // Combine left and right results return sumLeft + sumRight; } private static BigInteger SumToBigIntegerFasterParInner(this long[] arrayToSum, int l, int r, int thresholdParallel = 16 * 1024) { BigInteger sumLeft = 0; if (l > r) return sumLeft; if ((r - l + 1) <= thresholdParallel) return Algorithms.Sum.SumToBigIntegerFaster(arrayToSum, l, r - l + 1); int m = (r + l) / 2; BigInteger sumRight = 0; Parallel.Invoke( () => { sumLeft = SumToBigIntegerFasterParInner(arrayToSum, l, m, thresholdParallel); }, () => { sumRight = SumToBigIntegerFasterParInner(arrayToSum, m + 1, r, thresholdParallel); } ); // Combine left and right results return sumLeft + sumRight; } private static BigInteger SumToBigIntegerSseFasterParInner(this long[] arrayToSum, int l, int r, int thresholdParallel = 16 * 1024) { BigInteger sumLeft = 0; if (l > r) return sumLeft; if ((r - l + 1) <= thresholdParallel) return ParallelAlgorithms.Sum.SumToBigIntegerSseFasterInner(arrayToSum, l, r); int m = (r + l) / 2; BigInteger sumRight = 0; Parallel.Invoke( () => { sumLeft = SumToBigIntegerSseFasterParInner(arrayToSum, l, m, thresholdParallel); }, () => { sumRight = SumToBigIntegerSseFasterParInner(arrayToSum, m + 1, r, thresholdParallel); } ); // Combine left and right results return sumLeft + sumRight; } private static BigInteger SumToBigIntegerSseFasterParInner(this ulong[] arrayToSum, int l, int r, int thresholdParallel = 16 * 1024) { BigInteger sumLeft = 0; if (l > r) return sumLeft; if ((r - l + 1) <= thresholdParallel) return ParallelAlgorithms.Sum.SumToBigIntegerSseFasterInner(arrayToSum, l, r); int m = (r + l) / 2; BigInteger sumRight = 0; Parallel.Invoke( () => { sumLeft = SumToBigIntegerSseFasterParInner(arrayToSum, l, m, thresholdParallel); }, () => { sumRight = SumToBigIntegerSseFasterParInner(arrayToSum, m + 1, r, thresholdParallel); } ); // Combine left and right results return sumLeft + sumRight; } /// <summary> /// Summation of byte[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>ulong sum</returns> private static ulong SumToUlongSseParDac(this byte[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, SumToUlongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of short[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>long sum</returns> private static long SumToLongSseParDac(this short[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, SumToLongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of short[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>long sum</returns> private static long SumToLongSseParDac(this short[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, SumToLongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ushort[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>ulong sum</returns> private static ulong SumToUlongSseParDac(this ushort[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, SumToUlongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ushort[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>ulong sum</returns> private static ulong SumToUlongSseParDac(this ushort[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, SumToUlongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of int[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>long sum</returns> private static long SumToLongSseParDac(this int[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, SumToLongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of int[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a long accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>long sum</returns> private static long SumToLongSsePar(this int[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, SumToLongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of uint[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>ulong sum</returns> private static ulong SumToUlongSseParDac(this uint[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, SumToUlongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of uint[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a ulong accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>ulong sum</returns> private static ulong SumToUlongSsePar(this uint[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, SumToUlongSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Warning: this function will quietly overflow, not throwing an arithmetic overflow exception, wrapping around to the opposite sign value. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>long sum</returns> private static long SumSsePar(this long[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, SumSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Warning: this function will quietly overflow, not throwing an arithmetic overflow exception, wrapping around to the opposite sign value. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>long sum</returns> private static long SumSsePar(this long[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, SumSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, for higher performance within each core. /// Uses a decimal accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalPar(this long[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumToDecimal, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, for higher performance within each core. /// Uses a decimal accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalPar(this long[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, Algorithms.Sum.SumToDecimal, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, for higher performance within each core. /// Uses a long accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalFasterPar(this long[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumToDecimalFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, for higher performance within each core. /// Uses a long accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalFasterPar(this long[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, Algorithms.Sum.SumToDecimalFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a long accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseFasterPar(this long[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, ParallelAlgorithms.Sum.SumToDecimalSseFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a long accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseFasterPar(this long[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, ParallelAlgorithms.Sum.SumToDecimalSseFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, for higher performance within each core. /// Uses a long accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerFasterPar(this long[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return SumToBigIntegerFasterParInner(arrayToSum, 0, arrayToSum.Length - 1, thresholdParallel); //return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumToBigIntegerFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, for higher performance within each core. /// Uses a long accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static BigInteger SumToBigIntegerFasterPar(this long[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return SumToBigIntegerFasterParInner(arrayToSum, startIndex, startIndex + length - 1, thresholdParallel); //return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, Algorithms.Sum.SumToBigIntegerFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a long accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseFasterPar(this long[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return SumToBigIntegerSseFasterParInner(arrayToSum, 0, arrayToSum.Length - 1, thresholdParallel); //return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, ParallelAlgorithms.Sum.SumToBigIntegerSseFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of long[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a long accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseFasterPar(this long[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return SumToBigIntegerSseFasterParInner(arrayToSum, startIndex, startIndex + length - 1, thresholdParallel); //return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, ParallelAlgorithms.Sum.SumToBigIntegerSseFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, for higher performance within each core. /// Uses a decimal accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalPar(this ulong[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumToDecimal, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, for higher performance within each core. /// Uses a decimal accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalPar(this ulong[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, Algorithms.Sum.SumToDecimal, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, for higher performance within each core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalFasterPar(this ulong[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumToDecimalFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, for higher performance within each core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalFasterPar(this ulong[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, Algorithms.Sum.SumToDecimalFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseFasterPar(this ulong[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, ParallelAlgorithms.Sum.SumToDecimalSseFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseFasterPar(this ulong[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, ParallelAlgorithms.Sum.SumToDecimalSseFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a 128-bit accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseEvenFasterPar(this ulong[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, ParallelAlgorithms.Sum.SumToDecimalSseEvenFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a 128-bit accumulator for faster performance while detecting overflow without exceptions and returning a decimal for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumToDecimalSseEvenFasterPar(this ulong[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, ParallelAlgorithms.Sum.SumToDecimalSseEvenFasterInner, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, for higher performance within each core. /// Uses a BigInteger accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerPar(this ulong[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumToBigInteger, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, for higher performance within each core. /// Uses a BigInteger accumulator for perfect accuracy. Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerPar(this ulong[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, Algorithms.Sum.SumToBigInteger, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, for higher performance within each core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerFasterPar(this ulong[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return SumToBigIntegerFasterParInner(arrayToSum, 0, arrayToSum.Length - 1, thresholdParallel); //return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumToBigIntegerFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, for higher performance within each core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerFasterPar(this ulong[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return SumToBigIntegerFasterParInner(arrayToSum, startIndex, startIndex + length - 1, thresholdParallel); //return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, Algorithms.Sum.SumToBigIntegerFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseFasterPar(this ulong[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return SumToBigIntegerSseFasterParInner(arrayToSum, 0, arrayToSum.Length - 1, thresholdParallel); //return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, ParallelAlgorithms.Sum.SumToBigIntegerSseFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a ulong accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseFasterPar(this ulong[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return SumToBigIntegerSseFasterParInner(arrayToSum, startIndex, startIndex + length - 1, thresholdParallel); //return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, ParallelAlgorithms.Sum.SumToBigIntegerSseFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a 128-bit accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseEvenFasterPar(this ulong[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return (BigInteger)SumToDecimalSseEvenFasterPar(arrayToSum, thresholdParallel, degreeOfParallelism); //return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, ParallelAlgorithms.Sum.SumToBigIntegerSseEvenFaster, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions on each core, for higher performance within each core. /// Uses a 128-bit accumulator for faster performance while detecting overflow without exceptions and returning a BigInteger for perfect accuracy. /// Will not trow an overflow exception. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumToBigIntegerSseEvenFasterPar(this ulong[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return (BigInteger)SumToDecimalSseEvenFasterPar(arrayToSum, startIndex, length, thresholdParallel, degreeOfParallelism); //return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, ParallelAlgorithms.Sum.SumToBigIntegerSseEvenFasterInner, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>ulong sum</returns> private static ulong SumSseParDac(this ulong[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, SumSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of ulong[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>ulong sum</returns> private static ulong SumSseParDac(this ulong[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, SumSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of float[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>float sum</returns> public static float SumPar(this float[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumHpc, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of float[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>float sum</returns> public static float SumPar(this float[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, Algorithms.Sum.SumHpc, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of float[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a double accumulator for higher accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>float sum</returns> public static double SumToDoublePar(this float[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumToDouble, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of float[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a double accumulator for higher accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>float sum</returns> public static double SumToDoublePar(this float[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, Algorithms.Sum.SumToDouble, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of float[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>float sum</returns> public static float SumSsePar(this float[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, SumSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of float[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>float sum</returns> public static float SumSsePar(this float[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, SumSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of float[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a double accumulator for higher accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumToDoubleSsePar(this float[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, SumSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of float[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// Uses a double accumulator for higher accuracy. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumToDoubleSsePar(this float[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, SumSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of double[] array, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumPar(this double[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumHpc, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of double[] array, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumPar(this double[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, Algorithms.Sum.SumHpc, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of double[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumSsePar(this double[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, SumSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of double[] array, using multiple cores, and using data parallel SIMD/SSE instructions for higher performance within each core. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumSsePar(this double[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, SumSse, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of float[] array, using a more accurate Kahan summation algorithm, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>float sum</returns> public static float SumParMostAccurate(this float[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of float[] array, using a more accurate Kahan summation algorithm, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>float sum</returns> public static float SumParMostAccurate(this float[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, Algorithms.Sum.SumMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of float[] array, using a more accurate Kahan summation algorithm, using a double accumulator for higher accuracy, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumToDoubleParMostAccurate(this float[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumToDoubleMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of float[] array, using a more accurate Kahan summation algorithm, using a double accumulator for higher accuracy, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumToDoubleParMostAccurate(this float[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, Algorithms.Sum.SumToDoubleMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of float[] array, using a more accurate Kahan summation algorithm, using data parallel SIMD/SSE instructions for higher performance within each core, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>float sum</returns> public static float SumSseParMostAccurate(this float[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, SumSseMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of float[] array, using a more accurate Kahan summation algorithm, using data parallel SIMD/SSE instructions for higher performance within each core, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>float sum</returns> public static float SumSseParMostAccurate(this float[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, SumSseMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of float[] array, using a more accurate Kahan summation algorithm, using a double precision accumulator for higher accuracy, using data parallel SIMD/SSE instructions for higher performance within each core, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumToDoubleSseParMostAccurate(this float[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, 0, arrayToSum.Length, SumToDoubleSseMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of float[] array, using a more accurate Kahan summation algorithm, using a double precision accumulator for higher accuracy, using data parallel SIMD/SSE instructions for higher performance within each core, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumToDoubleSseParMostAccurate(this float[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerTwoTypesPar(arrayToSum, startIndex, length, SumToDoubleSseMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of double[] array, using a more accurate Kahan summation algorithm, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumParMostAccurate(this double[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of double[] array, using a more accurate Kahan summation algorithm, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumParMostAccurate(this double[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, Algorithms.Sum.SumMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of double[] array, using a more accurate Kahan summation algorithm, using data parallel SIMD/SSE instructions for higher performance within each core, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>double sum</returns> public static double SumSseParMostAccurate(this double[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, SumSseMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Implementation of the Neumaier variation of Kahan floating-point summation: more accurate than for loop summation. /// Summation of double[] array, using a more accurate Kahan summation algorithm, using data parallel SIMD/SSE instructions for higher performance within each core, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>double sum</returns> public static double SumSseParMostAccurate(this double[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, SumSseMostAccurate, Algorithms.Sum.SumMostAccurate, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of decimal[] array, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>decimal sum</returns> public static decimal SumPar(this decimal[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumHpc, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of decimal[] array, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static decimal SumPar(this decimal[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, Algorithms.Sum.SumHpc, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of BigInteger[] array, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <returns>BigInteger sum</returns> public static BigInteger SumPar(this BigInteger[] arrayToSum, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, 0, arrayToSum.Length, Algorithms.Sum.SumHpc, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } /// <summary> /// Summation of BigInteger[] array, using multiple cores. /// </summary> /// <param name="arrayToSum">An array to sum up</param> /// <param name="startIndex">index of the starting element for the summation</param> /// <param name="length">number of array elements to sum up</param> /// <returns>decimal sum</returns> public static BigInteger SumPar(this BigInteger[] arrayToSum, int startIndex, int length, int thresholdParallel = 16 * 1024, int degreeOfParallelism = 0) { return AlgorithmPatterns.DivideAndConquerPar(arrayToSum, startIndex, length, Algorithms.Sum.SumHpc, (x, y) => x + y, thresholdParallel, degreeOfParallelism); } } }
58.18602
254
0.622895
[ "Apache-2.0" ]
DragonSpit/CsharpFreeParallelAlgorithms
HPCsharp/SumParallel.cs
201,442
C#
using System; namespace POETradeHelper.ItemSearch.ViewModels { public class BindableSocketsFilterViewModel : BindableMinMaxFilterViewModel { public BindableSocketsFilterViewModel(System.Linq.Expressions.Expression<Func<PathOfExileTradeApi.Models.SearchQueryRequest, PathOfExileTradeApi.Models.Filters.IFilter>> bindingExpression) : base(bindingExpression) { } public int? Red { get; set; } public int? Green { get; set; } public int? Blue { get; set; } public int? White { get; set; } } }
33.588235
196
0.686515
[ "Apache-2.0", "MIT" ]
alueck/POE-TradeHelper
Source/POETradeHelper.ItemSearch/ViewModels/AdvancedQuery/BindableSocketsFilterViewModel.cs
573
C#
using Microsoft.EntityFrameworkCore.Migrations; namespace CustomerManagement.Domain.Migrations { public partial class Init : Migration { protected override void Up(MigrationBuilder migrationBuilder) { migrationBuilder.EnsureSchema( name: "CustomerManagement"); migrationBuilder.CreateTable( name: "Customers", schema: "CustomerManagement", columns: table => new { Id = table.Column<long>(nullable: false) .Annotation("SqlServer:Identity", "1, 1"), FirstName = table.Column<string>(maxLength: 50, nullable: false), LastName = table.Column<string>(maxLength: 200, nullable: false), Email = table.Column<string>(nullable: true), IsActive = table.Column<bool>(nullable: false) }, constraints: table => { table.PrimaryKey("PK_Customers", x => x.Id); }); } protected override void Down(MigrationBuilder migrationBuilder) { migrationBuilder.DropTable( name: "Customers", schema: "CustomerManagement"); } } }
34.763158
85
0.526874
[ "MIT" ]
RezaJenabi/Ecommerce
Src/Services/CustomerManagement/CustomerManagement.Domain/Migrations/20200326005410_Init.cs
1,323
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; public class LegController : MonoBehaviour { [SerializeField] Player player; [SerializeField] float playerRad = 1.2f; [SerializeField] List<Leg> legs; void Update() { transform.position = player.transform.position; } void OnTriggerEnter2D(Collider2D collision) { if (collision.tag == "Food" || collision.tag == "LegPlace") { Leg leg = null; bool findEmpty = false; byte minAngle = 0; float angle = float.MaxValue; for (byte i = 0; i < legs.Count; ++i) { if ((legs[i].target == null) && legs[i].GetAngleTo(collision.transform.position) < angle) { minAngle = i; angle = legs[minAngle].GetAngleTo(collision.transform.position); findEmpty = true; } } if (!findEmpty) { minAngle = 0; angle = legs[minAngle].GetAngleTo(collision.transform.position); for(byte i = 1; i < legs.Count; ++i) { if(legs[i].GetAngleTo(collision.transform.position) < angle) { minAngle = i; angle = legs[minAngle].GetAngleTo(collision.transform.position); } } } leg = legs[minAngle]; leg.SetNewTarget(collision.gameObject); } } void OnTriggerExit2D(Collider2D collision) { if (collision.tag == "Food" || collision.tag == "LegPlace") { foreach (var leg in legs) { if (leg.target == collision.gameObject) { leg.SetNewTarget(null); break; } } } } }
24.389831
95
0.650452
[ "MIT" ]
Team-on/13-legs
13-legs/Assets/Scripts/Game/Player/LegController.cs
1,441
C#
// Generated from https://github.com/nuke-build/nuke/blob/master/build/specifications/CoverallsNet.json using JetBrains.Annotations; using Newtonsoft.Json; using Nuke.Common; using Nuke.Common.Execution; using Nuke.Common.Tooling; using Nuke.Common.Tools; using Nuke.Common.Utilities.Collections; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.IO; using System.Linq; using System.Text; namespace Nuke.Common.Tools.CoverallsNet { /// <summary> /// <p>Coveralls uploader for .Net Code coverage of your C# source code. Should work with any code files that get reported with the supported coverage tools, but the primary focus is CSharp.</p> /// <p>For more details, visit the <a href="https://coverallsnet.readthedocs.io">official website</a>.</p> /// </summary> [PublicAPI] [ExcludeFromCodeCoverage] public static partial class CoverallsNetTasks { /// <summary> /// Path to the CoverallsNet executable. /// </summary> public static string CoverallsNetPath => ToolPathResolver.TryGetEnvironmentExecutable("COVERALLSNET_EXE") ?? ToolPathResolver.GetPackageExecutable("coveralls.net", "csmacnz.Coveralls.dll"); public static Action<OutputType, string> CoverallsNetLogger { get; set; } = ProcessTasks.DefaultLogger; /// <summary> /// <p>Coveralls uploader for .Net Code coverage of your C# source code. Should work with any code files that get reported with the supported coverage tools, but the primary focus is CSharp.</p> /// <p>For more details, visit the <a href="https://coverallsnet.readthedocs.io">official website</a>.</p> /// </summary> public static IReadOnlyCollection<Output> CoverallsNet(string arguments, string workingDirectory = null, IReadOnlyDictionary<string, string> environmentVariables = null, int? timeout = null, bool? logOutput = null, bool? logInvocation = null, bool? logTimestamp = null, string logFile = null, Func<string, string> outputFilter = null) { using var process = ProcessTasks.StartProcess(CoverallsNetPath, arguments, workingDirectory, environmentVariables, timeout, logOutput, logInvocation, logTimestamp, logFile, CoverallsNetLogger, outputFilter); process.AssertZeroExitCode(); return process.Output; } /// <summary> /// <p>Coveralls uploader for .Net Code coverage of your C# source code. Should work with any code files that get reported with the supported coverage tools, but the primary focus is CSharp.</p> /// <p>For more details, visit the <a href="https://coverallsnet.readthedocs.io">official website</a>.</p> /// </summary> /// <remarks> /// <p>This is a <a href="http://www.nuke.build/docs/authoring-builds/cli-tools.html#fluent-apis">CLI wrapper with fluent API</a> that allows to modify the following arguments:</p> /// <ul> /// <li><c>--basePath</c> via <see cref="CoverallsNetSettings.BasePath"/></li> /// <li><c>--commitAuthor</c> via <see cref="CoverallsNetSettings.CommitAuthor"/></li> /// <li><c>--commitBranch</c> via <see cref="CoverallsNetSettings.CommitBranch"/></li> /// <li><c>--commitEmail</c> via <see cref="CoverallsNetSettings.CommitEmail"/></li> /// <li><c>--commitId</c> via <see cref="CoverallsNetSettings.CommitId"/></li> /// <li><c>--commitMessage</c> via <see cref="CoverallsNetSettings.CommitMessage"/></li> /// <li><c>--dryrun</c> via <see cref="CoverallsNetSettings.DryRun"/></li> /// <li><c>--dynamiccodecoverage</c> via <see cref="CoverallsNetSettings.DynamicCodeCoverage"/></li> /// <li><c>--exportcodecoverage</c> via <see cref="CoverallsNetSettings.ExportCodeCoverage"/></li> /// <li><c>--input</c> via <see cref="CoverallsNetSettings.Input"/></li> /// <li><c>--jobId</c> via <see cref="CoverallsNetSettings.JobId"/></li> /// <li><c>--monocov</c> via <see cref="CoverallsNetSettings.Monocov"/></li> /// <li><c>--opencover</c> via <see cref="CoverallsNetSettings.OpenCover"/></li> /// <li><c>--output</c> via <see cref="CoverallsNetSettings.Output"/></li> /// <li><c>--pullRequest</c> via <see cref="CoverallsNetSettings.PullRequest"/></li> /// <li><c>--repoToken</c> via <see cref="CoverallsNetSettings.RepoToken"/></li> /// <li><c>--repoTokenVariable</c> via <see cref="CoverallsNetSettings.RepoTokenVariable"/></li> /// <li><c>--serviceName</c> via <see cref="CoverallsNetSettings.ServiceName"/></li> /// <li><c>--useRelativePaths</c> via <see cref="CoverallsNetSettings.UserRelativePaths"/></li> /// </ul> /// </remarks> public static IReadOnlyCollection<Output> CoverallsNet(CoverallsNetSettings toolSettings = null) { toolSettings = toolSettings ?? new CoverallsNetSettings(); using var process = ProcessTasks.StartProcess(toolSettings); process.AssertZeroExitCode(); return process.Output; } /// <summary> /// <p>Coveralls uploader for .Net Code coverage of your C# source code. Should work with any code files that get reported with the supported coverage tools, but the primary focus is CSharp.</p> /// <p>For more details, visit the <a href="https://coverallsnet.readthedocs.io">official website</a>.</p> /// </summary> /// <remarks> /// <p>This is a <a href="http://www.nuke.build/docs/authoring-builds/cli-tools.html#fluent-apis">CLI wrapper with fluent API</a> that allows to modify the following arguments:</p> /// <ul> /// <li><c>--basePath</c> via <see cref="CoverallsNetSettings.BasePath"/></li> /// <li><c>--commitAuthor</c> via <see cref="CoverallsNetSettings.CommitAuthor"/></li> /// <li><c>--commitBranch</c> via <see cref="CoverallsNetSettings.CommitBranch"/></li> /// <li><c>--commitEmail</c> via <see cref="CoverallsNetSettings.CommitEmail"/></li> /// <li><c>--commitId</c> via <see cref="CoverallsNetSettings.CommitId"/></li> /// <li><c>--commitMessage</c> via <see cref="CoverallsNetSettings.CommitMessage"/></li> /// <li><c>--dryrun</c> via <see cref="CoverallsNetSettings.DryRun"/></li> /// <li><c>--dynamiccodecoverage</c> via <see cref="CoverallsNetSettings.DynamicCodeCoverage"/></li> /// <li><c>--exportcodecoverage</c> via <see cref="CoverallsNetSettings.ExportCodeCoverage"/></li> /// <li><c>--input</c> via <see cref="CoverallsNetSettings.Input"/></li> /// <li><c>--jobId</c> via <see cref="CoverallsNetSettings.JobId"/></li> /// <li><c>--monocov</c> via <see cref="CoverallsNetSettings.Monocov"/></li> /// <li><c>--opencover</c> via <see cref="CoverallsNetSettings.OpenCover"/></li> /// <li><c>--output</c> via <see cref="CoverallsNetSettings.Output"/></li> /// <li><c>--pullRequest</c> via <see cref="CoverallsNetSettings.PullRequest"/></li> /// <li><c>--repoToken</c> via <see cref="CoverallsNetSettings.RepoToken"/></li> /// <li><c>--repoTokenVariable</c> via <see cref="CoverallsNetSettings.RepoTokenVariable"/></li> /// <li><c>--serviceName</c> via <see cref="CoverallsNetSettings.ServiceName"/></li> /// <li><c>--useRelativePaths</c> via <see cref="CoverallsNetSettings.UserRelativePaths"/></li> /// </ul> /// </remarks> public static IReadOnlyCollection<Output> CoverallsNet(Configure<CoverallsNetSettings> configurator) { return CoverallsNet(configurator(new CoverallsNetSettings())); } /// <summary> /// <p>Coveralls uploader for .Net Code coverage of your C# source code. Should work with any code files that get reported with the supported coverage tools, but the primary focus is CSharp.</p> /// <p>For more details, visit the <a href="https://coverallsnet.readthedocs.io">official website</a>.</p> /// </summary> /// <remarks> /// <p>This is a <a href="http://www.nuke.build/docs/authoring-builds/cli-tools.html#fluent-apis">CLI wrapper with fluent API</a> that allows to modify the following arguments:</p> /// <ul> /// <li><c>--basePath</c> via <see cref="CoverallsNetSettings.BasePath"/></li> /// <li><c>--commitAuthor</c> via <see cref="CoverallsNetSettings.CommitAuthor"/></li> /// <li><c>--commitBranch</c> via <see cref="CoverallsNetSettings.CommitBranch"/></li> /// <li><c>--commitEmail</c> via <see cref="CoverallsNetSettings.CommitEmail"/></li> /// <li><c>--commitId</c> via <see cref="CoverallsNetSettings.CommitId"/></li> /// <li><c>--commitMessage</c> via <see cref="CoverallsNetSettings.CommitMessage"/></li> /// <li><c>--dryrun</c> via <see cref="CoverallsNetSettings.DryRun"/></li> /// <li><c>--dynamiccodecoverage</c> via <see cref="CoverallsNetSettings.DynamicCodeCoverage"/></li> /// <li><c>--exportcodecoverage</c> via <see cref="CoverallsNetSettings.ExportCodeCoverage"/></li> /// <li><c>--input</c> via <see cref="CoverallsNetSettings.Input"/></li> /// <li><c>--jobId</c> via <see cref="CoverallsNetSettings.JobId"/></li> /// <li><c>--monocov</c> via <see cref="CoverallsNetSettings.Monocov"/></li> /// <li><c>--opencover</c> via <see cref="CoverallsNetSettings.OpenCover"/></li> /// <li><c>--output</c> via <see cref="CoverallsNetSettings.Output"/></li> /// <li><c>--pullRequest</c> via <see cref="CoverallsNetSettings.PullRequest"/></li> /// <li><c>--repoToken</c> via <see cref="CoverallsNetSettings.RepoToken"/></li> /// <li><c>--repoTokenVariable</c> via <see cref="CoverallsNetSettings.RepoTokenVariable"/></li> /// <li><c>--serviceName</c> via <see cref="CoverallsNetSettings.ServiceName"/></li> /// <li><c>--useRelativePaths</c> via <see cref="CoverallsNetSettings.UserRelativePaths"/></li> /// </ul> /// </remarks> public static IEnumerable<(CoverallsNetSettings Settings, IReadOnlyCollection<Output> Output)> CoverallsNet(CombinatorialConfigure<CoverallsNetSettings> configurator, int degreeOfParallelism = 1, bool completeOnFailure = false) { return configurator.Invoke(CoverallsNet, CoverallsNetLogger, degreeOfParallelism, completeOnFailure); } } #region CoverallsNetSettings /// <summary> /// Used within <see cref="CoverallsNetTasks"/>. /// </summary> [PublicAPI] [ExcludeFromCodeCoverage] [Serializable] public partial class CoverallsNetSettings : ToolSettings { /// <summary> /// Path to the CoverallsNet executable. /// </summary> public override string ToolPath => base.ToolPath ?? CoverallsNetTasks.CoverallsNetPath; public override Action<OutputType, string> CustomLogger => CoverallsNetTasks.CoverallsNetLogger; /// <summary> /// The coverage source file location. /// </summary> public virtual string Input { get; internal set; } /// <summary> /// The coverage results json will be written to this file it provided. /// </summary> public virtual string Output { get; internal set; } /// <summary> /// This flag will stop coverage results being posted to <a href="https://coveralls.io">coveralls.io</a>. /// </summary> public virtual bool? DryRun { get; internal set; } /// <summary> /// This flag, when provided, will attempt to strip the current working directory from the beginning of the source file path. /// </summary> public virtual bool? UserRelativePaths { get; internal set; } /// <summary> /// When useRelativePaths and a basePath is provided, this path is used instead of the current working directory. /// </summary> public virtual string BasePath { get; internal set; } /// <summary> /// Reads input as OpenCover data. /// </summary> public virtual bool? OpenCover { get; internal set; } /// <summary> /// Reads input as the CodeCoverage.exe xml format. /// </summary> public virtual bool? DynamicCodeCoverage { get; internal set; } /// <summary> /// Reads input as the Visual Studio Coverage Export xml format. /// </summary> public virtual bool? ExportCodeCoverage { get; internal set; } /// <summary> /// Reads input as monocov results folder. /// </summary> public virtual bool? Monocov { get; internal set; } /// <summary> /// The coveralls.io repository token. /// </summary> public virtual string RepoToken { get; internal set; } /// <summary> /// The Environment Variable name where the coveralls.io repository token is available. Default is <c>COVERALLS_REPO_TOKEN</c>. /// </summary> public virtual string RepoTokenVariable { get; internal set; } /// <summary> /// The git commit hash for the coverage report. /// </summary> public virtual string CommitId { get; internal set; } /// <summary> /// The git branch for the coverage report. /// </summary> public virtual string CommitBranch { get; internal set; } /// <summary> /// The git commit author for the coverage report. /// </summary> public virtual string CommitAuthor { get; internal set; } /// <summary> /// The git commit author email for the coverage report. /// </summary> public virtual string CommitEmail { get; internal set; } /// <summary> /// The git commit message for the coverage report. /// </summary> public virtual string CommitMessage { get; internal set; } /// <summary> /// The job Id to provide to coveralls.io. Default is <c>0</c>. /// </summary> public virtual int? JobId { get; internal set; } /// <summary> /// The service-name for the coverage report. Default is <c>coveralls.net</c>. /// </summary> public virtual string ServiceName { get; internal set; } /// <summary> /// The github pull request id. Used for updating status on github PRs. /// </summary> public virtual int? PullRequest { get; internal set; } protected override Arguments ConfigureArguments(Arguments arguments) { arguments .Add("--input {value}", Input) .Add("--output {value}", Output) .Add("--dryrun", DryRun) .Add("--useRelativePaths", UserRelativePaths) .Add("--basePath {value}", BasePath) .Add("--opencover", OpenCover) .Add("--dynamiccodecoverage", DynamicCodeCoverage) .Add("--exportcodecoverage", ExportCodeCoverage) .Add("--monocov", Monocov) .Add("--repoToken {value}", RepoToken, secret: true) .Add("--repoTokenVariable {value}", RepoTokenVariable) .Add("--commitId {value}", CommitId) .Add("--commitBranch {value}", CommitBranch) .Add("--commitAuthor {value}", CommitAuthor) .Add("--commitEmail {value}", CommitEmail) .Add("--commitMessage {value}", CommitMessage) .Add("--jobId {value}", JobId) .Add("--serviceName {value}", ServiceName) .Add("--pullRequest {value}", PullRequest); return base.ConfigureArguments(arguments); } } #endregion #region CoverallsNetSettingsExtensions /// <summary> /// Used within <see cref="CoverallsNetTasks"/>. /// </summary> [PublicAPI] [ExcludeFromCodeCoverage] public static partial class CoverallsNetSettingsExtensions { #region Input /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.Input"/></em></p> /// <p>The coverage source file location.</p> /// </summary> [Pure] public static T SetInput<T>(this T toolSettings, string input) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.Input = input; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.Input"/></em></p> /// <p>The coverage source file location.</p> /// </summary> [Pure] public static T ResetInput<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.Input = null; return toolSettings; } #endregion #region Output /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.Output"/></em></p> /// <p>The coverage results json will be written to this file it provided.</p> /// </summary> [Pure] public static T SetOutput<T>(this T toolSettings, string output) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.Output = output; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.Output"/></em></p> /// <p>The coverage results json will be written to this file it provided.</p> /// </summary> [Pure] public static T ResetOutput<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.Output = null; return toolSettings; } #endregion #region DryRun /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.DryRun"/></em></p> /// <p>This flag will stop coverage results being posted to <a href="https://coveralls.io">coveralls.io</a>.</p> /// </summary> [Pure] public static T SetDryRun<T>(this T toolSettings, bool? dryRun) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.DryRun = dryRun; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.DryRun"/></em></p> /// <p>This flag will stop coverage results being posted to <a href="https://coveralls.io">coveralls.io</a>.</p> /// </summary> [Pure] public static T ResetDryRun<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.DryRun = null; return toolSettings; } /// <summary> /// <p><em>Enables <see cref="CoverallsNetSettings.DryRun"/></em></p> /// <p>This flag will stop coverage results being posted to <a href="https://coveralls.io">coveralls.io</a>.</p> /// </summary> [Pure] public static T EnableDryRun<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.DryRun = true; return toolSettings; } /// <summary> /// <p><em>Disables <see cref="CoverallsNetSettings.DryRun"/></em></p> /// <p>This flag will stop coverage results being posted to <a href="https://coveralls.io">coveralls.io</a>.</p> /// </summary> [Pure] public static T DisableDryRun<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.DryRun = false; return toolSettings; } /// <summary> /// <p><em>Toggles <see cref="CoverallsNetSettings.DryRun"/></em></p> /// <p>This flag will stop coverage results being posted to <a href="https://coveralls.io">coveralls.io</a>.</p> /// </summary> [Pure] public static T ToggleDryRun<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.DryRun = !toolSettings.DryRun; return toolSettings; } #endregion #region UserRelativePaths /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.UserRelativePaths"/></em></p> /// <p>This flag, when provided, will attempt to strip the current working directory from the beginning of the source file path.</p> /// </summary> [Pure] public static T SetUserRelativePaths<T>(this T toolSettings, bool? userRelativePaths) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.UserRelativePaths = userRelativePaths; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.UserRelativePaths"/></em></p> /// <p>This flag, when provided, will attempt to strip the current working directory from the beginning of the source file path.</p> /// </summary> [Pure] public static T ResetUserRelativePaths<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.UserRelativePaths = null; return toolSettings; } /// <summary> /// <p><em>Enables <see cref="CoverallsNetSettings.UserRelativePaths"/></em></p> /// <p>This flag, when provided, will attempt to strip the current working directory from the beginning of the source file path.</p> /// </summary> [Pure] public static T EnableUserRelativePaths<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.UserRelativePaths = true; return toolSettings; } /// <summary> /// <p><em>Disables <see cref="CoverallsNetSettings.UserRelativePaths"/></em></p> /// <p>This flag, when provided, will attempt to strip the current working directory from the beginning of the source file path.</p> /// </summary> [Pure] public static T DisableUserRelativePaths<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.UserRelativePaths = false; return toolSettings; } /// <summary> /// <p><em>Toggles <see cref="CoverallsNetSettings.UserRelativePaths"/></em></p> /// <p>This flag, when provided, will attempt to strip the current working directory from the beginning of the source file path.</p> /// </summary> [Pure] public static T ToggleUserRelativePaths<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.UserRelativePaths = !toolSettings.UserRelativePaths; return toolSettings; } #endregion #region BasePath /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.BasePath"/></em></p> /// <p>When useRelativePaths and a basePath is provided, this path is used instead of the current working directory.</p> /// </summary> [Pure] public static T SetBasePath<T>(this T toolSettings, string basePath) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.BasePath = basePath; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.BasePath"/></em></p> /// <p>When useRelativePaths and a basePath is provided, this path is used instead of the current working directory.</p> /// </summary> [Pure] public static T ResetBasePath<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.BasePath = null; return toolSettings; } #endregion #region OpenCover /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.OpenCover"/></em></p> /// <p>Reads input as OpenCover data.</p> /// </summary> [Pure] public static T SetOpenCover<T>(this T toolSettings, bool? openCover) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.OpenCover = openCover; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.OpenCover"/></em></p> /// <p>Reads input as OpenCover data.</p> /// </summary> [Pure] public static T ResetOpenCover<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.OpenCover = null; return toolSettings; } /// <summary> /// <p><em>Enables <see cref="CoverallsNetSettings.OpenCover"/></em></p> /// <p>Reads input as OpenCover data.</p> /// </summary> [Pure] public static T EnableOpenCover<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.OpenCover = true; return toolSettings; } /// <summary> /// <p><em>Disables <see cref="CoverallsNetSettings.OpenCover"/></em></p> /// <p>Reads input as OpenCover data.</p> /// </summary> [Pure] public static T DisableOpenCover<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.OpenCover = false; return toolSettings; } /// <summary> /// <p><em>Toggles <see cref="CoverallsNetSettings.OpenCover"/></em></p> /// <p>Reads input as OpenCover data.</p> /// </summary> [Pure] public static T ToggleOpenCover<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.OpenCover = !toolSettings.OpenCover; return toolSettings; } #endregion #region DynamicCodeCoverage /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.DynamicCodeCoverage"/></em></p> /// <p>Reads input as the CodeCoverage.exe xml format.</p> /// </summary> [Pure] public static T SetDynamicCodeCoverage<T>(this T toolSettings, bool? dynamicCodeCoverage) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.DynamicCodeCoverage = dynamicCodeCoverage; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.DynamicCodeCoverage"/></em></p> /// <p>Reads input as the CodeCoverage.exe xml format.</p> /// </summary> [Pure] public static T ResetDynamicCodeCoverage<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.DynamicCodeCoverage = null; return toolSettings; } /// <summary> /// <p><em>Enables <see cref="CoverallsNetSettings.DynamicCodeCoverage"/></em></p> /// <p>Reads input as the CodeCoverage.exe xml format.</p> /// </summary> [Pure] public static T EnableDynamicCodeCoverage<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.DynamicCodeCoverage = true; return toolSettings; } /// <summary> /// <p><em>Disables <see cref="CoverallsNetSettings.DynamicCodeCoverage"/></em></p> /// <p>Reads input as the CodeCoverage.exe xml format.</p> /// </summary> [Pure] public static T DisableDynamicCodeCoverage<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.DynamicCodeCoverage = false; return toolSettings; } /// <summary> /// <p><em>Toggles <see cref="CoverallsNetSettings.DynamicCodeCoverage"/></em></p> /// <p>Reads input as the CodeCoverage.exe xml format.</p> /// </summary> [Pure] public static T ToggleDynamicCodeCoverage<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.DynamicCodeCoverage = !toolSettings.DynamicCodeCoverage; return toolSettings; } #endregion #region ExportCodeCoverage /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.ExportCodeCoverage"/></em></p> /// <p>Reads input as the Visual Studio Coverage Export xml format.</p> /// </summary> [Pure] public static T SetExportCodeCoverage<T>(this T toolSettings, bool? exportCodeCoverage) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.ExportCodeCoverage = exportCodeCoverage; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.ExportCodeCoverage"/></em></p> /// <p>Reads input as the Visual Studio Coverage Export xml format.</p> /// </summary> [Pure] public static T ResetExportCodeCoverage<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.ExportCodeCoverage = null; return toolSettings; } /// <summary> /// <p><em>Enables <see cref="CoverallsNetSettings.ExportCodeCoverage"/></em></p> /// <p>Reads input as the Visual Studio Coverage Export xml format.</p> /// </summary> [Pure] public static T EnableExportCodeCoverage<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.ExportCodeCoverage = true; return toolSettings; } /// <summary> /// <p><em>Disables <see cref="CoverallsNetSettings.ExportCodeCoverage"/></em></p> /// <p>Reads input as the Visual Studio Coverage Export xml format.</p> /// </summary> [Pure] public static T DisableExportCodeCoverage<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.ExportCodeCoverage = false; return toolSettings; } /// <summary> /// <p><em>Toggles <see cref="CoverallsNetSettings.ExportCodeCoverage"/></em></p> /// <p>Reads input as the Visual Studio Coverage Export xml format.</p> /// </summary> [Pure] public static T ToggleExportCodeCoverage<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.ExportCodeCoverage = !toolSettings.ExportCodeCoverage; return toolSettings; } #endregion #region Monocov /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.Monocov"/></em></p> /// <p>Reads input as monocov results folder.</p> /// </summary> [Pure] public static T SetMonocov<T>(this T toolSettings, bool? monocov) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.Monocov = monocov; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.Monocov"/></em></p> /// <p>Reads input as monocov results folder.</p> /// </summary> [Pure] public static T ResetMonocov<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.Monocov = null; return toolSettings; } /// <summary> /// <p><em>Enables <see cref="CoverallsNetSettings.Monocov"/></em></p> /// <p>Reads input as monocov results folder.</p> /// </summary> [Pure] public static T EnableMonocov<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.Monocov = true; return toolSettings; } /// <summary> /// <p><em>Disables <see cref="CoverallsNetSettings.Monocov"/></em></p> /// <p>Reads input as monocov results folder.</p> /// </summary> [Pure] public static T DisableMonocov<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.Monocov = false; return toolSettings; } /// <summary> /// <p><em>Toggles <see cref="CoverallsNetSettings.Monocov"/></em></p> /// <p>Reads input as monocov results folder.</p> /// </summary> [Pure] public static T ToggleMonocov<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.Monocov = !toolSettings.Monocov; return toolSettings; } #endregion #region RepoToken /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.RepoToken"/></em></p> /// <p>The coveralls.io repository token.</p> /// </summary> [Pure] public static T SetRepoToken<T>(this T toolSettings, string repoToken) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.RepoToken = repoToken; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.RepoToken"/></em></p> /// <p>The coveralls.io repository token.</p> /// </summary> [Pure] public static T ResetRepoToken<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.RepoToken = null; return toolSettings; } #endregion #region RepoTokenVariable /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.RepoTokenVariable"/></em></p> /// <p>The Environment Variable name where the coveralls.io repository token is available. Default is <c>COVERALLS_REPO_TOKEN</c>.</p> /// </summary> [Pure] public static T SetRepoTokenVariable<T>(this T toolSettings, string repoTokenVariable) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.RepoTokenVariable = repoTokenVariable; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.RepoTokenVariable"/></em></p> /// <p>The Environment Variable name where the coveralls.io repository token is available. Default is <c>COVERALLS_REPO_TOKEN</c>.</p> /// </summary> [Pure] public static T ResetRepoTokenVariable<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.RepoTokenVariable = null; return toolSettings; } #endregion #region CommitId /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.CommitId"/></em></p> /// <p>The git commit hash for the coverage report.</p> /// </summary> [Pure] public static T SetCommitId<T>(this T toolSettings, string commitId) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.CommitId = commitId; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.CommitId"/></em></p> /// <p>The git commit hash for the coverage report.</p> /// </summary> [Pure] public static T ResetCommitId<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.CommitId = null; return toolSettings; } #endregion #region CommitBranch /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.CommitBranch"/></em></p> /// <p>The git branch for the coverage report.</p> /// </summary> [Pure] public static T SetCommitBranch<T>(this T toolSettings, string commitBranch) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.CommitBranch = commitBranch; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.CommitBranch"/></em></p> /// <p>The git branch for the coverage report.</p> /// </summary> [Pure] public static T ResetCommitBranch<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.CommitBranch = null; return toolSettings; } #endregion #region CommitAuthor /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.CommitAuthor"/></em></p> /// <p>The git commit author for the coverage report.</p> /// </summary> [Pure] public static T SetCommitAuthor<T>(this T toolSettings, string commitAuthor) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.CommitAuthor = commitAuthor; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.CommitAuthor"/></em></p> /// <p>The git commit author for the coverage report.</p> /// </summary> [Pure] public static T ResetCommitAuthor<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.CommitAuthor = null; return toolSettings; } #endregion #region CommitEmail /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.CommitEmail"/></em></p> /// <p>The git commit author email for the coverage report.</p> /// </summary> [Pure] public static T SetCommitEmail<T>(this T toolSettings, string commitEmail) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.CommitEmail = commitEmail; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.CommitEmail"/></em></p> /// <p>The git commit author email for the coverage report.</p> /// </summary> [Pure] public static T ResetCommitEmail<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.CommitEmail = null; return toolSettings; } #endregion #region CommitMessage /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.CommitMessage"/></em></p> /// <p>The git commit message for the coverage report.</p> /// </summary> [Pure] public static T SetCommitMessage<T>(this T toolSettings, string commitMessage) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.CommitMessage = commitMessage; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.CommitMessage"/></em></p> /// <p>The git commit message for the coverage report.</p> /// </summary> [Pure] public static T ResetCommitMessage<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.CommitMessage = null; return toolSettings; } #endregion #region JobId /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.JobId"/></em></p> /// <p>The job Id to provide to coveralls.io. Default is <c>0</c>.</p> /// </summary> [Pure] public static T SetJobId<T>(this T toolSettings, int? jobId) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.JobId = jobId; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.JobId"/></em></p> /// <p>The job Id to provide to coveralls.io. Default is <c>0</c>.</p> /// </summary> [Pure] public static T ResetJobId<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.JobId = null; return toolSettings; } #endregion #region ServiceName /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.ServiceName"/></em></p> /// <p>The service-name for the coverage report. Default is <c>coveralls.net</c>.</p> /// </summary> [Pure] public static T SetServiceName<T>(this T toolSettings, string serviceName) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.ServiceName = serviceName; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.ServiceName"/></em></p> /// <p>The service-name for the coverage report. Default is <c>coveralls.net</c>.</p> /// </summary> [Pure] public static T ResetServiceName<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.ServiceName = null; return toolSettings; } #endregion #region PullRequest /// <summary> /// <p><em>Sets <see cref="CoverallsNetSettings.PullRequest"/></em></p> /// <p>The github pull request id. Used for updating status on github PRs.</p> /// </summary> [Pure] public static T SetPullRequest<T>(this T toolSettings, int? pullRequest) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.PullRequest = pullRequest; return toolSettings; } /// <summary> /// <p><em>Resets <see cref="CoverallsNetSettings.PullRequest"/></em></p> /// <p>The github pull request id. Used for updating status on github PRs.</p> /// </summary> [Pure] public static T ResetPullRequest<T>(this T toolSettings) where T : CoverallsNetSettings { toolSettings = toolSettings.NewInstance(); toolSettings.PullRequest = null; return toolSettings; } #endregion } #endregion }
48.31931
342
0.594057
[ "MIT" ]
KonH/nuke
source/Nuke.Common/Tools/CoverallsNet/CoverallsNet.Generated.cs
44,792
C#
// ----------------------------------------------------------------------------- // 让 .NET 开发更简单,更通用,更流行。 // Copyright © 2020-2021 Furion, 百小僧, Baiqian Co.,Ltd. // // 框架名称:Furion // 框架作者:百小僧 // 框架版本:2.7.9 // 源码地址:Gitee: https://gitee.com/dotnetchina/Furion // Github:https://github.com/monksoul/Furion // 开源协议:Apache-2.0(https://gitee.com/dotnetchina/Furion/blob/master/LICENSE) // ----------------------------------------------------------------------------- using Furion.DependencyInjection; using System; using System.Collections.Generic; using System.Data; using System.Data.Common; using System.Linq; using System.Reflection; using System.Text; using System.Text.RegularExpressions; namespace Furion.DatabaseAccessor { [SkipScan] internal static class DbHelpers { /// <summary> /// 将模型转为 DbParameter 集合 /// </summary> /// <param name="model">参数模型</param> /// <param name="dbCommand">数据库命令对象</param> /// <returns></returns> internal static DbParameter[] ConvertToDbParameters(object model, DbCommand dbCommand) { var modelType = model?.GetType(); // 处理字典类型参数 if (modelType == typeof(Dictionary<string, object>)) return ConvertToDbParameters((Dictionary<string, object>)model, dbCommand); var dbParameters = new List<DbParameter>(); if (model == null || !modelType.IsClass) return dbParameters.ToArray(); // 获取所有公开实例属性 var properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); if (properties.Length == 0) return dbParameters.ToArray(); // 遍历所有属性 foreach (var property in properties) { var propertyValue = property.GetValue(model) ?? DBNull.Value; // 创建命令参数 var dbParameter = dbCommand.CreateParameter(); // 判断属性是否贴有 [DbParameter] 特性 if (property.IsDefined(typeof(DbParameterAttribute), true)) { var dbParameterAttribute = property.GetCustomAttribute<DbParameterAttribute>(true); dbParameters.Add(ConfigureDbParameter(property.Name, propertyValue, dbParameterAttribute, dbParameter)); continue; } dbParameter.ParameterName = property.Name; dbParameter.Value = propertyValue; dbParameters.Add(dbParameter); } return dbParameters.ToArray(); } /// <summary> /// 将字典转换成命令参数 /// </summary> /// <param name="keyValues">字典</param> /// <param name="dbCommand">数据库命令对象</param> /// <returns></returns> internal static DbParameter[] ConvertToDbParameters(Dictionary<string, object> keyValues, DbCommand dbCommand) { var dbParameters = new List<DbParameter>(); if (keyValues == null || keyValues.Count == 0) return dbParameters.ToArray(); foreach (var key in keyValues.Keys) { var value = keyValues[key] ?? DBNull.Value; // 创建命令参数 var dbParameter = dbCommand.CreateParameter(); dbParameter.ParameterName = key; dbParameter.Value = value; dbParameters.Add(dbParameter); } return dbParameters.ToArray(); } /// <summary> /// 配置数据库命令参数 /// </summary> /// <param name="name">参数名</param> /// <param name="value">参数值</param> /// <param name="dbParameterAttribute">参数特性</param> /// <param name="dbParameter">数据库命令参数</param> /// <returns>DbParameter</returns> internal static DbParameter ConfigureDbParameter(string name, object value, DbParameterAttribute dbParameterAttribute, DbParameter dbParameter) { // 设置参数方向 dbParameter.ParameterName = name; dbParameter.Value = value; dbParameter.Direction = dbParameterAttribute.Direction; // 设置参数数据库类型 if (dbParameterAttribute.DbType != null) { var type = dbParameterAttribute.DbType.GetType(); if (type.IsEnum && typeof(DbType).IsAssignableFrom(type)) { dbParameter.DbType = (DbType)dbParameterAttribute.DbType; } } // 设置大小,解决NVarchar,Varchar 问题 if (dbParameterAttribute.Size > 0) { dbParameter.Size = dbParameterAttribute.Size; } return dbParameter; } /// <summary> /// 生成函数执行 sql 语句 /// </summary> /// <param name="providerName">ADO.NET 数据库对象</param> /// <param name="dbFunctionType">函数类型</param> /// <param name="funcName">函数名词</param> /// <param name="parameters">函数参数</param> /// <returns>sql 语句</returns> internal static string GenerateFunctionSql(string providerName, DbFunctionType dbFunctionType, string funcName, params DbParameter[] parameters) { // 检查是否支持函数 DbProvider.CheckFunctionSupported(providerName, dbFunctionType); parameters ??= Array.Empty<DbParameter>(); var stringBuilder = new StringBuilder(); stringBuilder.Append($"SELECT{(dbFunctionType == DbFunctionType.Table ? " * FROM" : "")} {funcName}("); // 生成函数参数 for (var i = 0; i < parameters.Length; i++) { var sqlParameter = parameters[i]; // 处理不同数据库的占位符 stringBuilder.Append(FixSqlParameterPlaceholder(providerName, sqlParameter.ParameterName)); // 处理最后一个参数逗号 if (i != parameters.Length - 1) { stringBuilder.Append(", "); } } stringBuilder.Append("); "); return stringBuilder.ToString(); } /// <summary> /// 生成函数执行 sql 语句 /// </summary> ///<param name="providerName">ADO.NET 数据库对象</param> /// <param name="dbFunctionType">函数类型</param> /// <param name="funcName">函数名词</param> /// <param name="model">参数模型</param> /// <returns>(string sql, DbParameter[] parameters)</returns> internal static string GenerateFunctionSql(string providerName, DbFunctionType dbFunctionType, string funcName, object model) { // 检查是否支持函数 DbProvider.CheckFunctionSupported(providerName, dbFunctionType); var modelType = model?.GetType(); // 处理字典类型参数 if (modelType == typeof(Dictionary<string, object>)) return GenerateFunctionSql(providerName, dbFunctionType, funcName, (Dictionary<string, object>)model); // 获取模型所有公开的属性 var properities = model == null ? Array.Empty<PropertyInfo>() : modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); var stringBuilder = new StringBuilder(); stringBuilder.Append($"SELECT{(dbFunctionType == DbFunctionType.Table ? " * FROM" : "")} {funcName}("); for (var i = 0; i < properities.Length; i++) { var property = properities[i]; stringBuilder.Append(FixSqlParameterPlaceholder(providerName, property.Name)); // 处理最后一个参数逗号 if (i != properities.Length - 1) { stringBuilder.Append(", "); } } stringBuilder.Append("); "); return stringBuilder.ToString(); } /// <summary> /// 生成函数执行 sql 语句 /// </summary> ///<param name="providerName">ADO.NET 数据库对象</param> /// <param name="dbFunctionType">函数类型</param> /// <param name="funcName">函数名词</param> /// <param name="keyValues">字典类型参数</param> /// <returns></returns> internal static string GenerateFunctionSql(string providerName, DbFunctionType dbFunctionType, string funcName, Dictionary<string, object> keyValues) { // 检查是否支持函数 DbProvider.CheckFunctionSupported(providerName, dbFunctionType); var stringBuilder = new StringBuilder(); stringBuilder.Append($"SELECT{(dbFunctionType == DbFunctionType.Table ? " * FROM" : "")} {funcName}("); if (keyValues != null && keyValues.Count > 0) { var i = 0; foreach (var key in keyValues.Keys) { stringBuilder.Append(FixSqlParameterPlaceholder(providerName, key)); // 处理最后一个参数逗号 if (i != keyValues.Count - 1) { stringBuilder.Append(", "); } i++; } } stringBuilder.Append("); "); return stringBuilder.ToString(); } /// <summary> /// 包裹存储过程返回结果集 /// </summary> /// <param name="providerName"></param> /// <param name="parameters">命令参数</param> /// <param name="dataSet">数据集</param> /// <returns>ProcedureOutput</returns> internal static ProcedureOutputResult WrapperProcedureOutput(string providerName, DbParameter[] parameters, DataSet dataSet) { // 读取输出返回值 ReadOuputValue(providerName, parameters, out var outputValues, out var returnValue); return new ProcedureOutputResult { Result = dataSet, OutputValues = outputValues, ReturnValue = returnValue }; } /// <summary> /// 包裹存储过程返回结果集 /// </summary> /// <typeparam name="TResult">数据集结果</typeparam> /// <param name="providerName"></param> /// <param name="parameters">命令参数</param> /// <param name="dataSet">数据集</param> /// <returns>ProcedureOutput</returns> internal static ProcedureOutputResult<TResult> WrapperProcedureOutput<TResult>(string providerName, DbParameter[] parameters, DataSet dataSet) { // 读取输出返回值 ReadOuputValue(providerName, parameters, out var outputValues, out var returnValue); return new ProcedureOutputResult<TResult> { Result = (TResult)dataSet.ToValueTuple(typeof(TResult)), OutputValues = outputValues, ReturnValue = returnValue }; } /// <summary> /// 包裹存储过程返回结果集 /// </summary> /// <param name="providerName"></param> /// <param name="parameters">命令参数</param> /// <param name="dataSet">数据集</param> /// <param name="type">返回类型</param> /// <returns>ProcedureOutput</returns> internal static object WrapperProcedureOutput(string providerName, DbParameter[] parameters, DataSet dataSet, Type type) { var wrapperProcedureOutputMethod = typeof(DbHelpers) .GetMethods(BindingFlags.Static | BindingFlags.NonPublic) .First(u => u.Name == "WrapperProcedureOutput" && u.IsGenericMethod) .MakeGenericMethod(type); return wrapperProcedureOutputMethod.Invoke(null, new object[] { providerName, parameters, dataSet }); } /// <summary> /// 解析 Sql 配置信息 /// </summary> /// <param name="sqlTemplate">sql或sql模板</param> /// <returns></returns> internal static string ResolveSqlConfiguration(string sqlTemplate) { var matches = SqlTemplateRegex.Matches(sqlTemplate); if (!matches.Any()) return sqlTemplate; foreach (Match match in matches) { // 获取路径 var path = match.Groups["path"].Value; // 读取配置 var realSql = App.Configuration[path]; if (string.IsNullOrWhiteSpace(realSql)) { var sqlConfiguration = App.GetConfig<SqlTemplate>(path) ?? throw new InvalidOperationException($"Not found {path} configuration information."); realSql = sqlConfiguration.Sql; } sqlTemplate = sqlTemplate.Replace($"#({path})", realSql); } return sqlTemplate; } /// <summary> /// 数据没找到异常 /// </summary> /// <returns></returns> internal static InvalidOperationException DataNotFoundException() { return new InvalidOperationException("Sequence contains no elements."); } /// <summary> /// 修正不同数据库命令参数前缀不一致问题 /// </summary> /// <param name="providerName"></param> /// <param name="parameterName"></param> /// <param name="isFixed"></param> /// <returns></returns> internal static string FixSqlParameterPlaceholder(string providerName, string parameterName, bool isFixed = true) { var placeholder = !DbProvider.IsDatabaseFor(providerName, DbProvider.Oracle) ? "@" : ":"; if (parameterName.StartsWith("@") || parameterName.StartsWith(":")) { parameterName = parameterName[1..]; } return isFixed ? placeholder + parameterName : parameterName; } /// <summary> /// 读取输出返回值 /// </summary> /// <param name="providerName"></param> /// <param name="parameters">参数</param> /// <param name="outputValues">输出参数</param> /// <param name="returnValue">返回值</param> private static void ReadOuputValue(string providerName, DbParameter[] parameters, out IEnumerable<ProcedureOutputValue> outputValues, out object returnValue) { // 查询所有OUTPUT值 outputValues = parameters .Where(u => u.Direction == ParameterDirection.Output) .Select(u => new ProcedureOutputValue { Name = FixSqlParameterPlaceholder(providerName, u.ParameterName, false), Value = u.Value }); // 查询返回值 returnValue = parameters.FirstOrDefault(u => u.Direction == ParameterDirection.ReturnValue)?.Value; } /// <summary> /// Sql 模板正在表达式 /// </summary> private static readonly Regex SqlTemplateRegex; /// <summary> /// 静态构造函数 /// </summary> static DbHelpers() { SqlTemplateRegex = new Regex(@"\#\((?<path>.*?)\)"); } } }
37.178392
167
0.550517
[ "Apache-2.0" ]
SuperSnowYao/Furion
framework/Furion/DatabaseAccessor/Helpers/DbHelpers.cs
15,822
C#
using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using RadarSoft.RadarCube.Controls; using RadarSoft.RadarCube.Layout; using RadarSoft.RadarCube.Tools; namespace RadarSoft.RadarCube.Engine.Md { [Serializable] [DebuggerDisplay("OlapCubeMetaLine ID = {ID}")] internal class MdMetaLine : MetaLine { internal MdMetaLine(OlapControl AGrid, IList<int> LevelIndexes) : base(AGrid, LevelIndexes) { } public MdMetaLine() { DebugLogging.WriteLine("OlapCubeMetaLine.ctor(ID=null)"); } [Conditional("DEBUG")] private void DebugLogging_WriteLine(IList<int> LevelIndexes) { if (DebugLogging.Verify("OlapCubeMetaLine.ctor()")) return; var levels = "(" + string.Join(", ", Levels.Select(x => x.DisplayName).ToArray()) + ")"; DebugLogging.WriteLine("OlapCubeMetaLine.ctor(ID={0} AGrid, LevelIndexes={1}={2})", ID, Extentions.ConvertToString(LevelIndexes), levels); } internal override Line GetLine(int HierID, Measure AMeasure, MeasureShowMode Mode) { var l = base.GetLine(HierID, AMeasure, Mode); if (l != null) return l; var key = GetKey(HierID, AMeasure, Mode); if (fLines.TryGetValue(key, out l)) { cache_line = l; return l; } l = new MdLine(this, key, AMeasure.UniqueName, Mode, HierID); fLines.Add(key, l); cache_line = l; return l; } } }
29.696429
100
0.575466
[ "MIT" ]
RadarSoft/radarcube-olap-analysis
src/Engine/Md/MdMetaLine.cs
1,663
C#
using Nager.Date.Contract; using Nager.Date.Model; using System; using System.Collections.Generic; using System.Linq; namespace Nager.Date.PublicHolidays { /// <summary> /// Switzerland /// </summary> public class SwitzerlandProvider : IPublicHolidayProvider, ICountyProvider { private readonly ICatholicProvider _catholicProvider; /// <summary> /// SwitzerlandProvider /// </summary> /// <param name="catholicProvider"></param> public SwitzerlandProvider(ICatholicProvider catholicProvider) { this._catholicProvider = catholicProvider; } ///<inheritdoc/> public IDictionary<string, string> GetCounties() { return new Dictionary<string, string> { { "CH-AG", "Aargau" }, { "CH-AI", "Appenzell Innerrhoden" }, { "CH-AR", "Appenzell Ausserrhoden" }, { "CH-BL", "Basel-Landschaft" }, { "CH-BS", "Basel-Stadt" }, { "CH-BE", "Bern" }, { "CH-FR", "Freiburg" }, { "CH-GE", "Genf" }, { "CH-GL", "Glarus" }, { "CH-GR", "Graubünden" }, { "CH-JU", "Jura" }, { "CH-LU", "Luzern" }, { "CH-NE", "Neuenburg" }, { "CH-NW", "Nidwalden" }, { "CH-OW", "Obwalden" }, { "CH-SG", "St. Gallen" }, { "CH-SH", "Schaffhausen" }, { "CH-SZ", "Schwyz" }, { "CH-SO", "Solothurn" }, { "CH-TG", "Thurgau" }, { "CH-TI", "Tessin" }, { "CH-UR", "Uri" }, { "CH-VS", "Wallis" }, { "CH-VD", "Waadt" }, { "CH-ZG", "Zug" }, { "CH-ZH", "Zürich" } }; } ///<inheritdoc/> public IEnumerable<PublicHoliday> Get(int year) { var countryCode = CountryCode.CH; var firstSundayOfSeptember = DateSystem.FindDay(year, Month.September, DayOfWeek.Sunday, Occurrence.First); var thirdMondayOfSeptember = DateSystem.FindDay(year, Month.September, DayOfWeek.Monday, Occurrence.Third); var items = new List<PublicHoliday>(); items.Add(new PublicHoliday(year, 1, 1, "Neujahr", "New Year's Day", countryCode, 1967)); items.Add(new PublicHoliday(year, 1, 2, "Berchtoldstag", "St. Berchtold's Day", countryCode, null, new string[] { "CH-ZH", "CH-BE", "CH-LU", "CH-OW", "CH-GL", "CH-ZG", "CH-FR", "CH-SO", "CH-SH", "CH-TG", "CH-VD", "CH-NE", "CH-GE", "CH-JU" })); items.Add(new PublicHoliday(year, 1, 6, "Heilige Drei Könige", "Epiphany", countryCode, null, new string[] { "CH-UR", "CH-SZ", "CH-GR", "CH-TI" })); items.Add(new PublicHoliday(year, 3, 19, "Josefstag", "Saint Joseph's Day", countryCode, null, new string[] { "CH-LU", "CH-UR", "CH-SZ", "CH-NW", "CH-ZG", "CH-GR", "CH-TI", "CH-VS" })); items.Add(this._catholicProvider.GoodFriday("Karfreitag", year, countryCode).SetCounties("CH-ZH", "CH-BE", "CH-LU", "CH-UR", "CH-SZ", "CH-OW", "CH-NW", "CH-GL", "CH-ZG", "CH-FR", "CH-SO", "CH-BS", "CH-BL", "CH-SH", "CH-AR", "CH-AI", "CH-SG", "CH-GR", "CH-AG", "CH-TG", "CH-VD", "CH-NE", "CH-GE", "CH-JU")); items.Add(this._catholicProvider.EasterMonday("Ostermontag", year, countryCode).SetLaunchYear(1642).SetCounties("CH-ZH", "CH-BE", "CH-LU", "CH-UR", "CH-SZ", "CH-OW", "CH-NW", "CH-GL", "CH-ZG", "CH-FR", "CH-SO", "CH-BS", "CH-BL", "CH-SH", "CH-AR", "CH-AI", "CH-SG", "CH-GR", "CH-AG", "CH-TG", "CH-TI", "CH-VD", "CH-NE", "CH-GE", "CH-JU")); items.Add(new PublicHoliday(year, 5, 1, "Tag der Arbeit", "Labour Day", countryCode, null, new string[] { "CH-ZH", "CH-FR", "CH-SO", "CH-BS", "CH-BL", "CH-SH", "CH-AG", "CH-TG", "CH-TI", "CH-NE", "CH-JU" })); items.Add(this._catholicProvider.AscensionDay("Auffahrt", year, countryCode)); items.Add(this._catholicProvider.WhitMonday("Pfingstmontag", year, countryCode).SetCounties("CH-ZH", "CH-BE", "CH-LU", "CH-UR", "CH-SZ", "CH-OW", "CH-NW", "CH-GL", "CH-ZG", "CH-FR", "CH-SO", "CH-BS", "CH-BL", "CH-SH", "CH-AR", "CH-AI", "CH-SG", "CH-GR", "CH-AG", "CH-TG", "CH-TI", "CH-VD", "CH-NE", "CH-GE", "CH-JU")); items.Add(this._catholicProvider.CorpusChristi("Fronleichnam", year, countryCode).SetCounties("CH-LU", "CH-UR", "CH-SZ", "CH-OW", "CH-NW", "CH-ZG", "CH-FR", "CH-SO", "CH-BL", "CH-AI", "CH-GR", "CH-AG", "CH-TI", "CH-VS", "CH-NE", "CH-JU")); items.Add(new PublicHoliday(year, 8, 1, "Bundesfeier", "Swiss National Day", countryCode)); items.Add(new PublicHoliday(year, 8, 15, "Maria Himmelfahrt", "Assumption of the Virgin Mary", countryCode, null, new string[] { "CH-LU", "CH-UR", "CH-SZ", "CH-OW", "CH-NW", "CH-ZG", "CH-FR", "CH-SO", "CH-BL", "CH-AI", "CH-GR", "CH-AG", "CH-TI", "CH-VS", "CH-JU" })); items.Add(new PublicHoliday(year, 11, 1, "Allerheiligen", "All Saints' Day", countryCode, null, new string[] { "CH-LU", "CH-UR", "CH-SZ", "CH-OW", "CH-NW", "CH-GL", "CH-ZG", "CH-FR", "CH-SO", "CH-AI", "CH-SG", "CH-GR", "CH-AG", "CH-TI", "CH-VS", "CH-JU" })); items.Add(new PublicHoliday(year, 12, 8, "Mariä Empfängnis", "Immaculate Conception", countryCode, null, new string[] { "CH-LU", "CH-UR", "CH-SZ", "CH-OW", "CH-NW", "CH-ZG", "CH-FR", "CH-SO", "CH-AI", "CH-GR", "CH-AG", "CH-TI", "CH-VS" })); items.Add(new PublicHoliday(year, 12, 25, "Weihnachten", "Christmas Day", countryCode)); items.Add(new PublicHoliday(year, 12, 26, "Stephanstag", "St. Stephen's Day", countryCode, null, new string[] { "CH-AG", "CH-AI", "CH-AR", "CH-BL", "CH-BS", "CH-BE", "CH-FR", "CH-GL", "CH-GR", "CH-LU", "CH-NW", "CH-OW", "CH-SG", "CH-SH", "CH-SZ", "CH-SO", "CH-TG", "CH-TI", "CH-UR", "CH-ZG", "CH-ZH" })); items.Add(new PublicHoliday(firstSundayOfSeptember.AddDays(4), "Jeûne genevois", "Geneva Prayday", countryCode, null, new string[] { "CH-GE" })); items.Add(new PublicHoliday(thirdMondayOfSeptember, "Eidgenössischer Dank-, Buss- und Bettag", "Federal Day of Thanksgiving", countryCode, null, new string[] { "CH-ZH", "CH-BE", "CH-LU", "CH-UR", "CH-SZ", "CH-OW", "CH-NW", "CH-GL", "CH-ZG", "CH-FR", "CH-SO", "CH-BS", "CH-BL", "CH-SH", "CH-AR", "CH-AI", "CH-SG", "CH-GR", "CH-AG", "CH-TG", "CH-TI", "CH-VD", "CH-VS", "CH-NE", "CH-JU" })); return items.OrderBy(o => o.Date); } ///<inheritdoc/> public IEnumerable<string> GetSources() { return new string[] { "https://de.wikipedia.org/wiki/Feiertage_in_der_Schweiz", "https://en.wikipedia.org/wiki/Federal_Day_of_Thanksgiving,_Repentance_and_Prayer", "https://en.wikipedia.org/wiki/Je%C3%BBne_genevois" }; } } }
67.524272
400
0.532135
[ "MIT" ]
Marcelo366/Nager.Date
Src/Nager.Date/PublicHolidays/SwitzerlandProvider.cs
6,962
C#
// // Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. // // Licensed under the Amazon Software License (the "License"). // You may not use this file except in compliance with the License. // A copy of the License is located at // // http://aws.amazon.com/asl/ // // or in the "license" file accompanying this file. This file is distributed // on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either // express or implied. See the License for the specific language governing // permissions and limitations under the License. // using System.Runtime.Serialization; namespace Amazon.Kinesis.ClientLibrary { [DataContract] internal class ShardEndedAction : Action { public const string ACTION = "shardEnded"; public ShardEndedAction() { Type = ACTION; } public override void Dispatch(IShardRecordProcessor processor, Checkpointer checkpointer) { processor.ShardEnded(new DefaultShardEndedInput(checkpointer)); } } }
30.342857
97
0.690207
[ "MIT" ]
ciaranodonnell/KinesisDotNet
Code/Amazon/ClientLibrary/messages/ShardEndedAction.cs
1,062
C#
namespace HallOfFame.Web.Areas.Administration.Controllers.Base { using System.Web.Mvc; using HallOfFame.Common.Constants; using HallOfFame.Data.Contracts; using HallOfFame.Web.Controllers; [Authorize(Roles = GlobalConstants.AdministratorRoleName)] public class AdministrationController : BaseController { public AdministrationController(IHallOfFameData data) : base(data) { } protected override void Dispose(bool disposing) { if (disposing) { this.Data.Dispose(); } base.Dispose(disposing); } } }
24.333333
63
0.613394
[ "MIT" ]
StefanSinapov/Hall-of-Fame
src/HallOfFame.Web/Areas/Administration/Controllers/Base/AdministrationController.cs
659
C#
#if UNITY_2018_2_OR_NEWER using UnityEditor; using UnityEngine; namespace Alf.UnityLocker.Editor.Drawers { [InitializeOnLoad] public static class HeaderLockDrawer { private static readonly Rect sm_headerRect = new Rect(7, 7, 21, 21); private static readonly GUIStyle sm_lockLabelStyle = new GUIStyle { alignment = TextAnchor.MiddleRight }; static HeaderLockDrawer() { UnityEditor.Editor.finishedDefaultHeaderGUI += OnFinishedHeaderGUI; } private static void OnFinishedHeaderGUI(UnityEditor.Editor editor) { if (editor.GetType() == typeof(MaterialEditor)) { return; } if (!Container.GetLockSettings().IsEnabled) { return; } if (!Locker.HasFetched) { return; } if (!Locker.AreAssetTypesValid(editor.targets)) { return; } var isLockedByMe = Locker.AreAllAssetsLockedByMe(editor.targets); var isLockedBySomeoneElse = Locker.IsAnyAssetLockedBySomeoneElse(editor.targets); var isLockedNowButUnlockedAtLaterCommit = Locker.IsAnyAssetLockedNowButUnlockedAtLaterCommit(editor.targets); using (new GUILayout.HorizontalScope()) { EditorGUILayout.LabelField("Lock", sm_lockLabelStyle, GUILayout.Width(44)); using (new EditorGUI.DisabledGroupScope(isLockedByMe || isLockedBySomeoneElse || isLockedNowButUnlockedAtLaterCommit)) { if (GUILayout.Button(new GUIContent(Constants.LockName), EditorStyles.miniButton)) { Locker.TryLockAssets(editor.targets, null, (errorMessage) => { EditorUtility.DisplayDialog("Asset locking failed", "Asset locking failed\n" + errorMessage, "OK"); }); } } using (new EditorGUI.DisabledGroupScope(!isLockedByMe)) { if (GUILayout.Button(new GUIContent(Constants.RevertName), EditorStyles.miniButton)) { Locker.TryRevertAssetLocks(editor.targets, null, (errorMessage) => { EditorUtility.DisplayDialog("Asset reverting failed", "Asset reverting failed\n" + errorMessage, "OK"); }); } if (GUILayout.Button(new GUIContent(Constants.FinishName), EditorStyles.miniButton)) { Locker.TryFinishLockingAssets(editor.targets, null, (errorMessage) => { EditorUtility.DisplayDialog("Asset finishing failed", "Asset finishing failed\n" + errorMessage, "OK"); }); } } if (GUILayout.Button(new GUIContent(Constants.HistoryName), EditorStyles.miniButton)) { HistoryWindow.Show(editor.target); } } if (isLockedByMe || isLockedBySomeoneElse || isLockedNowButUnlockedAtLaterCommit) { var hasMultipleLockers = false; var locker = Locker.GetAssetLocker(editor.targets[0]); for (var i = 1; i < editor.targets.Length; i++) { if (locker != Locker.GetAssetLocker(editor.targets[1])) { hasMultipleLockers = true; break; } } if (!string.IsNullOrEmpty(locker)) { LockDrawer.TryDrawLock(sm_headerRect, editor.target, LockDrawer.DrawType.LargeIcon); EditorGUILayout.LabelField("Asset" + (editor.targets.Length > 1 ? "s" : "") + " locked by " + (hasMultipleLockers ? "multiple users" : locker), EditorStyles.boldLabel); if (isLockedNowButUnlockedAtLaterCommit) { var hasMultipleUnlockShas = false; var sha = Locker.GetAssetUnlockCommitShaShort(editor.targets[0]); for (var i = 1; i < editor.targets.Length; i++) { if (sha != Locker.GetAssetUnlockCommitShaShort(editor.targets[1])) { hasMultipleUnlockShas = true; break; } } if (!string.IsNullOrEmpty(sha)) { if (hasMultipleUnlockShas) { EditorGUILayout.LabelField("(Unlocked at multiple commits)"); } else { EditorGUILayout.LabelField("(Unlocked at commit " + sha + ")"); } } } } } } } } #endif
30.436508
173
0.674837
[ "Apache-2.0" ]
AffeJonsson/UnityLocker
UnityLocker/Assets/Plugins/UnityLocker/Scripts/Editor/Drawers/HeaderLockDrawer.cs
3,837
C#
using System; using System.IO; using System.Linq; using System.Collections.Generic; using Microsoft.Dnx.Runtime; using Microsoft.Dnx.Runtime.Common.CommandLine; using Newtonsoft.Json; using System.Threading.Tasks; using System.Diagnostics; namespace Srclib.Nuget { class DepresolveConsoleCommand { static Lazy<string> _dnuPath; /// <summary> /// Initialization method for depresolve process /// </summary> /// <param name="cmdApp">application to run (srclib-csharp)</param> /// <param name="appEnv">common application information</param> /// <param name="runtimeEnv">environment representation</param> public static void Register(CommandLineApplication cmdApp, Microsoft.Extensions.PlatformAbstractions.IApplicationEnvironment appEnv, Microsoft.Extensions.PlatformAbstractions.IRuntimeEnvironment runtimeEnv) { _dnuPath = new Lazy<string>(FindDnuNix); cmdApp.Command("depresolve", c => { c.Description = "Perform a combination of parsing, static analysis, semantic analysis, and type inference"; c.HelpOption("-?|-h|--help"); c.OnExecute(async () => { var jsonIn = await Console.In.ReadToEndAsync(); var sourceUnit = JsonConvert.DeserializeObject<SourceUnit>(jsonIn); var dir = Path.Combine(Directory.GetCurrentDirectory(), sourceUnit.Dir); var deps = DepResolve(dir); var result = new List<Resolution>(); foreach (var dep in deps) { result.Add(Resolution.FromLibrary(dep)); } Console.WriteLine(JsonConvert.SerializeObject(result, Formatting.Indented)); return 0; }); }); } static IEnumerable<LibraryDescription> DepResolve(string dir) { Project proj; if(!Project.TryGetProject(dir, out proj)) { //not a DNX project List<LibraryDescription> empty = new List<LibraryDescription>(); return empty; } var allDeps = GetAllDeps(proj); return allDeps; } public static IEnumerable<LibraryDescription> DepResolve(Project proj) { var allDeps = GetAllDeps(proj); return allDeps; } static IEnumerable<LibraryDescription> GetAllDeps(Project proj) => proj.GetTargetFrameworks().Select(f => f.FrameworkName) .SelectMany(f => { var context = new ApplicationHostContext { Project = proj, TargetFramework = f }; IList<LibraryDescription> libs = null; while (libs == null) { try { libs = ApplicationHostContext.GetRuntimeLibraries(context); } catch (Exception e) { } } // the first library description is always self-reference, so skip it return libs.Skip(1); }) .Distinct(LibraryUtils.Comparer) .OrderBy(l => l.Identity?.Name); public static async Task RunResolve(string dir) { var p = new Process(); p.StartInfo.WorkingDirectory = dir; p.StartInfo.FileName = _dnuPath.Value; p.StartInfo.Arguments = "restore"; p.StartInfo.UseShellExecute = false; p.StartInfo.CreateNoWindow = true; p.StartInfo.RedirectStandardInput = true; p.StartInfo.RedirectStandardOutput = true; p.StartInfo.RedirectStandardError = true; p.Start(); // it's important to read stdout and stderr, else it might deadlock var outs = await Task.WhenAll(p.StandardOutput.ReadToEndAsync(), p.StandardError.ReadToEndAsync()); p.WaitForExit(); } static string FindDnuNix() { return RunForResult("/bin/bash", "-c \"which dnu\""); } public static string RunForResult(string shell, string command) { var p = new Process(); p.StartInfo.WorkingDirectory = Directory.GetCurrentDirectory(); p.StartInfo.FileName = shell; p.StartInfo.Arguments = command; p.StartInfo.UseShellExecute = false; p.StartInfo.CreateNoWindow = true; p.StartInfo.RedirectStandardInput = true; p.StartInfo.RedirectStandardOutput = true; p.StartInfo.RedirectStandardError = false; p.Start(); var result = p.StandardOutput.ReadToEnd().Trim(); p.WaitForExit(); return result; } public static FileInfo[] FindSources(DirectoryInfo root) { FileInfo[] files = null; DirectoryInfo[] subDirs = null; try { files = root.GetFiles("*.cs"); } catch (Exception e) { } subDirs = root.GetDirectories(); foreach (DirectoryInfo dirInfo in subDirs) { // Resursive call for each subdirectory. FileInfo[] res = FindSources(dirInfo); files = files.Concat(res).ToArray(); } return files; } public static string FindDll(DirectoryInfo root, string name) { FileInfo[] files = null; DirectoryInfo[] subDirs = null; try { files = root.GetFiles("*.dll"); } catch (Exception e) { } if (files != null) { foreach (FileInfo fi in files) { if (fi.Name.Equals(name + ".dll")) { return fi.FullName; } } } subDirs = root.GetDirectories(); foreach (DirectoryInfo dirInfo in subDirs) { // Resursive call for each subdirectory. string res = FindDll(dirInfo, name); if (res != null) { return res; } } return null; } public static string FindNuspec(DirectoryInfo root) { FileInfo[] files = null; DirectoryInfo[] subDirs = null; try { files = root.GetFiles("*.nuspec"); } catch (Exception e) { } if (files != null) { return files[0].FullName; } subDirs = root.GetDirectories(); foreach (DirectoryInfo dirInfo in subDirs) { // Resursive call for each subdirectory. string res = FindNuspec(dirInfo); if (res != null) { return res; } } return null; } } }
32.164502
214
0.501211
[ "MIT" ]
sourcegraph/srclib-csharp
Srclib.Nuget/ConsoleCommands/DepresolveConsoleCommand.cs
7,430
C#
using System; using System.IO; using Wire.Extensions; namespace Wire.ValueSerializers { public class ToSurrogateSerializer : ValueSerializer { private readonly Func<object, object> _translator; public ToSurrogateSerializer(Func<object, object> translator) { _translator = translator; } public override void WriteManifest(Stream stream, SerializerSession session) { //intentionally left blank } public override void WriteValue(Stream stream, object value, SerializerSession session) { var surrogateValue = _translator(value); stream.WriteObjectWithManifest(surrogateValue, session); } public override object ReadValue(Stream stream, DeserializerSession session) { throw new NotSupportedException(); } public override Type GetElementType() { throw new NotImplementedException(); } } }
27.054054
95
0.64036
[ "MIT" ]
Longzhiwu2015/SimpleBinary
Other/Wire/ValueSerializers/ToSurrogateSerializer.cs
1,003
C#
 namespace GMap.NET.MapProviders { using System; using GMap.NET.Projections; public abstract class CzechMapProviderBase : GMapProvider { public CzechMapProviderBase() { RefererUrl = "http://www.mapy.cz/"; Area = new RectLatLng(51.2024819920053, 11.8401353319027, 7.22833716731277, 2.78312271922872); } #region GMapProvider Members public override Guid Id { get { throw new NotImplementedException(); } } public override string Name { get { throw new NotImplementedException(); } } public override PureProjection Projection { get { return MapyCZProjection.Instance; } } GMapProvider[] overlays; public override GMapProvider[] Overlays { get { if(overlays == null) { overlays = new GMapProvider[] { this }; } return overlays; } } public override PureImage GetTileImage(GPoint pos, int zoom) { throw new NotImplementedException(); } #endregion } /// <summary> /// CzechMap provider, http://www.mapy.cz/ /// </summary> public class CzechMapProvider : CzechMapProviderBase { public static readonly CzechMapProvider Instance; CzechMapProvider() { } static CzechMapProvider() { Instance = new CzechMapProvider(); } #region GMapProvider Members readonly Guid id = new Guid("6A1AF99A-84C6-4EF6-91A5-77B9D03257C2"); public override Guid Id { get { return id; } } readonly string name = "CzechMap"; public override string Name { get { return name; } } public override PureImage GetTileImage(GPoint pos, int zoom) { string url = MakeTileImageUrl(pos, zoom, LanguageStr); return GetTileImageUsingHttp(url); } #endregion string MakeTileImageUrl(GPoint pos, int zoom, string language) { // ['base','ophoto','turist','army2'] // http://m1.mapserver.mapy.cz/base-n/3_8000000_8000000 long xx = pos.X << (28 - zoom); long yy = ((((long)Math.Pow(2.0, (double)zoom)) - 1) - pos.Y) << (28 - zoom); return string.Format(UrlFormat, GetServerNum(pos, 3) + 1, zoom, xx, yy); } static readonly string UrlFormat = "http://m{0}.mapserver.mapy.cz/base-n/{1}_{2:x7}_{3:x7}"; } }
23.5
104
0.520014
[ "Apache-2.0" ]
forgy/ntripshare-base
GMap.NET.Core/GMap.NET.MapProviders/Czech/CzechMapProvider.cs
2,775
C#
#if UNITY_ANDROID using UnityEngine; namespace FPCSharpUnity.unity.Android.Bindings.java.nio.charset { public class Charset : Binding { public static readonly Charset UTF_8; static readonly AndroidJavaClass klass; static Charset() { klass = new AndroidJavaClass("java.nio.charset.Charset"); // StandardCharsets are not available in old androids. UTF_8 = forName("UTF-8"); } public Charset(AndroidJavaObject java) : base(java) {} public static Charset forName(string charsetName) => new Charset(klass.csjo("forName", charsetName)); } } #endif
27.227273
65
0.707846
[ "MIT" ]
FPCSharpUnity/FPCSharpUnity
parts/0000-library/Assets/Vendor/FPCSharpUnity/unity/Android/Bindings/java/nio/charset/Charset.cs
601
C#
using DamaCoreCMS.Framework.Core.Models.ViewModels; using System.Collections.Generic; namespace Core.Blog.Models { public class TagCloudViewModel { public bool ShowTagHasPost { get; set; } public bool ShowPostCount { get; set; } public List<TagCloudItemViewModel> ItemList { get; set; } } }
25.923077
65
0.679525
[ "BSD-3-Clause" ]
damaraaxaindonesia/DamaCMS
DamaCoreCMS.Web/Core/Core.Blog/Models/TagCloudViewModel.cs
339
C#
//*********************************************************// // Copyright (c) Microsoft. All rights reserved. // // Apache 2.0 License // // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. // //*********************************************************// using System.Collections.Generic; using System.Linq; using System.Text; using Microsoft.NodejsTools.Npm.SPI; namespace Microsoft.NodejsTools.Npm { /// <summary> /// Mutable class for building immutable node module descriptions /// </summary> internal class NodeModuleBuilder { private List<IPackage> _dependencies = new List<IPackage>(); private readonly StringBuilder _descriptionBuff = new StringBuilder(); private readonly StringBuilder _authorBuff = new StringBuilder(); private readonly StringBuilder _publishDateTime = new StringBuilder(); private List<string> _keywords = new List<string>(); private List<string> _homepages = new List<string>(); private List<SemverVersion> _availableVersions = new List<SemverVersion>(); public NodeModuleBuilder() { Reset(); } public void Reset() { Name = null; // We should double check, but I believe that the package no longer exists when "latest" is not set. // If that's the case, we should include an option to filter out those packages. // https://nodejstools.codeplex.com/workitem/1452 LatestVersion = SemverVersion.UnknownVersion; _availableVersions = new List<SemverVersion>(); Flags = PackageFlags.None; RequestedVersionRange = null; // These *have* to be reinitialised or they'll be cleared // in any packages that have been created using the builder // because they're passed by reference. _dependencies = new List<IPackage>(); _keywords = new List<string>(); _homepages = new List<string>(); _descriptionBuff.Length = 0; _authorBuff.Length = 0; _publishDateTime.Length = 0; } public void AddAuthor(string text) { if (_authorBuff.Length > 0) { _authorBuff.Append(' '); } _authorBuff.Append(text); } public IPerson Author { get { var text = _authorBuff.ToString().Trim(); return string.IsNullOrEmpty(text) ? null : Person.CreateFromJsonSource(text); } } public string Name { get; set; } public SemverVersion LatestVersion { get; set; } public IEnumerable<SemverVersion> AvailableVersions { get { return _availableVersions; } set { _availableVersions = value != null ? value.ToList() : new List<SemverVersion>(); } } public IEnumerable<string> Homepages { get { return _homepages; } } public void AddHomepage(string homepage) { _homepages.Add(homepage); } public void AppendToDescription(string text) { _descriptionBuff.Append(text); } public string Description { get { var text = _descriptionBuff.ToString().Trim(); return string.IsNullOrEmpty(text) ? null : text; } } public void AppendToDate(string text) { if (_publishDateTime.Length > 0) { _publishDateTime.Append(' '); } _publishDateTime.Append(text); } public string PublishDateTimeString { get { var text = _publishDateTime.ToString().Trim(); return string.IsNullOrEmpty(text) ? null : text; } } public IEnumerable<IPackage> Dependencies { get { return _dependencies; } } public void AddDependency(IPackage module) { _dependencies.Add(module); } public void AddDependencies(IEnumerable<IPackage> packages) { _dependencies.AddRange(packages); } public PackageFlags Flags { get; set; } public string RequestedVersionRange { get; set; } public void AddKeyword(string keyword) { _keywords.Add(keyword); } public IEnumerable<string> Keywords { get { return _keywords; } } public IPackage Build() { var proxy = new PackageProxy { Author = Author, Name = Name, Version = LatestVersion, AvailableVersions = AvailableVersions, Description = Description, Homepages = Homepages, PublishDateTimeString = PublishDateTimeString, RequestedVersionRange = RequestedVersionRange, Flags = Flags, Keywords = _keywords }; var modules = new NodeModulesProxy(); foreach (var dep in Dependencies) { modules.AddModule(dep); } proxy.Modules = modules; return proxy; } } }
34.443787
113
0.545954
[ "Apache-2.0" ]
Weflac/nodejstools
Nodejs/Product/Npm/NodeModuleBuilder.cs
5,655
C#
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.ComponentModel.Composition; using Microsoft.VisualStudio.Shell; namespace Microsoft.VisualStudio.ProjectSystem.VS { /// <summary> /// Provides an implementation of <see cref="IVsService{TInterfaceType, TServiceType}"/> that calls into Visual Studio's <see cref="SVsServiceProvider"/>. /// </summary> [Export(typeof(IVsService<,>))] internal class VsService<TService, TInterface> : VsService<TInterface>, IVsService<TService, TInterface> { [ImportingConstructor] public VsService([Import(typeof(SVsServiceProvider))]IServiceProvider serviceProvider, IProjectThreadingService threadingService) : base(serviceProvider, threadingService, typeof(TService)) { } } }
42.136364
162
0.728155
[ "Apache-2.0" ]
333fred/roslyn-project-system
src/Microsoft.VisualStudio.ProjectSystem.Managed.VS/ProjectSystem/VS/VsService`2.cs
929
C#
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace SOFT151_Coursework.Properties { using System; /// <summary> /// A strongly-typed resource class, for looking up localized strings, etc. /// </summary> // This class was auto-generated by the StronglyTypedResourceBuilder // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] internal class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal Resources() { } /// <summary> /// Returns the cached ResourceManager instance used by this class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Resources.ResourceManager ResourceManager { get { if (object.ReferenceEquals(resourceMan, null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("SOFT151_Coursework.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } /// <summary> /// Overrides the current thread's CurrentUICulture property for all /// resource lookups using this strongly typed resource class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } } }
44.71875
185
0.601677
[ "MIT" ]
JamesHuntCode/Car-Hire-App
SOFT151-Coursework/Properties/Resources.Designer.cs
2,864
C#
using CopperSharp.Item; namespace CopperSharp.Blocks.State; /// <summary> /// Represents a dispenser/dropper inventory /// </summary> public sealed class DispenserInventory : Inventory { /// <inheritdoc /> protected override int MaxSize => 9; }
21.5
50
0.709302
[ "MIT" ]
Maxuss/CopperSharp
CopperSharp/Blocks/State/DispenserInventory.cs
258
C#
// // System.ComponentModel.DefaultPropertyAttribute // // Authors: // Gonzalo Paniagua Javier (gonzalo@ximian.com) // Andreas Nahr (ClassDevelopment@A-SoftTech.com) // // (C) 2002 Ximian, Inc (http://www.ximian.com) // (C) 2003 Andreas Nahr // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; namespace System.ComponentModel { [AttributeUsage(AttributeTargets.Class)] public sealed class DefaultPropertyAttribute : Attribute { private string property_name; public static readonly DefaultPropertyAttribute Default = new DefaultPropertyAttribute (null); public DefaultPropertyAttribute (string name) { property_name = name; } public string Name { get { return property_name; } } public override bool Equals (object o) { if (!(o is DefaultPropertyAttribute)) return false; return (((DefaultPropertyAttribute) o).Name == property_name); } public override int GetHashCode () { return base.GetHashCode (); } } }
29.231884
96
0.7412
[ "MIT" ]
zlxy/Genesis-3D
Engine/extlibs/IosLibs/mono-2.6.7/mcs/class/System/System.ComponentModel/DefaultPropertyAttribute.cs
2,017
C#
// Copyright © Tanner Gooding and Contributors. Licensed under the MIT License (MIT). See License.md in the repository root for more information. // Ported from um/winnt.h in the Windows SDK for Windows 10.0.22000.0 // Original source is Copyright © Microsoft. All rights reserved. using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Versioning; namespace TerraFX.Interop.Windows; /// <include file='PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY.xml' path='doc/member[@name="PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY"]/*' /> [SupportedOSPlatform("windows8.1")] public partial struct PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY { /// <include file='PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY.xml' path='doc/member[@name="PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY.Anonymous"]/*' /> [NativeTypeName("_PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY::(anonymous union at C:/Program Files (x86)/Windows Kits/10/Include/10.0.22000.0/um/winnt.h:12850:5)")] public _Anonymous_e__Union Anonymous; /// <include file='_Anonymous_e__Union.xml' path='doc/member[@name="_Anonymous_e__Union.Flags"]/*' /> public ref uint Flags { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return ref MemoryMarshal.GetReference(MemoryMarshal.CreateSpan(ref Anonymous.Flags, 1)); } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.EnableControlFlowGuard"]/*' /> public uint EnableControlFlowGuard { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return Anonymous.Anonymous.EnableControlFlowGuard; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { Anonymous.Anonymous.EnableControlFlowGuard = value; } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.EnableExportSuppression"]/*' /> public uint EnableExportSuppression { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return Anonymous.Anonymous.EnableExportSuppression; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { Anonymous.Anonymous.EnableExportSuppression = value; } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.StrictMode"]/*' /> public uint StrictMode { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return Anonymous.Anonymous.StrictMode; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { Anonymous.Anonymous.StrictMode = value; } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.EnableXfg"]/*' /> public uint EnableXfg { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return Anonymous.Anonymous.EnableXfg; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { Anonymous.Anonymous.EnableXfg = value; } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.EnableXfgAuditMode"]/*' /> public uint EnableXfgAuditMode { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return Anonymous.Anonymous.EnableXfgAuditMode; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { Anonymous.Anonymous.EnableXfgAuditMode = value; } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.ReservedFlags"]/*' /> public uint ReservedFlags { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return Anonymous.Anonymous.ReservedFlags; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { Anonymous.Anonymous.ReservedFlags = value; } } /// <include file='_Anonymous_e__Union.xml' path='doc/member[@name="_Anonymous_e__Union"]/*' /> [StructLayout(LayoutKind.Explicit)] public partial struct _Anonymous_e__Union { /// <include file='_Anonymous_e__Union.xml' path='doc/member[@name="_Anonymous_e__Union.Flags"]/*' /> [FieldOffset(0)] [NativeTypeName("DWORD")] public uint Flags; /// <include file='_Anonymous_e__Union.xml' path='doc/member[@name="_Anonymous_e__Union.Anonymous"]/*' /> [FieldOffset(0)] [NativeTypeName("_PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY::(anonymous struct at C:/Program Files (x86)/Windows Kits/10/Include/10.0.22000.0/um/winnt.h:12852:9)")] public _Anonymous_e__Struct Anonymous; /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct"]/*' /> public partial struct _Anonymous_e__Struct { public uint _bitfield; /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.EnableControlFlowGuard"]/*' /> [NativeTypeName("DWORD : 1")] public uint EnableControlFlowGuard { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return _bitfield & 0x1u; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { _bitfield = (_bitfield & ~0x1u) | (value & 0x1u); } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.EnableExportSuppression"]/*' /> [NativeTypeName("DWORD : 1")] public uint EnableExportSuppression { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return (_bitfield >> 1) & 0x1u; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { _bitfield = (_bitfield & ~(0x1u << 1)) | ((value & 0x1u) << 1); } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.StrictMode"]/*' /> [NativeTypeName("DWORD : 1")] public uint StrictMode { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return (_bitfield >> 2) & 0x1u; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { _bitfield = (_bitfield & ~(0x1u << 2)) | ((value & 0x1u) << 2); } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.EnableXfg"]/*' /> [NativeTypeName("DWORD : 1")] public uint EnableXfg { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return (_bitfield >> 3) & 0x1u; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { _bitfield = (_bitfield & ~(0x1u << 3)) | ((value & 0x1u) << 3); } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.EnableXfgAuditMode"]/*' /> [NativeTypeName("DWORD : 1")] public uint EnableXfgAuditMode { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return (_bitfield >> 4) & 0x1u; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { _bitfield = (_bitfield & ~(0x1u << 4)) | ((value & 0x1u) << 4); } } /// <include file='_Anonymous_e__Struct.xml' path='doc/member[@name="_Anonymous_e__Struct.ReservedFlags"]/*' /> [NativeTypeName("DWORD : 27")] public uint ReservedFlags { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { return (_bitfield >> 5) & 0x7FFFFFFu; } [MethodImpl(MethodImplOptions.AggressiveInlining)] set { _bitfield = (_bitfield & ~(0x7FFFFFFu << 5)) | ((value & 0x7FFFFFFu) << 5); } } } } }
35.610442
175
0.579677
[ "MIT" ]
reflectronic/terrafx.interop.windows
sources/Interop/Windows/Windows/um/winnt/PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY.cs
8,869
C#
using System; using System.Threading.Tasks; using MicroServices.Services.Activities.Domain.Models; namespace MicroServices.Services.Activities.Domain.Repositories { public interface IActivityRepository { Task<Activity> GetAsync(Guid id); Task AddAsync(Activity activity); } }
25.416667
63
0.754098
[ "MIT" ]
berkayalcin/Micro-Services
src/MicroServices.Services.Activities/Domain/Repositories/IActivityRepository.cs
307
C#
namespace PCG.MapIngredientConfigs { using UnityEngine; [CreateAssetMenu(fileName = "CivilizationIngredientConfig", menuName = "Osnowa/PCG/Maps/CivilizationIngredientConfig", order = 0)] public class CivilizationIngredientConfig : MoistureIngredientConfig { public AnimationCurve HeightInfluence; public AnimationCurve SweetWaterInfluence; public AnimationCurve SoilInfluence; } }
32.75
131
0.824427
[ "MIT" ]
azsdaja/Osnowa
Assets/Scripts/PCG/MapIngredientConfigs/CivilizationIngredientConfig.cs
395
C#
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Xml; using Microsoft.Build.BackEnd; using Microsoft.Build.Construction; using Microsoft.Build.Definition; using Microsoft.Build.Evaluation; using Microsoft.Build.Execution; using Microsoft.Build.Framework; using Microsoft.Build.Shared; using Microsoft.Build.UnitTests.BackEnd; using Shouldly; using Xunit; using Xunit.Abstractions; using static Microsoft.Build.Engine.UnitTests.TestComparers.ProjectInstanceModelTestComparers; #nullable disable namespace Microsoft.Build.UnitTests.OM.Instance { /// <summary> /// Tests for ProjectInstance internal members /// </summary> public class ProjectInstance_Internal_Tests { private readonly ITestOutputHelper _output; public ProjectInstance_Internal_Tests(ITestOutputHelper output) { _output = output; } /// <summary> /// Read task registrations /// </summary> [Fact] public void GetTaskRegistrations() { try { string projectFileContent = @" <Project> <UsingTask TaskName='t0' AssemblyFile='af0'/> <UsingTask TaskName='t1' AssemblyFile='af1a'/> <ItemGroup> <i Include='i0'/> </ItemGroup> <Import Project='{0}'/> </Project>"; string importContent = @" <Project> <UsingTask TaskName='t1' AssemblyName='an1' Condition=""'$(p)'=='v'""/> <UsingTask TaskName='t2' AssemblyName='an2' Condition=""'@(i)'=='i0'""/> <UsingTask TaskName='t3' AssemblyFile='af' Condition='false'/> <PropertyGroup> <p>v</p> </PropertyGroup> </Project>"; string importPath = ObjectModelHelpers.CreateFileInTempProjectDirectory("import.targets", importContent); projectFileContent = String.Format(projectFileContent, importPath); ProjectInstance project = new Project(ProjectRootElement.Create(XmlReader.Create(new StringReader(projectFileContent)))).CreateProjectInstance(); Assert.Equal(3, project.TaskRegistry.TaskRegistrations.Count); Assert.Equal(Path.Combine(Directory.GetCurrentDirectory(), "af0"), project.TaskRegistry.TaskRegistrations[new TaskRegistry.RegisteredTaskIdentity("t0", null)][0].TaskFactoryAssemblyLoadInfo.AssemblyFile); Assert.Equal(Path.Combine(Directory.GetCurrentDirectory(), "af1a"), project.TaskRegistry.TaskRegistrations[new TaskRegistry.RegisteredTaskIdentity("t1", null)][0].TaskFactoryAssemblyLoadInfo.AssemblyFile); Assert.Equal("an1", project.TaskRegistry.TaskRegistrations[new TaskRegistry.RegisteredTaskIdentity("t1", null)][1].TaskFactoryAssemblyLoadInfo.AssemblyName); Assert.Equal("an2", project.TaskRegistry.TaskRegistrations[new TaskRegistry.RegisteredTaskIdentity("t2", null)][0].TaskFactoryAssemblyLoadInfo.AssemblyName); } finally { ObjectModelHelpers.DeleteTempProjectDirectory(); } } /// <summary> /// InitialTargets and DefaultTargets with imported projects. /// DefaultTargets are not read from imported projects. /// InitialTargets are gathered from imports depth-first. /// </summary> [Fact] public void InitialTargetsDefaultTargets() { try { string projectFileContent = @" <Project DefaultTargets='d0a;d0b' InitialTargets='i0a;i0b'> <Import Project='{0}'/> <Import Project='{1}'/> </Project>"; string import1Content = @" <Project DefaultTargets='d1a;d1b' InitialTargets='i1a;i1b'> <Import Project='{0}'/> </Project>"; string import2Content = @"<Project DefaultTargets='d2a;2db' InitialTargets='i2a;i2b'/>"; string import3Content = @"<Project DefaultTargets='d3a;d3b' InitialTargets='i3a;i3b'/>"; string import2Path = ObjectModelHelpers.CreateFileInTempProjectDirectory("import2.targets", import2Content); string import3Path = ObjectModelHelpers.CreateFileInTempProjectDirectory("import3.targets", import3Content); import1Content = String.Format(import1Content, import3Path); string import1Path = ObjectModelHelpers.CreateFileInTempProjectDirectory("import1.targets", import1Content); projectFileContent = String.Format(projectFileContent, import1Path, import2Path); ProjectInstance project = new Project(ProjectRootElement.Create(XmlReader.Create(new StringReader(projectFileContent)))).CreateProjectInstance(); Helpers.AssertListsValueEqual(new string[] { "d0a", "d0b" }, project.DefaultTargets); Helpers.AssertListsValueEqual(new string[] { "i0a", "i0b", "i1a", "i1b", "i3a", "i3b", "i2a", "i2b" }, project.InitialTargets); } finally { ObjectModelHelpers.DeleteTempProjectDirectory(); } } /// <summary> /// InitialTargets and DefaultTargets with imported projects. /// DefaultTargets are not read from imported projects. /// InitialTargets are gathered from imports depth-first. /// </summary> [Fact] public void InitialTargetsDefaultTargetsEscaped() { try { string projectFileContent = @" <Project DefaultTargets='d0a%3bd0b' InitialTargets='i0a%3bi0b'> </Project>"; ProjectInstance project = new Project(ProjectRootElement.Create(XmlReader.Create(new StringReader(projectFileContent)))).CreateProjectInstance(); Helpers.AssertListsValueEqual(new string[] { "d0a;d0b" }, project.DefaultTargets); Helpers.AssertListsValueEqual(new string[] { "i0a;i0b" }, project.InitialTargets); } finally { ObjectModelHelpers.DeleteTempProjectDirectory(); } } /// <summary> /// Read property group under target /// </summary> [Fact] public void GetPropertyGroupUnderTarget() { string content = @" <Project> <Target Name='t'> <PropertyGroup Condition='c1'> <p1 Condition='c2'>v1</p1> <p2/> </PropertyGroup> </Target> </Project> "; ProjectInstance p = GetProjectInstance(content); ProjectPropertyGroupTaskInstance propertyGroup = (ProjectPropertyGroupTaskInstance)(p.Targets["t"].Children[0]); Assert.Equal("c1", propertyGroup.Condition); List<ProjectPropertyGroupTaskPropertyInstance> properties = Helpers.MakeList(propertyGroup.Properties); Assert.Equal(2, properties.Count); Assert.Equal("c2", properties[0].Condition); Assert.Equal("v1", properties[0].Value); Assert.Equal(String.Empty, properties[1].Condition); Assert.Equal(String.Empty, properties[1].Value); } /// <summary> /// Read item group under target /// </summary> [Fact] public void GetItemGroupUnderTarget() { string content = @" <Project> <Target Name='t'> <ItemGroup Condition='c1'> <i Include='i1' Exclude='e1' Condition='c2'> <m Condition='c3'>m1</m> <n>n1</n> </i> <j Remove='r1'/> <k> <o>o1</o> </k> </ItemGroup> </Target> </Project> "; ProjectInstance p = GetProjectInstance(content); ProjectItemGroupTaskInstance itemGroup = (ProjectItemGroupTaskInstance)(p.Targets["t"].Children[0]); Assert.Equal("c1", itemGroup.Condition); List<ProjectItemGroupTaskItemInstance> items = Helpers.MakeList(itemGroup.Items); Assert.Equal(3, items.Count); Assert.Equal("i1", items[0].Include); Assert.Equal("e1", items[0].Exclude); Assert.Equal(String.Empty, items[0].Remove); Assert.Equal("c2", items[0].Condition); Assert.Equal(String.Empty, items[1].Include); Assert.Equal(String.Empty, items[1].Exclude); Assert.Equal("r1", items[1].Remove); Assert.Equal(String.Empty, items[1].Condition); Assert.Equal(String.Empty, items[2].Include); Assert.Equal(String.Empty, items[2].Exclude); Assert.Equal(String.Empty, items[2].Remove); Assert.Equal(String.Empty, items[2].Condition); List<ProjectItemGroupTaskMetadataInstance> metadata1 = Helpers.MakeList(items[0].Metadata); List<ProjectItemGroupTaskMetadataInstance> metadata2 = Helpers.MakeList(items[1].Metadata); List<ProjectItemGroupTaskMetadataInstance> metadata3 = Helpers.MakeList(items[2].Metadata); Assert.Equal(2, metadata1.Count); Assert.Empty(metadata2); Assert.Single(metadata3); Assert.Equal("c3", metadata1[0].Condition); Assert.Equal("m1", metadata1[0].Value); Assert.Equal(String.Empty, metadata1[1].Condition); Assert.Equal("n1", metadata1[1].Value); Assert.Equal(String.Empty, metadata3[0].Condition); Assert.Equal("o1", metadata3[0].Value); } /// <summary> /// Task registry accessor /// </summary> [Fact] public void GetTaskRegistry() { ProjectInstance p = GetSampleProjectInstance(); Assert.True(p.TaskRegistry != null); } /// <summary> /// Global properties accessor /// </summary> [Fact] public void GetGlobalProperties() { ProjectInstance p = GetSampleProjectInstance(); Assert.Equal("v1", p.GlobalPropertiesDictionary["g1"].EvaluatedValue); Assert.Equal("v2", p.GlobalPropertiesDictionary["g2"].EvaluatedValue); } /// <summary> /// ToolsVersion accessor /// </summary> [Fact] public void GetToolsVersion() { ProjectInstance p = GetSampleProjectInstance(); Assert.Equal(ObjectModelHelpers.MSBuildDefaultToolsVersion, p.Toolset.ToolsVersion); } [Fact] public void UsingExplicitToolsVersionShouldBeFalseWhenNoToolsetIsReferencedInProject() { var projectInstance = new ProjectInstance( new ProjectRootElement( XmlReader.Create(new StringReader("<Project></Project>")), ProjectCollection.GlobalProjectCollection.ProjectRootElementCache, false, false) ); projectInstance.UsingDifferentToolsVersionFromProjectFile.ShouldBeFalse(); } /// <summary> /// Toolset data is cloned properly /// </summary> [Fact] public void CloneToolsetData() { var projectCollection = new ProjectCollection(); CreateMockToolsetIfNotExists("TESTTV", projectCollection); ProjectInstance first = GetSampleProjectInstance(null, null, projectCollection, toolsVersion: "TESTTV"); ProjectInstance second = first.DeepCopy(); Assert.Equal(first.ToolsVersion, second.ToolsVersion); Assert.Equal(first.ExplicitToolsVersion, second.ExplicitToolsVersion); Assert.Equal(first.ExplicitToolsVersionSpecified, second.ExplicitToolsVersionSpecified); } /// <summary> /// Test ProjectInstance's surfacing of the sub-toolset version /// </summary> [Fact] public void GetSubToolsetVersion() { string originalVisualStudioVersion = Environment.GetEnvironmentVariable("VisualStudioVersion"); try { Environment.SetEnvironmentVariable("VisualStudioVersion", null); ProjectInstance p = GetSampleProjectInstance(null, null, new ProjectCollection()); Assert.Equal(ObjectModelHelpers.MSBuildDefaultToolsVersion, p.Toolset.ToolsVersion); Assert.Equal(p.Toolset.DefaultSubToolsetVersion, p.SubToolsetVersion); if (p.Toolset.DefaultSubToolsetVersion == null) { Assert.Equal(MSBuildConstants.CurrentVisualStudioVersion, p.GetPropertyValue("VisualStudioVersion")); } else { Assert.Equal(p.Toolset.DefaultSubToolsetVersion, p.GetPropertyValue("VisualStudioVersion")); } } finally { Environment.SetEnvironmentVariable("VisualStudioVersion", originalVisualStudioVersion); } } /// <summary> /// Test ProjectInstance's surfacing of the sub-toolset version when it is overridden by a value in the /// environment /// </summary> [Fact] [Trait("Category", "mono-osx-failing")] public void GetSubToolsetVersion_FromEnvironment() { string originalVisualStudioVersion = Environment.GetEnvironmentVariable("VisualStudioVersion"); try { Environment.SetEnvironmentVariable("VisualStudioVersion", "ABCD"); ProjectInstance p = GetSampleProjectInstance(null, null, new ProjectCollection()); Assert.Equal(ObjectModelHelpers.MSBuildDefaultToolsVersion, p.Toolset.ToolsVersion); Assert.Equal("ABCD", p.SubToolsetVersion); Assert.Equal("ABCD", p.GetPropertyValue("VisualStudioVersion")); } finally { Environment.SetEnvironmentVariable("VisualStudioVersion", originalVisualStudioVersion); } } /// <summary> /// Test ProjectInstance's surfacing of the sub-toolset version when it is overridden by a global property /// </summary> [Fact] public void GetSubToolsetVersion_FromProjectGlobalProperties() { string originalVisualStudioVersion = Environment.GetEnvironmentVariable("VisualStudioVersion"); try { Environment.SetEnvironmentVariable("VisualStudioVersion", null); IDictionary<string, string> globalProperties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); globalProperties.Add("VisualStudioVersion", "ABCDE"); ProjectInstance p = GetSampleProjectInstance(null, globalProperties, new ProjectCollection()); Assert.Equal(ObjectModelHelpers.MSBuildDefaultToolsVersion, p.Toolset.ToolsVersion); Assert.Equal("ABCDE", p.SubToolsetVersion); Assert.Equal("ABCDE", p.GetPropertyValue("VisualStudioVersion")); } finally { Environment.SetEnvironmentVariable("VisualStudioVersion", originalVisualStudioVersion); } } /// <summary> /// Verify that if a sub-toolset version is passed to the constructor, it all other heuristic methods for /// getting the sub-toolset version. /// </summary> [Fact] public void GetSubToolsetVersion_FromConstructor() { string originalVisualStudioVersion = Environment.GetEnvironmentVariable("VisualStudioVersion"); try { Environment.SetEnvironmentVariable("VisualStudioVersion", "ABC"); string projectContent = @"<Project> <Target Name='t'> <Message Text='Hello'/> </Target> </Project>"; ProjectRootElement xml = ProjectRootElement.Create(XmlReader.Create(new StringReader(projectContent))); IDictionary<string, string> globalProperties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); globalProperties.Add("VisualStudioVersion", "ABCD"); IDictionary<string, string> projectCollectionGlobalProperties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); projectCollectionGlobalProperties.Add("VisualStudioVersion", "ABCDE"); ProjectInstance p = new ProjectInstance(xml, globalProperties, ObjectModelHelpers.MSBuildDefaultToolsVersion, "ABCDEF", new ProjectCollection(projectCollectionGlobalProperties)); Assert.Equal(ObjectModelHelpers.MSBuildDefaultToolsVersion, p.Toolset.ToolsVersion); Assert.Equal("ABCDEF", p.SubToolsetVersion); Assert.Equal("ABCDEF", p.GetPropertyValue("VisualStudioVersion")); } finally { Environment.SetEnvironmentVariable("VisualStudioVersion", originalVisualStudioVersion); } } /// <summary> /// DefaultTargets accessor /// </summary> [Fact] public void GetDefaultTargets() { ProjectInstance p = GetSampleProjectInstance(); Helpers.AssertListsValueEqual(new string[] { "dt" }, p.DefaultTargets); } /// <summary> /// InitialTargets accessor /// </summary> [Fact] public void GetInitialTargets() { ProjectInstance p = GetSampleProjectInstance(); Helpers.AssertListsValueEqual(new string[] { "it" }, p.InitialTargets); } /// <summary> /// Cloning project clones targets /// </summary> [Fact] public void CloneTargets() { var hostServices = new HostServices(); ProjectInstance first = GetSampleProjectInstance(hostServices); ProjectInstance second = first.DeepCopy(); // Targets, tasks are immutable so we can expect the same objects Assert.True(Object.ReferenceEquals(first.Targets, second.Targets)); Assert.True(Object.ReferenceEquals(first.Targets["t"], second.Targets["t"])); var firstTasks = first.Targets["t"]; var secondTasks = second.Targets["t"]; Assert.True(Object.ReferenceEquals(firstTasks.Children[0], secondTasks.Children[0])); } /// <summary> /// Cloning project copies task registry /// </summary> [Fact] public void CloneTaskRegistry() { ProjectInstance first = GetSampleProjectInstance(); ProjectInstance second = first.DeepCopy(); // Task registry object should be immutable Assert.Same(first.TaskRegistry, second.TaskRegistry); } /// <summary> /// Cloning project copies global properties /// </summary> [Fact] public void CloneGlobalProperties() { ProjectInstance first = GetSampleProjectInstance(); ProjectInstance second = first.DeepCopy(); Assert.Equal("v1", second.GlobalPropertiesDictionary["g1"].EvaluatedValue); Assert.Equal("v2", second.GlobalPropertiesDictionary["g2"].EvaluatedValue); } /// <summary> /// Cloning project copies default targets /// </summary> [Fact] public void CloneDefaultTargets() { ProjectInstance first = GetSampleProjectInstance(); ProjectInstance second = first.DeepCopy(); Helpers.AssertListsValueEqual(new string[] { "dt" }, second.DefaultTargets); } /// <summary> /// Cloning project copies initial targets /// </summary> [Fact] public void CloneInitialTargets() { ProjectInstance first = GetSampleProjectInstance(); ProjectInstance second = first.DeepCopy(); Helpers.AssertListsValueEqual(new string[] { "it" }, second.InitialTargets); } /// <summary> /// Cloning project copies toolsversion /// </summary> [Fact] public void CloneToolsVersion() { ProjectInstance first = GetSampleProjectInstance(); ProjectInstance second = first.DeepCopy(); Assert.Equal(first.Toolset, second.Toolset); } /// <summary> /// Cloning project copies toolsversion /// </summary> [Fact] public void CloneStateTranslation() { ProjectInstance first = GetSampleProjectInstance(); first.TranslateEntireState = true; ProjectInstance second = first.DeepCopy(); Assert.True(second.TranslateEntireState); } /// <summary> /// Tests building a simple project and verifying the log looks as expected. /// </summary> [Fact] public void Build() { // Setting the current directory to the MSBuild running location. It *should* be this // already, but if it's not some other test changed it and didn't change it back. If // the directory does not include the reference dlls the compilation will fail. Directory.SetCurrentDirectory(BuildEnvironmentHelper.Instance.CurrentMSBuildToolsDirectory); string projectFileContent = @" <Project> <UsingTask TaskName='Microsoft.Build.Tasks.Message' AssemblyFile='Microsoft.Build.Tasks.Core.dll'/> <ItemGroup> <i Include='i0'/> </ItemGroup> <Target Name='Build'> <Message Text='Building...'/> <Message Text='Completed!'/> </Target> </Project>"; ProjectInstance projectInstance = GetProjectInstance(projectFileContent); List<ILogger> loggers = new List<ILogger>(); MockLogger mockLogger = new MockLogger(_output); loggers.Add(mockLogger); bool success = projectInstance.Build("Build", loggers); Assert.True(success); mockLogger.AssertLogContains(new string[] { "Building...", "Completed!" }); } [Theory] [InlineData( @" <Project> </Project> ")] // Project with one of each direct child(indirect children trees are tested separately) [InlineData( @" <Project InitialTargets=`t1` DefaultTargets=`t2` ToolsVersion=`{0}`> <UsingTask TaskName=`t1` AssemblyFile=`f1`/> <ItemDefinitionGroup> <i> <n>n1</n> </i> </ItemDefinitionGroup> <PropertyGroup> <p1>v1</p1> </PropertyGroup> <ItemGroup> <i Include='i0'/> </ItemGroup> <Target Name='t1'> <t1/> </Target> <Target Name='t2' BeforeTargets=`t1`> <t2/> </Target> <Target Name='t3' AfterTargets=`t2`> <t3/> </Target> </Project> ")] // Project with at least two instances of each direct child. Tests that collections serialize well. [InlineData( @" <Project InitialTargets=`t1` DefaultTargets=`t2` ToolsVersion=`{0}`> <UsingTask TaskName=`t1` AssemblyFile=`f1`/> <UsingTask TaskName=`t2` AssemblyFile=`f2`/> <ItemDefinitionGroup> <i> <n>n1</n> </i> </ItemDefinitionGroup> <ItemDefinitionGroup> <i2> <n2>n2</n2> </i2> </ItemDefinitionGroup> <PropertyGroup> <p1>v1</p1> </PropertyGroup> <PropertyGroup> <p2>v2</p2> </PropertyGroup> <ItemGroup> <i Include='i1'/> </ItemGroup> <ItemGroup> <i2 Include='i2'> <m1 Condition=`1==1`>m1</m1> <m2>m2</m2> </i2> </ItemGroup> <Target Name='t1'> <t1/> </Target> <Target Name='t2' BeforeTargets=`t1`> <t2/> </Target> <Target Name='t3' AfterTargets=`t1`> <t3/> </Target> <Target Name='t4' BeforeTargets=`t1`> <t4/> </Target> <Target Name='t5' AfterTargets=`t1`> <t5/> </Target> </Project> ")] public void ProjectInstanceCanSerializeEntireStateViaTranslator(string projectContents) { projectContents = string.Format(projectContents, MSBuildConstants.CurrentToolsVersion); var original = new ProjectInstance(ProjectRootElement.Create(XmlReader.Create(new StringReader(ObjectModelHelpers.CleanupFileContents(projectContents))))); original.TranslateEntireState = true; ((ITranslatable)original).Translate(TranslationHelpers.GetWriteTranslator()); var copy = ProjectInstance.FactoryForDeserialization(TranslationHelpers.GetReadTranslator()); Assert.Equal(original, copy, new ProjectInstanceComparer()); } public delegate ProjectInstance ProjectInstanceFactory(string file, ProjectRootElement xml, ProjectCollection collection); public static IEnumerable<object[]> ProjectInstanceHasEvaluationIdTestData() { // from file (new) yield return new ProjectInstanceFactory[] { (f, xml, c) => new ProjectInstance(f, null, null, c) }; // from file (factory method) yield return new ProjectInstanceFactory[] { (f, xml, c) => ProjectInstance.FromFile(f, new ProjectOptions { ProjectCollection = c }) }; // from Project yield return new ProjectInstanceFactory[] { (f, xml, c) => new Project(f, null, null, c).CreateProjectInstance() }; // from DeepCopy yield return new ProjectInstanceFactory[] { (f, xml, c) => new ProjectInstance(f, null, null, c).DeepCopy() }; // from ProjectRootElement (new) yield return new ProjectInstanceFactory[] { (f, xml, c) => new ProjectInstance(xml, null, null, c) }; // from ProjectRootElement (factory method) yield return new ProjectInstanceFactory[] { (f, xml, c) => ProjectInstance.FromProjectRootElement(xml, new ProjectOptions { ProjectCollection = c }) }; // from translated project instance yield return new ProjectInstanceFactory[] { (f, xml, c) => { var pi = new ProjectInstance(f, null, null, c); pi.AddItem("foo", "bar"); pi.TranslateEntireState = true; ((ITranslatable) pi).Translate(TranslationHelpers.GetWriteTranslator()); var copy = ProjectInstance.FactoryForDeserialization(TranslationHelpers.GetReadTranslator()); return copy; } }; } [Theory] [MemberData(nameof(ProjectInstanceHasEvaluationIdTestData))] public void ProjectInstanceHasEvaluationId(ProjectInstanceFactory projectInstanceFactory) { using (var env = TestEnvironment.Create()) { var file = env.CreateFile().Path; var projectCollection = env.CreateProjectCollection().Collection; var xml = ProjectRootElement.Create(projectCollection); xml.Save(file); var projectInstance = projectInstanceFactory.Invoke(file, xml, projectCollection); Assert.NotEqual(BuildEventContext.InvalidEvaluationId, projectInstance.EvaluationId); } } [Fact] public void AddTargetAddsNewTarget() { string projectFileContent = @" <Project> <Target Name='a' /> </Project>"; ProjectRootElement rootElement = ProjectRootElement.Create(XmlReader.Create(new StringReader(projectFileContent))); ProjectInstance projectInstance = new ProjectInstance(rootElement); ProjectTargetInstance targetInstance = projectInstance.AddTarget("b", "1==1", "inputs", "outputs", "returns", "keepDuplicateOutputs", "dependsOnTargets", "beforeTargets", "afterTargets", true); Assert.Equal(2, projectInstance.Targets.Count); Assert.Equal(targetInstance, projectInstance.Targets["b"]); Assert.Equal("b", targetInstance.Name); Assert.Equal("1==1", targetInstance.Condition); Assert.Equal("inputs", targetInstance.Inputs); Assert.Equal("outputs", targetInstance.Outputs); Assert.Equal("returns", targetInstance.Returns); Assert.Equal("keepDuplicateOutputs", targetInstance.KeepDuplicateOutputs); Assert.Equal("dependsOnTargets", targetInstance.DependsOnTargets); Assert.Equal("beforeTargets", targetInstance.BeforeTargets); Assert.Equal("afterTargets", targetInstance.AfterTargets); Assert.Equal(projectInstance.ProjectFileLocation, targetInstance.Location); Assert.Equal(ElementLocation.EmptyLocation, targetInstance.ConditionLocation); Assert.Equal(ElementLocation.EmptyLocation, targetInstance.InputsLocation); Assert.Equal(ElementLocation.EmptyLocation, targetInstance.OutputsLocation); Assert.Equal(ElementLocation.EmptyLocation, targetInstance.ReturnsLocation); Assert.Equal(ElementLocation.EmptyLocation, targetInstance.KeepDuplicateOutputsLocation); Assert.Equal(ElementLocation.EmptyLocation, targetInstance.DependsOnTargetsLocation); Assert.Equal(ElementLocation.EmptyLocation, targetInstance.BeforeTargetsLocation); Assert.Equal(ElementLocation.EmptyLocation, targetInstance.AfterTargetsLocation); Assert.True(targetInstance.ParentProjectSupportsReturnsAttribute); } [Fact] public void AddTargetThrowsWithExistingTarget() { string projectFileContent = @" <Project> <Target Name='a' /> </Project>"; ProjectRootElement rootElement = ProjectRootElement.Create(XmlReader.Create(new StringReader(projectFileContent))); ProjectInstance projectInstance = new ProjectInstance(rootElement); Assert.Throws<InternalErrorException>(() => projectInstance.AddTarget("a", "1==1", "inputs", "outputs", "returns", "keepDuplicateOutputs", "dependsOnTargets", "beforeTargets", "afterTargets", true)); } [Theory] [InlineData(false, ProjectLoadSettings.Default)] [InlineData(false, ProjectLoadSettings.RecordDuplicateButNotCircularImports)] [InlineData(true, ProjectLoadSettings.Default)] [InlineData(true, ProjectLoadSettings.RecordDuplicateButNotCircularImports)] public void GetImportPathsAndImportPathsIncludingDuplicates(bool useDirectConstruction, ProjectLoadSettings projectLoadSettings) { try { string projectFileContent = @" <Project> <Import Project='{0}'/> <Import Project='{1}'/> <Import Project='{0}'/> </Project>"; string import1Content = @" <Project> <Import Project='{0}'/> <Import Project='{1}'/> </Project>"; string import2Content = @"<Project />"; string import3Content = @"<Project />"; string import2Path = ObjectModelHelpers.CreateFileInTempProjectDirectory("import2.targets", import2Content); string import3Path = ObjectModelHelpers.CreateFileInTempProjectDirectory("import3.targets", import3Content); import1Content = string.Format(import1Content, import2Path, import3Path); string import1Path = ObjectModelHelpers.CreateFileInTempProjectDirectory("import1.targets", import1Content); projectFileContent = string.Format(projectFileContent, import1Path, import2Path); ProjectCollection projectCollection = new ProjectCollection(); BuildParameters buildParameters = new BuildParameters(projectCollection) { ProjectLoadSettings = projectLoadSettings }; BuildEventContext buildEventContext = new BuildEventContext(0, BuildEventContext.InvalidTargetId, BuildEventContext.InvalidProjectContextId, BuildEventContext.InvalidTaskId); ProjectRootElement rootElement = ProjectRootElement.Create(XmlReader.Create(new StringReader(projectFileContent))); ProjectInstance projectInstance = useDirectConstruction ? new ProjectInstance(rootElement, globalProperties: null, toolsVersion: null, buildParameters, projectCollection.LoggingService, buildEventContext, sdkResolverService: null, 0) : new Project(rootElement, globalProperties: null, toolsVersion: null, projectCollection, projectLoadSettings).CreateProjectInstance(); string[] expectedImportPaths = new string[] { import1Path, import2Path, import3Path }; string[] expectedImportPathsIncludingDuplicates = projectLoadSettings.HasFlag(ProjectLoadSettings.RecordDuplicateButNotCircularImports) ? new string[] { import1Path, import2Path, import3Path, import2Path, import1Path } : expectedImportPaths; Helpers.AssertListsValueEqual(expectedImportPaths, projectInstance.ImportPaths.ToList()); Helpers.AssertListsValueEqual(expectedImportPathsIncludingDuplicates, projectInstance.ImportPathsIncludingDuplicates.ToList()); } finally { ObjectModelHelpers.DeleteTempProjectDirectory(); } } /// <summary> /// Create a ProjectInstance from provided project content /// </summary> private static ProjectInstance GetProjectInstance(string content) { return GetProjectInstance(content, null); } /// <summary> /// Create a ProjectInstance from provided project content and host services object /// </summary> private static ProjectInstance GetProjectInstance(string content, HostServices hostServices) { return GetProjectInstance(content, hostServices, null, null); } /// <summary> /// Create a ProjectInstance from provided project content and host services object /// </summary> private static ProjectInstance GetProjectInstance(string content, HostServices hostServices, IDictionary<string, string> globalProperties, ProjectCollection projectCollection, string toolsVersion = null) { XmlReader reader = XmlReader.Create(new StringReader(content)); if (globalProperties == null) { // choose some interesting defaults if we weren't explicitly asked to use a set. globalProperties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); globalProperties.Add("g1", "v1"); globalProperties.Add("g2", "v2"); } Project project = new Project(reader, globalProperties, toolsVersion ?? ObjectModelHelpers.MSBuildDefaultToolsVersion, projectCollection ?? ProjectCollection.GlobalProjectCollection); ProjectInstance instance = project.CreateProjectInstance(); return instance; } /// <summary> /// Create a ProjectInstance with some items and properties and targets /// </summary> private static ProjectInstance GetSampleProjectInstance() { return GetSampleProjectInstance(null); } /// <summary> /// Create a ProjectInstance with some items and properties and targets /// </summary> private static ProjectInstance GetSampleProjectInstance(HostServices hostServices) { return GetSampleProjectInstance(hostServices, null, null); } /// <summary> /// Create a ProjectInstance with some items and properties and targets /// </summary> private static ProjectInstance GetSampleProjectInstance(HostServices hostServices, IDictionary<string, string> globalProperties, ProjectCollection projectCollection, string toolsVersion = null) { string toolsVersionSubstring = toolsVersion != null ? "ToolsVersion=\"" + toolsVersion + "\" " : String.Empty; string content = @" <Project InitialTargets='it' DefaultTargets='dt' " + toolsVersionSubstring + @"> <PropertyGroup> <p1>v1</p1> <p2>v2</p2> <p2>$(p2)X$(p)</p2> </PropertyGroup> <ItemGroup> <i Include='i0'/> <i Include='i1'> <m>m1</m> </i> <i Include='$(p1)'/> </ItemGroup> <Target Name='t'> <t1 a='a1' b='b1' ContinueOnError='coe' Condition='c'/> <t2/> </Target> <Target Name='tt'/> </Project> "; ProjectInstance p = GetProjectInstance(content, hostServices, globalProperties, projectCollection, toolsVersion); return p; } /// <summary> /// Creates a toolset with the given tools version if one does not already exist. /// </summary> private static void CreateMockToolsetIfNotExists(string toolsVersion, ProjectCollection projectCollection) { ProjectCollection pc = projectCollection; if (!pc.Toolsets.Any(t => String.Equals(t.ToolsVersion, toolsVersion, StringComparison.OrdinalIgnoreCase))) { Toolset template = pc.Toolsets.First(t => String.Equals(t.ToolsVersion, pc.DefaultToolsVersion, StringComparison.OrdinalIgnoreCase)); var toolset = new Toolset( toolsVersion, template.ToolsPath, template.Properties.ToDictionary(p => p.Key, p => p.Value.EvaluatedValue), pc, null); pc.AddToolset(toolset); } } } }
42.430911
221
0.571945
[ "MIT" ]
AlexanderSemenyak/msbuild
src/Build.UnitTests/Instance/ProjectInstance_Internal_Tests.cs
41,455
C#