content stringlengths 5 1.04M | avg_line_length float64 1.75 12.9k | max_line_length int64 2 244k | alphanum_fraction float64 0 0.98 | licenses list | repository_name stringlengths 7 92 | path stringlengths 3 249 | size int64 5 1.04M | lang stringclasses 2 values |
|---|---|---|---|---|---|---|---|---|
using Esri.ArcGISRuntime.Portal;
using Prism.Services.Dialogs;
using System.Collections.ObjectModel;
using WorkingWithMaps.Example.Core;
using WorkingWithMaps.Example.Models;
namespace WorkingWithMaps.Example.ViewModels.Dialogs
{
public class WebMapDetailsDialogViewModel : DialogViewModel
{
public WebMapDetailsDialogViewModel(IApplicationService applicationService) : base(applicationService)
{
Title = "Details";
}
private PortalItem _item = null;
public PortalItem Item
{
get { return _item; }
set { SetProperty(ref _item, value); }
}
private ObservableCollection<CommentModel> _comments = new ObservableCollection<CommentModel>();
public ObservableCollection<CommentModel> Comments
{
get { return _comments; }
set { SetProperty(ref _comments, value); }
}
public async override void OnDialogOpened(IDialogParameters parameters)
{
var model = parameters.GetValue<WebMapModel>("model");
Item = model.Item;
var comments = await Item.GetCommentsAsync();
foreach (var comment in comments)
{
Comments.Add(new CommentModel(comment));
}
}
}
}
| 30.627907 | 110 | 0.635535 | [
"Apache-2.0"
] | anttikajanus/working-with-maps-arcgis-runtime-dotnet | src/WorkingWithMaps/ViewModels/Dialogs/WebMapDetailsDialogViewModel.cs | 1,319 | C# |
using System;
using Xunit;
namespace Lokad.Numerics.Tests
{
public class Log2Tests
{
[Fact]
public void Log2_uint()
{
for(var i = 1.0; i < uint.MaxValue; i *= 1.3)
{
var n = (uint)i;
Assert.Equal((uint)Math.Log(n, 2.0), FxMath.Log2(n));
}
}
[Fact]
public void Log2_ulong()
{
for (var i = 1.0; i < uint.MaxValue; i *= 1.3)
{
var n = (ulong)i;
Assert.Equal((ulong)Math.Log(n, 2.0), FxMath.Log2(n));
}
}
[Fact]
public void Log2_WithLookup()
{
for (var i = 1.0; i < uint.MaxValue; i *= 1.3)
{
var n = (ulong)i;
Assert.Equal((uint)Math.Log(n, 2.0), AltMath.Log2(n));
}
}
}
}
| 22.717949 | 70 | 0.404063 | [
"MIT"
] | Lokad/FasterMath | test/Lokad.FasterMath.Tests/Log2Tests.cs | 888 | C# |
using System;
using System.ServiceModel.Channels;
using Nelibur.ServiceModel.Services.Maps;
namespace Nelibur.ServiceModel.Services
{
public sealed class NeliburSoapService : NeliburService
{
private NeliburSoapService()
{
}
public static IConfiguration Configure(Action<IConfiguration> action)
{
action(_configuration);
return _configuration;
}
public static Message Process(Message message)
{
RequestMetadata metadata = _requests.FromSoapMessage(message);
return Process(metadata);
}
public static void ProcessOneWay(Message message)
{
RequestMetadata metadata = _requests.FromSoapMessage(message);
ProcessOneWay(metadata);
}
}
}
| 25.53125 | 77 | 0.638923 | [
"MIT"
] | Nelibur/Nelibur | Source/Nelibur/ServiceModel/Services/NeliburSoapService.cs | 819 | C# |
using System;
using System.Text;
using System.Web.UI.HtmlControls;
using System.Web.UI.WebControls;
using CMS.Base.Web.UI;
using CMS.DeviceProfiles;
using CMS.Helpers;
using CMS.PortalEngine.Web.UI;
using CMS.UIControls;
public partial class CMSMasterPages_UI_Dialogs_ModalDialogPage : CMSMasterPage, ICMSModalMasterPage
{
#region "Properties"
/// <summary>
/// PageTitle control.
/// </summary>
public override PageTitle Title
{
get
{
return titleElem;
}
}
/// <summary>
/// HeaderActions control.
/// </summary>
public override HeaderActions HeaderActions
{
get
{
if (base.HeaderActions != null)
{
return base.HeaderActions;
}
return actionsElem.HeaderActions;
}
}
/// <summary>
/// Container with header actions menu
/// </summary>
public override ObjectEditMenu ObjectEditMenu
{
get
{
if (actionsElem != null)
{
return actionsElem.ObjectEditMenu;
}
return base.ObjectEditMenu;
}
}
/// <summary>
/// Header container.
/// </summary>
public override Panel HeaderContainer
{
get
{
return pnlContainerHeader;
}
}
/// <summary>
/// Body panel.
/// </summary>
public override Panel PanelBody
{
get
{
return pnlBody;
}
}
/// <summary>
/// Gets the content panel.
/// </summary>
public override Panel PanelContent
{
get
{
return divContent;
}
}
/// <summary>
/// Gets the header panel
/// </summary>
public override Panel PanelHeader
{
get
{
return pnlHeader;
}
}
/// <summary>
/// Footer container.
/// </summary>
public override Panel FooterContainer
{
get
{
return pnlFooterContent;
}
}
/// <summary>
/// Gets the labels container.
/// </summary>
public override PlaceHolder PlaceholderLabels
{
get
{
return plcLabels;
}
}
/// <summary>
/// Body object.
/// </summary>
public override HtmlGenericControl Body
{
get
{
return bodyElem;
}
}
/// <summary>
/// Prepared for specifying the additional HEAD elements.
/// </summary>
public override Literal HeadElements
{
get
{
return ltlHeadElements;
}
set
{
ltlHeadElements = value;
}
}
/// <summary>
/// Panel containing title actions displayed above scrolling content.
/// </summary>
public override Panel PanelTitleActions
{
get
{
return pnlTitleActions;
}
}
/// <summary>
/// Gets placeholder located after form element.
/// </summary>
public override PlaceHolder AfterFormPlaceholder
{
get
{
return plcAfterForm;
}
}
/// <summary>
/// HeaderActionsPermissions place holder.
/// </summary>
public override UIPlaceHolder HeaderActionsPlaceHolder
{
get
{
return plcActionsPermissions;
}
}
/// <summary>
/// Fired when 'Save & close' button is clicked and the content should be saved. Pages that use this master page should add handler to this event
/// alike binding to save button <see cref="Button.OnClick"/> event.
/// </summary>
public event EventHandler Save;
#endregion
#region "Public methods"
/// <summary>
/// Sets JavaScript to the "Save & close" button.
/// </summary>
/// <param name="javaScript">JavaScript to add to the Save & Close button</param>
public void SetSaveJavaScript(string javaScript)
{
btnSaveAndClose.OnClientClick = javaScript;
}
/// <summary>
/// Sets JavaScript to be processed when user clicks the "Close" button or the area around the modal window.
/// </summary>
/// <param name="javaScript">JavaScript to be processed when user clicks the Close button or the area around the modal window</param>
public void SetCloseJavaScript(string javaScript)
{
titleElem.SetCloseJavaScript(javaScript);
}
/// <summary>
/// Shows Save & Close button at the bottom of the page.
/// </summary>
public void ShowSaveAndCloseButton()
{
btnSaveAndClose.Visible = true;
}
/// <summary>
/// Sets Save & Close button resource string.
/// </summary>
/// <param name="resourceString">Resource string</param>
public void SetSaveResourceString(string resourceString)
{
btnSaveAndClose.ResourceString = resourceString;
}
#endregion
#region "Page events"
protected override void OnInit(EventArgs e)
{
base.OnInit(e);
PageStatusContainer = plcStatus;
}
protected void Page_Load(object sender, EventArgs e)
{
// Display panel with additional controls place holder if required
if (DisplayControlsPanel)
{
pnlAdditionalControls.Visible = true;
}
// Display panel with site selector
if (DisplaySiteSelectorPanel)
{
pnlSiteSelector.Visible = true;
}
bodyElem.Attributes["class"] = mBodyClass;
var device = DeviceContext.CurrentDevice;
if (!device.IsMobile())
{
// Footer - apply fixed position
pnlFooterContent.Style.Add("position", "fixed");
pnlFooterContent.Style.Add("width", "100%");
pnlFooterContent.Style.Add("bottom", "0px");
}
StringBuilder resizeScript = new StringBuilder();
resizeScript.Append(@"
var headerElem = null;
var footerElem = null;
var contentElem = null;
var jIframe = null;
var jIframeContents = null;
var oldClientWidth = 0;
var oldClientHeight = 0;
var dialogCMSHeaderPad = null;
var dialogCKFooter = null;
function ResizeWorkingAreaIE()
{
ResizeWorkingArea();
window.onresize = function() { ResizeWorkingArea(); };
}
function ResizeWorkingArea()
{
if (headerElem == null)
{
headerElem = document.getElementById('divHeader');
}
if (footerElem == null)
{
footerElem = document.getElementById('divFooter');
}
if (contentElem == null)
{
contentElem = document.getElementById('divContent');
}
if (dialogCMSHeaderPad == null)
{
dialogCMSHeaderPad = document.getElementById('CMSHeaderPad');
}
if (dialogCKFooter == null)
{
dialogCKFooter = document.getElementById('CKFooter');
}
if ((headerElem != null) && (contentElem != null))
{
var headerHeight = headerElem.offsetHeight + ((dialogCMSHeaderPad != null) ? dialogCMSHeaderPad.offsetHeight : 0);
var footerHeight = ((footerElem != null) ? footerElem.offsetHeight : 0) + ((dialogCKFooter != null) ? dialogCKFooter.offsetHeight : 0);
var height = ($cmsj(window).height() - headerHeight - footerHeight);
if (height > 0)
{
var h = (height > 0 ? height : '0') + 'px';
if (contentElem.style.height != h)
{
contentElem.style.height = h;
}
}");
if (device.IsMobile())
{
resizeScript.Append(@"
if ((jIframe == null) || (!jIframe.length)) {
jIframe = $cmsj('.EditableTextEdit iframe:first');
}
if ((jIframeContents == null) || (!jIframeContents.length)) {
jIframeContents = jIframe.contents();
}
// Set height of the iframe manually for mobile devices
jIframe.css('height', jIframeContents.height());
// WebKit browsers fix - width of the parent element of the iframe needs to be defined
jIframe.parent().width(jIframeContents.width());");
}
if (BrowserHelper.IsIE())
{
resizeScript.Append(@"
var pnlBody = null;
var formElem = null;
var bodyElement = null;
if (pnlBody == null)
{
pnlBody = document.getElementById('", pnlBody.ClientID, @"');
}
if (formElem == null)
{
formElem = document.getElementById('", form1.ClientID, @"');
}
if (bodyElement == null)
{
bodyElement = document.getElementById('", bodyElem.ClientID, @"');
}
if ((bodyElement != null) && (formElem != null) && (pnlBody != null))
{
var newClientWidth = document.documentElement.clientWidth;
var newClientHeight = document.documentElement.clientHeight;
if (newClientWidth != oldClientWidth)
{
bodyElement.style.width = newClientWidth;
formElem.style.width = newClientWidth;
pnlBody.style.width = newClientWidth;
headerElem.style.width = newClientWidth;
contentElem.style.width = newClientWidth;
oldClientWidth = newClientWidth;
}
if (newClientHeight != oldClientHeight)
{
bodyElement.style.height = newClientHeight;
formElem.style.height = newClientHeight;
pnlBody.style.height = newClientHeight;
oldClientHeight = newClientHeight;
}
}");
}
resizeScript.Append(@"
}
if (window.afterResize) {
window.afterResize();
}
}");
if (BrowserHelper.IsIE())
{
resizeScript.Append(@"
var timer = setInterval('ResizeWorkingAreaIE();', 50);");
}
else
{
resizeScript.Append(@"
window.onresize = function() { ResizeWorkingArea(); };
window.onload = function() { ResizeWorkingArea(); };");
}
ScriptHelper.RegisterClientScriptBlock(this, typeof(string), "resizeScript", ScriptHelper.GetScript(resizeScript.ToString()));
// Register a script that will re-calculate content height when the CKToolbar is displayed
const string ckEditorScript = @"
if (window.CKEDITOR) {
CKEDITOR.on('instanceCreated', function(e) {
e.editor.on('instanceReady', function(e) { setTimeout(function() { ResizeWorkingArea(); }, 200); });
});
}";
ScriptHelper.RegisterStartupScript(this, typeof(string), "ckEditorScript", ckEditorScript, true);
// Register header shadow script
ScriptHelper.RegisterModule(Page, "CMS/HeaderShadow");
}
protected override void Render(System.Web.UI.HtmlTextWriter writer)
{
// Hide actions panel if no actions are present and DisplayActionsPanel is false
if (!DisplayActionsPanel)
{
if (!HeaderActions.IsVisible() && (plcActions.Controls.Count == 0))
{
pnlActions.Visible = false;
}
}
base.Render(writer);
}
/// <summary>
/// Fires <see cref="ICMSModalMasterPage.Save"/>.
/// </summary>
protected void btnSaveAndClose_OnClick(object sender, EventArgs e)
{
if (Save != null)
{
Save(sender, e);
}
}
#endregion
}
| 24.82684 | 149 | 0.570096 | [
"MIT"
] | BryanSoltis/KenticoMVCWidgetShowcase | CMS/CMSMasterPages/UI/Dialogs/ModalDialogPage.master.cs | 11,472 | C# |
/*
* Swagger Petstore
*
* This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
*
* OpenAPI spec version: 1.0.0
* Contact: apiteam@swagger.io
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
using SwaggerDateConverter = IO.Swagger.Client.SwaggerDateConverter;
namespace IO.Swagger.Model
{
/// <summary>
/// Body6
/// </summary>
[DataContract]
public partial class Body6 : IEquatable<Body6>
{
/// <summary>
/// Initializes a new instance of the <see cref="Body6" /> class.
/// </summary>
/// <param name="name">Updated name of the animal.</param>
/// <param name="status">Updated status of the animal.</param>
public Body6(string name = default(string), string status = default(string))
{
this.Name = name;
this.Status = status;
}
/// <summary>
/// Updated name of the animal
/// </summary>
/// <value>Updated name of the animal</value>
[DataMember(Name="name", EmitDefaultValue=false)]
public string Name { get; set; }
/// <summary>
/// Updated status of the animal
/// </summary>
/// <value>Updated status of the animal</value>
[DataMember(Name="status", EmitDefaultValue=false)]
public string Status { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class Body6 {\n");
sb.Append(" Name: ").Append(Name).Append("\n");
sb.Append(" Status: ").Append(Status).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public virtual string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="input">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object input)
{
return this.Equals(input as Body6);
}
/// <summary>
/// Returns true if Body6 instances are equal
/// </summary>
/// <param name="input">Instance of Body6 to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(Body6 input)
{
if (input == null)
return false;
return
(
this.Name == input.Name ||
(this.Name != null &&
this.Name.Equals(input.Name))
) &&
(
this.Status == input.Status ||
(this.Status != null &&
this.Status.Equals(input.Status))
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
unchecked // Overflow is fine, just wrap
{
int hashCode = 41;
if (this.Name != null)
hashCode = hashCode * 59 + this.Name.GetHashCode();
if (this.Status != null)
hashCode = hashCode * 59 + this.Status.GetHashCode();
return hashCode;
}
}
}
}
| 32.045455 | 159 | 0.54279 | [
"Apache-2.0"
] | Cadcorp/swagger-codegen | samples/client/petstore/csharp/SwaggerClientNet35/src/IO.Swagger/Model/Body6.cs | 4,230 | C# |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("csExperts")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("www.pipseq.org")]
[assembly: AssemblyProduct("csExperts")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("2b27e618-cda5-40d4-ab6d-ea2f09745303")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| 37.972973 | 84 | 0.745196 | [
"MIT"
] | pipseq/csFxModel | csExperts/Properties/AssemblyInfo.cs | 1,408 | C# |
// <auto-generated />
namespace Vidly.Migrations
{
using System.CodeDom.Compiler;
using System.Data.Entity.Migrations;
using System.Data.Entity.Migrations.Infrastructure;
using System.Resources;
[GeneratedCode("EntityFramework.Migrations", "6.1.3-40302")]
public sealed partial class changeannotations : IMigrationMetadata
{
private readonly ResourceManager Resources = new ResourceManager(typeof(changeannotations));
string IMigrationMetadata.Id
{
get { return "201710161554180_changeannotations"; }
}
string IMigrationMetadata.Source
{
get { return null; }
}
string IMigrationMetadata.Target
{
get { return Resources.GetString("Target"); }
}
}
}
| 27.6 | 100 | 0.625604 | [
"MPL-2.0",
"MPL-2.0-no-copyleft-exception"
] | XomRng/Vidly | Migrations/201710161554180_changeannotations.Designer.cs | 828 | C# |
using System.ComponentModel.DataAnnotations;
namespace VacunaAPI.Models
{
public class VerifyEmailRequest
{
[Required]
public string Token { get; set; }
}
}
| 17.909091 | 46 | 0.629442 | [
"MIT"
] | andres88888/Vacuna | src/Models/VerifyEmailRequest.cs | 199 | C# |
using System;
namespace Lawn
{
public enum ButtonIds
{
ALMANAC_BUTTON_CLOSE,
ALMANAC_BUTTON_PLANT,
ALMANAC_BUTTON_ZOMBIE,
ALMANAC_BUTTON_INDEX
}
}
| 9.666667 | 24 | 0.712644 | [
"MIT"
] | OptiJuegos/Plants-VS-Zombies-NET | DotNETPvZ_Shared/Lawn/UI/Almanac/ButtonIds.cs | 176 | C# |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("UnitTestProject1")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("UnitTestProject1")]
[assembly: AssemblyCopyright("Copyright © 2013")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("e3e5ebed-90e7-4654-9d8c-eb472950107f")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| 37.972973 | 84 | 0.746619 | [
"MIT"
] | christianarg/HybridStorage.Net | HybridStorageTests/Properties/AssemblyInfo.cs | 1,408 | C# |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
// <auto-generated/>
// Template Source: Templates\CSharp\Requests\EntityRequestBuilder.cs.tt
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.IO;
/// <summary>
/// The type WorkbookOperationRequestBuilder.
/// </summary>
public partial class WorkbookOperationRequestBuilder : EntityRequestBuilder, IWorkbookOperationRequestBuilder
{
/// <summary>
/// Constructs a new WorkbookOperationRequestBuilder.
/// </summary>
/// <param name="requestUrl">The URL for the built request.</param>
/// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param>
public WorkbookOperationRequestBuilder(
string requestUrl,
IBaseClient client)
: base(requestUrl, client)
{
}
/// <summary>
/// Builds the request.
/// </summary>
/// <returns>The built request.</returns>
public new IWorkbookOperationRequest Request()
{
return this.Request(null);
}
/// <summary>
/// Builds the request.
/// </summary>
/// <param name="options">The query and header options for the request.</param>
/// <returns>The built request.</returns>
public new IWorkbookOperationRequest Request(IEnumerable<Option> options)
{
return new WorkbookOperationRequest(this.RequestUrl, this.Client, options);
}
}
}
| 34.909091 | 153 | 0.577083 | [
"MIT"
] | andrueastman/msgraph-sdk-dotnet | src/Microsoft.Graph/Generated/requests/WorkbookOperationRequestBuilder.cs | 1,920 | C# |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.RazorPages;
namespace POC_Consumer_App.Pages
{
[ResponseCache(Duration = 0, Location = ResponseCacheLocation.None, NoStore = true)]
public class ErrorModel : PageModel
{
public string RequestId { get; set; }
public bool ShowRequestId => !string.IsNullOrEmpty(RequestId);
public void OnGet()
{
RequestId = Activity.Current?.Id ?? HttpContext.TraceIdentifier;
}
}
}
| 25.875 | 88 | 0.703704 | [
"Apache-2.0"
] | fbrass/ory-hydra-asp.net-core-consumer-app-poc | Pages/Error.cshtml.cs | 621 | C# |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the groundstation-2019-05-23.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
using System.Xml.Serialization;
using Amazon.GroundStation.Model;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
using Amazon.Runtime.Internal.Transform;
using Amazon.Runtime.Internal.Util;
using ThirdParty.Json.LitJson;
namespace Amazon.GroundStation.Model.Internal.MarshallTransformations
{
/// <summary>
/// S3RecordingConfig Marshaller
/// </summary>
public class S3RecordingConfigMarshaller : IRequestMarshaller<S3RecordingConfig, JsonMarshallerContext>
{
/// <summary>
/// Unmarshaller the response from the service to the response class.
/// </summary>
/// <param name="requestObject"></param>
/// <param name="context"></param>
/// <returns></returns>
public void Marshall(S3RecordingConfig requestObject, JsonMarshallerContext context)
{
if(requestObject.IsSetBucketArn())
{
context.Writer.WritePropertyName("bucketArn");
context.Writer.Write(requestObject.BucketArn);
}
if(requestObject.IsSetPrefix())
{
context.Writer.WritePropertyName("prefix");
context.Writer.Write(requestObject.Prefix);
}
if(requestObject.IsSetRoleArn())
{
context.Writer.WritePropertyName("roleArn");
context.Writer.Write(requestObject.RoleArn);
}
}
/// <summary>
/// Singleton Marshaller.
/// </summary>
public readonly static S3RecordingConfigMarshaller Instance = new S3RecordingConfigMarshaller();
}
} | 33.054054 | 111 | 0.66435 | [
"Apache-2.0"
] | Hazy87/aws-sdk-net | sdk/src/Services/GroundStation/Generated/Model/Internal/MarshallTransformations/S3RecordingConfigMarshaller.cs | 2,446 | C# |
using Files.Shared;
using Files.Shared.Extensions;
using Microsoft.Win32;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.Versioning;
using System.Security;
using System.Text;
using System.Threading.Tasks;
using Windows.Storage;
namespace Files.FullTrust.Helpers
{
[SupportedOSPlatform("Windows10.0.10240")]
public static class ShellNewMenuHelper
{
public static async Task<List<ShellNewEntry>> GetNewContextMenuEntries()
{
var newMenuItems = new List<ShellNewEntry>();
foreach (var keyName in Registry.ClassesRoot.GetSubKeyNames().Where(x => x.StartsWith('.') && !new string[] { ShellLibraryItem.EXTENSION, ".url", ".lnk" }.Contains(x, StringComparer.OrdinalIgnoreCase)))
{
using var key = Registry.ClassesRoot.OpenSubKeySafe(keyName);
if (key != null)
{
var ret = await GetShellNewRegistryEntries(key, key);
if (ret != null)
{
newMenuItems.Add(ret);
}
}
}
return newMenuItems;
}
public static async Task<ShellNewEntry> GetNewContextMenuEntryForType(string extension)
{
if (string.IsNullOrEmpty(extension)) return null;
using var key = Registry.ClassesRoot.OpenSubKeySafe(extension);
return key != null ? await GetShellNewRegistryEntries(key, key) : null;
}
private static async Task<ShellNewEntry> GetShellNewRegistryEntries(RegistryKey current, RegistryKey root)
{
foreach (var keyName in current.GetSubKeyNames())
{
using var key = current.OpenSubKeySafe(keyName);
if (key == null)
{
continue;
}
if (keyName == "ShellNew")
{
return await ParseShellNewRegistryEntry(key, root);
}
else
{
var ret = await GetShellNewRegistryEntries(key, root);
if (ret != null)
{
return ret;
}
}
}
return null;
}
private static async Task<ShellNewEntry> ParseShellNewRegistryEntry(RegistryKey key, RegistryKey root)
{
var valueNames = key.GetValueNames();
if (!valueNames.Contains("NullFile", StringComparer.OrdinalIgnoreCase) &&
!valueNames.Contains("ItemName", StringComparer.OrdinalIgnoreCase) &&
!valueNames.Contains("FileName", StringComparer.OrdinalIgnoreCase) &&
!valueNames.Contains("Command", StringComparer.OrdinalIgnoreCase))
{
return null;
}
var extension = root.Name.Substring(root.Name.LastIndexOf('\\') + 1);
var fileName = (string)key.GetValue("FileName");
byte[] data = null;
var dataObj = key.GetValue("Data");
if (dataObj != null)
{
switch (key.GetValueKind("Data"))
{
case RegistryValueKind.Binary:
data = (byte[])dataObj;
break;
case RegistryValueKind.String:
data = UTF8Encoding.UTF8.GetBytes((string)dataObj);
break;
}
}
var folder = await SafetyExtensions.IgnoreExceptions(() => ApplicationData.Current.LocalFolder.CreateFolderAsync("extensions", CreationCollisionOption.OpenIfExists).AsTask());
var sampleFile = folder != null ? await SafetyExtensions.IgnoreExceptions(() => folder.CreateFileAsync("file" + extension, CreationCollisionOption.OpenIfExists).AsTask()) : null;
var displayType = sampleFile != null ? sampleFile.DisplayType : string.Format("{0} {1}", "file", extension);
var thumbnail = sampleFile != null ? await SafetyExtensions.IgnoreExceptions(() => sampleFile.GetThumbnailAsync(Windows.Storage.FileProperties.ThumbnailMode.ListView, 24, Windows.Storage.FileProperties.ThumbnailOptions.UseCurrentScale).AsTask()) : null;
string iconString = null;
if (thumbnail != null)
{
var readStream = thumbnail.AsStreamForRead();
var bitmapData = new byte[readStream.Length];
await readStream.ReadAsync(bitmapData, 0, bitmapData.Length);
iconString = Convert.ToBase64String(bitmapData, 0, bitmapData.Length);
}
var entry = new ShellNewEntry()
{
Extension = extension,
Template = fileName,
Name = displayType,
Command = (string)key.GetValue("Command"),
IconBase64 = iconString,
Data = data
};
return entry;
}
private static RegistryKey OpenSubKeySafe(this RegistryKey root, string keyName)
{
try
{
return root.OpenSubKey(keyName);
}
catch (SecurityException)
{
return null;
}
}
}
} | 38.964029 | 265 | 0.550591 | [
"MIT"
] | AaronKatzin/Files | src/Files.FullTrust/Helpers/ShellNewMenuHelper.cs | 5,418 | C# |
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Quidjibo.Models;
namespace Quidjibo.Providers
{
public interface IWorkProvider : IDisposable
{
/// <summary>
/// Sends the asynchronous.
/// </summary>
/// <param name="item">The work item.</param>
/// <param name="delay">The delay in seconds.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns></returns>
Task SendAsync(WorkItem item, int delay, CancellationToken cancellationToken);
/// <summary>
/// Receives the asynchronous.
/// </summary>
/// <param name="worker"></param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns></returns>
Task<List<WorkItem>> ReceiveAsync(string worker, CancellationToken cancellationToken);
/// <summary>
/// Renews the lock asynchronous.
/// </summary>
/// <param name="item">The work item.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns></returns>
Task<DateTime> RenewAsync(WorkItem item, CancellationToken cancellationToken);
/// <summary>
/// Completes the asynchronous.
/// </summary>
/// <param name="item">The work item.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns></returns>
Task CompleteAsync(WorkItem item, CancellationToken cancellationToken);
/// <summary>
/// Faults the asynchronous.
/// </summary>
/// <param name="item">The work item.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns></returns>
Task FaultAsync(WorkItem item, CancellationToken cancellationToken);
}
} | 37.480769 | 94 | 0.605952 | [
"Apache-2.0"
] | smiggleworth/Quidjibo | src/Quidjibo/Providers/IWorkProvider.cs | 1,951 | C# |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.IO.Pipelines;
using System.Linq;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Features;
using Microsoft.AspNetCore.Server.Kestrel.Core.Features;
using Microsoft.AspNetCore.Server.Kestrel.Core.Internal;
using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http;
using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http2;
using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http2.HPack;
using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Infrastructure;
using Microsoft.AspNetCore.Server.Kestrel.Transport.Abstractions.Internal;
using Microsoft.AspNetCore.Testing;
using Microsoft.Net.Http.Headers;
using Moq;
using Xunit;
using Xunit.Abstractions;
namespace Microsoft.AspNetCore.Server.Kestrel.Core.Tests
{
public class Http2TestBase : TestApplicationErrorLoggerLoggedTest, IDisposable, IHttpHeadersHandler
{
protected static readonly int MaxRequestHeaderFieldSize = 8192;
protected static readonly string _4kHeaderValue = new string('a', 4096);
protected static readonly IEnumerable<KeyValuePair<string, string>> _browserRequestHeaders = new[]
{
new KeyValuePair<string, string>(HeaderNames.Method, "GET"),
new KeyValuePair<string, string>(HeaderNames.Path, "/"),
new KeyValuePair<string, string>(HeaderNames.Scheme, "http"),
new KeyValuePair<string, string>(HeaderNames.Authority, "localhost:80"),
new KeyValuePair<string, string>("user-agent", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:54.0) Gecko/20100101 Firefox/54.0"),
new KeyValuePair<string, string>("accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
new KeyValuePair<string, string>("accept-language", "en-US,en;q=0.5"),
new KeyValuePair<string, string>("accept-encoding", "gzip, deflate, br"),
new KeyValuePair<string, string>("upgrade-insecure-requests", "1"),
};
private readonly MemoryPool<byte> _memoryPool = KestrelMemoryPool.Create();
internal readonly DuplexPipe.DuplexPipePair _pair;
protected readonly Http2PeerSettings _clientSettings = new Http2PeerSettings();
protected readonly HPackEncoder _hpackEncoder = new HPackEncoder();
protected readonly HPackDecoder _hpackDecoder;
private readonly byte[] _headerEncodingBuffer = new byte[Http2PeerSettings.MinAllowedMaxFrameSize];
protected readonly Mock<ITimeoutHandler> _mockTimeoutHandler = new Mock<ITimeoutHandler>();
protected readonly TimeoutControl _timeoutControl;
protected readonly ConcurrentDictionary<int, TaskCompletionSource<object>> _runningStreams = new ConcurrentDictionary<int, TaskCompletionSource<object>>();
protected readonly Dictionary<string, string> _receivedHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
protected readonly Dictionary<string, string> _decodedHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
protected readonly HashSet<int> _abortedStreamIds = new HashSet<int>();
protected readonly object _abortedStreamIdsLock = new object();
protected readonly TaskCompletionSource<object> _closingStateReached = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
protected readonly TaskCompletionSource<object> _closedStateReached = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
protected readonly RequestDelegate _noopApplication;
protected readonly RequestDelegate _readHeadersApplication;
protected readonly RequestDelegate _readTrailersApplication;
protected readonly RequestDelegate _bufferingApplication;
protected readonly RequestDelegate _echoApplication;
protected readonly RequestDelegate _echoWaitForAbortApplication;
protected readonly RequestDelegate _largeHeadersApplication;
protected readonly RequestDelegate _waitForAbortApplication;
protected readonly RequestDelegate _waitForAbortFlushingApplication;
protected readonly RequestDelegate _waitForAbortWithDataApplication;
protected readonly RequestDelegate _echoMethod;
protected readonly RequestDelegate _echoHost;
protected readonly RequestDelegate _echoPath;
protected HttpConnectionContext _connectionContext;
protected Http2Connection _connection;
protected Task _connectionTask;
public Http2TestBase()
{
// Always dispatch test code back to the ThreadPool. This prevents deadlocks caused by continuing
// Http2Connection.ProcessRequestsAsync() loop with writer locks acquired. Run product code inline to make
// it easier to verify request frames are processed correctly immediately after sending the them.
var inputPipeOptions = new PipeOptions(
pool: _memoryPool,
readerScheduler: PipeScheduler.Inline,
writerScheduler: PipeScheduler.ThreadPool,
useSynchronizationContext: false
);
var outputPipeOptions = new PipeOptions(
pool: _memoryPool,
readerScheduler: PipeScheduler.ThreadPool,
writerScheduler: PipeScheduler.Inline,
useSynchronizationContext: false
);
_pair = DuplexPipe.CreateConnectionPair(inputPipeOptions, outputPipeOptions);
_hpackDecoder = new HPackDecoder((int)_clientSettings.HeaderTableSize, MaxRequestHeaderFieldSize);
_timeoutControl = new TimeoutControl(_mockTimeoutHandler.Object);
_noopApplication = context => Task.CompletedTask;
_readHeadersApplication = context =>
{
foreach (var header in context.Request.Headers)
{
_receivedHeaders[header.Key] = header.Value.ToString();
}
return Task.CompletedTask;
};
_readTrailersApplication = async context =>
{
using (var ms = new MemoryStream())
{
// Consuming the entire request body guarantees trailers will be available
await context.Request.Body.CopyToAsync(ms);
}
foreach (var header in context.Request.Headers)
{
_receivedHeaders[header.Key] = header.Value.ToString();
}
};
_bufferingApplication = async context =>
{
var data = new List<byte>();
var buffer = new byte[1024];
var received = 0;
while ((received = await context.Request.Body.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
data.AddRange(new ArraySegment<byte>(buffer, 0, received));
}
await context.Response.Body.WriteAsync(data.ToArray(), 0, data.Count);
};
_echoApplication = async context =>
{
var buffer = new byte[Http2PeerSettings.MinAllowedMaxFrameSize];
var received = 0;
while ((received = await context.Request.Body.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
await context.Response.Body.WriteAsync(buffer, 0, received);
}
};
_echoWaitForAbortApplication = async context =>
{
var buffer = new byte[Http2PeerSettings.MinAllowedMaxFrameSize];
var received = 0;
while ((received = await context.Request.Body.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
await context.Response.Body.WriteAsync(buffer, 0, received);
}
var sem = new SemaphoreSlim(0);
context.RequestAborted.Register(() =>
{
sem.Release();
});
await sem.WaitAsync().DefaultTimeout();
};
_largeHeadersApplication = context =>
{
foreach (var name in new[] { "a", "b", "c", "d", "e", "f", "g", "h" })
{
context.Response.Headers[name] = _4kHeaderValue;
}
return Task.CompletedTask;
};
_waitForAbortApplication = async context =>
{
var streamIdFeature = context.Features.Get<IHttp2StreamIdFeature>();
var sem = new SemaphoreSlim(0);
context.RequestAborted.Register(() =>
{
lock (_abortedStreamIdsLock)
{
_abortedStreamIds.Add(streamIdFeature.StreamId);
}
sem.Release();
});
await sem.WaitAsync().DefaultTimeout();
_runningStreams[streamIdFeature.StreamId].TrySetResult(null);
};
_waitForAbortFlushingApplication = async context =>
{
var streamIdFeature = context.Features.Get<IHttp2StreamIdFeature>();
var sem = new SemaphoreSlim(0);
context.RequestAborted.Register(() =>
{
lock (_abortedStreamIdsLock)
{
_abortedStreamIds.Add(streamIdFeature.StreamId);
}
sem.Release();
});
await sem.WaitAsync().DefaultTimeout();
await context.Response.Body.FlushAsync();
_runningStreams[streamIdFeature.StreamId].TrySetResult(null);
};
_waitForAbortWithDataApplication = async context =>
{
var streamIdFeature = context.Features.Get<IHttp2StreamIdFeature>();
var sem = new SemaphoreSlim(0);
context.RequestAborted.Register(() =>
{
lock (_abortedStreamIdsLock)
{
_abortedStreamIds.Add(streamIdFeature.StreamId);
}
sem.Release();
});
await sem.WaitAsync().DefaultTimeout();
await context.Response.Body.WriteAsync(new byte[10], 0, 10);
_runningStreams[streamIdFeature.StreamId].TrySetResult(null);
};
_echoMethod = context =>
{
context.Response.Headers["Method"] = context.Request.Method;
return Task.CompletedTask;
};
_echoHost = context =>
{
context.Response.Headers[HeaderNames.Host] = context.Request.Headers[HeaderNames.Host];
return Task.CompletedTask;
};
_echoPath = context =>
{
context.Response.Headers["path"] = context.Request.Path.ToString();
context.Response.Headers["rawtarget"] = context.Features.Get<IHttpRequestFeature>().RawTarget;
return Task.CompletedTask;
};
}
public override void Initialize(MethodInfo methodInfo, object[] testMethodArguments, ITestOutputHelper testOutputHelper)
{
base.Initialize(methodInfo, testMethodArguments, testOutputHelper);
var mockKestrelTrace = new Mock<IKestrelTrace>();
mockKestrelTrace
.Setup(m => m.Http2ConnectionClosing(It.IsAny<string>()))
.Callback(() => _closingStateReached.SetResult(null));
mockKestrelTrace
.Setup(m => m.Http2ConnectionClosed(It.IsAny<string>(), It.IsAny<int>()))
.Callback(() => _closedStateReached.SetResult(null));
_connectionContext = new HttpConnectionContext
{
ConnectionContext = Mock.Of<ConnectionContext>(),
ConnectionFeatures = new FeatureCollection(),
ServiceContext = new TestServiceContext(LoggerFactory, mockKestrelTrace.Object),
MemoryPool = _memoryPool,
Transport = _pair.Transport,
TimeoutControl = _timeoutControl
};
_connection = new Http2Connection(_connectionContext);
}
public override void Dispose()
{
_pair.Application.Input.Complete();
_pair.Application.Output.Complete();
_pair.Transport.Input.Complete();
_pair.Transport.Output.Complete();
_memoryPool.Dispose();
base.Dispose();
}
void IHttpHeadersHandler.OnHeader(Span<byte> name, Span<byte> value)
{
_decodedHeaders[name.GetAsciiStringNonNullCharacters()] = value.GetAsciiOrUTF8StringNonNullCharacters();
}
protected async Task InitializeConnectionAsync(RequestDelegate application, int expectedSettingsCount = 3)
{
_connectionTask = _connection.ProcessRequestsAsync(new DummyApplication(application));
await SendPreambleAsync().ConfigureAwait(false);
await SendSettingsAsync();
await ExpectAsync(Http2FrameType.SETTINGS,
withLength: expectedSettingsCount * Http2FrameReader.SettingSize,
withFlags: 0,
withStreamId: 0);
await ExpectAsync(Http2FrameType.WINDOW_UPDATE,
withLength: 4,
withFlags: 0,
withStreamId: 0);
await ExpectAsync(Http2FrameType.SETTINGS,
withLength: 0,
withFlags: (byte)Http2SettingsFrameFlags.ACK,
withStreamId: 0);
}
protected Task StartStreamAsync(int streamId, IEnumerable<KeyValuePair<string, string>> headers, bool endStream)
{
var writableBuffer = _pair.Application.Output;
var tcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
_runningStreams[streamId] = tcs;
var frame = new Http2Frame();
frame.PrepareHeaders(Http2HeadersFrameFlags.NONE, streamId);
var buffer = _headerEncodingBuffer.AsSpan();
var done = _hpackEncoder.BeginEncode(headers, buffer, out var length);
frame.PayloadLength = length;
if (done)
{
frame.HeadersFlags = Http2HeadersFrameFlags.END_HEADERS;
}
if (endStream)
{
frame.HeadersFlags |= Http2HeadersFrameFlags.END_STREAM;
}
Http2FrameWriter.WriteHeader(frame, writableBuffer);
writableBuffer.Write(buffer.Slice(0, length));
while (!done)
{
frame.PrepareContinuation(Http2ContinuationFrameFlags.NONE, streamId);
done = _hpackEncoder.Encode(buffer, out length);
frame.PayloadLength = length;
if (done)
{
frame.ContinuationFlags = Http2ContinuationFrameFlags.END_HEADERS;
}
Http2FrameWriter.WriteHeader(frame, writableBuffer);
writableBuffer.Write(buffer.Slice(0, length));
}
return FlushAsync(writableBuffer);
}
/* https://tools.ietf.org/html/rfc7540#section-6.2
+---------------+
|Pad Length? (8)|
+-+-------------+-----------------------------------------------+
| Header Block Fragment (*) ...
+---------------------------------------------------------------+
| Padding (*) ...
+---------------------------------------------------------------+
*/
protected Task SendHeadersWithPaddingAsync(int streamId, IEnumerable<KeyValuePair<string, string>> headers, byte padLength, bool endStream)
{
var writableBuffer = _pair.Application.Output;
var tcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
_runningStreams[streamId] = tcs;
var frame = new Http2Frame();
frame.PrepareHeaders(Http2HeadersFrameFlags.END_HEADERS | Http2HeadersFrameFlags.PADDED, streamId);
frame.HeadersPadLength = padLength;
var extendedHeaderLength = 1; // Padding length field
var buffer = _headerEncodingBuffer.AsSpan();
var extendedHeader = buffer.Slice(0, extendedHeaderLength);
extendedHeader[0] = padLength;
var payload = buffer.Slice(extendedHeaderLength, buffer.Length - padLength - extendedHeaderLength);
_hpackEncoder.BeginEncode(headers, payload, out var length);
var padding = buffer.Slice(extendedHeaderLength + length, padLength);
padding.Fill(0);
frame.PayloadLength = extendedHeaderLength + length + padLength;
if (endStream)
{
frame.HeadersFlags |= Http2HeadersFrameFlags.END_STREAM;
}
Http2FrameWriter.WriteHeader(frame, writableBuffer);
writableBuffer.Write(buffer.Slice(0, frame.PayloadLength));
return FlushAsync(writableBuffer);
}
/* https://tools.ietf.org/html/rfc7540#section-6.2
+-+-------------+-----------------------------------------------+
|E| Stream Dependency? (31) |
+-+-------------+-----------------------------------------------+
| Weight? (8) |
+-+-------------+-----------------------------------------------+
| Header Block Fragment (*) ...
+---------------------------------------------------------------+
*/
protected Task SendHeadersWithPriorityAsync(int streamId, IEnumerable<KeyValuePair<string, string>> headers, byte priority, int streamDependency, bool endStream)
{
var writableBuffer = _pair.Application.Output;
var tcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
_runningStreams[streamId] = tcs;
var frame = new Http2Frame();
frame.PrepareHeaders(Http2HeadersFrameFlags.END_HEADERS | Http2HeadersFrameFlags.PRIORITY, streamId);
frame.HeadersPriorityWeight = priority;
frame.HeadersStreamDependency = streamDependency;
var extendedHeaderLength = 5; // stream dependency + weight
var buffer = _headerEncodingBuffer.AsSpan();
var extendedHeader = buffer.Slice(0, extendedHeaderLength);
Bitshifter.WriteUInt31BigEndian(extendedHeader, (uint)streamDependency);
extendedHeader[4] = priority;
var payload = buffer.Slice(extendedHeaderLength);
_hpackEncoder.BeginEncode(headers, payload, out var length);
frame.PayloadLength = extendedHeaderLength + length;
if (endStream)
{
frame.HeadersFlags |= Http2HeadersFrameFlags.END_STREAM;
}
Http2FrameWriter.WriteHeader(frame, writableBuffer);
writableBuffer.Write(buffer.Slice(0, frame.PayloadLength));
return FlushAsync(writableBuffer);
}
/* https://tools.ietf.org/html/rfc7540#section-6.2
+---------------+
|Pad Length? (8)|
+-+-------------+-----------------------------------------------+
|E| Stream Dependency? (31) |
+-+-------------+-----------------------------------------------+
| Weight? (8) |
+-+-------------+-----------------------------------------------+
| Header Block Fragment (*) ...
+---------------------------------------------------------------+
| Padding (*) ...
+---------------------------------------------------------------+
*/
protected Task SendHeadersWithPaddingAndPriorityAsync(int streamId, IEnumerable<KeyValuePair<string, string>> headers, byte padLength, byte priority, int streamDependency, bool endStream)
{
var writableBuffer = _pair.Application.Output;
var tcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
_runningStreams[streamId] = tcs;
var frame = new Http2Frame();
frame.PrepareHeaders(Http2HeadersFrameFlags.END_HEADERS | Http2HeadersFrameFlags.PADDED | Http2HeadersFrameFlags.PRIORITY, streamId);
frame.HeadersPadLength = padLength;
frame.HeadersPriorityWeight = priority;
frame.HeadersStreamDependency = streamDependency;
var extendedHeaderLength = 6; // pad length + stream dependency + weight
var buffer = _headerEncodingBuffer.AsSpan();
var extendedHeader = buffer.Slice(0, extendedHeaderLength);
extendedHeader[0] = padLength;
Bitshifter.WriteUInt31BigEndian(extendedHeader.Slice(1), (uint)streamDependency);
extendedHeader[5] = priority;
var payload = buffer.Slice(extendedHeaderLength, buffer.Length - padLength - extendedHeaderLength);
_hpackEncoder.BeginEncode(headers, payload, out var length);
var padding = buffer.Slice(extendedHeaderLength + length, padLength);
padding.Fill(0);
frame.PayloadLength = extendedHeaderLength + length + padLength;
if (endStream)
{
frame.HeadersFlags |= Http2HeadersFrameFlags.END_STREAM;
}
Http2FrameWriter.WriteHeader(frame, writableBuffer);
writableBuffer.Write(buffer.Slice(0, frame.PayloadLength));
return FlushAsync(writableBuffer);
}
protected Task WaitForAllStreamsAsync()
{
return Task.WhenAll(_runningStreams.Values.Select(tcs => tcs.Task)).DefaultTimeout();
}
protected Task SendAsync(ReadOnlySpan<byte> span)
{
var writableBuffer = _pair.Application.Output;
writableBuffer.Write(span);
return FlushAsync(writableBuffer);
}
protected static async Task FlushAsync(PipeWriter writableBuffer)
{
await writableBuffer.FlushAsync();
}
protected Task SendPreambleAsync() => SendAsync(new ArraySegment<byte>(Http2Connection.ClientPreface));
protected async Task SendSettingsAsync()
{
var writableBuffer = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareSettings(Http2SettingsFrameFlags.NONE);
var settings = _clientSettings.GetNonProtocolDefaults();
var payload = new byte[settings.Count * Http2FrameReader.SettingSize];
frame.PayloadLength = payload.Length;
Http2FrameWriter.WriteSettings(settings, payload);
Http2FrameWriter.WriteHeader(frame, writableBuffer);
await SendAsync(payload);
}
protected async Task SendSettingsAckWithInvalidLengthAsync(int length)
{
var writableBuffer = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareSettings(Http2SettingsFrameFlags.ACK);
frame.PayloadLength = length;
Http2FrameWriter.WriteHeader(frame, writableBuffer);
await SendAsync(new byte[length]);
}
protected async Task SendSettingsWithInvalidStreamIdAsync(int streamId)
{
var writableBuffer = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareSettings(Http2SettingsFrameFlags.NONE);
frame.StreamId = streamId;
var settings = _clientSettings.GetNonProtocolDefaults();
var payload = new byte[settings.Count * Http2FrameReader.SettingSize];
frame.PayloadLength = payload.Length;
Http2FrameWriter.WriteSettings(settings, payload);
Http2FrameWriter.WriteHeader(frame, writableBuffer);
await SendAsync(payload);
}
protected async Task SendSettingsWithInvalidLengthAsync(int length)
{
var writableBuffer = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareSettings(Http2SettingsFrameFlags.NONE);
frame.PayloadLength = length;
var payload = new byte[length];
Http2FrameWriter.WriteHeader(frame, writableBuffer);
await SendAsync(payload);
}
protected async Task SendSettingsWithInvalidParameterValueAsync(Http2SettingsParameter parameter, uint value)
{
var writableBuffer = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareSettings(Http2SettingsFrameFlags.NONE);
frame.PayloadLength = 6;
var payload = new byte[Http2FrameReader.SettingSize];
payload[0] = (byte)((ushort)parameter >> 8);
payload[1] = (byte)(ushort)parameter;
payload[2] = (byte)(value >> 24);
payload[3] = (byte)(value >> 16);
payload[4] = (byte)(value >> 8);
payload[5] = (byte)value;
Http2FrameWriter.WriteHeader(frame, writableBuffer);
await SendAsync(payload);
}
protected Task SendPushPromiseFrameAsync()
{
var writableBuffer = _pair.Application.Output;
var frame = new Http2Frame();
frame.PayloadLength = 0;
frame.Type = Http2FrameType.PUSH_PROMISE;
frame.StreamId = 1;
Http2FrameWriter.WriteHeader(frame, writableBuffer);
return FlushAsync(writableBuffer);
}
protected async Task<bool> SendHeadersAsync(int streamId, Http2HeadersFrameFlags flags, IEnumerable<KeyValuePair<string, string>> headers)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareHeaders(flags, streamId);
var buffer = _headerEncodingBuffer.AsMemory();
var done = _hpackEncoder.BeginEncode(headers, buffer.Span, out var length);
frame.PayloadLength = length;
Http2FrameWriter.WriteHeader(frame, outputWriter);
await SendAsync(buffer.Span.Slice(0, length));
return done;
}
protected async Task SendHeadersAsync(int streamId, Http2HeadersFrameFlags flags, byte[] headerBlock)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareHeaders(flags, streamId);
frame.PayloadLength = headerBlock.Length;
Http2FrameWriter.WriteHeader(frame, outputWriter);
await SendAsync(headerBlock);
}
protected async Task SendInvalidHeadersFrameAsync(int streamId, int payloadLength, byte padLength)
{
Assert.True(padLength >= payloadLength, $"{nameof(padLength)} must be greater than or equal to {nameof(payloadLength)} to create an invalid frame.");
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareHeaders(Http2HeadersFrameFlags.PADDED, streamId);
frame.PayloadLength = payloadLength;
var payload = new byte[payloadLength];
if (payloadLength > 0)
{
payload[0] = padLength;
}
Http2FrameWriter.WriteHeader(frame, outputWriter);
await SendAsync(payload);
}
protected async Task SendIncompleteHeadersFrameAsync(int streamId)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareHeaders(Http2HeadersFrameFlags.END_HEADERS, streamId);
frame.PayloadLength = 3;
var payload = new byte[3];
// Set up an incomplete Literal Header Field w/ Incremental Indexing frame,
// with an incomplete new name
payload[0] = 0;
payload[1] = 2;
payload[2] = (byte)'a';
Http2FrameWriter.WriteHeader(frame, outputWriter);
await SendAsync(payload);
}
protected async Task<bool> SendContinuationAsync(int streamId, Http2ContinuationFrameFlags flags)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareContinuation(flags, streamId);
var buffer = _headerEncodingBuffer.AsMemory();
var done = _hpackEncoder.Encode(buffer.Span, out var length);
frame.PayloadLength = length;
Http2FrameWriter.WriteHeader(frame, outputWriter);
await SendAsync(buffer.Span.Slice(0, length));
return done;
}
protected async Task SendContinuationAsync(int streamId, Http2ContinuationFrameFlags flags, byte[] payload)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareContinuation(flags, streamId);
frame.PayloadLength = payload.Length;
Http2FrameWriter.WriteHeader(frame, outputWriter);
await SendAsync(payload);
}
protected Task SendEmptyContinuationFrameAsync(int streamId, Http2ContinuationFrameFlags flags)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareContinuation(flags, streamId);
frame.PayloadLength = 0;
Http2FrameWriter.WriteHeader(frame, outputWriter);
return FlushAsync(outputWriter);
}
protected async Task SendIncompleteContinuationFrameAsync(int streamId)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareContinuation(Http2ContinuationFrameFlags.END_HEADERS, streamId);
frame.PayloadLength = 3;
var payload = new byte[3];
// Set up an incomplete Literal Header Field w/ Incremental Indexing frame,
// with an incomplete new name
payload[0] = 0;
payload[1] = 2;
payload[2] = (byte)'a';
Http2FrameWriter.WriteHeader(frame, outputWriter);
await SendAsync(payload);
}
protected Task SendDataAsync(int streamId, Memory<byte> data, bool endStream)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareData(streamId);
frame.PayloadLength = data.Length;
frame.DataFlags = endStream ? Http2DataFrameFlags.END_STREAM : Http2DataFrameFlags.NONE;
Http2FrameWriter.WriteHeader(frame, outputWriter);
return SendAsync(data.Span);
}
protected async Task SendDataWithPaddingAsync(int streamId, Memory<byte> data, byte padLength, bool endStream)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareData(streamId, padLength);
frame.PayloadLength = data.Length + 1 + padLength;
if (endStream)
{
frame.DataFlags |= Http2DataFrameFlags.END_STREAM;
}
Http2FrameWriter.WriteHeader(frame, outputWriter);
outputWriter.GetSpan(1)[0] = padLength;
outputWriter.Advance(1);
await SendAsync(data.Span);
await SendAsync(new byte[padLength]);
}
protected Task SendInvalidDataFrameAsync(int streamId, int frameLength, byte padLength)
{
Assert.True(padLength >= frameLength, $"{nameof(padLength)} must be greater than or equal to {nameof(frameLength)} to create an invalid frame.");
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareData(streamId);
frame.DataFlags = Http2DataFrameFlags.PADDED;
frame.PayloadLength = frameLength;
var payload = new byte[frameLength];
if (frameLength > 0)
{
payload[0] = padLength;
}
Http2FrameWriter.WriteHeader(frame, outputWriter);
return SendAsync(payload);
}
protected Task SendPingAsync(Http2PingFrameFlags flags)
{
var outputWriter = _pair.Application.Output;
var pingFrame = new Http2Frame();
pingFrame.PreparePing(flags);
Http2FrameWriter.WriteHeader(pingFrame, outputWriter);
return SendAsync(new byte[8]); // Empty payload
}
protected Task SendPingWithInvalidLengthAsync(int length)
{
var outputWriter = _pair.Application.Output;
var pingFrame = new Http2Frame();
pingFrame.PreparePing(Http2PingFrameFlags.NONE);
pingFrame.PayloadLength = length;
Http2FrameWriter.WriteHeader(pingFrame, outputWriter);
return SendAsync(new byte[length]);
}
protected Task SendPingWithInvalidStreamIdAsync(int streamId)
{
Assert.NotEqual(0, streamId);
var outputWriter = _pair.Application.Output;
var pingFrame = new Http2Frame();
pingFrame.PreparePing(Http2PingFrameFlags.NONE);
pingFrame.StreamId = streamId;
Http2FrameWriter.WriteHeader(pingFrame, outputWriter);
return SendAsync(new byte[pingFrame.PayloadLength]);
}
/* https://tools.ietf.org/html/rfc7540#section-6.3
+-+-------------------------------------------------------------+
|E| Stream Dependency (31) |
+-+-------------+-----------------------------------------------+
| Weight (8) |
+-+-------------+
*/
protected Task SendPriorityAsync(int streamId, int streamDependency = 0)
{
var outputWriter = _pair.Application.Output;
var priorityFrame = new Http2Frame();
priorityFrame.PreparePriority(streamId, streamDependency: streamDependency, exclusive: false, weight: 0);
var payload = new byte[priorityFrame.PayloadLength].AsSpan();
Bitshifter.WriteUInt31BigEndian(payload, (uint)streamDependency);
payload[4] = 0; // Weight
Http2FrameWriter.WriteHeader(priorityFrame, outputWriter);
return SendAsync(payload);
}
protected Task SendInvalidPriorityFrameAsync(int streamId, int length)
{
var outputWriter = _pair.Application.Output;
var priorityFrame = new Http2Frame();
priorityFrame.PreparePriority(streamId, streamDependency: 0, exclusive: false, weight: 0);
priorityFrame.PayloadLength = length;
Http2FrameWriter.WriteHeader(priorityFrame, outputWriter);
return SendAsync(new byte[length]);
}
/* https://tools.ietf.org/html/rfc7540#section-6.4
+---------------------------------------------------------------+
| Error Code (32) |
+---------------------------------------------------------------+
*/
protected Task SendRstStreamAsync(int streamId)
{
var outputWriter = _pair.Application.Output;
var rstStreamFrame = new Http2Frame();
rstStreamFrame.PrepareRstStream(streamId, Http2ErrorCode.CANCEL);
var payload = new byte[rstStreamFrame.PayloadLength];
BinaryPrimitives.WriteUInt32BigEndian(payload, (uint)Http2ErrorCode.CANCEL);
Http2FrameWriter.WriteHeader(rstStreamFrame, outputWriter);
return SendAsync(payload);
}
protected Task SendInvalidRstStreamFrameAsync(int streamId, int length)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareRstStream(streamId, Http2ErrorCode.CANCEL);
frame.PayloadLength = length;
Http2FrameWriter.WriteHeader(frame, outputWriter);
return SendAsync(new byte[length]);
}
protected Task SendGoAwayAsync()
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareGoAway(0, Http2ErrorCode.NO_ERROR);
Http2FrameWriter.WriteHeader(frame, outputWriter);
return SendAsync(new byte[frame.PayloadLength]);
}
protected Task SendInvalidGoAwayFrameAsync()
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareGoAway(0, Http2ErrorCode.NO_ERROR);
frame.StreamId = 1;
Http2FrameWriter.WriteHeader(frame, outputWriter);
return SendAsync(new byte[frame.PayloadLength]);
}
protected Task SendWindowUpdateAsync(int streamId, int sizeIncrement)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareWindowUpdate(streamId, sizeIncrement);
Http2FrameWriter.WriteHeader(frame, outputWriter);
var buffer = outputWriter.GetSpan(4);
Bitshifter.WriteUInt31BigEndian(buffer, (uint)sizeIncrement);
outputWriter.Advance(4);
return FlushAsync(outputWriter);
}
protected Task SendInvalidWindowUpdateAsync(int streamId, int sizeIncrement, int length)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.PrepareWindowUpdate(streamId, sizeIncrement);
frame.PayloadLength = length;
Http2FrameWriter.WriteHeader(frame, outputWriter);
return SendAsync(new byte[length]);
}
protected Task SendUnknownFrameTypeAsync(int streamId, int frameType)
{
var outputWriter = _pair.Application.Output;
var frame = new Http2Frame();
frame.StreamId = streamId;
frame.Type = (Http2FrameType)frameType;
frame.PayloadLength = 0;
Http2FrameWriter.WriteHeader(frame, outputWriter);
return FlushAsync(outputWriter);
}
protected async Task<Http2FrameWithPayload> ReceiveFrameAsync(uint maxFrameSize = Http2PeerSettings.DefaultMaxFrameSize)
{
var frame = new Http2FrameWithPayload();
while (true)
{
var result = await _pair.Application.Input.ReadAsync().AsTask().DefaultTimeout();
var buffer = result.Buffer;
var consumed = buffer.Start;
var examined = buffer.Start;
try
{
Assert.True(buffer.Length > 0);
if (Http2FrameReader.ReadFrame(buffer, frame, maxFrameSize, out var framePayload))
{
consumed = examined = framePayload.End;
frame.Payload = framePayload.ToArray();
return frame;
}
else
{
examined = buffer.End;
}
if (result.IsCompleted)
{
throw new IOException("The reader completed without returning a frame.");
}
}
finally
{
_pair.Application.Input.AdvanceTo(consumed, examined);
}
}
}
protected async Task<Http2FrameWithPayload> ExpectAsync(Http2FrameType type, int withLength, byte withFlags, int withStreamId)
{
var frame = await ReceiveFrameAsync((uint)withLength);
Assert.Equal(type, frame.Type);
Assert.Equal(withLength, frame.PayloadLength);
Assert.Equal(withFlags, frame.Flags);
Assert.Equal(withStreamId, frame.StreamId);
return frame;
}
protected Task StopConnectionAsync(int expectedLastStreamId, bool ignoreNonGoAwayFrames)
{
_pair.Application.Output.Complete();
return WaitForConnectionStopAsync(expectedLastStreamId, ignoreNonGoAwayFrames);
}
protected Task WaitForConnectionStopAsync(int expectedLastStreamId, bool ignoreNonGoAwayFrames)
{
return WaitForConnectionErrorAsync<Exception>(ignoreNonGoAwayFrames, expectedLastStreamId, Http2ErrorCode.NO_ERROR, expectedErrorMessage: null);
}
protected void VerifyGoAway(Http2Frame frame, int expectedLastStreamId, Http2ErrorCode expectedErrorCode)
{
Assert.Equal(Http2FrameType.GOAWAY, frame.Type);
Assert.Equal(8, frame.PayloadLength);
Assert.Equal(0, frame.Flags);
Assert.Equal(0, frame.StreamId);
Assert.Equal(expectedLastStreamId, frame.GoAwayLastStreamId);
Assert.Equal(expectedErrorCode, frame.GoAwayErrorCode);
}
protected async Task WaitForConnectionErrorAsync<TException>(bool ignoreNonGoAwayFrames, int expectedLastStreamId, Http2ErrorCode expectedErrorCode, string expectedErrorMessage)
where TException : Exception
{
var frame = await ReceiveFrameAsync();
if (ignoreNonGoAwayFrames)
{
while (frame.Type != Http2FrameType.GOAWAY)
{
frame = await ReceiveFrameAsync();
}
}
VerifyGoAway(frame, expectedLastStreamId, expectedErrorCode);
if (expectedErrorMessage != null)
{
var message = Assert.Single(TestApplicationErrorLogger.Messages, m => m.Exception is TException);
Assert.Contains(expectedErrorMessage, message.Exception.Message);
}
await _connectionTask;
_pair.Application.Output.Complete();
}
protected async Task WaitForStreamErrorAsync(int expectedStreamId, Http2ErrorCode expectedErrorCode, string expectedErrorMessage)
{
var frame = await ReceiveFrameAsync();
Assert.Equal(Http2FrameType.RST_STREAM, frame.Type);
Assert.Equal(4, frame.PayloadLength);
Assert.Equal(0, frame.Flags);
Assert.Equal(expectedStreamId, frame.StreamId);
Assert.Equal(expectedErrorCode, frame.RstStreamErrorCode);
if (expectedErrorMessage != null)
{
Assert.Contains(TestApplicationErrorLogger.Messages, m => m.Exception?.Message.Contains(expectedErrorMessage) ?? false);
}
}
protected void VerifyDecodedRequestHeaders(IEnumerable<KeyValuePair<string, string>> expectedHeaders)
{
foreach (var header in expectedHeaders)
{
Assert.True(_receivedHeaders.TryGetValue(header.Key, out var value), header.Key);
Assert.Equal(header.Value, value, ignoreCase: true);
}
}
public class Http2FrameWithPayload : Http2Frame
{
public Http2FrameWithPayload() : base()
{
}
// This does not contain extended headers
public Memory<byte> Payload { get; set; }
public ReadOnlySequence<byte> PayloadSequence => new ReadOnlySequence<byte>(Payload);
}
}
}
| 41.842991 | 195 | 0.593474 | [
"Apache-2.0"
] | SonicGD/KestrelHttpServer | test/Kestrel.InMemory.FunctionalTests/Http2/Http2TestBase.cs | 44,774 | C# |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
namespace Microsoft.EntityFrameworkCore.Cosmos.Query.Internal
{
/// <summary>
/// This is an internal API that supports the Entity Framework Core infrastructure and not subject to
/// the same compatibility standards as public APIs. It may be changed or removed without notice in
/// any release. You should only use it directly in your code with extreme caution and knowing that
/// doing so can result in application failures when updating to a new Entity Framework Core release.
/// </summary>
public class QuerySqlGeneratorFactory : IQuerySqlGeneratorFactory
{
/// <summary>
/// This is an internal API that supports the Entity Framework Core infrastructure and not subject to
/// the same compatibility standards as public APIs. It may be changed or removed without notice in
/// any release. You should only use it directly in your code with extreme caution and knowing that
/// doing so can result in application failures when updating to a new Entity Framework Core release.
/// </summary>
public virtual QuerySqlGenerator Create()
=> new();
}
}
| 55.333333 | 113 | 0.698795 | [
"MIT"
] | FelicePollano/efcore | src/EFCore.Cosmos/Query/Internal/QuerySqlGeneratorFactory.cs | 1,328 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Windows.Forms;
namespace WDNUtils.Win32
{
/// <summary>
/// Controller for a ComboBox with a list of object instances
/// </summary>
/// <typeparam name="T">Element type</typeparam>
public class ComboBoxClassController<T> where T : class
{
#region Properties
/// <summary>
/// Combo box that is controlled by this class
/// </summary>
protected ComboBox ComboBox { get; private set; }
/// <summary>
/// List of command text elements
/// </summary>
private IEnumerable<Tuple<T, string>> CommandTextList { get; set; }
#endregion
#region Constructor
/// <summary>
/// Creates a new instance of ComboBoxClassController
/// </summary>
/// <param name="comboBox">Combo box that will be controlled by this class</param>
/// <param name="commandText">List of command text elements</param>
protected ComboBoxClassController(ComboBox comboBox, params string[] commandText)
{
ComboBox = comboBox;
ComboBox.DropDownStyle = ComboBoxStyle.DropDownList;
ComboBox.DropDownHeight = ComboBox.ItemHeight * 10;
ComboBox.IntegralHeight = false;
ComboBox.FormattingEnabled = true;
ComboBox.DisplayMember = nameof(Tuple<T, string>.Item2);
ComboBox.ValueMember = nameof(Tuple<T, string>.Item2);
ComboBox.DataSource = new List<Tuple<T, string>>();
CommandTextList = (commandText?.Select(item => new Tuple<T, string>(null, item))?.ToList())
?? Enumerable.Empty<Tuple<T, string>>();
}
#endregion
#region Get selection
/// <summary>
/// Indicates if there is a selected item in the combo box
/// </summary>
/// <returns>True if there is a selected item in the combo box</returns>
public bool HasSelection()
{
return (ComboBox.SelectedIndex >= 0);
}
/// <summary>
/// Get current selected item (returns null if the selected item is a command text element, or if there is no selected item)
/// </summary>
/// <returns>Current selected item</returns>
public T GetSelectedItem()
{
return (ComboBox.SelectedItem as Tuple<T, string>)?.Item1;
}
/// <summary>
/// Get current command text (returns null if the selected item is a data element, or if there is no selected item)
/// </summary>
/// <returns>Current selected command text</returns>
public string GetSelectedCommandText()
{
var item = ComboBox.SelectedItem as Tuple<T, string>;
return (item?.Item1 is null) ? item?.Item2 : null;
}
#endregion
#region Set selection
/// <summary>
/// Clear selection
/// </summary>
public void ClearSelection()
{
ComboBox.SelectedIndex = -1;
}
/// <summary>
/// Set selected data item
/// </summary>
/// <param name="predicate">Predicate to select the desired data item</param>
/// <param name="clearSelectionIfNotFound">Indicates if the selection should be cleared if the desired data item is not found</param>
/// <returns>True if the desired data item is selected, false if the data item was not found</returns>
protected bool SetSelectedItem(Func<T, bool> predicate, bool clearSelectionIfNotFound = true)
{
for (int index = 0; index < ComboBox.Items.Count; index++)
{
var item = ComboBox.Items[index] as Tuple<T, string>;
if ((!(item?.Item1 is null)) && (predicate(item.Item1)))
{
ComboBox.SelectedIndex = index;
return true;
}
}
if (clearSelectionIfNotFound)
{
ComboBox.SelectedIndex = -1;
}
return false;
}
/// <summary>
/// Set selected command text
/// </summary>
/// <param name="predicate">Predicate to select the desired command text</param>
/// <param name="clearSelectionIfNotFound">Indicates if the selection should be cleared if the desired command text is not found</param>
/// <returns>True if the desired command text is selected, false if the command text was not found</returns>
protected bool SetSelectedCommandText(Func<string, bool> predicate, bool clearSelectionIfNotFound = true)
{
for (int index = 0; index < ComboBox.Items.Count; index++)
{
var item = ComboBox.Items[index] as Tuple<T, string>;
if ((item?.Item1 is null) && (predicate(item?.Item2)))
{
ComboBox.SelectedIndex = index;
return true;
}
}
if (clearSelectionIfNotFound)
{
ComboBox.SelectedIndex = -1;
}
return false;
}
/// <summary>
/// Set selected command text
/// </summary>
/// <param name="commandText">Command text to be select</param>
/// <param name="clearSelectionIfNotFound">Indicates if the selection should be cleared if the command text is not found</param>
/// <returns>True if the command text is selected, false if the command text was not found</returns>
protected bool SetSelectedCommandText(string commandText, bool clearSelectionIfNotFound = true)
{
return SetSelectedCommandText(
predicate: item => string.Equals(item, commandText, StringComparison.Ordinal),
clearSelectionIfNotFound: clearSelectionIfNotFound);
}
/// <summary>
/// Set the first data item element as current selected item, if there is no current selected item
/// </summary>
/// <returns>True if a data item was selected, false if there are no data items to be selected, or null if the selection was not changed</returns>
public bool? SetSelectedItemFirstByDefault()
{
if (ComboBox.SelectedIndex < 0)
{
return SetSelectedItem(predicate: item => true, clearSelectionIfNotFound: false);
}
else
{
return null;
}
}
/// <summary>
/// Set the first command text as current selected item, if there is no current selected item
/// </summary>
/// <returns>True if a command text was selected, false if there are no command texts to be selected, or null if the selection was not changed</returns>
public bool? SetSelectedCommandTextFirstByDefault()
{
if (ComboBox.SelectedIndex < 0)
{
return SetSelectedCommandText(predicate: item => true, clearSelectionIfNotFound: false);
}
else
{
return null;
}
}
/// <summary>
/// Set a command text as current selected item, if there is no current selected item
/// </summary>
/// <param name="commandText">Command text to be select</param>
/// <returns>True if a command text was selected, false if the command text was not found, or null if the selection was not changed</returns>
public bool? SetSelectedCommandTextByDefault(string commandText)
{
if (ComboBox.SelectedIndex < 0)
{
return SetSelectedCommandText(commandText: commandText, clearSelectionIfNotFound: false);
}
else
{
return null;
}
}
#endregion
#region Set data source
/// <summary>
/// Remove all data items from the combo box list (command text items are kept)
/// </summary>
public void Clear()
{
SetDataList(
dataList: Enumerable.Empty<T>(),
getText: item => item.ToString(),
equalityComparer: null);
}
/// <summary>
/// Updates the date items in the combo box list (command text items are kept)
/// </summary>
/// <param name="dataList">New list of data items</param>
/// <param name="getText">Function to generate the text description for each data item, to be displayed in the combo box</param>
/// <param name="equalityComparer">Comparer used to keep the current selected item (may be null)</param>
/// <exception cref="ArgumentOutOfRangeException">There are null elements in <paramref name="dataList"/></exception>
/// <returns>True if the selection was kept (or if there was no selection), false if the previously selected element was not found</returns>
public bool SetDataList(IEnumerable<T> dataList, Func<T, string> getText, Func<T, T, bool> equalityComparer)
{
var hasSelection = HasSelection();
var selectedItem = (!hasSelection) ? null : GetSelectedItem();
var selectedCommandText = (!hasSelection) ? null : GetSelectedCommandText();
ComboBox.DataSource = CommandTextList.Concat(
dataList.Select(item => (item is null)
? throw new ArgumentOutOfRangeException(nameof(dataList))
: new Tuple<T, string>(item, getText(item))))
.ToList();
if (!hasSelection)
{
return true;
}
else if (!(selectedCommandText is null))
{
return SetSelectedCommandText(selectedCommandText);
}
else if ((!(selectedItem is null)) && (!(equalityComparer is null)))
{
return SetSelectedItem(item => equalityComparer(item, selectedItem));
}
else
{
return false;
}
}
#endregion
#region Get the parent form
/// <summary>
/// Retrieves the form that the control is on
/// </summary>
/// <returns>The <see cref="Form"/> that the control is on</returns>
public Form FindForm()
{
return ComboBox?.FindForm();
}
#endregion
}
}
| 37.425532 | 160 | 0.570684 | [
"MIT"
] | JTOne123/wdnutils | src/WDNUtils.Win32/WinForms/ComboBoxClassController.cs | 10,556 | C# |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
namespace Newtonsoft.Json.Tests.TestObjects
{
public class CircularReferenceClass
{
[JsonProperty(Required = Required.Always)]
public string Name { get; set; }
[JsonProperty(NullValueHandling = NullValueHandling.Ignore)]
public CircularReferenceClass Child { get; set; }
}
} | 41.4 | 68 | 0.756384 | [
"MIT"
] | Chimpaneez/LiveSplit | LiveSplit/Libs/JSON.Net/Source/Src/Newtonsoft.Json.Tests/TestObjects/CircularReferenceClass.cs | 1,451 | C# |
using System;
using MAVN.Numerics;
namespace MAVN.Job.TokensStatistics.Domain.Models
{
public class TokensStatistic
{
public DateTime Day { get; set; }
public Money18 Amount { get; set; }
}
}
| 17.076923 | 49 | 0.657658 | [
"MIT"
] | IliyanIlievPH/MAVN.Job.TokensStatistics | src/MAVN.Job.TokensStatistics.Domain/Models/TokenStatistic.cs | 222 | C# |
//Copyright (c) Service Stack LLC. All Rights Reserved.
//License: https://raw.github.com/ServiceStack/ServiceStack/master/license.txt
#if !(PCL || LITE)
using System;
using System.Collections.Generic;
using System.Dynamic;
using ServiceStack.Text;
using ServiceStack.Text.Common;
using ServiceStack.Text.Json;
using System.Linq;
using System.Text;
namespace ServiceStack
{
public static class DeserializeDynamic<TSerializer>
where TSerializer : ITypeSerializer
{
private static readonly ITypeSerializer Serializer = JsWriter.GetTypeSerializer<TSerializer>();
private static readonly ParseStringDelegate CachedParseFn;
static DeserializeDynamic()
{
CachedParseFn = ParseDynamic;
}
public static ParseStringDelegate Parse
{
get { return CachedParseFn; }
}
public static IDynamicMetaObjectProvider ParseDynamic(string value)
{
var index = VerifyAndGetStartIndex(value, typeof(ExpandoObject));
var result = new ExpandoObject();
if (JsonTypeSerializer.IsEmptyMap(value)) return result;
var container = (IDictionary<String, Object>)result;
var tryToParsePrimitiveTypes = JsConfig.TryToParsePrimitiveTypeValues;
var valueLength = value.Length;
while (index < valueLength)
{
var keyValue = Serializer.EatMapKey(value, ref index);
Serializer.EatMapKeySeperator(value, ref index);
var elementValue = Serializer.EatValue(value, ref index);
var mapKey = Serializer.UnescapeString(keyValue);
if (JsonUtils.IsJsObject(elementValue))
{
container[mapKey] = ParseDynamic(elementValue);
}
else if (JsonUtils.IsJsArray(elementValue))
{
container[mapKey] = DeserializeList<List<object>, TSerializer>.Parse(elementValue);
}
else if (tryToParsePrimitiveTypes)
{
container[mapKey] = DeserializeType<TSerializer>.ParsePrimitive(elementValue) ?? Serializer.UnescapeString(elementValue);
}
else
{
container[mapKey] = Serializer.UnescapeString(elementValue);
}
Serializer.EatItemSeperatorOrMapEndChar(value, ref index);
}
return result;
}
private static int VerifyAndGetStartIndex(string value, Type createMapType)
{
var index = 0;
if (!Serializer.EatMapStartChar(value, ref index))
{
//Don't throw ex because some KeyValueDataContractDeserializer don't have '{}'
Tracer.Instance.WriteDebug("WARN: Map definitions should start with a '{0}', expecting serialized type '{1}', got string starting with: {2}",
JsWriter.MapStartChar, createMapType != null ? createMapType.Name : "Dictionary<,>", value.Substring(0, value.Length < 50 ? value.Length : 50));
}
return index;
}
}
//TODO: Workout how to fix broken CoreCLR SL5 build that uses dynamic
#if !(SL5 && CORECLR)
public class DynamicJson : DynamicObject
{
private readonly IDictionary<string, object> _hash = new Dictionary<string, object>();
public static string Serialize(dynamic instance)
{
var json = JsonSerializer.SerializeToString(instance);
return json;
}
public static dynamic Deserialize(string json)
{
// Support arbitrary nesting by using JsonObject
var deserialized = JsonSerializer.DeserializeFromString<JsonObject>(json);
var hash = deserialized.ToDictionary<KeyValuePair<string, string>, string, object>(entry => entry.Key, entry => entry.Value);
return new DynamicJson(hash);
}
public DynamicJson(IEnumerable<KeyValuePair<string, object>> hash)
{
_hash.Clear();
foreach (var entry in hash)
{
_hash.Add(Underscored(entry.Key), entry.Value);
}
}
public override bool TrySetMember(SetMemberBinder binder, object value)
{
var name = Underscored(binder.Name);
_hash[name] = value;
return _hash[name] == value;
}
public override bool TryGetMember(GetMemberBinder binder, out object result)
{
var name = Underscored(binder.Name);
return YieldMember(name, out result);
}
public override string ToString()
{
return JsonSerializer.SerializeToString(_hash);
}
private bool YieldMember(string name, out object result)
{
if (_hash.ContainsKey(name))
{
var json = _hash[name].ToString();
if (json.TrimStart(' ').StartsWith("{", StringComparison.Ordinal))
{
result = Deserialize(json);
return true;
}
else if (json.TrimStart(' ').StartsWith("[", StringComparison.Ordinal))
{
result = JsonArrayObjects.Parse(json).Select(a =>
{
var hash = a.ToDictionary<KeyValuePair<string, string>, string, object>(entry => entry.Key, entry => entry.Value);
return new DynamicJson(hash);
}).ToArray();
return true;
}
result = json;
return _hash[name] == result;
}
result = null;
return false;
}
internal static string Underscored(string pascalCase)
{
return Underscored(pascalCase.ToCharArray());
}
internal static string Underscored(IEnumerable<char> pascalCase)
{
var sb = StringBuilderCache.Allocate();
var i = 0;
foreach (var c in pascalCase)
{
if (char.IsUpper(c) && i > 0)
{
sb.Append("_");
}
sb.Append(c);
i++;
}
return StringBuilderCache.ReturnAndFree(sb).ToLowerInvariant();
}
}
#endif
}
#endif
| 35.291005 | 165 | 0.549025 | [
"Apache-2.0"
] | MinistryOfMagic/ServiceStack | src/ServiceStack.Pcl.WinStore81/Pcl.Dynamic.cs | 6,482 | C# |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace WinNetMeter.Core.Views
{
public partial class FormUpdater : Form
{
public FormUpdater()
{
InitializeComponent();
}
private void FormUpdater_Deactivate(object sender, EventArgs e)
{
Close();
}
}
}
| 19.5 | 71 | 0.658777 | [
"MIT"
] | WinTenDev/WinNetMeter | WinNetMeter.Core/Views/FormUpdater.cs | 509 | C# |
/*<FILE_LICENSE>
* Azos (A to Z Application Operating System) Framework
* The A to Z Foundation (a.k.a. Azist) licenses this file to you under the MIT license.
* See the LICENSE file in the project root for more information.
</FILE_LICENSE>*/
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("agdida")]
[assembly: AssemblyDescription("Azos Sky, Gdid Generation Authority")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Azist Group")]
[assembly: AssemblyProduct("agdida")]
[assembly: AssemblyCopyright("Copyright © Azist Group 2019")]
[assembly: AssemblyTrademark("Azos Sky")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("B0E25428-FAAA-485E-BF5F-92EB16E25261")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| 40.238095 | 88 | 0.750296 | [
"MIT"
] | saleyn/azos | src/runtimes/netf/agdida/Properties/AssemblyInfo.cs | 1,691 | C# |
using Amazon.JSII.Runtime.Deputy;
#pragma warning disable CS0672,CS0809,CS1591
namespace AlibabaCloud.SDK.ROS.CDK.Cxapi
{
#pragma warning disable CS8618
[JsiiByValue(fqn: "@alicloud/ros-cdk-cxapi.SynthesisMessage")]
public class SynthesisMessage : AlibabaCloud.SDK.ROS.CDK.Cxapi.ISynthesisMessage
{
[JsiiProperty(name: "entry", typeJson: "{\"fqn\":\"@alicloud/ros-cdk-assembly-schema.MetadataEntry\"}")]
public AlibabaCloud.SDK.ROS.CDK.Assembly.Schema.IMetadataEntry Entry
{
get;
set;
}
[JsiiProperty(name: "id", typeJson: "{\"primitive\":\"string\"}")]
public string Id
{
get;
set;
}
[JsiiProperty(name: "level", typeJson: "{\"fqn\":\"@alicloud/ros-cdk-cxapi.SynthesisMessageLevel\"}")]
public AlibabaCloud.SDK.ROS.CDK.Cxapi.SynthesisMessageLevel Level
{
get;
set;
}
}
}
| 28.382353 | 112 | 0.603109 | [
"Apache-2.0"
] | piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit | multiple-languages/dotnet/AlibabaCloud.SDK.ROS.CDK.Cxapi/AlibabaCloud/SDK/ROS/CDK/Cxapi/SynthesisMessage.cs | 965 | C# |
using System;
using System.IO;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
namespace CherokeeLanguageLearningTool
{
[Serializable]
class UserRecords
{
public int PreviousPhoneticScore {get; set;} //Stores the score for the previous Phonetic to English assessment.
public int TopPhoneticScore {get; set;} //Stores the top score for the Phonetic to English assessment.
public int AttemptedPhoneticAssessments { get; set; } //Counter for number of complete Phonetic to English Assessments.
public int PreviousEnglishScore { get; set; } //Stores the score for the previous English to Phonetic assessment.
public int TopEnglishScore { get; set; } //Stores the top score for the English to Phonetic assessment.
public int AttemptedEnglishAssessments { get; set; } //Counter for number of complete English to Phonetic Assessments.
public int PreviousSyllabaryScore{ get; set; } //Stores the score for the previous Syllabary assessment.
public int TopSyllabaryScore { get; set; } //Stores the top score for the Syllabary assessment.
public int AttemptedSyllabaryAssessments { get; set; } //Counter for number of complete Syllabary Assessments.
public int LearnerLevel {get; set;} //Stores the learner level value.
public string Firstname {get; set;} //Stores the firstname string.
public string Lastname { get; set; } //Stores the lastname string.
public bool Exists { get; set; }
public UserRecords(string fname, string lname) // Constructor that accepts a username and initializes all other values to default.
{
Firstname = fname;
Lastname = lname;
LearnerLevel = 1;
PreviousPhoneticScore = 0;
TopPhoneticScore = 0;
AttemptedPhoneticAssessments = 0;
PreviousEnglishScore = 0;
TopEnglishScore = 0;
AttemptedEnglishAssessments = 0;
PreviousSyllabaryScore = 0;
TopSyllabaryScore = 0;
AttemptedSyllabaryAssessments = 0;
}
/// <summary>
/// Saves the user record in a serialized file to reduce ease of data manipulation.
/// </summary>
public void SaveUserRecord(UserRecords _record)
{
IFormatter formatter = new BinaryFormatter();
//Create a method to store the record to a file.
string username = Firstname + Lastname;
if (username != "")
{
string path;
if (Program.recordsFoldersFound)
{
path = Program.portableVersion ? Program.recordsFolderLocationPortable + username + "Record.txt" : Program.recordsFolderLocation + username + "Record.txt";
}
else
{
path = Properties.Settings.Default.customRecordsPath;
}
Stream stream = new FileStream(path, FileMode.Create, FileAccess.Write);
formatter.Serialize(stream, _record);
stream.Close();
}
}
/// <summary>
/// Loads and deserializes the user record.
/// </summary>
public void LoadUserRecord(UserRecords _record)
{
IFormatter formatter = new BinaryFormatter();
string path;
if (Program.recordsFoldersFound)
{
path = Program.portableVersion ? Program.recordsFolderLocationPortable + Firstname + Lastname + "Record.txt" : Program.recordsFolderLocation + Firstname + Lastname + "Record.txt";
}
else
{
path = Properties.Settings.Default.customRecordsPath + Firstname + Lastname + "Record.txt";
}
if (File.Exists(path))
{
Stream stream = new FileStream(path, FileMode.Open, FileAccess.Read);
_record = (UserRecords)formatter.Deserialize(stream);
Firstname = _record.Firstname;
Lastname = _record.Lastname;
PreviousPhoneticScore = _record.PreviousPhoneticScore;
TopPhoneticScore = _record.TopPhoneticScore;
AttemptedPhoneticAssessments = _record.AttemptedPhoneticAssessments;
PreviousEnglishScore = _record.PreviousEnglishScore;
TopEnglishScore = _record.PreviousEnglishScore;
AttemptedEnglishAssessments = _record.AttemptedEnglishAssessments;
PreviousSyllabaryScore = _record.PreviousSyllabaryScore;
TopSyllabaryScore = _record.TopSyllabaryScore;
AttemptedSyllabaryAssessments = _record.AttemptedSyllabaryAssessments;
LearnerLevel = _record.LearnerLevel;
Exists = true;
stream.Close();
}
else
{
Exists = false;
}
}
}
} | 41.809917 | 195 | 0.611386 | [
"MIT"
] | fined-nsu/CherokeeLanguageStudyTool | CherokeeStudyTool/UserRecords.cs | 5,061 | C# |
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("DynamicProxyGenAssembly2")]
[assembly: InternalsVisibleTo("Lokman")]
[assembly: InternalsVisibleTo("Lokman.Client")]
[assembly: InternalsVisibleTo("Lokman.Tests")]
| 33.571429 | 58 | 0.812766 | [
"MIT"
] | vchirikov/lokman | src/InternalsVisibleTo.cs | 235 | C# |
using System.Threading.Tasks;
using EasyAbp.AbpHelper.Core.Commands.Generate.Crud;
using EasyAbp.AbpHelper.Gui.CodeGeneration.Crud.Dtos;
using EasyAbp.AbpHelper.Gui.Shared.Dtos;
namespace EasyAbp.AbpHelper.Gui.CodeGeneration.Crud
{
public class CodeGenerationCrudAppService : CodeGenerationAppService, ICodeGenerationCrudAppService
{
private readonly CrudCommand _crudCommand;
public CodeGenerationCrudAppService(CrudCommand crudCommand)
{
_crudCommand = crudCommand;
}
public virtual async Task<ServiceExecutionResult> GenerateAsync(AbpHelperGenerateCrudInput input)
{
await _crudCommand.RunCommand(ObjectMapper.Map<AbpHelperGenerateCrudInput, CrudCommandOption>(input));
return new ServiceExecutionResult(true);
}
}
} | 34.833333 | 114 | 0.739234 | [
"Apache-2.0"
] | aelhadi/AbpHelper.GUI | dotnet/src/EasyAbp.AbpHelper.Gui.Application/CodeGeneration/Crud/CodeGenerationCrudAppService.cs | 838 | C# |
//using System;
//using System.Collections.Generic;
//namespace NextGenSoftware.OASIS.API.ONODE.WebAPI
//{
// public class Account
// {
// //public int Id { get; set; }
// //public string Title { get; set; }
// //public string FirstName { get; set; }
// //public string LastName { get; set; }
// //public string Email { get; set; }
// //public string PasswordHash { get; set; }
// public bool AcceptTerms { get; set; }
// public Role Role { get; set; }
// public string VerificationToken { get; set; }
// public DateTime? Verified { get; set; }
// public bool IsVerified => Verified.HasValue || PasswordReset.HasValue;
// public string ResetToken { get; set; }
// public DateTime? ResetTokenExpires { get; set; }
// public DateTime? PasswordReset { get; set; }
// public DateTime Created { get; set; }
// public DateTime? Updated { get; set; }
// public List<RefreshToken> RefreshTokens { get; set; }
// public bool OwnsToken(string token)
// {
// return this.RefreshTokens?.Find(x => x.Token == token) != null;
// }
// }
//} | 37.125 | 80 | 0.572391 | [
"CC0-1.0"
] | HirenBodhi/Our-World-OASIS-API-HoloNET-HoloUnity-And-.NET-HDK | NextGenSoftware.OASIS.API.ONODE.WebAPI/Entities/Account.cs | 1,188 | C# |
using System;
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
using System.Windows.Input;
using Windows.UI.Core;
using Windows.UI.Popups;
using Windows.UI.Xaml.Media.Imaging;
using UwpHelpers.Controls.Common;
using UwpHelpers.Controls.Extensions;
using UwpHelpers.Examples.Annotations;
namespace UwpHelpers.Examples.ViewModels
{
public class HttpClientExtensionsViewModel : INotifyPropertyChanged
{
private BitmapImage downloadedImage;
private string downloadedString;
private double downloadProgress;
private bool isBusy;
private string isBusyMessage;
private ICommand downloadImageCommand;
public HttpClientExtensionsViewModel()
{
}
#region properties
public BitmapImage DownloadedImage
{
get { return downloadedImage; }
set { downloadedImage = value; OnPropertyChanged(); }
}
public string DownloadedString
{
get { return downloadedString; }
set { downloadedString = value; OnPropertyChanged();}
}
public double DownloadProgress
{
get { return downloadProgress; }
set { downloadProgress = value; OnPropertyChanged(); }
}
public bool IsBusy
{
get { return isBusy; }
set { isBusy = value; OnPropertyChanged();}
}
public string IsBusyMessage
{
get { return isBusyMessage; }
set { isBusyMessage = value; OnPropertyChanged();}
}
public ICommand DownloadImageCommand => downloadImageCommand ?? (downloadImageCommand=new DelegateCommand(async ()=> await GetImageAsync()));
#endregion
#region methods and event handlers
private async Task GetImageAsync()
{
IsBusy = true;
DownloadProgress = 0;
//hook into the ProgressChanged event, this is where the progress is reported (note there is a DownloadProgresseventArgs in Windows.Imaging but we're using our own
var reporter = new Progress<DownloadProgressArgs>();
reporter.ProgressChanged += Reporter_ProgressChanged;
try
{
IsBusy = true;
var handler = new HttpClientHandler();
if (handler.SupportsAutomaticDecompression)
handler.AutomaticDecompression = DecompressionMethods.Deflate | DecompressionMethods.GZip;
var bigImageUrl = $"http://www.tomswallpapers.com/images/201505/tomswallpapers.com_28074.jpg?dontCacheMeBro={DateTime.Now.Ticks}";
//be a good citizen and dispose the stream
using (var imageStream = await new HttpClient(handler).DownloadStreamWithProgressAsync(bigImageUrl, reporter))
{
//I'm using BitmapImage, but do what you want with the returnes Stream (to disk, to LumiaSDK effect, to Win2D effect, etc)
DownloadedImage = new BitmapImage();
await DownloadedImage.SetSourceAsync(imageStream.AsRandomAccessStream());
}
}
catch (Exception ex)
{
Debug.WriteLine($"HttpClientExtensionsViewModel.GetImageAsync Exception\r\n{ex}");
await new MessageDialog("Whoops, something went wrong downloading the image. See Debug Output for details").ShowAsync();
}
finally
{
reporter.ProgressChanged -= Reporter_ProgressChanged;
IsBusy = false;
IsBusyMessage = "";
}
}
//This is the event handler to update your UI of progress (there is no need to use UI Dispatcher)
private void Reporter_ProgressChanged(object sender, DownloadProgressArgs e)
{
DownloadProgress = e.PercentComplete;
IsBusyMessage = $"downloading {e.PercentComplete.ToString("N2")}%";
}
#endregion
#region INPC
public event PropertyChangedEventHandler PropertyChanged;
[NotifyPropertyChangedInvocator]
protected virtual void OnPropertyChanged([CallerMemberName] string propertyName = null)
{
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName));
}
#endregion
}
}
| 34.238806 | 175 | 0.62075 | [
"MIT"
] | LanceMcCarthy/UwpProjects | UwpHelpers/UwpHelpers.Examples/ViewModels/HttpClientExtensionsViewModel.cs | 4,590 | C# |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information.
namespace System.Data.Entity.Core.Objects.ELinq
{
internal enum InitializerMetadataKind
{
Grouping,
ProjectionNew,
ProjectionInitializer,
EntityCollection,
}
}
| 26 | 132 | 0.707101 | [
"Apache-2.0"
] | CZEMacLeod/EntityFramework6 | src/EntityFramework/Core/Objects/ELinq/InitializerFacet.cs | 338 | C# |
/*
* Exchange Web Services Managed API
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
*
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this
* software and associated documentation files (the "Software"), to deal in the Software
* without restriction, including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
* to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
* FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/// <summary>
/// Represents a UnifiedGroup class.
/// </summary>
class UnifiedGroup : ComplexProperty
{
/// <summary>
/// Initializes a new instance of the <see cref="UnifiedGroup"/> class.
/// </summary>
UnifiedGroup() :
base()
{
}
/// <summary>
/// Gets or sets whether this groups is a favorite group
/// </summary>
bool IsFavorite { get; set; }
/// <summary>
/// Gets or sets the ExternalDirectoryObjectId for this group
/// </summary>
String ExternalDirectoryObjectId { get; set; }
/// <summary>
/// Gets or sets the LastVisitedTimeUtc for this group and user
/// </summary>
String LastVisitedTimeUtc { get; set; }
/// <summary>
/// Gets or sets the SmtpAddress associated with this group
/// </summary>
String SmtpAddress { get; set; }
/// <summary>
/// Gets or sets the LegacyDN associated with this group
/// </summary>
String LegacyDN { get; set; }
/// <summary>
/// Gets or sets the MailboxGuid associated with this group
/// </summary>
String MailboxGuid { get; set; }
/// <summary>
/// Gets or sets the DisplayName associated with this group
/// </summary>
String DisplayName { get; set; }
/// <summary>
/// Gets or sets the AccessType associated with this group
/// </summary>
UnifiedGroupAccessType AccessType { get; set; }
/// <summary>
/// Read Conversations from XML.
/// </summary>
/// <param name="reader">The reader.</param>
/// <param name="xmlElementName">The xml element to read.</param>
@override
Future<void> LoadFromXml(EwsServiceXmlReader reader, String xmlElementName) async
{
reader.EnsureCurrentNodeIsStartElement(XmlNamespace.Types, XmlElementNames.UnifiedGroup);
do
{
await reader.Read();
switch (reader.LocalName)
{
case XmlElementNames.SmtpAddress:
this.SmtpAddress = reader.ReadElementValue<String>();
break;
case XmlElementNames.LegacyDN:
this.LegacyDN = reader.ReadElementValue<String>();
break;
case XmlElementNames.MailboxGuid:
this.MailboxGuid = reader.ReadElementValue<String>();
break;
case XmlElementNames.DisplayName:
this.DisplayName = reader.ReadElementValue<String>();
break;
case XmlElementNames.IsFavorite:
this.IsFavorite = reader.ReadElementValue<bool>();
break;
case XmlElementNames.LastVisitedTimeUtc:
this.LastVisitedTimeUtc = reader.ReadElementValue<String>();
break;
case XmlElementNames.AccessType:
this.AccessType = (UnifiedGroupAccessType)Enum.Parse(typeof(UnifiedGroupAccessType), reader.ReadElementValue<String>(), false);
break;
case XmlElementNames.ExternalDirectoryObjectId:
this.ExternalDirectoryObjectId = reader.ReadElementValue<String>();
break;
default:
break;
}
}
while (!reader.IsEndElement(XmlNamespace.Types, XmlElementNames.UnifiedGroup));
// Skip end element
reader.EnsureCurrentNodeIsEndElement(XmlNamespace.NotSpecified, XmlElementNames.UnifiedGroup);
await reader.Read();
}
}
| 38.052632 | 151 | 0.591188 | [
"MIT"
] | dmytro-glynskyi/ews | lib/Groups/ComplexProperties/UnifiedGroup.cs | 5,061 | C# |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.ContractsLight;
using System.IO;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using BuildXL.Native.Streams;
using BuildXL.Native.Tracing;
using BuildXL.Utilities;
using BuildXL.Utilities.Instrumentation.Common;
using BuildXL.Utilities.Tasks;
using BuildXL.Utilities.Tracing;
using Microsoft.Win32.SafeHandles;
using static BuildXL.Native.IO.FileUtilities;
using static BuildXL.Utilities.FormattableStringEx;
using Overlapped = BuildXL.Native.Streams.Overlapped;
#pragma warning disable 1591 // disabling warning about missing API documentation; TODO: Remove this line and write documentation!
#pragma warning disable CA1823 // Unused field
#pragma warning disable SA1203 // Constant fields must appear before non-constant fields
#pragma warning disable SA1139 // Use literal suffix notation instead of casting
#pragma warning disable IDE1006 // Naming rule violation
namespace BuildXL.Native.IO.Windows
{
/// <summary>
/// FileSystem related native implementations for Windows based systems
/// </summary>
public sealed class FileSystemWin : IFileSystem
{
#region Constants
/// <summary>
/// Long path prefix.
/// </summary>
public const string LongPathPrefix = @"\\?\";
/// <summary>
/// Long UNC path prefix.
/// </summary>
public const string LongUNCPathPrefix = @"\\?\UNC\";
/// <summary>
/// NT path prefix.
/// </summary>
public const string NtPathPrefix = @"\??\";
/// <summary>
/// Local device prefix.
/// </summary>
public const string LocalDevicePrefix = @"\\.\";
private const int DefaultBufferSize = 4096;
#endregion
private readonly LoggingContext m_loggingContext;
#region PInvoke and structs
/// <summary>
/// A value representing INVALID_HANDLE_VALUE.
/// </summary>
private static readonly IntPtr INVALID_HANDLE_VALUE = new IntPtr(-1);
/// <summary>
/// OSVERSIONINFOEX
/// See http://msdn.microsoft.com/en-us/library/windows/desktop/ms724833(v=vs.85).aspx
/// </summary>
/// <remarks>
/// This definition is taken with minor modifications from the BCL.
/// </remarks>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
private sealed class OsVersionInfoEx
{
public static readonly int Size = Marshal.SizeOf<OsVersionInfoEx>();
public OsVersionInfoEx()
{
// This must be set to Size before use, since it is validated by consumers such as VerifyVersionInfo.
OSVersionInfoSize = Size;
}
public int OSVersionInfoSize;
public int MajorVersion;
public int MinorVersion;
public int BuildNumber;
public int PlatformId;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 128)]
public string CSDVersion;
public ushort ServicePackMajor;
public ushort ServicePackMinor;
public short SuiteMask;
public byte ProductType;
public byte Reserved;
}
/// <summary>
/// Request structure indicating this program's supported version range of Usn records.
/// See http://msdn.microsoft.com/en-us/library/windows/desktop/hh802705(v=vs.85).aspx
/// </summary>
[StructLayout(LayoutKind.Sequential)]
private struct ReadFileUsnData
{
/// <summary>
/// Size of this structure (there are no variable length fields).
/// </summary>
public static readonly int Size = Marshal.SizeOf<ReadFileUsnData>();
/// <summary>
/// Indicates that FSCTL_READ_FILE_USN_DATA should return either V2 or V3 records (those with NTFS or ReFS-sized file IDs respectively).
/// </summary>
/// <remarks>
/// This request should work on Windows 8 / Server 2012 and above.
/// </remarks>
public static readonly ReadFileUsnData NtfsAndReFSCompatible = new ReadFileUsnData()
{
MinMajorVersion = 2,
MaxMajorVersion = 3,
};
/// <summary>
/// Indicates that FSCTL_READ_FILE_USN_DATA should return only V2 records (those with NTFS file IDs, even if using ReFS).
/// </summary>
/// <remarks>
/// This request should work on Windows 8 / Server 2012 and above.
/// </remarks>
public static readonly ReadFileUsnData NtfsCompatible = new ReadFileUsnData()
{
MinMajorVersion = 2,
MaxMajorVersion = 2,
};
public ushort MinMajorVersion;
public ushort MaxMajorVersion;
}
[StructLayout(LayoutKind.Sequential)]
private struct FileRenameInfo
{
public byte ReplaceIfExists;
public IntPtr RootDirectory;
/// <summary>
/// Length of the string starting at <see cref="FileName"/> in *bytes* (not characters).
/// </summary>
public int FileNameLengthInBytes;
/// <summary>
/// First character of filename; this is a variable length array as determined by FileNameLength.
/// </summary>
public readonly char FileName;
}
/// <summary>
/// Union tag for <see cref="FileIdDescriptor"/>.
/// </summary>
/// <remarks>
/// http://msdn.microsoft.com/en-us/library/windows/desktop/aa364227(v=vs.85).aspx
/// </remarks>
private enum FileIdDescriptorType
{
FileId = 0,
// ObjectId = 1, - Not supported
ExtendedFileId = 2,
}
/// <summary>
/// Structure to specify a file ID to <see cref="OpenFileById"/>.
/// </summary>
/// <remarks>
/// On the native side, the ID field is a union of a 64-bit file ID, a 128-bit file ID,
/// and an object ID (GUID). Since we only pass this in to <see cref="OpenFileById"/>
/// we simply specify the ID part to C# as a 128-bit file ID and ensure that the high bytes are
/// empty when we are specifying a 64-bit ID.
/// Note that since downlevel the union members are a GUID and a 64-bit file ID (extended file ID unsupported),
/// the structure size is fortunately same in all cases (because the object ID GUID is 16 bytes / 128-bits).
/// See http://msdn.microsoft.com/en-us/library/windows/desktop/aa364227(v=vs.85).aspx
/// </remarks>
[StructLayout(LayoutKind.Sequential)]
private readonly struct FileIdDescriptor
{
private static readonly int s_size = Marshal.SizeOf<FileIdDescriptor>();
public readonly int Size;
public readonly FileIdDescriptorType Type;
public readonly FileId ExtendedFileId;
public FileIdDescriptor(FileId fileId)
{
if (IsExtendedFileIdSupported())
{
Type = FileIdDescriptorType.ExtendedFileId;
}
else
{
Contract.Assume(fileId.High == 0, "File ID should not have high bytes when extended IDs are not supported on the underlying OS");
Type = FileIdDescriptorType.FileId;
}
Size = s_size;
ExtendedFileId = fileId;
}
}
/// <summary>
/// Header data in common between USN_RECORD_V2 and USN_RECORD_V3. These fields are needed to determine how to interpret a returned record.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
private readonly struct NativeUsnRecordHeader
{
/// <summary>
/// Size of the record header in bytes.
/// </summary>
public static readonly int Size = Marshal.SizeOf<NativeUsnRecordHeader>();
public readonly int RecordLength;
public readonly ushort MajorVersion;
public readonly ushort MinorVersion;
}
/// <summary>
/// USN_RECORD_V3
/// See http://msdn.microsoft.com/en-us/library/windows/desktop/hh802708(v=vs.85).aspx
/// </summary>
/// <remarks>
/// The Size is explicitly set to the actual used size + the needing padding to 8-byte alignment
/// (for Usn, Timestamp, etc.). Two of those padding bytes are actually the first character of the filename.
/// </remarks>
[StructLayout(LayoutKind.Sequential, Size = 0x50)]
private readonly struct NativeUsnRecordV3
{
/// <summary>
/// Size of a record with two filename characters (starting at WCHAR FileName[1]; not modeled in the C# struct),
/// or one filename character and two bytes of then-needed padding (zero-length filenames are disallowed).
/// This is the minimum size that should ever be returned.
/// </summary>
public static readonly int MinimumSize = Marshal.SizeOf<NativeUsnRecordV3>();
/// <summary>
/// Maximum size of a single V3 record, assuming the NTFS / ReFS 255 character file name length limit.
/// </summary>
/// <remarks>
/// ( (MaximumComponentLength - 1) * sizeof(WCHAR) + sizeof(USN_RECORD_V3)
/// See http://msdn.microsoft.com/en-us/library/windows/desktop/hh802708(v=vs.85).aspx
/// Due to padding this is perhaps an overestimate.
/// </remarks>
public static readonly int MaximumSize = MinimumSize + (254 * 2);
public readonly NativeUsnRecordHeader Header;
public readonly FileId FileReferenceNumber;
public readonly FileId ParentFileReferenceNumber;
public readonly Usn Usn;
public readonly long TimeStamp;
public readonly uint Reason;
public readonly uint SourceInfo;
public readonly uint SecurityId;
public readonly uint FileAttributes;
public readonly ushort FileNameLength;
public readonly ushort FileNameOffset;
// WCHAR FileName[1];
}
/// <summary>
/// TODO: this is not documented by WDG yet.
/// TODO: OpenSource
/// </summary>
[StructLayout(LayoutKind.Sequential)]
private struct FileDispositionInfoEx
{
public FileDispositionFlags Flags;
}
/// <summary>
/// TODO: this is not properly documented by WDG yet.
/// TODO: OpenSource
/// </summary>
[Flags]
private enum FileDispositionFlags : uint
{
#pragma warning disable CA1008 // Enums should have zero value
DoNotDelete = 0x00000000,
#pragma warning restore CA1008 // Enums should have zero value
Delete = 0x00000001,
/// <summary>
/// NTFS default behavior on link removal is when the last handle is closed on that link, the link is physically gone.
/// The link is marked for deletion when the FILE_FLAG_DELETE_ON_CLOSE is specified on open or FileDispositionInfo is called.
/// Although, the link is marked as deleted until the last handle on that link is closed,
/// it can not be re-purposed as it physically exists.
/// This is also true for superseded rename case where the target cannot be deleted if other handles are opened on that link.
/// This makes Windows distinct in nature than how Linux works handling the links where the link name is freed
/// and can be re-purposed as soon as you deleted/rename the link by closing the handle that requested the delete/rename
/// regardless of other handles are opened on that link.
/// FileDispositionInfoEx and FileRenameInfoEx implement the POSIX style delete/rename behavior.
/// For POSIX style superseded rename, the target needs to be opened with FILE_SHARE_DELETE access by other openers.
/// </summary>
PosixSemantics = 0x00000002,
ForceImageSectionCheck = 0x00000004,
OnClose = 0x00000008,
}
/// <summary>
/// USN_RECORD_V2
/// See http://msdn.microsoft.com/en-us/library/windows/desktop/aa365722(v=vs.85).aspx
/// </summary>
/// <remarks>
/// The Size is explicitly set to the actual used size + the needing padding to 8-byte alignment
/// (for Usn, Timestamp, etc.). Two of those padding bytes are actually the first character of the filename.
/// </remarks>
[StructLayout(LayoutKind.Sequential, Size = 0x40)]
private struct NativeUsnRecordV2
{
/// <summary>
/// Size of a record with two filename characters (starting at WCHAR FileName[1]; not modeled in the C# struct),
/// or one filename character and two bytes of then-needed padding (zero-length filenames are disallowed).
/// This is the minimum size that should ever be returned.
/// </summary>
public static readonly int MinimumSize = Marshal.SizeOf<NativeUsnRecordV2>();
/// <summary>
/// Maximum size of a single V2 record, assuming the NTFS / ReFS 255 character file name length limit.
/// </summary>
/// <remarks>
/// ( (MaximumComponentLength - 1) * sizeof(WCHAR) + sizeof(USN_RECORD_V2)
/// See http://msdn.microsoft.com/en-us/library/windows/desktop/aa365722(v=vs.85).aspx
/// Due to padding this is perhaps an overestimate.
/// </remarks>
public static readonly int MaximumSize = MinimumSize + (254 * 2);
public readonly NativeUsnRecordHeader Header;
public readonly ulong FileReferenceNumber;
public readonly ulong ParentFileReferenceNumber;
public readonly Usn Usn;
public readonly long TimeStamp;
public readonly uint Reason;
public readonly uint SourceInfo;
public readonly uint SecurityId;
public readonly uint FileAttributes;
public readonly ushort FileNameLength;
public readonly ushort FileNameOffset;
// WCHAR FileName[1];
}
/// <summary>
/// FILE_INFO_BY_HANDLE_CLASS for GetFileInformationByHandleEx.
/// See http://msdn.microsoft.com/en-us/library/windows/desktop/aa364953(v=vs.85).aspx
/// </summary>
private enum FileInfoByHandleClass : uint
{
FileBasicInfo = 0x0,
FileStandardInfo = 0x1,
FileNameInfo = 0x2,
FileRenameInfo = 0x3,
FileDispositionInfo = 0x4,
FileAllocationInfo = 0x5,
FileEndOfFileInfo = 0x6,
FileStreamInfo = 0x7,
FileCompressionInfo = 0x8,
FileAttributeTagInfo = 0x9,
FileIdBothDirectoryInfo = 0xa,
FileIdBothDirectoryRestartInfo = 0xb,
FileRemoteProtocolInfo = 0xd,
FileFullDirectoryInfo = 0xe,
FileFullDirectoryRestartInfo = 0xf,
FileStorageInfo = 0x10,
FileAlignmentInfo = 0x11,
FileIdInfo = 0x12,
FileIdExtdDirectoryInfo = 0x13,
FileIdExtdDirectoryRestartInfo = 0x14,
FileDispositionInfoEx = 0x15,
FileRenameInfoEx = 0x16,
}
/// <summary>
/// Whether the hresult status is one that should be treated as a nonexistent file
/// </summary>
/// <remarks>
/// CODESYNC: static bool IsPathNonexistent(DWORD error) function on the Detours side in FileAccessHelper.cpp.
/// CODESYNC: <see cref="OpenFileStatusExtensions.IsNonexistent(OpenFileStatus)"/>
///
/// NotReadyDevice is treated as non-existent probe.
/// BitLocker locked volume is treated as non-existent probe.
/// </remarks>
public static bool IsHresultNonexistent(int hr)
{
return hr == NativeIOConstants.ErrorFileNotFound
|| hr == NativeIOConstants.ErrorPathNotFound
|| hr == NativeIOConstants.ErrorNotReady
|| hr == NativeIOConstants.FveLockedVolume
|| hr == NativeIOConstants.ErrorCantAccessFile
|| hr == NativeIOConstants.ErrorBadPathname
|| hr == NativeIOConstants.ErrorInvalidName
|| hr == NativeIOConstants.ErrorInvalidParameter;
}
/// <summary>
/// <c>FILE_BASIC_INFO</c>
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1815:OverrideEqualsAndOperatorEqualsOnValueTypes")]
public struct FileBasicInfo
{
/// <summary>
/// UTC FILETIME of the file's creation.
/// </summary>
public ulong CreationTime;
/// <summary>
/// UTC FILETIME of the last access to the file.
/// </summary>
public ulong LastAccessTime;
/// <summary>
/// UTC FILETIME of the last write to the file.
/// </summary>
public ulong LastWriteTime;
/// <summary>
/// UTC FILETIME of the last change to the file (e.g. attribute change or a write)
/// </summary>
public ulong ChangeTime;
/// <summary>
/// File attributes
/// </summary>
public FileAttributes Attributes;
}
/// <summary>
/// <c>FILE_STANDARD_INFO</c>
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1815:OverrideEqualsAndOperatorEqualsOnValueTypes")]
public struct FileStandardInfo
{
/// <summary>
/// The amount of space that is allocated for the file.
/// </summary>
public ulong AllocationSize;
/// <summary>
/// The end of the file.
/// </summary>
public ulong EndOfFile;
/// <summary>
/// The number of links to the file.
/// </summary>
public uint NumberOfLinks;
/// <summary>
/// TRUE if the file in the delete queue; otherwise, false.
/// </summary>
public bool DeletePending;
/// <summary>
/// TRUE if the file is a directory; otherwise, false.
/// </summary>
public bool Directory;
}
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, ExactSpelling = true)]
private static extern SafeFileHandle CreateFileW(
string lpFileName,
FileDesiredAccess dwDesiredAccess,
FileShare dwShareMode,
IntPtr lpSecurityAttributes,
FileMode dwCreationDisposition,
FileFlagsAndAttributes dwFlagsAndAttributes,
IntPtr hTemplateFile);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, ExactSpelling = true)]
private static extern SafeFileHandle ReOpenFile(
SafeFileHandle hOriginalFile,
FileDesiredAccess dwDesiredAccess,
FileShare dwShareMode,
FileFlagsAndAttributes dwFlagsAndAttributes);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, BestFitMapping = false)]
internal static extern bool CreateDirectoryW(string path, IntPtr lpSecurityAttributes);
[Flags]
private enum FlushFileBuffersFlags : uint
{
/// <summary>
/// Corresponds to <c>FLUSH_FLAGS_FILE_DATA_ONLY</c>.
/// If set, this operation will write the data for the given file from the
/// Windows in-memory cache. This will NOT commit any associated metadata
/// changes. This will NOT send a SYNC to the storage device to flush its
/// cache. Not supported on volume handles. Only supported by the NTFS
/// filesystem.
/// </summary>
FileDataOnly = 0x00000001,
/// <summary>
/// Corresponds to <c>FLUSH_FLAGS_NO_SYNC</c>.
/// If set, this operation will commit both the data and metadata changes for
/// the given file from the Windows in-memory cache. This will NOT send a SYNC
/// to the storage device to flush its cache. Not supported on volume handles.
/// Only supported by the NTFS filesystem.
/// </summary>
NoSync = 0x00000002,
}
/// <summary>
/// Lower-level file-flush facility, like <c>FlushFileBuffers</c>. Allows cache-only flushes without sending an expensive 'sync' command to the underlying disk.
/// See https://msdn.microsoft.com/en-us/library/windows/hardware/hh967720(v=vs.85).aspx
/// </summary>
[DllImport("ntdll.dll", SetLastError = false, CharSet = CharSet.Unicode, ExactSpelling = true)]
private static extern unsafe NtStatus NtFlushBuffersFileEx(
SafeFileHandle handle,
FlushFileBuffersFlags mode,
void* parameters,
int parametersSize,
IoStatusBlock* ioStatusBlock);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, ExactSpelling = true)]
private static extern SafeFileHandle OpenFileById(
SafeFileHandle hFile, // Any handle on the relevant volume
[In] FileIdDescriptor lpFileId,
FileDesiredAccess dwDesiredAccess,
FileShare dwShareMode,
IntPtr lpSecurityAttributes,
FileFlagsAndAttributes dwFlagsAndAttributes);
/// <summary>
/// Creates an I/O completion port or associates an existing port with a file handle.
/// </summary>
/// <remarks>
/// http://msdn.microsoft.com/en-us/library/windows/desktop/aa363862(v=vs.85).aspx
/// We marshal the result as an IntPtr since, given an <paramref name="existingCompletionPort"/>,
/// we get back the same handle value. Wrapping the same handle value again would result in double-frees on finalize.
/// </remarks>
[DllImport("kernel32.dll", SetLastError = true)]
private static extern IntPtr CreateIoCompletionPort(
SafeFileHandle handle,
SafeIOCompletionPortHandle existingCompletionPort,
IntPtr completionKey,
int numberOfConcurrentThreads);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern unsafe bool GetOverlappedResult(
SafeFileHandle hFile,
Overlapped* lpOverlapped,
int* lpNumberOfBytesTransferred,
[MarshalAs(UnmanagedType.Bool)] bool bWait);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
[SuppressMessage("Microsoft.Interoperability", "CA1415:DeclarePInvokesCorrectly", Justification = "Overlapped intentionally redefined.")]
private static extern unsafe bool GetQueuedCompletionStatus(
SafeIOCompletionPortHandle hCompletionPort,
int* lpNumberOfBytes,
IntPtr* lpCompletionKey,
Overlapped** lpOverlapped,
int dwMilliseconds);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
[SuppressMessage("Microsoft.Interoperability", "CA1415:DeclarePInvokesCorrectly", Justification = "Overlapped intentionally redefined.")]
private static extern unsafe bool PostQueuedCompletionStatus(
SafeIOCompletionPortHandle hCompletionPort,
int dwNumberOfBytesTransferred,
IntPtr dwCompletionKey,
Overlapped* lpOverlapped);
[Flags]
private enum FileCompletionMode
{
FileSkipCompletionPortOnSuccess = 0x1,
FileSkipSetEventOnHandle = 0x2,
}
/// <summary>
/// Sets the mode for dispatching IO completions on the given file handle.
/// </summary>
/// <remarks>
/// Skipping completion port queueing on success (i.e., synchronous completion) avoids wasted thread handoffs but requires an aware caller
/// (that does not assume <c>ERROR_IO_PENDING</c>).
/// Skipping the signaling of the file object itself via <see cref="FileCompletionMode.FileSkipSetEventOnHandle"/> can avoid some
/// wasted work and locking in the event there's not a specific event provided in the corresponding <c>OVERLAPPED</c> structure.
/// See http://blogs.technet.com/b/winserverperformance/archive/2008/06/26/designing-applications-for-high-performance-part-iii.aspx
/// </remarks>
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool SetFileCompletionNotificationModes(SafeFileHandle handle, FileCompletionMode mode);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
[SuppressMessage("Microsoft.Interoperability", "CA1415:DeclarePInvokesCorrectly", Justification = "Overlapped intentionally redefined.")]
private static extern unsafe bool ReadFile(
SafeFileHandle hFile,
byte* lpBuffer,
int nNumberOfBytesToRead,
int* lpNumberOfBytesRead,
Overlapped* lpOverlapped);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
[SuppressMessage("Microsoft.Interoperability", "CA1415:DeclarePInvokesCorrectly", Justification = "Overlapped intentionally redefined.")]
private static extern unsafe bool WriteFile(
SafeFileHandle hFile,
byte* lpBuffer,
int nNumberOfBytesToWrite,
int* lpNumberOfBytesWritten,
Overlapped* lpOverlapped);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode)]
[return: MarshalAs(UnmanagedType.Bool)]
[SuppressMessage("Microsoft.Interoperability", "CA1415:DeclarePInvokesCorrectly", Justification = "Overlapped intentionally redefined.")]
public static extern unsafe bool WriteFile(
SafeFileHandle handle,
byte[] buffer,
int numBytesToWrite,
out int numBytesWritten,
NativeOverlapped* lpOverlapped);
[SuppressMessage("Microsoft.Globalization", "CA2101:SpecifyMarshalingForPInvokeStringArguments", MessageId = "OsVersionInfoEx.CSDVersion",
Justification = "This appears impossible to satisfy.")]
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, BestFitMapping = false, ThrowOnUnmappableChar = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool VerifyVersionInfo(
[In] OsVersionInfoEx versionInfo,
uint typeMask,
ulong conditionMask);
[DllImport("kernel32.dll", SetLastError = true)]
private static extern ulong VerSetConditionMask(
ulong existingMask,
uint typeMask,
byte conditionMask);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool DeviceIoControl(
SafeFileHandle deviceHandle,
uint ioControlCode,
IntPtr inputBuffer,
int inputBufferSize,
IntPtr outputBuffer,
int outputBufferSize,
out int bytesReturned,
IntPtr overlapped);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool DeviceIoControl(
SafeFileHandle deviceHandle,
uint ioControlCode,
IntPtr inputBuffer,
int inputBufferSize,
[Out] QueryUsnJournalData outputBuffer,
int outputBufferSize,
out int bytesReturned,
IntPtr overlapped);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool DeviceIoControl(
SafeFileHandle hDevice,
uint ioControlCode,
ref STORAGE_PROPERTY_QUERY inputBuffer,
int inputBufferSize,
out DEVICE_SEEK_PENALTY_DESCRIPTOR outputBuffer,
int outputBufferSize,
out uint bytesReturned,
IntPtr overlapped);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool GetFileInformationByHandleEx(
SafeFileHandle deviceHandle,
uint fileInformationClass,
IntPtr outputFileInformationBuffer,
int outputBufferSize);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool SetFileInformationByHandle(
SafeFileHandle hFile,
uint fileInformationClass,
IntPtr lpFileInformation,
int bufferSize);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool GetFileSizeEx(
SafeFileHandle handle,
out long size);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, ExactSpelling = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool GetVolumeInformationByHandleW(
SafeFileHandle fileHandle,
[Out] StringBuilder volumeNameBuffer, // Buffer for volume name (if not null)
int volumeNameBufferSize,
IntPtr volumeSerial, // Optional pointer to a DWORD to be populated with the volume serial number
IntPtr maximumComponentLength, // Optional pointer to a DWORD to be populated with the max component length.
IntPtr fileSystemFlags, // Optional pointer to a DWORD to be populated with flags of supported features on the volume (e.g. hardlinks)
[Out] StringBuilder fileSystemNameBuffer, // Buffer for volume FS, e.g. "NTFS" (if not null)
int fileSystemNameBufferSize);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, ExactSpelling = true)]
private static extern SafeFindVolumeHandle FindFirstVolumeW(
[Out] StringBuilder volumeNameBuffer,
int volumeNameBufferLength);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, ExactSpelling = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool FindNextVolumeW(
SafeFindVolumeHandle findVolumeHandle,
[Out] StringBuilder volumeNameBuffer,
int volumeNameBufferLength);
/// <summary>
/// Disposes a <see cref="SafeFindVolumeHandle"/>
/// </summary>
/// <remarks>
/// Since this is used by <see cref="SafeFindVolumeHandle"/> itself, we expose
/// the inner <see cref="IntPtr"/> (rather than trying to marshal the handle wrapper
/// from within its own release method).
/// </remarks>
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
internal static extern bool FindVolumeClose(IntPtr findVolumeHandle);
/// <summary>
/// Disposes a typical handle.
/// </summary>
/// <remarks>
/// Since this is used by safe handle wrappers (e.g. <see cref="SafeIOCompletionPortHandle"/>), we expose
/// the inner <see cref="IntPtr"/> (rather than trying to marshal the handle wrapper
/// from within its own release method).
/// </remarks>
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool CloseHandle(IntPtr handle);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, ExactSpelling = true)]
[return: MarshalAs(UnmanagedType.Bool)]
internal static extern bool CreateHardLinkW(string lpFileName, string lpExistingFileName, IntPtr reservedSecurityAttributes);
/// <summary>
/// Symbolic link target.
/// </summary>
[SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue")]
[SuppressMessage("Microsoft.Naming", "CA1714:FlagsEnumsShouldHavePluralNames")]
[Flags]
public enum SymbolicLinkTarget : uint
{
/// <summary>
/// The link target is a file.
/// </summary>
File = 0x0,
/// <summary>
/// The link target is a directory.
/// </summary>
Directory = 0x1,
/// <summary>
/// Specify this flag to allow creation of symbolic links when the process is not elevated.
/// </summary>
AllowUnprivilegedCreate = 0x2
}
/// <summary>
/// WinAPI for creating symlinks.
/// Although the documentation says "If the function succeeds, the return value is nonzero",
/// it's not entirely true --- if the call succeeds, the return value is non-negative!
/// </summary>
/// <remarks>
/// For the reason stated above, we cannot MarshalAs boolean because all negative values would be converted to 'true'.
/// SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE mentioned in the doc does not really do what the doc says it should do:
/// it allows symlinks to be created from non-elevated process ONLY if a process is run under Windows 10 (14972) AND
/// a user enabled Developer Mode. If any of these conditions is not met - the flag is simply ignored.
/// https://docs.microsoft.com/en-us/windows/desktop/api/winbase/nf-winbase-createsymboliclinkw
/// </remarks>
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, ExactSpelling = true)]
internal static extern int CreateSymbolicLinkW(string lpSymlinkFileName, string lpTargetFileName, SymbolicLinkTarget dwFlags);
/// <summary>
/// When this flag is set on the process or thread error mode, 'the system does not display the critical-error-handler message box'.
/// In this context, we don't want a weird message box prompting to insert a CD / floppy when querying volume information.
/// </summary>
/// <remarks>
/// Seriously?!
/// Corresponds to SEM_FAILCRITICALERRORS
/// </remarks>
private const int SemFailCriticalErrors = 1;
/// <os>Windows 7+</os>
[DllImport("kernel32.dll", SetLastError = false)]
private static extern int GetThreadErrorMode();
/// <os>Windows 7+</os>
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
private static extern bool SetThreadErrorMode(int newErrorMode, out int oldErrorMode);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, ExactSpelling = true)]
private static extern int GetFinalPathNameByHandleW(SafeFileHandle hFile, [Out] StringBuilder filePathBuffer, int filePathBufferSize, int flags);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode)]
[return: MarshalAs(UnmanagedType.Bool)]
internal static extern bool RemoveDirectoryW(
string lpPathName);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode)]
[SuppressMessage("Microsoft.Interoperability", "CA1401:PInvokesShouldNotBeVisible", Justification = "Needed for custom enumeration.")]
public static extern SafeFindFileHandle FindFirstFileW(
string lpFileName,
out WIN32_FIND_DATA lpFindFileData);
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode)]
[return: MarshalAs(UnmanagedType.Bool)]
[SuppressMessage("Microsoft.Interoperability", "CA1401:PInvokesShouldNotBeVisible", Justification = "Needed for custom enumeration.")]
public static extern bool FindNextFileW(SafeHandle hFindFile, out WIN32_FIND_DATA lpFindFileData);
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
internal static extern bool FindClose(IntPtr findFileHandle);
[DllImport("shlwapi.dll", CharSet = CharSet.Unicode)]
[return: MarshalAs(UnmanagedType.Bool)]
[SuppressMessage("Microsoft.Interoperability", "CA1401:PInvokesShouldNotBeVisible", Justification = "Needed for creating symlinks.")]
public static extern bool PathMatchSpecW([In] string pszFileParam, [In] string pszSpec);
/// <summary>
/// Values for the DwReserved0 member of the WIN32_FIND_DATA struct.
/// </summary>
public enum DwReserved0Flag : uint
{
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_RESERVED_ZERO = 0x00000000, // Reserved reparse tag value.
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_RESERVED_ONE = 0x00000001, // Reserved reparse tag value.
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_MOUNT_POINT = 0xA0000003, // Used for mount point support, specified in section 2.1.2.5.
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_HSM = 0xC0000004, // Obsolete.Used by legacy Hierarchical Storage Manager Product.
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_HSM2 = 0x80000006, // Obsolete.Used by legacy Hierarchical Storage Manager Product.
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_DRIVER_EXTENDER = 0x80000005, // Home server drive extender.<3>
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_SIS = 0x80000007, // Used by single-instance storage (SIS) filter driver.Server-side interpretation only, not meaningful over the wire.
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_DFS = 0x8000000A, // Used by the DFS filter.The DFS is described in the Distributed File System (DFS): Referral Protocol Specification[MS - DFSC]. Server-side interpretation only, not meaningful over the wire.
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_DFSR = 0x80000012, // Used by the DFS filter.The DFS is described in [MS-DFSC]. Server-side interpretation only, not meaningful over the wire.
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_FILTER_MANAGER = 0x8000000B, // Used by filter manager test harness.<4>
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_SYMLINK = 0xA000000C, // Used for symbolic link support. See section 2.1.2.4.
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_WCIFS = 0x80000018, // The tag for a WCI reparse point
[SuppressMessage("Microsoft.Naming", "CA1700:DoNotNameEnumValuesReserved")]
[SuppressMessage("Microsoft.Naming", "CA1707:RemoveUnderscoresFromMemberName")]
IO_REPARSE_TAG_WCIFS_TOMBSTONE = 0xA000001F, // The tag for a WCI tombstone file
}
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode)]
[return: MarshalAs(UnmanagedType.Bool)]
[SuppressMessage("Microsoft.Usage", "CA2205:UseManagedEquivalentsOfWin32Api",
Justification = "We explicitly need to call the native SetFileAttributes as the managed one does not support long paths.")]
internal static extern bool SetFileAttributesW(
string lpFileName,
FileAttributes dwFileAttributes);
/// <nodoc />
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode)]
[return: MarshalAs(UnmanagedType.U4)]
[SuppressMessage("Microsoft.Usage", "CA2205:UseManagedEquivalentsOfWin32Api",
Justification = "We explicitly need to call the native GetFileAttributes as the managed one does not support long paths.")]
public static extern uint GetFileAttributesW(
string lpFileName);
/// <summary>
/// Storage property query
/// https://msdn.microsoft.com/en-us/library/windows/desktop/ff800840(v=vs.85).aspx
/// </summary>
[StructLayout(LayoutKind.Sequential)]
private struct STORAGE_PROPERTY_QUERY
{
public uint PropertyId;
public uint QueryType;
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 1)]
public byte[] AdditionalParameters;
}
private const uint StorageDeviceSeekPenaltyProperty = 7;
private const uint PropertyStandardQuery = 0;
/// <summary>
/// Specifies whether a device has a seek penalty.
/// https://msdn.microsoft.com/en-us/library/ff552549.aspx
/// </summary>
[StructLayout(LayoutKind.Sequential)]
private struct DEVICE_SEEK_PENALTY_DESCRIPTOR
{
public readonly uint Version;
public readonly uint Size;
[MarshalAs(UnmanagedType.U1)]
public readonly bool IncursSeekPenalty;
}
// Consts from sdk\inc\winioctl.h
private const uint METHOD_BUFFERED = 0;
private const uint FILE_ANY_ACCESS = 0;
private const uint FILE_DEVICE_MASS_STORAGE = 0x0000002d;
private const uint IOCTL_STORAGE_BASE = FILE_DEVICE_MASS_STORAGE;
private static readonly uint IOCTL_STORAGE_QUERY_PROPERTY = CTL_CODE(IOCTL_STORAGE_BASE, 0x500, METHOD_BUFFERED, FILE_ANY_ACCESS);
private static uint CTL_CODE(uint deviceType, uint function, uint method, uint access)
{
return (deviceType << 16) | (access << 14) | (function << 2) | method;
}
/// <summary>
/// Reparse data buffer - from ntifs.h.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
private struct REPARSE_DATA_BUFFER
{
public DwReserved0Flag ReparseTag;
public ushort ReparseDataLength;
public readonly ushort Reserved;
public ushort SubstituteNameOffset;
public ushort SubstituteNameLength;
public ushort PrintNameOffset;
public ushort PrintNameLength;
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 0x3FF0)]
public byte[] PathBuffer;
}
private const int FSCTL_SET_REPARSE_POINT = 0x000900A4;
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode, ExactSpelling = true)]
private static extern uint GetFullPathNameW(string lpFileName, uint nBufferLength, [Out] StringBuilder lpBuffer, IntPtr lpFilePart);
[Flags]
private enum MoveFileFlags
{
MOVEFILE_REPLACE_EXISTING = 0x00000001,
MOVEFILE_COPY_ALLOWED = 0x00000002,
MOVEFILE_DELAY_UNTIL_REBOOT = 0x00000004,
MOVEFILE_WRITE_THROUGH = 0x00000008,
MOVEFILE_CREATE_HARDLINK = 0x00000010,
MOVEFILE_FAIL_IF_NOT_TRACKABLE = 0x00000020
}
[return: MarshalAs(UnmanagedType.Bool)]
[DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode)]
private static extern bool MoveFileEx(string lpExistingFileName, string lpNewFileName, MoveFileFlags dwFlags);
#endregion
/// <summary>
/// <see cref="StaticIsOSVersionGreaterOrEqual(int, int)"/>
/// </summary>
public static bool StaticIsOSVersionGreaterOrEqual(Version version)
{
return StaticIsOSVersionGreaterOrEqual(version.Major, version.Minor);
}
/// <summary>
/// Calls VerifyVersionInfo to determine if the running OS's version meets or exceeded the given major.minor version.
/// </summary>
/// <remarks>
/// Unlike <see cref="Environment.OSVersion"/>, this works for Windows 8.1 and above.
/// See the deprecation warnings at http://msdn.microsoft.com/en-us/library/windows/desktop/ms724451(v=vs.85).aspx
/// </remarks>
public static bool StaticIsOSVersionGreaterOrEqual(int major, int minor)
{
const uint ErrorOldWinVersion = 0x47e; // ERROR_OLD_WIN_VERSION
const uint MajorVersion = 0x2; // VER_MAJOR_VERSION
const uint MinorVersion = 0x1; // VER_MINOR_VERSION
const byte CompareGreaterOrEqual = 0x3; // VER_GREATER_EQUAL
ulong conditionMask = VerSetConditionMask(0, MajorVersion, CompareGreaterOrEqual);
conditionMask = VerSetConditionMask(conditionMask, MinorVersion, CompareGreaterOrEqual);
OsVersionInfoEx comparand = new OsVersionInfoEx { OSVersionInfoSize = OsVersionInfoEx.Size, MajorVersion = major, MinorVersion = minor };
bool satisfied = VerifyVersionInfo(comparand, MajorVersion | MinorVersion, conditionMask);
int hr = Marshal.GetLastWin32Error();
if (!satisfied && hr != ErrorOldWinVersion)
{
throw ThrowForNativeFailure(hr, "VerifyVersionInfo");
}
return satisfied;
}
/// <nodoc />
public static readonly Version MinWindowsVersionThatSupportsLongPaths = new Version(major: 6, minor: 2);
/// <nodoc />
public static readonly Version MinWindowsVersionThatSupportsNestedJobs = new Version(major: 6, minor: 2);
/// <nodoc />
public static readonly Version MinWindowsVersionThatSupportsWow64Processes = new Version(major: 5, minor: 1);
/// <nodoc />
public static readonly int MaxDirectoryPathOld = 130;
/// <nodoc />
public static readonly int MaxDirectoryPathNew = 260;
/// <inheritdoc />
public int MaxDirectoryPathLength()
{
return StaticIsOSVersionGreaterOrEqual(MinWindowsVersionThatSupportsLongPaths)
? MaxDirectoryPathNew
: MaxDirectoryPathOld;
}
private readonly Lazy<bool> m_supportUnprivilegedCreateSymbolicLinkFlag = default;
/// <summary>
/// Checks if the OS supports SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE flag for creating symlink.
/// </summary>
/// <remarks>
/// Not all Win 10 versions support this flag. Checking OS version for this feature is currently not advised
/// because the OS may have had new features added in a redistributable DLL.
/// See: https://docs.microsoft.com/en-us/windows/desktop/SysInfo/operating-system-version
/// </remarks>
private bool CheckSupportUnprivilegedCreateSymbolicLinkFlag()
{
var tempTarget = FileUtilities.GetTempFileName();
var tempLink = FileUtilities.GetTempFileName();
DeleteFile(tempLink, true);
CreateSymbolicLinkW(tempLink, tempTarget, SymbolicLinkTarget.File | SymbolicLinkTarget.AllowUnprivilegedCreate);
int lastError = Marshal.GetLastWin32Error();
DeleteFile(tempTarget, true);
DeleteFile(tempLink, true);
return lastError != NativeIOConstants.ErrorInvalidParameter;
}
/// <summary>
/// Creates an instance of <see cref="FileSystemWin"/>.
/// </summary>
public FileSystemWin(LoggingContext loggingContext)
{
m_loggingContext = loggingContext;
m_supportUnprivilegedCreateSymbolicLinkFlag = new Lazy<bool>(CheckSupportUnprivilegedCreateSymbolicLinkFlag);
}
/// <summary>
/// Disposable struct to push / pop a thread-local error mode (e.g. <see cref="SemFailCriticalErrors"/>) within a 'using' block.
/// This context must be created and disposed on the same thread.
/// </summary>
private readonly struct ErrorModeContext : IDisposable
{
private readonly bool m_isValid;
private readonly int m_oldErrorMode;
private readonly int m_thisErrorMode;
private readonly int m_threadId;
/// <summary>
/// Creates an error mode context that represent pushing <paramref name="thisErrorMode"/> on top of the current <paramref name="oldErrorMode"/>
/// </summary>
private ErrorModeContext(int oldErrorMode, int thisErrorMode)
{
m_isValid = true;
m_oldErrorMode = oldErrorMode;
m_thisErrorMode = thisErrorMode;
m_threadId = Thread.CurrentThread.ManagedThreadId;
}
/// <summary>
/// Pushes an error mode context which is the current mode with the given extra flags set.
/// (i.e., we push <c><see cref="GetThreadErrorMode"/> | <paramref name="additionalFlags"/></c>)
/// </summary>
public static ErrorModeContext PushWithAddedFlags(int additionalFlags)
{
int currentErrorMode = GetThreadErrorMode();
int thisErrorMode = currentErrorMode | additionalFlags;
int oldErrorModeViaSet;
if (!SetThreadErrorMode(thisErrorMode, out oldErrorModeViaSet))
{
int hr = Marshal.GetLastWin32Error();
throw ThrowForNativeFailure(hr, "SetThreadErrorMode");
}
Contract.Assume(currentErrorMode == oldErrorModeViaSet, "Thread error mode should only be change from calls on this thread");
return new ErrorModeContext(oldErrorMode: currentErrorMode, thisErrorMode: thisErrorMode);
}
/// <summary>
/// Sets <c>SEM_FAILCRITICALERRORS</c> in the thread's error mode (if it is not set already).
/// The returned <see cref="ErrorModeContext"/> must be disposed to restore the prior error mode (and the disposal must occur on the same thread).
/// </summary>
/// <remarks>
/// The intended effect is to avoid a blocking message box if a file path on a CD / floppy drive letter is poked without media inserted.
/// This is neccessary before using volume management functions such as <see cref="ListVolumeGuidPathsAndSerials"/>
/// See http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621(v=vs.85).aspx
/// </remarks>
public static ErrorModeContext DisableMessageBoxForRemovableMedia()
{
return PushWithAddedFlags(SemFailCriticalErrors);
}
/// <summary>
/// Pops this error mode context off of the thread's error mode stack.
/// </summary>
public void Dispose()
{
Contract.Assume(m_isValid);
Contract.Assume(m_threadId == Thread.CurrentThread.ManagedThreadId, "An ErrorModeContext must be disposed on the same thread on which it was created");
int errorModeBeforeRestore;
if (!SetThreadErrorMode(m_oldErrorMode, out errorModeBeforeRestore))
{
int hr = Marshal.GetLastWin32Error();
throw ThrowForNativeFailure(hr, "SetThreadErrorMode");
}
Contract.Assume(errorModeBeforeRestore == m_thisErrorMode, "The thread error mode changed within the ErrorModeContext, but was not restored before popping this context.");
}
}
/// <inheritdoc />
public unsafe NtStatus FlushPageCacheToFilesystem(SafeFileHandle handle)
{
IoStatusBlock iosb = default(IoStatusBlock);
NtStatus status = NtFlushBuffersFileEx(handle, FlushFileBuffersFlags.FileDataOnly, null, 0, &iosb);
return status;
}
/// <inheritdoc />
public unsafe MiniUsnRecord? ReadFileUsnByHandle(SafeFileHandle fileHandle, bool forceJournalVersion2 = false)
{
Contract.Requires(fileHandle != null);
int bytesReturned;
// We support V2 and V3 records. V3 records (with ReFS length FileIds) are larger, so we allocate a buffer on that assumption.
int recordBufferLength = NativeUsnRecordV3.MaximumSize;
byte* recordBuffer = stackalloc byte[recordBufferLength];
ReadFileUsnData readOptions = forceJournalVersion2 ? ReadFileUsnData.NtfsCompatible : ReadFileUsnData.NtfsAndReFSCompatible;
if (!DeviceIoControl(
fileHandle,
ioControlCode: NativeIOConstants.FsctlReadFileUsnData,
inputBuffer: (IntPtr)(&readOptions),
inputBufferSize: ReadFileUsnData.Size,
outputBuffer: (IntPtr)recordBuffer,
outputBufferSize: recordBufferLength,
bytesReturned: out bytesReturned,
overlapped: IntPtr.Zero))
{
int error = Marshal.GetLastWin32Error();
if (error == NativeIOConstants.ErrorJournalDeleteInProgress ||
error == NativeIOConstants.ErrorJournalNotActive ||
error == NativeIOConstants.ErrorInvalidFunction ||
error == NativeIOConstants.ErrorOnlyIfConnected ||
error == NativeIOConstants.ErrorAccessDenied ||
error == NativeIOConstants.ErrorNotSupported)
{
return null;
}
throw ThrowForNativeFailure(error, "DeviceIoControl(FSCTL_READ_FILE_USN_DATA)");
}
NativeUsnRecordHeader* recordHeader = (NativeUsnRecordHeader*)recordBuffer;
Contract.Assume(
bytesReturned >= NativeUsnRecordHeader.Size,
"Not enough data returned for a valid USN record header");
Contract.Assume(
bytesReturned == recordHeader->RecordLength,
"RecordLength field disagrees from number of bytes actually returned; but we were expecting exactly one record.");
MiniUsnRecord resultRecord;
if (recordHeader->MajorVersion == 3)
{
Contract.Assume(!forceJournalVersion2);
Contract.Assume(
bytesReturned >= NativeUsnRecordV3.MinimumSize && bytesReturned <= NativeUsnRecordV3.MaximumSize,
"FSCTL_READ_FILE_USN_DATA returned an amount of data that does not correspond to a valid USN_RECORD_V3.");
NativeUsnRecordV3* record = (NativeUsnRecordV3*)recordBuffer;
Contract.Assume(
record->Reason == 0 && record->TimeStamp == 0 && record->SourceInfo == 0,
"FSCTL_READ_FILE_USN_DATA scrubs these fields. Marshalling issue?");
resultRecord = new MiniUsnRecord(record->FileReferenceNumber, record->Usn);
}
else if (recordHeader->MajorVersion == 2)
{
Contract.Assume(
bytesReturned >= NativeUsnRecordV2.MinimumSize && bytesReturned <= NativeUsnRecordV2.MaximumSize,
"FSCTL_READ_FILE_USN_DATA returned an amount of data that does not correspond to a valid USN_RECORD_V2.");
NativeUsnRecordV2* record = (NativeUsnRecordV2*)recordBuffer;
Contract.Assume(
record->Reason == 0 && record->TimeStamp == 0 && record->SourceInfo == 0,
"FSCTL_READ_FILE_USN_DATA scrubs these fields. Marshalling issue?");
resultRecord = new MiniUsnRecord(new FileId(0, record->FileReferenceNumber), record->Usn);
}
else
{
Contract.Assume(false, "An unrecognized record version was returned, even though version 2 or 3 was requested.");
throw new InvalidOperationException("Unreachable");
}
Logger.Log.StorageReadUsn(m_loggingContext, resultRecord.FileId.High, resultRecord.FileId.Low, resultRecord.Usn.Value);
return resultRecord;
}
/// <inheritdoc />
public unsafe ReadUsnJournalResult TryReadUsnJournal(
SafeFileHandle volumeHandle,
byte[] buffer,
ulong journalId,
Usn startUsn = default(Usn),
bool forceJournalVersion2 = false,
bool isJournalUnprivileged = false)
{
Contract.Requires(volumeHandle != null);
Contract.Requires(buffer != null && buffer.Length > 0);
Contract.Ensures(Contract.Result<ReadUsnJournalResult>() != null);
var readOptions = new ReadUsnJournalData
{
MinMajorVersion = 2,
MaxMajorVersion = forceJournalVersion2 ? (ushort) 2 : (ushort) 3,
StartUsn = startUsn,
Timeout = 0,
BytesToWaitFor = 0,
ReasonMask = uint.MaxValue, // TODO: Filter this!
ReturnOnlyOnClose = 0,
UsnJournalID = journalId,
};
int bytesReturned;
bool ioctlSuccess;
int error;
fixed (byte* pRecordBuffer = buffer)
{
ioctlSuccess = DeviceIoControl(
volumeHandle,
ioControlCode: isJournalUnprivileged ? NativeIOConstants.FsctlReadUnprivilegedUsnJournal : NativeIOConstants.FsctlReadUsnJournal,
inputBuffer: (IntPtr) (&readOptions),
inputBufferSize: ReadUsnJournalData.Size,
outputBuffer: (IntPtr) pRecordBuffer,
outputBufferSize: buffer.Length,
bytesReturned: out bytesReturned,
overlapped: IntPtr.Zero);
error = Marshal.GetLastWin32Error();
}
if (!ioctlSuccess)
{
ReadUsnJournalStatus errorStatus;
switch ((uint) error)
{
case NativeIOConstants.ErrorJournalNotActive:
errorStatus = ReadUsnJournalStatus.JournalNotActive;
break;
case NativeIOConstants.ErrorJournalDeleteInProgress:
errorStatus = ReadUsnJournalStatus.JournalDeleteInProgress;
break;
case NativeIOConstants.ErrorJournalEntryDeleted:
errorStatus = ReadUsnJournalStatus.JournalEntryDeleted;
break;
case NativeIOConstants.ErrorInvalidParameter:
errorStatus = ReadUsnJournalStatus.InvalidParameter;
break;
case NativeIOConstants.ErrorInvalidFunction:
errorStatus = ReadUsnJournalStatus.VolumeDoesNotSupportChangeJournals;
break;
default:
throw ThrowForNativeFailure(error, "DeviceIoControl(FSCTL_READ_USN_JOURNAL)");
}
return new ReadUsnJournalResult(errorStatus, nextUsn: new Usn(0), records: null);
}
Contract.Assume(
bytesReturned >= sizeof(ulong),
"The output buffer should always contain the updated USN cursor (even if no records were returned)");
var recordsToReturn = new List<UsnRecord>();
ulong nextUsn;
fixed (byte* recordBufferBase = buffer)
{
nextUsn = *(ulong*) recordBufferBase;
byte* currentRecordBase = recordBufferBase + sizeof(ulong);
Contract.Assume(currentRecordBase != null);
// One past the end of the record part of the buffer
byte* recordsEnd = recordBufferBase + bytesReturned;
while (currentRecordBase < recordsEnd)
{
Contract.Assume(
currentRecordBase + NativeUsnRecordHeader.Size <= recordsEnd,
"Not enough data returned for a valid USN record header");
NativeUsnRecordHeader* currentRecordHeader = (NativeUsnRecordHeader*) currentRecordBase;
Contract.Assume(
currentRecordBase + currentRecordHeader->RecordLength <= recordsEnd,
"RecordLength field advances beyond the buffer");
if (currentRecordHeader->MajorVersion == 3)
{
Contract.Assume(!forceJournalVersion2);
if (!(currentRecordHeader->RecordLength >= NativeUsnRecordV3.MinimumSize &&
currentRecordHeader->RecordLength <= NativeUsnRecordV3.MaximumSize))
{
Contract.Assert(false, "Size in record header does not correspond to a valid USN_RECORD_V3. Header record length: " + currentRecordHeader->RecordLength);
}
NativeUsnRecordV3* record = (NativeUsnRecordV3*) currentRecordBase;
recordsToReturn.Add(
new UsnRecord(
record->FileReferenceNumber,
record->ParentFileReferenceNumber,
record->Usn,
(UsnChangeReasons) record->Reason));
}
else if (currentRecordHeader->MajorVersion == 2)
{
if (!(currentRecordHeader->RecordLength >= NativeUsnRecordV2.MinimumSize &&
currentRecordHeader->RecordLength <= NativeUsnRecordV2.MaximumSize))
{
Contract.Assert(false, "Size in record header does not correspond to a valid USN_RECORD_V2. Header record length: " + currentRecordHeader->RecordLength);
}
NativeUsnRecordV2* record = (NativeUsnRecordV2*) currentRecordBase;
recordsToReturn.Add(
new UsnRecord(
new FileId(0, record->FileReferenceNumber),
new FileId(0, record->ParentFileReferenceNumber),
record->Usn,
(UsnChangeReasons) record->Reason));
}
else
{
Contract.Assume(
false,
"An unrecognized record version was returned, even though version 2 or 3 was requested.");
throw new InvalidOperationException("Unreachable");
}
currentRecordBase += currentRecordHeader->RecordLength;
}
}
return new ReadUsnJournalResult(ReadUsnJournalStatus.Success, new Usn(nextUsn), recordsToReturn);
}
/// <inheritdoc />
public QueryUsnJournalResult TryQueryUsnJournal(SafeFileHandle volumeHandle)
{
Contract.Requires(volumeHandle != null);
Contract.Ensures(Contract.Result<QueryUsnJournalResult>() != null);
var data = new QueryUsnJournalData();
bool ioctlSuccess = DeviceIoControl(
volumeHandle,
ioControlCode: NativeIOConstants.FsctlQueryUsnJournal,
inputBuffer: IntPtr.Zero,
inputBufferSize: 0,
outputBuffer: data,
outputBufferSize: QueryUsnJournalData.Size,
bytesReturned: out int bytesReturned,
overlapped: IntPtr.Zero);
int error = Marshal.GetLastWin32Error();
if (!ioctlSuccess)
{
QueryUsnJournalStatus errorStatus;
switch ((uint)error)
{
case NativeIOConstants.ErrorJournalNotActive:
errorStatus = QueryUsnJournalStatus.JournalNotActive;
break;
case NativeIOConstants.ErrorJournalDeleteInProgress:
errorStatus = QueryUsnJournalStatus.JournalDeleteInProgress;
break;
case NativeIOConstants.ErrorInvalidFunction:
errorStatus = QueryUsnJournalStatus.VolumeDoesNotSupportChangeJournals;
break;
case NativeIOConstants.ErrorInvalidParameter:
errorStatus = QueryUsnJournalStatus.InvalidParameter;
break;
case NativeIOConstants.ErrorAccessDenied:
errorStatus = QueryUsnJournalStatus.AccessDenied;
break;
default:
throw ThrowForNativeFailure(error, "DeviceIoControl(FSCTL_QUERY_USN_JOURNAL)");
}
return new QueryUsnJournalResult(errorStatus, data: null);
}
Contract.Assume(bytesReturned == QueryUsnJournalData.Size, "Output buffer size mismatched (not all fields populated?)");
return new QueryUsnJournalResult(QueryUsnJournalStatus.Success, data);
}
/// <inheritdoc />
public unsafe Usn? TryWriteUsnCloseRecordByHandle(SafeFileHandle fileHandle)
{
Contract.Requires(fileHandle != null);
ulong writtenUsn;
if (!DeviceIoControl(
fileHandle,
ioControlCode: NativeIOConstants.FsctlWriteUsnCloseRecord,
inputBuffer: IntPtr.Zero,
inputBufferSize: 0,
outputBuffer: (IntPtr)(&writtenUsn),
outputBufferSize: sizeof(ulong),
bytesReturned: out int bytesReturned,
overlapped: IntPtr.Zero))
{
int error = Marshal.GetLastWin32Error();
if (error == NativeIOConstants.ErrorJournalDeleteInProgress ||
error == NativeIOConstants.ErrorJournalNotActive ||
error == NativeIOConstants.ErrorWriteProtect)
{
return null;
}
throw ThrowForNativeFailure(error, "DeviceIoControl(FSCTL_WRITE_USN_CLOSE_RECORD)");
}
Contract.Assume(bytesReturned == sizeof(ulong));
Logger.Log.StorageCheckpointUsn(m_loggingContext, writtenUsn);
return new Usn(writtenUsn);
}
/// <summary>
/// Indicates if the running OS is at least Windows 8.0 / Server 2012
/// (which is the first version to support nested jobs, hence <see cref="FileSystemWin.MinWindowsVersionThatSupportsNestedJobs"/>)
/// </summary>
private static readonly bool s_runningWindows8OrAbove = StaticIsOSVersionGreaterOrEqual(FileSystemWin.MinWindowsVersionThatSupportsNestedJobs);
/// <summary>
/// Indicates if the extended (128-bit) file ID type is supported on this running OS.
/// http://msdn.microsoft.com/en-us/library/windows/desktop/aa364227(v=vs.85).aspx
/// </summary>
[Pure]
private static bool IsExtendedFileIdSupported()
{
return s_runningWindows8OrAbove;
}
/// <inheritdoc />
public unsafe FileIdAndVolumeId? TryGetFileIdAndVolumeIdByHandle(SafeFileHandle fileHandle)
{
Contract.Requires(fileHandle != null);
var info = default(FileIdAndVolumeId);
if (!GetFileInformationByHandleEx(fileHandle, (uint)FileInfoByHandleClass.FileIdInfo, (IntPtr)(&info), FileIdAndVolumeId.Size))
{
return null;
}
return info;
}
/// <inheritdoc />
public unsafe FileAttributes GetFileAttributesByHandle(SafeFileHandle fileHandle)
{
Contract.Requires(fileHandle != null);
var info = default(FileBasicInfo);
if (!GetFileInformationByHandleEx(fileHandle, (uint)FileInfoByHandleClass.FileBasicInfo, (IntPtr)(&info), sizeof(FileBasicInfo)))
{
int hr = Marshal.GetLastWin32Error();
ThrowForNativeFailure(hr, "GetFileInformationByHandleEx");
}
return info.Attributes;
}
/// <inheritdoc />
public unsafe bool IsPendingDelete(SafeFileHandle fileHandle)
{
Contract.Requires(fileHandle != null);
var info = default(FileStandardInfo);
if (!GetFileInformationByHandleEx(fileHandle, (uint)FileInfoByHandleClass.FileStandardInfo, (IntPtr)(&info), sizeof(FileStandardInfo)))
{
int hr = Marshal.GetLastWin32Error();
ThrowForNativeFailure(hr, "GetFileInformationByHandleEx");
}
return info.DeletePending;
}
/// <summary>
/// Queries the current length (end-of-file position) of an open file.
/// </summary>
public static long GetFileLengthByHandle(SafeFileHandle fileHandle)
{
Contract.Requires(fileHandle != null);
if (!GetFileSizeEx(fileHandle, out long size))
{
int hr = Marshal.GetLastWin32Error();
ThrowForNativeFailure(hr, "GetFileSizeEx");
}
return size;
}
/// <inheritdoc />
[SuppressMessage("Microsoft.Naming", "CA1720:IdentifiersShouldNotContainTypeNames", MessageId = "short")]
public unsafe uint GetShortVolumeSerialNumberByHandle(SafeFileHandle fileHandle)
{
uint serial = 0;
bool success = GetVolumeInformationByHandleW(
fileHandle,
volumeNameBuffer: null,
volumeNameBufferSize: 0,
volumeSerial: (IntPtr)(&serial),
maximumComponentLength: IntPtr.Zero,
fileSystemFlags: IntPtr.Zero,
fileSystemNameBuffer: null,
fileSystemNameBufferSize: 0);
if (!success)
{
int hr = Marshal.GetLastWin32Error();
throw ThrowForNativeFailure(hr, "GetVolumeInformationByHandleW");
}
return serial;
}
/// <inheritdoc />
public ulong GetVolumeSerialNumberByHandle(SafeFileHandle fileHandle)
{
FileIdAndVolumeId? maybeInfo = TryGetFileIdAndVolumeIdByHandle(fileHandle);
if (maybeInfo.HasValue)
{
return maybeInfo.Value.VolumeSerialNumber;
}
return GetShortVolumeSerialNumberByHandle(fileHandle);
}
/// <inheritdoc />
public unsafe bool TrySetDeletionDisposition(SafeFileHandle handle)
{
byte delete = 1;
return SetFileInformationByHandle(handle, (uint)FileInfoByHandleClass.FileDispositionInfo, (IntPtr)(&delete), sizeof(byte));
}
/// <inheritdoc />
public unsafe bool TryRename(SafeFileHandle handle, string destination, bool replaceExisting)
{
destination = ToLongPathIfExceedMaxPath(destination);
// FileRenameInfo as we've defined it contains one character which is enough for a terminating null byte. Then, we need room for the real characters.
int fileNameLengthInBytesExcludingNull = destination.Length * sizeof(char);
int structSizeIncludingDestination = sizeof(FileRenameInfo) + fileNameLengthInBytesExcludingNull;
var buffer = new byte[structSizeIncludingDestination];
fixed (byte* b = buffer)
{
var renameInfo = (FileRenameInfo*)b;
renameInfo->ReplaceIfExists = replaceExisting ? (byte)1 : (byte)0;
renameInfo->RootDirectory = IntPtr.Zero;
renameInfo->FileNameLengthInBytes = fileNameLengthInBytesExcludingNull + sizeof(char);
char* filenameBuffer = &renameInfo->FileName;
for (int i = 0; i < destination.Length; i++)
{
filenameBuffer[i] = destination[i];
}
filenameBuffer[destination.Length] = (char)0;
Contract.Assume(buffer.Length > 2 && b[buffer.Length - 1] == 0 && b[buffer.Length - 2] == 0);
return SetFileInformationByHandle(handle, (uint)FileInfoByHandleClass.FileRenameInfo, (IntPtr)renameInfo, structSizeIncludingDestination);
}
}
/// <inheritdoc />
internal unsafe void SetFileTimestampsByHandle(SafeFileHandle handle, DateTime creationTime, DateTime accessTime, DateTime lastWriteTime, DateTime lastChangeTime)
{
var newInfo = default(FileBasicInfo);
newInfo.Attributes = (FileAttributes)0;
newInfo.CreationTime = unchecked((ulong)creationTime.ToFileTimeUtc());
newInfo.LastAccessTime = unchecked((ulong)accessTime.ToFileTimeUtc());
newInfo.LastWriteTime = unchecked((ulong)lastWriteTime.ToFileTimeUtc());
newInfo.ChangeTime = unchecked((ulong)lastChangeTime.ToFileTimeUtc());
if (!SetFileInformationByHandle(handle, (uint)FileInfoByHandleClass.FileBasicInfo, (IntPtr)(&newInfo), sizeof(FileBasicInfo)))
{
ThrowForNativeFailure(Marshal.GetLastWin32Error(), nameof(SetFileInformationByHandle));
}
}
/// <inheritdoc />
internal unsafe void GetFileTimestampsByHandle(SafeFileHandle handle, out DateTime creationTime, out DateTime accessTime, out DateTime lastWriteTime, out DateTime lastChangeTime)
{
var info = default(FileBasicInfo);
if (!GetFileInformationByHandleEx(handle, (uint)FileInfoByHandleClass.FileBasicInfo, (IntPtr)(&info), sizeof(FileBasicInfo)))
{
ThrowForNativeFailure(Marshal.GetLastWin32Error(), nameof(GetFileInformationByHandleEx));
}
creationTime = DateTime.FromFileTimeUtc(unchecked((long) info.CreationTime));
accessTime = DateTime.FromFileTimeUtc(unchecked((long)info.LastAccessTime));
lastWriteTime = DateTime.FromFileTimeUtc(unchecked((long)info.LastWriteTime));
lastChangeTime = DateTime.FromFileTimeUtc(unchecked((long)info.ChangeTime));
}
/// <inheritdoc />
public unsafe bool TryPosixDelete(string pathToDelete, out OpenFileResult openFileResult)
{
SafeFileHandle handle = CreateFileW(
ToLongPathIfExceedMaxPath(pathToDelete),
FileDesiredAccess.Delete,
FileShare.Delete | FileShare.Read | FileShare.Write,
IntPtr.Zero,
FileMode.Open,
FileFlagsAndAttributes.FileFlagBackupSemantics | FileFlagsAndAttributes.FileFlagOpenReparsePoint,
IntPtr.Zero);
using (handle)
{
int hr = Marshal.GetLastWin32Error();
if (handle.IsInvalid)
{
Logger.Log.StorageTryOpenOrCreateFileFailure(m_loggingContext, pathToDelete, (int)FileMode.Open, hr);
openFileResult = OpenFileResult.Create(pathToDelete, hr, FileMode.Open, handleIsValid: false);
return false;
}
// handle will not be actually valid after this function terminates,
// but it was at this time, and this is what we are reporting.
openFileResult = OpenFileResult.Create(pathToDelete, hr, FileMode.Open, handleIsValid: true);
FileDispositionInfoEx fdi;
fdi.Flags = FileDispositionFlags.Delete | FileDispositionFlags.PosixSemantics;
// this is an optimistic call that might fail, so we are not calling Marshal.GetLastWin32Error() after it, just
// relying on return value.
bool deleted = SetFileInformationByHandle(
handle,
(uint)FileInfoByHandleClass.FileDispositionInfoEx,
(IntPtr)(&fdi),
sizeof(FileDispositionInfoEx));
return deleted;
}
}
/// <inheritdoc />
public FileSystemType GetVolumeFileSystemByHandle(SafeFileHandle fileHandle)
{
var fileSystemNameBuffer = new StringBuilder(32);
bool success = GetVolumeInformationByHandleW(
fileHandle,
volumeNameBuffer: null,
volumeNameBufferSize: 0,
volumeSerial: IntPtr.Zero,
maximumComponentLength: IntPtr.Zero,
fileSystemFlags: IntPtr.Zero,
fileSystemNameBuffer: fileSystemNameBuffer,
fileSystemNameBufferSize: fileSystemNameBuffer.Capacity);
if (!success)
{
int hr = Marshal.GetLastWin32Error();
throw ThrowForNativeFailure(hr, "GetVolumeInformationByHandleW");
}
string fileSystemName = fileSystemNameBuffer.ToString();
switch (fileSystemName)
{
case "NTFS":
return FileSystemType.NTFS;
case "ReFS":
return FileSystemType.ReFS;
default:
return FileSystemType.Unknown;
}
}
/// <inheritdoc />
public OpenFileResult TryOpenDirectory(
string directoryPath,
FileDesiredAccess desiredAccess,
FileShare shareMode,
FileFlagsAndAttributes flagsAndAttributes,
out SafeFileHandle handle)
{
Contract.Requires(!string.IsNullOrEmpty(directoryPath));
return TryOpenDirectory(directoryPath, desiredAccess, shareMode, FileMode.Open, flagsAndAttributes, out handle);
}
/// <inheritdoc />
/// <remarks>
/// This code is adapted from <see cref="Directory.CreateDirectory(string)"/>.
/// This code assumes that the directory path has been canonicalized by calling <see cref="GetFullPath(string, out int)"/>.
/// </remarks>
public void CreateDirectory(string directoryPath)
{
Contract.Requires(!string.IsNullOrEmpty(directoryPath));
int length = directoryPath.Length;
if (length >= 2 && IsDirectorySeparator(directoryPath[length - 1]))
{
// Skip ending directory separator without trimming the path.
--length;
}
int rootLength = GetRootLength(directoryPath);
if (Directory.Exists(ToLongPathIfExceedMaxPath(directoryPath)))
{
// Short cut if directory exists
return;
}
// Now collect directory path and its parents. We must ensure
// that the parents exist before creating the requested directory path.
var stackDirs = new Stack<string>();
bool parentPathExists = false;
if (length > rootLength)
{
// We are traversing the path bottom up to collect non-existent parents, and push them to stack.
// Thus, the top-most non-existent parent will be created first later.
int i = length - 1;
while (i >= rootLength && !parentPathExists)
{
string dir = directoryPath.Substring(0, i + 1);
if (!Directory.Exists(ToLongPathIfExceedMaxPath(dir)))
{
stackDirs.Push(dir);
}
else
{
// Some parent path exists, stop traversal.
parentPathExists = true;
}
// Skip directory separators.
while (i > rootLength && !IsDirectorySeparator(directoryPath[i]))
{
--i;
}
--i;
}
}
// Now start creating directories from the top-most non-existent parent.
bool result = true;
int firstFoundError = NativeIOConstants.ErrorSuccess;
while (stackDirs.Count > 0)
{
string dir = stackDirs.Pop();
result = CreateDirectoryW(ToLongPathIfExceedMaxPath(dir), IntPtr.Zero);
if (!result && (firstFoundError == NativeIOConstants.ErrorSuccess))
{
int currentError = Marshal.GetLastWin32Error();
if (currentError != NativeIOConstants.ErrorAlreadyExists)
{
// Another thread may have been created directory or its parents.
firstFoundError = currentError;
}
else
{
if (FileExistsNoFollow(dir) || (DirectoryExistsNoFollow(dir) && currentError == NativeIOConstants.ErrorAccessDenied))
{
// The directory or its parents may have existed as files or creation results in denied access.
firstFoundError = currentError;
}
}
}
}
// Only throw an exception if creating the exact directory failed.
if (!result && firstFoundError != NativeIOConstants.ErrorSuccess)
{
throw new BuildXLException(I($"Failed to create directory '{directoryPath}'"), CreateWin32Exception(firstFoundError, "CreateDirectoryW"));
}
}
private OpenFileResult TryOpenDirectory(
string directoryPath,
FileDesiredAccess desiredAccess,
FileShare shareMode,
FileMode fileMode,
FileFlagsAndAttributes flagsAndAttributes,
out SafeFileHandle handle)
{
Contract.Requires(!string.IsNullOrEmpty(directoryPath));
handle = CreateFileW(
ToLongPathIfExceedMaxPath(directoryPath),
desiredAccess | FileDesiredAccess.Synchronize,
shareMode,
lpSecurityAttributes: IntPtr.Zero,
dwCreationDisposition: fileMode,
dwFlagsAndAttributes: flagsAndAttributes | FileFlagsAndAttributes.FileFlagBackupSemantics,
hTemplateFile: IntPtr.Zero);
int hr = Marshal.GetLastWin32Error();
if (handle.IsInvalid)
{
Logger.Log.StorageTryOpenDirectoryFailure(m_loggingContext, directoryPath, hr);
handle = null;
Contract.Assume(hr != 0);
var result = OpenFileResult.Create(directoryPath, hr, fileMode, handleIsValid: false);
Contract.Assume(!result.Succeeded);
return result;
}
else
{
var result = OpenFileResult.Create(directoryPath, hr, fileMode, handleIsValid: true);
Contract.Assume(result.Succeeded);
return result;
}
}
/// <inheritdoc />
public OpenFileResult TryOpenDirectory(string directoryPath, FileShare shareMode, out SafeFileHandle handle)
{
Contract.Requires(!string.IsNullOrEmpty(directoryPath));
return TryOpenDirectory(directoryPath, FileDesiredAccess.None, shareMode, FileFlagsAndAttributes.None, out handle);
}
/// <inheritdoc />
public OpenFileResult TryCreateOrOpenFile(
string path,
FileDesiredAccess desiredAccess,
FileShare shareMode,
FileMode creationDisposition,
FileFlagsAndAttributes flagsAndAttributes,
out SafeFileHandle handle)
{
handle = CreateFileW(
ToLongPathIfExceedMaxPath(path),
desiredAccess,
shareMode,
lpSecurityAttributes: IntPtr.Zero,
dwCreationDisposition: creationDisposition,
dwFlagsAndAttributes: flagsAndAttributes,
hTemplateFile: IntPtr.Zero);
int hr = Marshal.GetLastWin32Error();
if (handle.IsInvalid)
{
Logger.Log.StorageTryOpenOrCreateFileFailure(m_loggingContext, path, (int)creationDisposition, hr);
handle = null;
Contract.Assume(hr != 0);
var result = OpenFileResult.Create(path, hr, creationDisposition, handleIsValid: false);
Contract.Assume(!result.Succeeded);
return result;
}
else
{
var result = OpenFileResult.Create(path, hr, creationDisposition, handleIsValid: true);
Contract.Assume(result.Succeeded);
return result;
}
}
/// <inheritdoc />
public ReOpenFileStatus TryReOpenFile(
SafeFileHandle existing,
FileDesiredAccess desiredAccess,
FileShare shareMode,
FileFlagsAndAttributes flagsAndAttributes,
out SafeFileHandle reopenedHandle)
{
Contract.Requires(existing != null);
SafeFileHandle newHandle = ReOpenFile(existing, desiredAccess, shareMode, flagsAndAttributes);
int hr = Marshal.GetLastWin32Error();
if (newHandle.IsInvalid)
{
reopenedHandle = null;
Contract.Assume(hr != NativeIOConstants.ErrorSuccess, "Invalid handle should imply an error.");
switch (hr)
{
case NativeIOConstants.ErrorSharingViolation:
return ReOpenFileStatus.SharingViolation;
case NativeIOConstants.ErrorAccessDenied:
return ReOpenFileStatus.AccessDenied;
default:
throw ThrowForNativeFailure(hr, "ReOpenFile");
}
}
else
{
reopenedHandle = newHandle;
return ReOpenFileStatus.Success;
}
}
/// <inheritdoc />
public FileStream CreateFileStream(
string path,
FileMode fileMode,
FileAccess fileAccess,
FileShare fileShare,
FileOptions options,
bool force)
{
// The bufferSize of 4096 bytes is the default as used by the other FileStream constructors
// http://index/mscorlib/system/io/filestream.cs.html
return ExceptionUtilities.HandleRecoverableIOException(
() =>
{
string streamPath = ToLongPathIfExceedMaxPath(path);
try
{
return new FileStream(streamPath, fileMode, fileAccess, fileShare, bufferSize: DefaultBufferSize, options: options);
}
catch (UnauthorizedAccessException)
{
// This is a workaround to allow write access to a file that is marked as readonly. It is
// exercised when hashing the output files of pips that create readonly files. The hashing currently
// opens files as write
if (force)
{
if (!TryGetFileAttributes(streamPath, out FileAttributes fileAttributes, out int hrGet)
|| !TrySetFileAttributes(streamPath, fileAttributes & ~FileAttributes.ReadOnly, out int hrSet))
{
throw;
}
return new FileStream(streamPath, fileMode, fileAccess, fileShare, bufferSize: DefaultBufferSize, options: options);
}
throw;
}
},
ex =>
{
throw new BuildXLException(I($"Failed to open path '{path}' with mode='{fileMode}', access='{fileAccess}', share='{fileShare}'"), ex);
});
}
/// <summary>
/// Creates a new IO completion port.
/// </summary>
[SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope",
Justification = "Handles are either null/invalid or intentionally returned to caller")]
public static SafeIOCompletionPortHandle CreateIOCompletionPort()
{
IntPtr rawHandle = CreateIoCompletionPort(
handle: new SafeFileHandle(new IntPtr(-1), ownsHandle: false),
existingCompletionPort: SafeIOCompletionPortHandle.CreateInvalid(),
completionKey: IntPtr.Zero,
numberOfConcurrentThreads: 0);
int error = Marshal.GetLastWin32Error();
var handle = new SafeIOCompletionPortHandle(rawHandle);
if (handle.IsInvalid)
{
throw ThrowForNativeFailure(error, "CreateIoCompletionPort");
}
return handle;
}
/// <summary>
/// Binds a file handle to the given IO completion port. The file must have been opened with <see cref="FileFlagsAndAttributes.FileFlagOverlapped"/>.
/// Future completed IO operations for this handle will be queued to the specified port.
/// </summary>
/// <remarks>
/// Along with binding to the port, this function also sets the handle's completion mode to <c>FILE_SKIP_COMPLETION_PORT_ON_SUCCESS</c>.
/// This means that the caller should respect <c>ERROR_SUCCESS</c> (don't assume <c>ERROR_IO_PENDING</c>).
/// </remarks>
[SuppressMessage("Microsoft.Reliability", "CA2001:AvoidCallingProblematicMethods")]
public static void BindFileHandleToIOCompletionPort(SafeFileHandle handle, SafeIOCompletionPortHandle port, IntPtr completionKey)
{
Contract.Requires(handle != null && !handle.IsInvalid);
Contract.Requires(port != null && !port.IsInvalid);
IntPtr returnedHandle = CreateIoCompletionPort(
handle: handle,
existingCompletionPort: port,
completionKey: completionKey,
numberOfConcurrentThreads: 0);
if (returnedHandle == IntPtr.Zero || returnedHandle == INVALID_HANDLE_VALUE)
{
throw ThrowForNativeFailure(Marshal.GetLastWin32Error(), "CreateIoCompletionPort");
}
// Note that we do not wrap returnedHandle as a safe handle. This is because we would otherwise have two safe handles
// wrapping the same underlying handle value, and could then double-free it.
Contract.Assume(returnedHandle == port.DangerousGetHandle());
// TODO:454491: We could also set FileSkipSetEventOnHandle here, such that the file's internal event is not cleared / signaled by the IO manager.
// However, this is a compatibility problem for existing usages of e.g. DeviceIoControl that do not specify an OVERLAPPED (which
// may wait on the file to be signaled). Ideally, we never depend on signaling a file handle used for async I/O, since we may
// to issue concurrent operations on the handle (and without the IO manager serializing requests as with sync handles, depending
// on signaling and waiting the file handle is simply unsafe).
// We need unchecked here. The issue is that the SetFileCompletionNotificationModes native function returns BOOL, which is actually an int8.
// When marshaling to Bool, if the highest bit is set we can get overflow error.
bool success = unchecked(SetFileCompletionNotificationModes(
handle,
FileCompletionMode.FileSkipCompletionPortOnSuccess));
if (!success)
{
throw ThrowForNativeFailure(Marshal.GetLastWin32Error(), "SetFileCompletionNotificationModes");
}
}
/// <summary>
/// Issues an async read via <c>ReadFile</c>. The eventual completion will possibly be sent to an I/O completion port, associated with <see cref="Windows.FileSystemWin.BindFileHandleToIOCompletionPort"/>.
/// Note that <paramref name="pinnedBuffer"/> must be pinned on a callstack that lives until I/O completion or with a pinning <see cref="System.Runtime.InteropServices.GCHandle"/>,
/// similarly with the provided <paramref name="pinnedOverlapped" />; both are accessed by the kernel as the request is processed in the background.
/// </summary>
public static unsafe FileAsyncIOResult ReadFileOverlapped(SafeFileHandle handle, byte* pinnedBuffer, int bytesToRead, long fileOffset, Overlapped* pinnedOverlapped)
{
Contract.Requires(handle != null && !handle.IsInvalid);
pinnedOverlapped->Offset = fileOffset;
bool success = ReadFile(handle, pinnedBuffer, bytesToRead, lpNumberOfBytesRead: (int*)IntPtr.Zero, lpOverlapped: pinnedOverlapped);
return CreateFileAsyncIOResult(handle, pinnedOverlapped, success);
}
/// <summary>
/// Issues an async write via <c>WriteFile</c>. The eventual completion will possibly be sent to an I/O completion port, associated with <see cref="BindFileHandleToIOCompletionPort"/>.
/// Note that <paramref name="pinnedBuffer"/> must be pinned on a callstack that lives until I/O completion or with a pinning <see cref="GCHandle"/>,
/// similarly with the provided <paramref name="pinnedOverlapped" />; both are accessed by the kernel as the request is processed in the background.
/// </summary>
public static unsafe FileAsyncIOResult WriteFileOverlapped(SafeFileHandle handle, byte* pinnedBuffer, int bytesToWrite, long fileOffset, Overlapped* pinnedOverlapped)
{
Contract.Requires(handle != null && !handle.IsInvalid);
pinnedOverlapped->Offset = fileOffset;
bool success = WriteFile(handle, pinnedBuffer, bytesToWrite, lpNumberOfBytesWritten: (int*)IntPtr.Zero, lpOverlapped: pinnedOverlapped);
return CreateFileAsyncIOResult(handle, pinnedOverlapped, success);
}
/// <summary>
/// Common conversion from an overlapped <c>ReadFile</c> or <c>WriteFile</c> result to a <see cref="FileAsyncIOResult"/>.
/// This must be called immediately after the IO operation such that <see cref="Marshal.GetLastWin32Error"/> is still valid.
/// </summary>
[SuppressMessage("Microsoft.Interoperability", "CA1404:CallGetLastErrorImmediatelyAfterPInvoke", Justification = "Intentionally wrapping GetLastWin32Error")]
private static unsafe FileAsyncIOResult CreateFileAsyncIOResult(SafeFileHandle handle, Overlapped* pinnedOverlapped, bool success)
{
if (success)
{
// Success: IO completed synchronously and we will assume no completion packet is coming (due to FileCompletionMode.FileSkipCompletionPortOnSuccess).
GetCompletedOverlappedResult(handle, pinnedOverlapped, out int error, out int bytesTransferred);
Contract.Assume(error == NativeIOConstants.ErrorSuccess, "IO operation indicated success, but the completed OVERLAPPED did not contain ERROR_SUCCESS");
return new FileAsyncIOResult(FileAsyncIOStatus.Succeeded, bytesTransferred: bytesTransferred, error: NativeIOConstants.ErrorSuccess);
}
else
{
// Pending (a completion packet is expected) or synchronous failure.
int error = Marshal.GetLastWin32Error();
Contract.Assume(error != NativeIOConstants.ErrorSuccess);
bool completedSynchronously = error != NativeIOConstants.ErrorIOPending;
return new FileAsyncIOResult(
completedSynchronously ? FileAsyncIOStatus.Failed : FileAsyncIOStatus.Pending,
bytesTransferred: 0,
error: error);
}
}
/// <summary>
/// Unpacks a completed <c>OVERLAPPED</c> structure into the number of bytes transferred and error code for the completed operation.
/// Fails if the given overlapped structure indicates that the IO operation has not yet completed.
/// </summary>
public static unsafe void GetCompletedOverlappedResult(SafeFileHandle handle, Overlapped* overlapped, out int error, out int bytesTransferred)
{
int bytesTransferredTemp = 0;
if (!GetOverlappedResult(handle, overlapped, &bytesTransferredTemp, bWait: false))
{
bytesTransferred = 0;
error = Marshal.GetLastWin32Error();
if (error == NativeIOConstants.ErrorIOIncomplete)
{
throw ThrowForNativeFailure(error, "GetOverlappedResult");
}
}
else
{
bytesTransferred = bytesTransferredTemp;
error = NativeIOConstants.ErrorSuccess;
}
}
/// <summary>
/// Status of dequeueing an I/O completion packet from a port. Indepenent from success / failure in the packet itself.
/// </summary>
public enum IOCompletionPortDequeueStatus
{
/// <summary>
/// A packet was dequeued.
/// </summary>
Succeeded,
/// <summary>
/// The completion port has been closed, so further dequeues cannot proceed.
/// </summary>
CompletionPortClosed,
}
/// <summary>
/// Result of dequeueing an I/O completion packet from a port.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1815:OverrideEqualsAndOperatorEqualsOnValueTypes")]
public readonly unsafe struct IOCompletionPortDequeueResult
{
/// <summary>
/// Dequeue status (for the dequeue operation itself).
/// </summary>
public readonly IOCompletionPortDequeueStatus Status;
private readonly FileAsyncIOResult m_completedIO;
private readonly IntPtr m_completionKey;
private readonly Overlapped* m_dequeuedOverlapped;
internal IOCompletionPortDequeueResult(FileAsyncIOResult completedIO, Overlapped* dequeuedOverlapped, IntPtr completionKey)
{
Contract.Requires(completedIO.Status == FileAsyncIOStatus.Succeeded || completedIO.Status == FileAsyncIOStatus.Failed);
Status = IOCompletionPortDequeueStatus.Succeeded;
m_completedIO = completedIO;
m_completionKey = completionKey;
m_dequeuedOverlapped = dequeuedOverlapped;
}
internal IOCompletionPortDequeueResult(IOCompletionPortDequeueStatus status)
{
Contract.Requires(status != IOCompletionPortDequeueStatus.Succeeded);
Status = status;
m_completedIO = default(FileAsyncIOResult);
m_completionKey = default(IntPtr);
m_dequeuedOverlapped = null;
}
/// <summary>
/// Result of the asynchronous I/O that completed. Available only if the status is <see cref="IOCompletionPortDequeueStatus.Succeeded"/>,
/// meaning that a packet was actually dequeued.
/// </summary>
public FileAsyncIOResult CompletedIO
{
get
{
Contract.Requires(Status == IOCompletionPortDequeueStatus.Succeeded);
Contract.Ensures(Contract.Result<FileAsyncIOResult>().Status != FileAsyncIOStatus.Pending);
return m_completedIO;
}
}
/// <summary>
/// Completion key (handle unique identifier) of the completed I/O. Available only if the status is <see cref="IOCompletionPortDequeueStatus.Succeeded"/>,
/// meaning that a packet was actually dequeued.
/// </summary>
public IntPtr CompletionKey
{
get
{
Contract.Requires(Status == IOCompletionPortDequeueStatus.Succeeded);
return m_completionKey;
}
}
/// <summary>
/// Pointer to the overlapped originally used to isse the completed I/O. Available only if the status is <see cref="IOCompletionPortDequeueStatus.Succeeded"/>,
/// meaning that a packet was actually dequeued.
/// </summary>
public Overlapped* DequeuedOverlapped
{
get
{
Contract.Requires(Status == IOCompletionPortDequeueStatus.Succeeded);
return m_dequeuedOverlapped;
}
}
}
/// <summary>
/// Attempts to dequeue a completion packet from a completion port. The result indicates whether or not a packet
/// was dequeued, and if so the packet's contents.
/// </summary>
[SuppressMessage("Microsoft.Interoperability", "CA1404:CallGetLastErrorImmediatelyAfterPInvoke", Justification = "Incorrect analysis")]
public static unsafe IOCompletionPortDequeueResult GetQueuedCompletionStatus(SafeIOCompletionPortHandle completionPort)
{
// Possible indications:
// dequeuedOverlapped == null && !result: dequeue failed. Maybe ERROR_ABANDONED_WAIT_0 (port closed)?
// dequeuedOverlapped != null && !result: Dequeue succeeded. IO failed.
// dequeuedOverlapped != null && result: Dequeue succeeded. IO succeeded.
// dequeuedOverlapped == null && result: PostQueuedCompletionStatus with null OVERLAPPED
// See https://msdn.microsoft.com/en-us/library/windows/desktop/aa364986%28v=vs.85%29.aspx
Overlapped* dequeuedOverlapped = null;
int bytesTransferred = 0;
IntPtr completionKey = default(IntPtr);
bool result = GetQueuedCompletionStatus(completionPort, &bytesTransferred, &completionKey, &dequeuedOverlapped, NativeIOConstants.Infinite);
if (result || dequeuedOverlapped != null)
{
// Latter three cases; dequeue succeeded.
int error = NativeIOConstants.ErrorSuccess;
if (!result)
{
error = Marshal.GetLastWin32Error();
Contract.Assume(error != NativeIOConstants.ErrorSuccess);
}
return new IOCompletionPortDequeueResult(
new FileAsyncIOResult(
result ? FileAsyncIOStatus.Succeeded : FileAsyncIOStatus.Failed,
// GetQueueCompletionStatus can return false but still store non-0 value into 'bytesTransferred' argument.
bytesTransferred: bytesTransferred,
error: error),
dequeuedOverlapped,
completionKey);
}
else
{
// Dequeue failed: dequeuedOverlapped == null && !result
int error = Marshal.GetLastWin32Error();
if (error == NativeIOConstants.ErrorAbandonedWait0)
{
return new IOCompletionPortDequeueResult(IOCompletionPortDequeueStatus.CompletionPortClosed);
}
else
{
throw ThrowForNativeFailure(error, "GetQueuedCompletionStatus");
}
}
}
/// <summary>
/// Queues a caller-defined completion packet to a completion port.
/// </summary>
public static unsafe void PostQueuedCompletionStatus(SafeIOCompletionPortHandle completionPort, IntPtr completionKey)
{
if (!PostQueuedCompletionStatus(completionPort, dwNumberOfBytesTransferred: 0, dwCompletionKey: completionKey, lpOverlapped: null))
{
throw ThrowForNativeFailure(Marshal.GetLastWin32Error(), "PostQueuedCompletionStatus");
}
}
/// <inheritdoc />
public CreateHardLinkStatus TryCreateHardLink(string link, string linkTarget)
{
bool result = CreateHardLinkW(ToLongPathIfExceedMaxPath(link), ToLongPathIfExceedMaxPath(linkTarget), IntPtr.Zero);
if (result)
{
return CreateHardLinkStatus.Success;
}
switch (Marshal.GetLastWin32Error())
{
case NativeIOConstants.ErrorNotSameDevice:
return CreateHardLinkStatus.FailedSinceDestinationIsOnDifferentVolume;
case NativeIOConstants.ErrorTooManyLinks:
return CreateHardLinkStatus.FailedDueToPerFileLinkLimit;
case NativeIOConstants.ErrorNotSupported:
return CreateHardLinkStatus.FailedSinceNotSupportedByFilesystem;
case NativeIOConstants.ErrorAccessDenied:
return CreateHardLinkStatus.FailedAccessDenied;
default:
return CreateHardLinkStatus.Failed;
}
}
/// <inheritdoc />
public CreateHardLinkStatus TryCreateHardLinkViaSetInformationFile(string link, string linkTarget, bool replaceExisting = true)
{
// Please note, that this method does not support long paths: FileLinkInformation struct hard codes the file name lengths to 264.
using (FileStream handle = CreateFileStream(linkTarget, FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete, FileOptions.None, false))
{
FileLinkInformation fileLinkInformation = new FileLinkInformation(NtPathPrefix + link, replaceExisting);
var status = NtSetInformationFile(handle.SafeFileHandle, out _, fileLinkInformation, (uint)Marshal.SizeOf(fileLinkInformation), FileInformationClass.FileLinkInformation);
var result = Marshal.GetLastWin32Error();
if (status.IsSuccessful)
{
return CreateHardLinkStatus.Success;
}
else
{
switch (result)
{
case NativeIOConstants.ErrorTooManyLinks:
return CreateHardLinkStatus.FailedDueToPerFileLinkLimit;
case NativeIOConstants.ErrorNotSameDevice:
return CreateHardLinkStatus.FailedSinceDestinationIsOnDifferentVolume;
case NativeIOConstants.ErrorAccessDenied:
return CreateHardLinkStatus.FailedAccessDenied;
case NativeIOConstants.ErrorNotSupported:
return CreateHardLinkStatus.FailedSinceNotSupportedByFilesystem;
default:
return CreateHardLinkStatus.Failed;
}
}
}
}
/// <inheritdoc />
public Possible<Unit> TryCreateSymbolicLink(string symLinkFileName, string targetFileName, bool isTargetFile)
{
SymbolicLinkTarget creationFlag = isTargetFile ? SymbolicLinkTarget.File : SymbolicLinkTarget.Directory;
if (m_supportUnprivilegedCreateSymbolicLinkFlag.Value)
{
creationFlag |= SymbolicLinkTarget.AllowUnprivilegedCreate;
}
int res = CreateSymbolicLinkW(symLinkFileName, targetFileName, creationFlag);
// The return value of CreateSymbolicLinkW is underspecified in its documentation.
// In non-admin mode where Developer mode is not enabled, the return value can be greater than zero, but the last error
// is ERROR_PRIVILEGE_NOT_HELD, and consequently the symlink is not created. We strenghten the return value by
// also checking that the last error is ERROR_SUCCESS.
int lastError = Marshal.GetLastWin32Error();
if (res > 0 && lastError == NativeIOConstants.ErrorSuccess)
{
return Unit.Void;
}
return new NativeFailure(lastError, I($"{nameof(CreateSymbolicLinkW)} returns '{res}'"));
}
/// <inheritdoc />
public void CreateJunction(string junctionPoint, string targetDir)
{
if (!Directory.Exists(ToLongPathIfExceedMaxPath(targetDir)))
{
throw new IOException(I($"Target path '{targetDir}' does not exist or is not a directory."));
}
SafeFileHandle handle;
var openReparsePoint = TryOpenReparsePoint(junctionPoint, FileDesiredAccess.GenericWrite, out handle);
if (!openReparsePoint.Succeeded)
{
openReparsePoint.ThrowForError();
}
using (handle)
{
string fullTargetDirPath = GetFullPath(targetDir, out int hr);
if (fullTargetDirPath == null)
{
throw CreateWin32Exception(hr, "GetFullPathName");
}
byte[] targetDirBytes = Encoding.Unicode.GetBytes(NtPathPrefix + fullTargetDirPath);
REPARSE_DATA_BUFFER reparseDataBuffer = new REPARSE_DATA_BUFFER
{
ReparseTag = DwReserved0Flag.IO_REPARSE_TAG_MOUNT_POINT,
ReparseDataLength = (ushort)(targetDirBytes.Length + 12),
SubstituteNameOffset = 0,
SubstituteNameLength = (ushort)targetDirBytes.Length,
PrintNameOffset = (ushort)(targetDirBytes.Length + 2),
PrintNameLength = 0,
PathBuffer = new byte[0x3ff0],
};
Array.Copy(targetDirBytes, reparseDataBuffer.PathBuffer, targetDirBytes.Length);
int inBufferSize = Marshal.SizeOf(reparseDataBuffer);
IntPtr inBuffer = Marshal.AllocHGlobal(inBufferSize);
try
{
Marshal.StructureToPtr(reparseDataBuffer, inBuffer, false);
bool success = DeviceIoControl(
handle,
FSCTL_SET_REPARSE_POINT,
inBuffer,
targetDirBytes.Length + 20,
IntPtr.Zero,
0,
out _,
IntPtr.Zero);
if (!success)
{
throw CreateWin32Exception(Marshal.GetLastWin32Error(), "DeviceIoControl");
}
}
finally
{
Marshal.FreeHGlobal(inBuffer);
}
}
}
private static OpenFileResult TryOpenReparsePoint(string reparsePoint, FileDesiredAccess accessMode, out SafeFileHandle reparsePointHandle)
{
reparsePointHandle = CreateFileW(
reparsePoint,
accessMode,
FileShare.Read | FileShare.Write | FileShare.Delete,
IntPtr.Zero,
FileMode.Open,
FileFlagsAndAttributes.FileFlagBackupSemantics | FileFlagsAndAttributes.FileFlagOpenReparsePoint,
IntPtr.Zero);
int hr = Marshal.GetLastWin32Error();
if (reparsePointHandle.IsInvalid)
{
reparsePointHandle = null;
Contract.Assume(hr != 0);
var result = OpenFileResult.Create(reparsePoint, hr, FileMode.Open, handleIsValid: false);
Contract.Assume(!result.Succeeded);
return result;
}
else
{
var result = OpenFileResult.Create(reparsePoint, hr, FileMode.Open, handleIsValid: true);
Contract.Assume(result.Succeeded);
return result;
}
}
/// <inheritdoc />
public List<Tuple<VolumeGuidPath, ulong>> ListVolumeGuidPathsAndSerials()
{
Contract.Ensures(Contract.Result<List<Tuple<VolumeGuidPath, ulong>>>().Count > 0);
Contract.Ensures(Contract.ForAll(Contract.Result<List<Tuple<VolumeGuidPath, ulong>>>(), t => t.Item1.IsValid));
var volumeList = new List<Tuple<VolumeGuidPath, ulong>>();
// We don't want funky message boxes for poking removable media, e.g. a CD drive without a disk.
// By observation, these drives *may* be returned when enumerating volumes. Run 'wmic volume get DeviceId,Name'
// when an empty floppy / cd drive is visible in explorer.
using (ErrorModeContext.DisableMessageBoxForRemovableMedia())
{
var volumeNameBuffer = new StringBuilder(capacity: NativeIOConstants.MaxPath + 1);
using (SafeFindVolumeHandle findVolumeHandle = FindFirstVolumeW(volumeNameBuffer, volumeNameBuffer.Capacity))
{
{
int hr = Marshal.GetLastWin32Error();
// The docs say we'll see an invalid handle if it 'fails to find any volumes'. It's very hard to run this program without a volume, though.
// http://msdn.microsoft.com/en-us/library/windows/desktop/aa364425(v=vs.85).aspx
if (findVolumeHandle.IsInvalid)
{
throw ThrowForNativeFailure(hr, "FindNextVolumeW");
}
}
do
{
string volumeGuidPathString = volumeNameBuffer.ToString();
volumeNameBuffer.Clear();
Contract.Assume(!string.IsNullOrEmpty(volumeGuidPathString) && volumeGuidPathString[volumeGuidPathString.Length - 1] == '\\');
bool volumeGuidPathParsed = VolumeGuidPath.TryCreate(volumeGuidPathString, out VolumeGuidPath volumeGuidPath);
Contract.Assume(volumeGuidPathParsed, "FindFirstVolume / FindNextVolume promise to return volume GUID paths");
if (TryOpenDirectory(volumeGuidPathString, FileShare.Delete | FileShare.Read | FileShare.Write, out SafeFileHandle volumeRoot).Succeeded)
{
ulong serial;
using (volumeRoot)
{
serial = GetVolumeSerialNumberByHandle(volumeRoot);
}
Logger.Log.StorageFoundVolume(m_loggingContext, volumeGuidPathString, serial);
volumeList.Add(Tuple.Create(volumeGuidPath, serial));
}
}
while (FindNextVolumeW(findVolumeHandle, volumeNameBuffer, volumeNameBuffer.Capacity));
// FindNextVolumeW returned false; hopefully for the right reason.
{
int hr = Marshal.GetLastWin32Error();
if (hr != NativeIOConstants.ErrorNoMoreFiles)
{
throw ThrowForNativeFailure(hr, "FindNextVolumeW");
}
}
}
}
return volumeList;
}
/// <inheritdoc />
public OpenFileResult TryOpenFileById(
SafeFileHandle existingHandleOnVolume,
FileId fileId,
FileDesiredAccess desiredAccess,
FileShare shareMode,
FileFlagsAndAttributes flagsAndAttributes,
out SafeFileHandle handle)
{
Contract.Requires(existingHandleOnVolume != null && !existingHandleOnVolume.IsInvalid);
var fileIdDescriptor = new FileIdDescriptor(fileId);
handle = OpenFileById(
existingHandleOnVolume,
fileIdDescriptor,
desiredAccess,
shareMode,
lpSecurityAttributes: IntPtr.Zero,
dwFlagsAndAttributes: flagsAndAttributes);
int hr = Marshal.GetLastWin32Error();
if (handle.IsInvalid)
{
Logger.Log.StorageTryOpenFileByIdFailure(m_loggingContext, fileId.High, fileId.Low, GetVolumeSerialNumberByHandle(existingHandleOnVolume), hr);
handle = null;
Contract.Assume(hr != 0);
var result = OpenFileResult.CreateForOpeningById(hr, FileMode.Open, handleIsValid: false);
Contract.Assume(!result.Succeeded);
return result;
}
else
{
var result = OpenFileResult.CreateForOpeningById(hr, FileMode.Open, handleIsValid: true);
Contract.Assume(result.Succeeded);
return result;
}
}
// SymLink target support
// Constants
private const int INITIAL_REPARSE_DATA_BUFFER_SIZE = 1024;
private const int FSCTL_GET_REPARSE_POINT = 0x000900a8;
private const int ERROR_INSUFFICIENT_BUFFER = 0x7A;
private const int ERROR_MORE_DATA = 0xEA;
private const int ERROR_SUCCESS = 0x0;
private const int SYMLINK_FLAG_RELATIVE = 0x1;
/// <inheritdoc />
public void GetChainOfReparsePoints(SafeFileHandle handle, string sourcePath, IList<string> chainOfReparsePoints)
{
Contract.Requires(!handle.IsInvalid);
Contract.Requires(!string.IsNullOrWhiteSpace(sourcePath));
Contract.Requires(chainOfReparsePoints != null);
SafeFileHandle originalHandle = handle;
chainOfReparsePoints.Add(sourcePath);
do
{
if (!TryGetFileAttributes(sourcePath, out FileAttributes attributes, out _))
{
if (handle != originalHandle)
{
handle.Dispose();
}
return;
}
if ((attributes & FileAttributes.ReparsePoint) == 0)
{
if (handle != originalHandle)
{
handle.Dispose();
}
return;
}
var possibleNextTarget = TryGetReparsePointTarget(handle, sourcePath);
if (!possibleNextTarget.Succeeded)
{
if (handle != originalHandle)
{
handle.Dispose();
}
return;
}
if (handle != originalHandle)
{
handle.Dispose();
}
var maybeResolvedTarget = ResolveSymlinkTarget(sourcePath, possibleNextTarget.Result);
if (!maybeResolvedTarget.Succeeded)
{
return;
}
sourcePath = maybeResolvedTarget.Result;
chainOfReparsePoints.Add(sourcePath);
var openResult = TryOpenDirectory(
sourcePath,
FileDesiredAccess.GenericRead,
FileShare.ReadWrite | FileShare.Delete,
FileFlagsAndAttributes.FileFlagOverlapped | FileFlagsAndAttributes.FileFlagOpenReparsePoint,
out handle);
if (!openResult.Succeeded)
{
return;
}
} while (!handle.IsInvalid);
}
/// <inheritdoc />
public Possible<string> TryGetReparsePointTarget(SafeFileHandle handle, string sourcePath)
{
try
{
if (handle == null || handle.IsInvalid)
{
var openResult = TryCreateOrOpenFile(
sourcePath,
FileDesiredAccess.GenericRead,
FileShare.Read | FileShare.Delete,
FileMode.Open,
FileFlagsAndAttributes.FileFlagOpenReparsePoint | FileFlagsAndAttributes.FileFlagBackupSemantics,
out SafeFileHandle symlinkHandle);
if (!openResult.Succeeded)
{
return openResult.CreateFailureForError();
}
using (symlinkHandle)
{
return GetReparsePointTarget(symlinkHandle);
}
}
else
{
return GetReparsePointTarget(handle);
}
}
catch (NativeWin32Exception e)
{
return new RecoverableExceptionFailure(new BuildXLException("Failed to get reparse point target", e));
}
catch (NotSupportedException e)
{
return new RecoverableExceptionFailure(new BuildXLException("Failed to get reparse point target", e));
}
}
private unsafe string GetReparsePointTarget(SafeFileHandle handle)
{
string targetPath = string.Empty;
int bufferSize = INITIAL_REPARSE_DATA_BUFFER_SIZE;
int errorCode = ERROR_INSUFFICIENT_BUFFER;
byte[] buffer = null;
while (errorCode == ERROR_MORE_DATA || errorCode == ERROR_INSUFFICIENT_BUFFER)
{
buffer = new byte[bufferSize];
bool success = false;
fixed (byte* pBuffer = buffer)
{
int bufferReturnedSize;
success = DeviceIoControl(
handle,
FSCTL_GET_REPARSE_POINT,
IntPtr.Zero,
0,
(IntPtr)pBuffer,
bufferSize,
out bufferReturnedSize,
IntPtr.Zero);
}
bufferSize *= 2;
errorCode = success ? 0 : Marshal.GetLastWin32Error();
}
if (errorCode != 0)
{
throw ThrowForNativeFailure(errorCode, "DeviceIoControl(FSCTL_GET_REPARSE_POINT)");
}
// Now get the offsets in the REPARSE_DATA_BUFFER buffer string based on
// the offsets for the different type of reparse points.
const uint PrintNameOffsetIndex = 12;
const uint PrintNameLengthIndex = 14;
const uint SubsNameOffsetIndex = 8;
const uint SubsNameLengthIndex = 10;
fixed (byte* pBuffer = buffer)
{
uint reparsePointTag = *(uint*)(pBuffer);
if (reparsePointTag != (uint)DwReserved0Flag.IO_REPARSE_TAG_SYMLINK
&& reparsePointTag != (uint)DwReserved0Flag.IO_REPARSE_TAG_MOUNT_POINT)
{
throw new NotSupportedException(I($"Reparse point tag {reparsePointTag:X} not supported"));
}
uint pathBufferOffsetIndex = (uint)((reparsePointTag == (uint) DwReserved0Flag.IO_REPARSE_TAG_SYMLINK) ? 20 : 16);
char* nameStartPtr = (char*)(pBuffer + pathBufferOffsetIndex);
int nameOffset = *(short*)(pBuffer + PrintNameOffsetIndex) / 2;
int nameLength = *(short*)(pBuffer + PrintNameLengthIndex) / 2;
targetPath = new string(nameStartPtr, nameOffset, nameLength);
if (string.IsNullOrWhiteSpace(targetPath))
{
nameOffset = *(short*)(pBuffer + SubsNameOffsetIndex) / 2;
nameLength = *(short*)(pBuffer + SubsNameLengthIndex) / 2;
targetPath = new string(nameStartPtr, nameOffset, nameLength);
}
}
return targetPath;
}
/// <inheritdoc />
public bool TryGetFinalPathNameByPath(string path, out string finalPath, out int nativeErrorCode, bool volumeGuidPath = false)
{
Contract.RequiresNotNullOrEmpty(path);
SafeFileHandle handle = CreateFileW(
ToLongPathIfExceedMaxPath(path),
FileDesiredAccess.None,
FileShare.None,
lpSecurityAttributes: IntPtr.Zero,
dwCreationDisposition: FileMode.Open,
dwFlagsAndAttributes: FileFlagsAndAttributes.FileFlagBackupSemantics,
hTemplateFile: IntPtr.Zero);
nativeErrorCode = Marshal.GetLastWin32Error();
if (handle.IsInvalid)
{
finalPath = string.Empty;
return false;
}
using (handle)
{
try
{
finalPath = GetFinalPathNameByHandle(handle, volumeGuidPath);
return true;
}
catch(NativeWin32Exception ex)
{
finalPath = string.Empty;
nativeErrorCode = ex.NativeErrorCode;
return false;
}
}
}
/// <inheritdoc />
public string GetFinalPathNameByHandle(SafeFileHandle handle, bool volumeGuidPath = false)
{
const int VolumeNameGuid = 0x1;
var pathBuffer = new StringBuilder(NativeIOConstants.MaxPath);
int neededSize = NativeIOConstants.MaxPath;
do
{
// Capacity must include the null terminator character
pathBuffer.EnsureCapacity(neededSize + 1);
neededSize = GetFinalPathNameByHandleW(handle, pathBuffer, pathBuffer.Capacity, flags: volumeGuidPath ? VolumeNameGuid : 0);
if (neededSize == 0)
{
int hr = Marshal.GetLastWin32Error();
// ERROR_PATH_NOT_FOUND
if (hr == 0x3)
{
// This can happen if the volume
Contract.Assume(!volumeGuidPath);
return GetFinalPathNameByHandle(handle, volumeGuidPath: true);
}
else
{
throw ThrowForNativeFailure(hr, "GetFinalPathNameByHandleW");
}
}
Contract.Assume(neededSize < NativeIOConstants.MaxLongPath);
}
while (neededSize >= pathBuffer.Capacity);
bool expectedPrefixIsPresent = true;
// The returned path can either have a \\?\ or a \??\ prefix
// Observe LongPathPrefix and NtPathPrefix have the same length
if (pathBuffer.Length >= LongPathPrefix.Length)
{
for (int i = 0; i < LongPathPrefix.Length; i++)
{
int currentChar = pathBuffer[i];
if (!(currentChar == LongPathPrefix[i] || currentChar == NtPathPrefix[i]))
{
expectedPrefixIsPresent = false;
break;
}
}
}
else
{
expectedPrefixIsPresent = false;
}
// Some paths do not come back with any prefixes. This is the case for example of unix-like paths
// that some tools, even on Windows, decide to probe
if (volumeGuidPath || !expectedPrefixIsPresent)
{
return pathBuffer.ToString();
}
else
{
return pathBuffer.ToString(startIndex: LongPathPrefix.Length, length: pathBuffer.Length - LongPathPrefix.Length);
}
}
/// <inheritdoc />
public bool TryRemoveDirectory(
string path,
out int hr)
{
if (!RemoveDirectoryW(ToLongPathIfExceedMaxPath(path)))
{
hr = Marshal.GetLastWin32Error();
return false;
}
hr = 0;
return true;
}
/// <inheritdoc />
public void RemoveDirectory(string path)
{
if (!TryRemoveDirectory(path, out int hr))
{
ThrowForNativeFailure(hr, "RemoveDirectoryW");
}
}
/// <summary>
/// Thin wrapper for native SetFileAttributesW that checks the win32 error upon failure
/// </summary>
public bool TrySetFileAttributes(string path, FileAttributes attributes, out int hr)
{
if (!SetFileAttributesW(ToLongPathIfExceedMaxPath(path), attributes))
{
hr = Marshal.GetLastWin32Error();
return false;
}
hr = 0;
return true;
}
/// <inheritdoc />
public void SetFileAttributes(string path, FileAttributes attributes)
{
if (!TrySetFileAttributes(path, attributes, out int hr))
{
ThrowForNativeFailure(hr, "SetFileAttributesW");
}
}
private bool TryGetFileAttributes(string path, out FileAttributes attributes, out int hr)
{
return TryGetFileAttributesViaGetFileAttributes(path, out attributes, out hr)
|| TryGetFileAttributesViaFindFirstFile(path, out attributes, out hr);
}
private bool TryGetFileAttributesViaGetFileAttributes(string path, out FileAttributes attributes, out int hr)
{
var fileAttributes = GetFileAttributesW(ToLongPathIfExceedMaxPath(path));
if (fileAttributes == NativeIOConstants.InvalidFileAttributes)
{
hr = Marshal.GetLastWin32Error();
attributes = FileAttributes.Normal;
return false;
}
hr = 0;
attributes = (FileAttributes)fileAttributes;
return true;
}
private bool TryGetFileAttributesViaFindFirstFile(string path, out FileAttributes attributes, out int hr)
{
WIN32_FIND_DATA findResult;
using (SafeFindFileHandle findHandle = FindFirstFileW(ToLongPathIfExceedMaxPath(path), out findResult))
{
if (findHandle.IsInvalid)
{
hr = Marshal.GetLastWin32Error();
attributes = FileAttributes.Normal;
return false;
}
hr = 0;
attributes = findResult.DwFileAttributes;
return true;
}
}
/// <inheritdoc />
public Possible<PathExistence, NativeFailure> TryProbePathExistence(string path, bool followSymlink)
{
if (!TryGetFileAttributesViaGetFileAttributes(path, out FileAttributes fileAttributes, out int hr))
{
if (hr == NativeIOConstants.ErrorInvalidParameter)
{
// This is a temporary log, added to figure out what causes ERROR_INVALID_PARAMETER
// Should be removed after we find out which parameters are wrong and fixing them
// $TODO: When this is removed, please also remove the method VerboseEvent_RemoveMe
string errorMessage = new Win32Exception(Marshal.GetLastWin32Error()).Message;
Events.Log.VerboseEvent_RemoveMe("ERROR_INVALID_PARAMETER (0x57) error orrured in TryProbePathExistence for path: " + path + " Error Message: " + errorMessage);
}
if (IsHresultNonexistent(hr))
{
return PathExistence.Nonexistent;
}
else
{
// Fall back using more expensive FindFirstFile.
// Getting file attributes for probing file existence with GetFileAttributesW sometimes results in "access denied".
// This causes problem especially during file materialization. Because such a probe is interpreted as probing non-existent path,
// the materialization target is not deleted. However, cache, using .NET File.Exist, is able to determine that the file exists.
// Thus, cache refuses to materialize the file
if (!TryGetFileAttributesViaFindFirstFile(path, out fileAttributes, out hr))
{
if (IsHresultNonexistent(hr))
{
return PathExistence.Nonexistent;
}
else
{
return new NativeFailure(hr);
}
}
}
}
var attrs = checked((FileAttributes)fileAttributes);
bool hasDirectoryFlag = ((attrs & FileAttributes.Directory) != 0);
if (followSymlink)
{
// when following symlinks --> implement the same behavior as .NET File.Exists() and Directory.Exists()
return hasDirectoryFlag
? PathExistence.ExistsAsDirectory
: PathExistence.ExistsAsFile;
}
else
{
// when not following symlinks --> treat symlinks as files regardless of what they point to
bool hasSymlinkFlag = ((attrs & FileAttributes.ReparsePoint) != 0);
return
hasSymlinkFlag ? PathExistence.ExistsAsFile :
hasDirectoryFlag ? PathExistence.ExistsAsDirectory :
PathExistence.ExistsAsFile;
}
}
/// <summary>
/// Gets file name.
/// </summary>
public string GetFileName(string path)
{
WIN32_FIND_DATA findResult;
using (SafeFindFileHandle findHandle = FindFirstFileW(ToLongPathIfExceedMaxPath(path), out findResult))
{
if (!findHandle.IsInvalid)
{
return findResult.CFileName;
}
ThrowForNativeFailure(Marshal.GetLastWin32Error(), nameof(FindFirstFileW));
}
return null;
}
public bool PathMatchPattern(string path, string pattern)
{
return PathMatchSpecW(path, pattern);
}
/// <inheritdoc />
public unsafe uint GetHardLinkCountByHandle(SafeFileHandle handle)
{
var info = default(FileStandardInfo);
if (!GetFileInformationByHandleEx(handle, (uint)FileInfoByHandleClass.FileStandardInfo, (IntPtr)(&info), sizeof(FileStandardInfo)))
{
ThrowForNativeFailure(Marshal.GetLastWin32Error(), nameof(GetFileInformationByHandleEx));
}
return info.NumberOfLinks;
}
/// <inheritdoc />
public FileAttributes GetFileAttributes(string path)
{
if (!TryGetFileAttributes(path, out FileAttributes attributes, out int hr))
{
ThrowForNativeFailure(hr, "FindFirstFileW", nameof(GetFileAttributes));
}
return attributes;
}
/// <inheritdoc />
public EnumerateDirectoryResult EnumerateDirectoryEntries(
string directoryPath,
bool recursive,
Action<string /*filePath*/, string /*fileName*/, FileAttributes /*attributes*/> handleEntry,
bool isEnumerationForDirectoryDeletion = false)
{
return EnumerateDirectoryEntries(directoryPath, recursive, "*", handleEntry, isEnumerationForDirectoryDeletion);
}
/// <inheritdoc />
public EnumerateDirectoryResult EnumerateDirectoryEntries(
string directoryPath,
bool recursive,
string pattern,
Action<string /*filePath*/, string /*fileName*/, FileAttributes /*attributes*/> handleEntry,
bool isEnumerationForDirectoryDeletion = false,
bool followSymlinksToDirectories = false)
{
return EnumerateDirectoryEntries(
directoryPath,
recursive,
pattern,
(path, name, attributes, IsReparsePointActionable) => handleEntry(path, name, attributes),
isEnumerationForDirectoryDeletion,
followSymlinksToDirectories);
}
/// <inheritdoc />
private EnumerateDirectoryResult EnumerateDirectoryEntries(
string directoryPath,
bool recursive,
string pattern,
Action<string /*filePath*/, string /*fileName*/, FileAttributes /*attributes*/, bool /* isActionableReparsePoint*/> handleEntry,
bool isEnumerationForDirectoryDeletion = false,
bool followSymlinksToDirectories = false)
{
// directoryPath may be passed by users, so don't modify it (e.g., TrimEnd '\') because it's going to be part of the returned result.
var searchDirectoryPath = Path.Combine(ToLongPathIfExceedMaxPath(directoryPath), "*");
using (SafeFindFileHandle findHandle = FindFirstFileW(searchDirectoryPath, out WIN32_FIND_DATA findResult))
{
if (findHandle.IsInvalid)
{
int hr = Marshal.GetLastWin32Error();
Contract.Assume(hr != NativeIOConstants.ErrorSuccess);
return EnumerateDirectoryResult.CreateFromHResult(directoryPath, hr);
}
while (true)
{
// There will be entries for the current and parent directories. Ignore those.
if (((findResult.DwFileAttributes & FileAttributes.Directory) == 0) ||
(findResult.CFileName != "." && findResult.CFileName != ".."))
{
// When enumerating directories, it is important to not descend into actionable reparse points because
// they could create loops in the directory structure and crash the build engine. Windows has several
// reparse point types (tags), we make sure to not follow mount points and symbolic link ones - this is
// required to properly support directory symlinks.
var isActionableReparsePoint = IsReparsePointActionable(GetReparsePointTypeFromWin32FindData(findResult));
if (PathMatchSpecW(findResult.CFileName, pattern))
{
handleEntry(directoryPath, findResult.CFileName, findResult.DwFileAttributes, isActionableReparsePoint);
}
// Only descend if the entry is a directory and not an actionable reparse point
if (recursive && (findResult.DwFileAttributes & FileAttributes.Directory) != 0 && !isActionableReparsePoint)
{
var recursiveResult = EnumerateDirectoryEntries(
Path.Combine(directoryPath, findResult.CFileName),
recursive: true,
pattern,
handleEntry: handleEntry);
if (!recursiveResult.Succeeded)
{
return recursiveResult;
}
}
}
if (!FindNextFileW(findHandle, out findResult))
{
int hr = Marshal.GetLastWin32Error();
if (hr == NativeIOConstants.ErrorNoMoreFiles)
{
// Graceful completion of enumeration.
return new EnumerateDirectoryResult(directoryPath, EnumerateDirectoryStatus.Success, hr);
}
else
{
Contract.Assume(hr != NativeIOConstants.ErrorSuccess);
// Maybe we can fail ACLs in the middle of enumerating. Do we nead FILE_READ_ATTRIBUTES on each file? That would be surprising
// since the security descriptors aren't in the directory file. All other canonical statuses have to do with beginning enumeration
// rather than continuing (can we open the search directory?)
// So, let's assume that this failure is esoteric and use the 'unknown error' catchall.
return new EnumerateDirectoryResult(directoryPath, EnumerateDirectoryStatus.UnknownError, hr);
}
}
}
}
}
private EnumerateDirectoryResult EnumerateEntries(
string directoryPath,
bool recursive,
string pattern,
Action<string /*filePath*/, string /*fileName*/, FileAttributes /*attributes*/, long /*fileSize*/> handleFileEntry)
{
// directoryPath may be passed by users, so don't modify it (e.g., TrimEnd '\') because it's going to be part of the returned result.
var searchDirectoryPath = Path.Combine(ToLongPathIfExceedMaxPath(directoryPath), "*");
using (SafeFindFileHandle findHandle = FindFirstFileW(searchDirectoryPath, out WIN32_FIND_DATA findResult))
{
if (findHandle.IsInvalid)
{
int hr = Marshal.GetLastWin32Error();
Contract.Assume(hr != NativeIOConstants.ErrorSuccess);
return EnumerateDirectoryResult.CreateFromHResult(directoryPath, hr);
}
while (true)
{
// There will be entries for the current and parent directories. Ignore those.
if (((findResult.DwFileAttributes & FileAttributes.Directory) == 0) ||
(findResult.CFileName != "." && findResult.CFileName != ".."))
{
if (PathMatchSpecW(findResult.CFileName, pattern))
{
handleFileEntry(directoryPath, findResult.CFileName, findResult.DwFileAttributes, findResult.GetFileSize());
}
// Only descend if the entry is a real directory, not a symlink nor file
var reparsePointTag = GetReparsePointTypeFromWin32FindData(findResult);
if (recursive && (findResult.DwFileAttributes & FileAttributes.Directory) != 0 && !IsReparsePointActionable(reparsePointTag))
{
var recursiveResult = EnumerateFiles(
Path.Combine(directoryPath, findResult.CFileName),
recursive: true,
pattern,
handleFileEntry: handleFileEntry);
if (!recursiveResult.Succeeded)
{
return recursiveResult;
}
}
}
if (!FindNextFileW(findHandle, out findResult))
{
int hr = Marshal.GetLastWin32Error();
if (hr == NativeIOConstants.ErrorNoMoreFiles)
{
// Graceful completion of enumeration.
return new EnumerateDirectoryResult(directoryPath, EnumerateDirectoryStatus.Success, hr);
}
else
{
Contract.Assume(hr != NativeIOConstants.ErrorSuccess);
// Maybe we can fail ACLs in the middle of enumerating. Do we need FILE_READ_ATTRIBUTES on each file? That would be surprising
// since the security descriptors aren't in the directory file. All other canonical statuses have to do with beginning enumeration
// rather than continuing (can we open the search directory?)
// So, let's assume that this failure is esoteric and use the 'unknown error' catchall.
return new EnumerateDirectoryResult(directoryPath, EnumerateDirectoryStatus.UnknownError, hr);
}
}
}
}
}
/// <inheritdoc />
public EnumerateDirectoryResult EnumerateFiles(
string directoryPath,
bool recursive,
string pattern,
Action<string /*filePath*/, string /*fileName*/, FileAttributes /*attributes*/, long /*fileSize*/> handleFileEntry)
{
return EnumerateEntries(directoryPath, recursive, pattern,
(filePath, fileName, attributes, fileSize) =>
{
if ((attributes & FileAttributes.Directory) == 0)
{
handleFileEntry(filePath, fileName, attributes, fileSize);
}
});
}
/// <inheritdoc />
public EnumerateDirectoryResult EnumerateDirectoryEntries(
string directoryPath,
bool enumerateDirectory,
string pattern,
uint directoriesToSkipRecursively,
bool recursive,
IDirectoryEntriesAccumulator accumulators,
bool isEnumerationForDirectoryDeletion = false)
{
// directoryPath may be passed by users, so don't modify it (e.g., TrimEnd '\') because it's going to be part of the returned result.
var searchDirectoryPath = Path.Combine(ToLongPathIfExceedMaxPath(directoryPath), "*");
using (SafeFindFileHandle findHandle = FindFirstFileW(searchDirectoryPath, out WIN32_FIND_DATA findResult))
{
if (findHandle.IsInvalid)
{
int hr = Marshal.GetLastWin32Error();
Contract.Assume(hr != NativeIOConstants.ErrorFileNotFound);
var result = EnumerateDirectoryResult.CreateFromHResult(directoryPath, hr);
accumulators.Current.Succeeded = false;
return result;
}
var accumulator = accumulators.Current;
while (true)
{
bool isDirectory = (findResult.DwFileAttributes & FileAttributes.Directory) != 0;
// There will be entries for the current and parent directories. Ignore those.
if (!isDirectory || (findResult.CFileName != "." && findResult.CFileName != ".."))
{
if (PathMatchSpecW(findResult.CFileName, pattern))
{
if (!(enumerateDirectory ^ isDirectory) && directoriesToSkipRecursively == 0)
{
accumulator.AddFile(findResult.CFileName);
}
}
accumulator.AddTrackFile(findResult.CFileName, findResult.DwFileAttributes);
if ((recursive || directoriesToSkipRecursively > 0) && isDirectory)
{
accumulators.AddNew(accumulator, findResult.CFileName);
var recurs = EnumerateDirectoryEntries(
Path.Combine(directoryPath, findResult.CFileName),
enumerateDirectory,
pattern,
directoriesToSkipRecursively == 0 ? 0 : directoriesToSkipRecursively - 1,
recursive,
accumulators);
if (!recurs.Succeeded)
{
return recurs;
}
}
}
if (!FindNextFileW(findHandle, out findResult))
{
int hr = Marshal.GetLastWin32Error();
if (hr == NativeIOConstants.ErrorNoMoreFiles)
{
// Graceful completion of enumeration.
return new EnumerateDirectoryResult(
directoryPath,
EnumerateDirectoryStatus.Success,
hr);
}
Contract.Assume(hr != NativeIOConstants.ErrorSuccess);
return new EnumerateDirectoryResult(
directoryPath,
EnumerateDirectoryStatus.UnknownError,
hr);
}
}
}
}
/// <inheritdoc />
public FileFlagsAndAttributes GetFileFlagsAndAttributesForPossibleReparsePoint(string expandedPath)
{
Possible<ReparsePointType> reparsePointType = TryGetReparsePointType(expandedPath);
var isActionableReparsePoint = false;
if (reparsePointType.Succeeded)
{
isActionableReparsePoint = IsReparsePointActionable(reparsePointType.Result);
}
var openFlags = FileFlagsAndAttributes.FileFlagOverlapped;
if (isActionableReparsePoint)
{
openFlags = openFlags | FileFlagsAndAttributes.FileFlagOpenReparsePoint;
}
return openFlags;
}
/// <inheritdoc />
public EnumerateDirectoryResult EnumerateDirectoryEntries(string directoryPath, Action<string, FileAttributes> handleEntry, bool isEnumerationForDirectoryDeletion = false)
{
return EnumerateDirectoryEntries(directoryPath, recursive: false, handleEntry: (currentDirectory, fileName, fileAttributes) => handleEntry(fileName, fileAttributes), isEnumerationForDirectoryDeletion);
}
internal EnumerateDirectoryResult EnumerateDirectoryEntries(
string directoryPath,
bool recursive,
Action<string /*filePath*/, string /*fileName*/, FileAttributes /*attributes*/, bool /* isActionableReparsePoint*/> handleEntry,
bool isEnumerationForDirectoryDeletion = false)
{
return EnumerateDirectoryEntries(directoryPath, recursive, "*", handleEntry, isEnumerationForDirectoryDeletion);
}
internal EnumerateDirectoryResult EnumerateDirectoryEntries(string directoryPath, Action<string, FileAttributes, bool> handleEntry, bool isEnumerationForDirectoryDeletion = false)
{
return EnumerateDirectoryEntries(directoryPath, recursive: false, handleEntry: (currentDirectory, fileName, fileAttributes, isActionableReparspoint) => handleEntry(fileName, fileAttributes, isActionableReparspoint), isEnumerationForDirectoryDeletion);
}
/// <summary>
/// Throws an exception for the unexpected failure of a native API.
/// </summary>
/// <remarks>
/// We don't want native failure checks erased at any contract-rewriting setting.
/// The return type is <see cref="Exception"/> to facilitate a pattern of <c>throw ThrowForNativeFailure(...)</c> which informs csc's flow control analysis.
/// </remarks>
internal static Exception ThrowForNativeFailure(int error, string nativeApiName, [CallerMemberName] string managedApiName = "<unknown>")
{
Contract.Requires(!string.IsNullOrEmpty(nativeApiName) && !string.IsNullOrEmpty(managedApiName));
throw CreateWin32Exception(error, nativeApiName, managedApiName);
}
/// <summary>
/// Creates a Win32 exception for an HResult
/// </summary>
internal static NativeWin32Exception CreateWin32Exception(int error, string nativeApiName, [CallerMemberName] string managedApiName = "<unknown>")
{
Contract.Requires(!string.IsNullOrEmpty(nativeApiName) && !string.IsNullOrEmpty(managedApiName));
return new NativeWin32Exception(error, I($"{nativeApiName} for {managedApiName} failed"));
}
/// <summary>
/// Throws an exception for the unexpected failure of a native API.
/// </summary>
/// <remarks>
/// We don't want native failure checks erased at any contract-rewriting setting.
/// The return type is <see cref="Exception"/> to facilitate a pattern of <c>throw ThrowForNativeFailure(...)</c> which informs csc's flow control analysis.
/// </remarks>
internal static Exception ThrowForNativeFailure(NtStatus status, string nativeApiName, [CallerMemberName] string managedApiName = "<unknown>")
{
Contract.Requires(!string.IsNullOrEmpty(nativeApiName) && !string.IsNullOrEmpty(managedApiName));
throw CreateNtException(status, nativeApiName, managedApiName);
}
/// <summary>
/// Creates an NT exception for an NTSTATUS
/// </summary>
internal static NativeNtException CreateNtException(NtStatus status, string nativeApiName, [CallerMemberName] string managedApiName = "<unknown>")
{
Contract.Requires(!string.IsNullOrEmpty(nativeApiName) && !string.IsNullOrEmpty(managedApiName));
return new NativeNtException(status, I($"{nativeApiName} for {managedApiName} failed"));
}
/// <inheritdoc />
public bool TryReadSeekPenaltyProperty(SafeFileHandle driveHandle, out bool hasSeekPenalty, out int error)
{
Contract.Requires(driveHandle != null);
Contract.Requires(!driveHandle.IsInvalid);
hasSeekPenalty = true;
STORAGE_PROPERTY_QUERY storagePropertyQuery = default(STORAGE_PROPERTY_QUERY);
storagePropertyQuery.PropertyId = StorageDeviceSeekPenaltyProperty;
storagePropertyQuery.QueryType = PropertyStandardQuery;
DEVICE_SEEK_PENALTY_DESCRIPTOR seekPropertyDescriptor;
bool ioctlSuccess = DeviceIoControl(
driveHandle,
IOCTL_STORAGE_QUERY_PROPERTY,
ref storagePropertyQuery,
Marshal.SizeOf<STORAGE_PROPERTY_QUERY>(),
out seekPropertyDescriptor,
Marshal.SizeOf<DEVICE_SEEK_PENALTY_DESCRIPTOR>(),
out uint bytesReturned,
IntPtr.Zero);
error = Marshal.GetLastWin32Error();
if (ioctlSuccess)
{
Contract.Assume(bytesReturned >= Marshal.SizeOf<DEVICE_SEEK_PENALTY_DESCRIPTOR>(), "Query returned fewer bytes than length of output data");
hasSeekPenalty = seekPropertyDescriptor.IncursSeekPenalty;
return true;
}
else
{
return false;
}
}
/// <inheritdoc />
public bool IsReparsePointActionable(ReparsePointType reparsePointType)
{
return reparsePointType == ReparsePointType.FileSymlink
|| reparsePointType == ReparsePointType.DirectorySymlink
|| reparsePointType == ReparsePointType.MountPoint;
}
/// <inheritdoc />
public bool IsReparsePointSymbolicLink(ReparsePointType reparsePointType)
{
return reparsePointType == ReparsePointType.FileSymlink || reparsePointType == ReparsePointType.DirectorySymlink;
}
/// <inheritdoc />
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA2204:Literals should be spelled correctly", MessageId = "GetFileAttributesW")]
public Possible<ReparsePointType> TryGetReparsePointType(string path)
{
if (!TryGetFileAttributes(path, out FileAttributes attributes, out int hr))
{
return new Possible<ReparsePointType>(new NativeFailure(hr));
}
if ((attributes & FileAttributes.ReparsePoint) == 0)
{
return ReparsePointType.None;
}
using (SafeFindFileHandle findHandle = FindFirstFileW(ToLongPathIfExceedMaxPath(path), out WIN32_FIND_DATA findResult))
{
if (!findHandle.IsInvalid)
{
return GetReparsePointTypeFromWin32FindData(findResult);
}
}
return ReparsePointType.None;
}
private ReparsePointType GetReparsePointTypeFromWin32FindData(WIN32_FIND_DATA findResult)
{
if (findResult.DwReserved0 == (uint)DwReserved0Flag.IO_REPARSE_TAG_SYMLINK ||
findResult.DwReserved0 == (uint)DwReserved0Flag.IO_REPARSE_TAG_MOUNT_POINT)
{
return findResult.DwReserved0 == (uint)DwReserved0Flag.IO_REPARSE_TAG_SYMLINK
? ((findResult.DwFileAttributes & FileAttributes.Directory) != 0) ? ReparsePointType.DirectorySymlink : ReparsePointType.FileSymlink
: ReparsePointType.MountPoint;
}
return ReparsePointType.NonActionable;
}
/// <inheritdoc/>
public bool IsWciReparseArtifact(string path)
{
return IsWCIReparsePointWithTag(path, DwReserved0Flag.IO_REPARSE_TAG_WCIFS, DwReserved0Flag.IO_REPARSE_TAG_WCIFS_TOMBSTONE);
}
/// <inheritdoc/>
public bool IsWciReparsePoint(string path)
{
return IsWCIReparsePointWithTag(path, DwReserved0Flag.IO_REPARSE_TAG_WCIFS);
}
/// <inheritdoc/>
public bool IsWciTombstoneFile(string path)
{
return IsWCIReparsePointWithTag(path, DwReserved0Flag.IO_REPARSE_TAG_WCIFS_TOMBSTONE);
}
/// <summary>
/// Whether the given path contains any of the given tags
/// </summary>
private bool IsWCIReparsePointWithTag(string path, DwReserved0Flag tag1, DwReserved0Flag tag2 = DwReserved0Flag.IO_REPARSE_TAG_RESERVED_ZERO)
{
Contract.Requires(!string.IsNullOrEmpty(path));
// GetFileAttributes doesn't seem to see WCI reparse points as such. So we go to FindFirstFile
// directly
using (SafeFindFileHandle findHandle = FindFirstFileW(ToLongPathIfExceedMaxPath(path), out WIN32_FIND_DATA findResult))
{
if (!findHandle.IsInvalid)
{
return
(findResult.DwFileAttributes & FileAttributes.ReparsePoint) != 0 &&
(findResult.DwReserved0 == (uint)tag1 || (tag2 == DwReserved0Flag.IO_REPARSE_TAG_RESERVED_ZERO || findResult.DwReserved0 == (uint)tag2));
}
return false;
}
}
[DllImport("ntdll.dll", ExactSpelling = true)]
internal static extern NtStatus NtSetInformationFile(
SafeFileHandle fileHandle,
out IoStatusBlock ioStatusBlock,
#pragma warning disable 0618
[MarshalAs(UnmanagedType.AsAny)] object fileInformation,
#pragma warning restore 0618
uint length,
FileInformationClass fileInformationClass);
/// <inheritdoc/>
public bool IsVolumeMapped(string volume)
{
Contract.Requires(!string.IsNullOrEmpty(volume));
// QueryDosDevice needs a volume name without trailing slashes
volume = volume.TrimEnd('\\');
var sb = new StringBuilder(259);
if (QueryDosDevice(volume, sb, sb.Capacity) != 0)
{
// If the volume was mapped, then it starts with '\??\'
return sb.ToString().StartsWith(NtPathPrefix);
}
// QueryDosDevice failed, so we assume this is not a mapped volume
// TODO: consider logging this case
return false;
}
[DllImport("kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)]
private static extern int QueryDosDevice(string devname, StringBuilder buffer, int bufSize);
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
internal readonly struct FileLinkInformation
{
private readonly byte m_replaceIfExists;
private readonly IntPtr m_rootDirectoryHandle;
private readonly uint m_fileNameLength;
/// <summary>
/// Allocates a constant-sized buffer for the FileName. MAX_PATH for the path, 4 for the DosToNtPathPrefix.
/// </summary>
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = 260 + 4)]
private readonly string m_filenameName;
// ReSharper restore PrivateFieldCanBeConvertedToLocalVariable
public FileLinkInformation(string destinationPath, bool replaceIfExists)
{
m_filenameName = destinationPath;
m_fileNameLength = (uint)(2 * m_filenameName.Length);
m_rootDirectoryHandle = IntPtr.Zero;
m_replaceIfExists = (byte)(replaceIfExists ? 1 : 0);
}
}
/// <summary>
/// Enumeration of the various file information classes.
/// See wdm.h.
/// </summary>
public enum FileInformationClass
{
None = 0,
FileDirectoryInformation = 1,
FileFullDirectoryInformation, // 2
FileBothDirectoryInformation, // 3
FileBasicInformation, // 4
FileStandardInformation, // 5
FileInternalInformation, // 6
FileEaInformation, // 7
FileAccessInformation, // 8
FileNameInformation, // 9
FileRenameInformation, // 10
FileLinkInformation, // 11
FileNamesInformation, // 12
FileDispositionInformation, // 13
FilePositionInformation, // 14
FileFullEaInformation, // 15
FileModeInformation, // 16
FileAlignmentInformation, // 17
FileAllInformation, // 18
FileAllocationInformation, // 19
FileEndOfFileInformation, // 20
FileAlternateNameInformation, // 21
FileStreamInformation, // 22
FilePipeInformation, // 23
FilePipeLocalInformation, // 24
FilePipeRemoteInformation, // 25
FileMailslotQueryInformation, // 26
FileMailslotSetInformation, // 27
FileCompressionInformation, // 28
FileObjectIdInformation, // 29
FileCompletionInformation, // 30
FileMoveClusterInformation, // 31
FileQuotaInformation, // 32
FileReparsePointInformation, // 33
FileNetworkOpenInformation, // 34
FileAttributeTagInformation, // 35
FileTrackingInformation, // 36
FileIdBothDirectoryInformation, // 37
FileIdFullDirectoryInformation, // 38
FileValidDataLengthInformation, // 39
FileShortNameInformation, // 40
FileIoCompletionNotificationInformation, // 41
FileIoStatusBlockRangeInformation, // 42
FileIoPriorityHintInformation, // 43
FileSfioReserveInformation, // 44
FileSfioVolumeInformation, // 45
FileHardLinkInformation, // 46
FileProcessIdsUsingFileInformation, // 47
FileNormalizedNameInformation, // 48
FileNetworkPhysicalNameInformation, // 49
FileIdGlobalTxDirectoryInformation, // 50
FileIsRemoteDeviceInformation, // 51
FileAttributeCacheInformation, // 52
FileNumaNodeInformation, // 53
FileStandardLinkInformation, // 54
FileRemoteProtocolInformation, // 55
FileMaximumInformation,
}
/// <inheritdoc/>
public string GetFullPath(string path)
{
Contract.Requires(!string.IsNullOrEmpty(path));
string fullPath = GetFullPath(path, out int hResult);
if (fullPath == null)
{
new BuildXLException(I($"Failed to get a full path from '{path}'"), CreateWin32Exception(hResult, "GetFullPathNameW"));
}
return fullPath;
}
/// <summary>
/// Gets the full path of the specified path.
/// </summary>
/// <remarks>
/// This method functions like <see cref="Path.GetFullPath(string)"/>, i.e., it merges the name of the current drive and directory with
/// a specified file name to determine the full path of a specified file.
/// </remarks>
public string GetFullPath(string path, out int hr)
{
hr = 0;
string toFullPath = ToLongPathIfExceedMaxPath(path);
int bufferSize = NativeIOConstants.MaxPath;
StringBuilder sbFull = new StringBuilder(bufferSize);
uint u = GetFullPathNameW(toFullPath, (uint)bufferSize, sbFull, IntPtr.Zero);
if (u == 0)
{
hr = Marshal.GetLastWin32Error();
return null;
}
if (u > bufferSize)
{
bufferSize = (int)u + 10;
sbFull.Clear();
sbFull.EnsureCapacity(bufferSize);
u = GetFullPathNameW(toFullPath, (uint)bufferSize, sbFull, IntPtr.Zero);
}
return sbFull.ToString();
}
/// <summary>
/// Moves file to a new location.
/// </summary>
public void MoveFile(string existingFileName, string newFileName, bool replaceExisting)
{
existingFileName = ToLongPathIfExceedMaxPath(existingFileName);
newFileName = ToLongPathIfExceedMaxPath(newFileName);
MoveFileFlags moveFlags = replaceExisting ? MoveFileFlags.MOVEFILE_REPLACE_EXISTING : MoveFileFlags.MOVEFILE_COPY_ALLOWED;
if (!MoveFileEx(existingFileName, newFileName, moveFlags))
{
int hr = Marshal.GetLastWin32Error();
ThrowForNativeFailure(hr, nameof(MoveFileEx), nameof(MoveFile));
}
}
/// <summary>
/// Win32 absolute path type.
/// </summary>
private enum Win32AbsolutePathType
{
/// <summary>
/// Invalid type.
/// </summary>
Invalid,
/// <summary>
/// E.g., X:\ABC\DEF.
/// </summary>
LocalDrive,
/// <summary>
/// E.g., \\server\share\ABC\DEF.
/// </summary>
UNC,
/// <summary>
/// E.g., \\.\COM20, \\.\pipe\mypipe.
/// </summary>
LocalDevice,
/// <summary>
/// E.g., \\?\X:\ABC\DEF.
/// </summary>
LongPathPrefixed,
/// <summary>
/// E.g., \??\X:\ABC\DEF.
/// </summary>
NtPrefixed,
}
private static Win32AbsolutePathType GetPathType(string path)
{
if (path.Length >= 3
&& char.IsLetter(path[0])
&& path[1] == Path.VolumeSeparatorChar
&& IsDirectorySeparatorCore(path[2]))
{
return Win32AbsolutePathType.LocalDrive;
}
if (path.Length >= 2 && (path[0] == '\\' || path[0] == '/'))
{
char path0 = path[0];
if (path.Length >= 4 && path[3] == path0)
{
if (path[1] == path0)
{
if (path[2] == '?')
{
return Win32AbsolutePathType.LongPathPrefixed;
}
else if (path[2] == '.')
{
return Win32AbsolutePathType.LocalDevice;
}
}
else if (path[1] == '?' && path[2] == '?')
{
return Win32AbsolutePathType.NtPrefixed;
}
}
if (path[1] == path0)
{
return Win32AbsolutePathType.UNC;
}
}
return Win32AbsolutePathType.Invalid;
}
/// <summary>
/// Returns a path with a long path prefix if the given path exceeds a short max path length.
/// </summary>
public static string ToLongPathIfExceedMaxPath(string path)
{
Contract.Requires(path != null);
if (path.Length < NativeIOConstants.MaxDirectoryPath)
{
return path;
}
switch (GetPathType(path))
{
case Win32AbsolutePathType.Invalid:
case Win32AbsolutePathType.LocalDevice:
case Win32AbsolutePathType.LongPathPrefixed:
case Win32AbsolutePathType.NtPrefixed:
return path;
case Win32AbsolutePathType.LocalDrive:
return LongPathPrefix + path;
case Win32AbsolutePathType.UNC:
return LongUNCPathPrefix + path.Substring(2);
default:
return path;
}
}
/// <inheritdoc />
public FileIdAndVolumeId? TryGetFileIdentityByHandle(SafeFileHandle fileHandle)
{
FileIdAndVolumeId? maybeIds = TryGetFileIdAndVolumeIdByHandle(fileHandle);
if (maybeIds.HasValue)
{
return maybeIds.Value;
}
ulong volumeSerial = GetShortVolumeSerialNumberByHandle(fileHandle);
var usnRecord = ReadFileUsnByHandle(fileHandle);
return usnRecord.HasValue
? new FileIdAndVolumeId(volumeSerial, usnRecord.Value.FileId)
: default(FileIdAndVolumeId?);
}
/// <inheritdoc />
public (FileIdAndVolumeId, Usn)? TryGetVersionedFileIdentityByHandle(SafeFileHandle fileHandle)
{
MiniUsnRecord? usnRecord = ReadFileUsnByHandle(fileHandle);
if (usnRecord.HasValue && usnRecord.Value.Usn.IsZero)
{
Usn? maybeNewUsn = TryWriteUsnCloseRecordByHandle(fileHandle);
if (maybeNewUsn.HasValue)
{
usnRecord = new MiniUsnRecord(usnRecord.Value.FileId, maybeNewUsn.Value);
}
}
// If usnRecord is null or 0, then fail!
if (!usnRecord.HasValue || usnRecord.Value.Usn.IsZero)
{
return null;
}
FileIdAndVolumeId? maybeIds = TryGetFileIdAndVolumeIdByHandle(fileHandle);
// A short volume serial isn't the first choice (fewer random bits), but we fall back to it if the long serial is unavailable.
var volumeSerial = maybeIds.HasValue ? maybeIds.Value.VolumeSerialNumber : GetShortVolumeSerialNumberByHandle(fileHandle);
return (new FileIdAndVolumeId(volumeSerial, usnRecord.Value.FileId), usnRecord.Value.Usn);
}
/// <inheritdoc />
public (FileIdAndVolumeId, Usn)? TryEstablishVersionedFileIdentityByHandle(SafeFileHandle fileHandle, bool flushPageCache)
{
// Before writing a CLOSE record, we might want to ensure that all dirtied cache pages have been handed back to the filesystem.
// Otherwise, at some point in the future, the dirty pages will get lazy-written back to the filesystem, thus generating
// a DATA OVERWRITE change reason after our CLOSE. This can happen if a file was memory-mapped for writing.
// Note that this does NOT ensure data is crash-safe, i.e., it may still be in some cache such as one on the disk device itself;
// we just need NTFS / ReFS up to date on what writes have supposedly happened.
if (flushPageCache)
{
// This flush operation is best effort.
FlushPageCacheToFilesystem(fileHandle);
}
Usn? maybeNewUsn = TryWriteUsnCloseRecordByHandle(fileHandle);
if (!maybeNewUsn.HasValue)
{
return null;
}
Usn newUsn = maybeNewUsn.Value;
FileIdAndVolumeId? maybeIds = TryGetFileIdAndVolumeIdByHandle(fileHandle);
ulong volumeSerial;
FileId fileId;
if (maybeIds.HasValue)
{
volumeSerial = maybeIds.Value.VolumeSerialNumber;
fileId = maybeIds.Value.FileId;
}
else
{
// A short volume serial isn't the first choice (fewer random bits), but we fall back to it if the long serial is unavailable.
volumeSerial = GetShortVolumeSerialNumberByHandle(fileHandle);
var usnRecord = ReadFileUsnByHandle(fileHandle);
if (usnRecord.HasValue)
{
fileId = usnRecord.Value.FileId;
}
else
{
return null;
}
}
return (new FileIdAndVolumeId(volumeSerial, fileId), newUsn);
}
/// <inheritdoc />
public bool IsPreciseFileVersionSupportedByEnlistmentVolume
{
get => true;
set
{
// Do nothing.
}
}
/// <inheritdoc />
public bool CheckIfVolumeSupportsPreciseFileVersionByHandle(SafeFileHandle fileHandle) => true;
/// <inheritdoc />
public bool IsCopyOnWriteSupportedByEnlistmentVolume
{
get => false;
set
{
// Do nothing.
}
}
/// <inheritdoc />
public bool CheckIfVolumeSupportsCopyOnWriteByHandle(SafeFileHandle fileHandle) => false;
/// <inheritdoc />
public bool IsPathRooted(string path)
{
return GetRootLength(path) != 0;
}
/// <inheritdoc />
public int GetRootLength(string path)
{
int i = 0;
int volumeSeparatorLength = 2; // Length to the colon "C:"
int uncRootLength = 2; // Length to the start of the server name "\\"
bool extendedSyntax = path.StartsWith(LongPathPrefix, StringComparison.Ordinal);
bool extendedUncSyntax = path.StartsWith(LongUNCPathPrefix, StringComparison.Ordinal);
if (extendedSyntax)
{
// Shift the position we look for the root from to account for the extended prefix
if (extendedUncSyntax)
{
// "\\" -> "\\?\UNC\"
uncRootLength = LongUNCPathPrefix.Length;
}
else
{
// "C:" -> "\\?\C:"
volumeSeparatorLength += LongPathPrefix.Length;
}
}
if ((!extendedSyntax || extendedUncSyntax) && path.Length > 0 && IsDirectorySeparator(path[0]))
{
// UNC or simple rooted path (e.g. "\foo", NOT "\\?\C:\foo")
i = 1; // Drive rooted (\foo) is one character
if (extendedUncSyntax || (path.Length > 1 && IsDirectorySeparator(path[1])))
{
// UNC (\\?\UNC\ or \\), scan past the next two directory separators at most
// (e.g. to \\?\UNC\Server\Share or \\Server\Share\)
i = uncRootLength;
int n = 2;
while (i < path.Length && (!IsDirectorySeparator(path[i]) || --n > 0))
{
++i;
}
}
}
else if (path.Length >= volumeSeparatorLength && path[volumeSeparatorLength - 1] == Path.VolumeSeparatorChar)
{
// Path is at least longer than where we expect a colon, and has a colon (\\?\A:, A:)
// If the colon is followed by a directory separator, move past it
i = volumeSeparatorLength;
if (path.Length >= volumeSeparatorLength + 1 && IsDirectorySeparator(path[volumeSeparatorLength]))
{
++i;
}
}
return i;
}
/// <inheritdoc />
public bool IsDirectorySeparator(char c) => IsDirectorySeparatorCore(c);
private static bool IsDirectorySeparatorCore(char c) => c == Path.DirectorySeparatorChar || c == Path.AltDirectorySeparatorChar;
/// <inheritdoc />
private Possible<string> TryGetFinalPathByHandle(string path)
{
SafeFileHandle handle = CreateFileW(
ToLongPathIfExceedMaxPath(path),
FileDesiredAccess.None,
FileShare.None,
lpSecurityAttributes: IntPtr.Zero,
dwCreationDisposition: FileMode.Open,
dwFlagsAndAttributes: FileFlagsAndAttributes.FileFlagBackupSemantics,
hTemplateFile: IntPtr.Zero);
int hr = Marshal.GetLastWin32Error();
if (handle.IsInvalid)
{
return new NativeFailure(hr);
}
using (handle)
{
try
{
return GetFinalPathNameByHandle(handle);
}
catch (NativeWin32Exception e)
{
return NativeFailure.CreateFromException(e);
}
}
}
/// <summary>
/// Resolves the reparse points with relative target.
/// </summary>
/// <remarks>
/// This method resolves reparse points that occur in the path prefix. This method should only be called when path itself
/// is an actionable reparse point whose target is a relative path.
/// This method traverses each prefix starting from the shortest one. Every time it encounters a directory symlink, it uses GetFinalPathNameByHandle to get the final path.
/// However, if the prefix itself is a junction, then it leaves the current resolved path intact. We cannot call GetFinalPathNameByHandle on the whole path because
/// that function resolves junctions to their target paths.
/// The following example show the needs for this method as a prerequisite in getting
/// the immediate target of a reparse point. Suppose that we have the following file system layout:
///
/// repo
/// |
/// +---intermediate
/// | \---current
/// | symlink1.link ==> ..\..\target\file1.txt
/// | symlink2.link ==> ..\target\file2.txt
/// |
/// +---source ==> intermediate\current (case 1: directory symlink, case 2: junction)
/// |
/// \---target
/// file1.txt
/// file2.txt
///
/// **CASE 1**: source ==> intermediate\current is a directory symlink.
///
/// If a tool accesses repo\source\symlink1.link (say 'type repo\source\symlink1.link'), then the tool should get the content of repo\target\file1.txt.
/// If the tool accesses repo\source\symlink2.link, then the tool should get path-not-found error because the resolved path will be repo\intermediate\target\file2.txt.
/// Now, if we try to resolve repo\source\symlink1.link by simply combining it with ..\..\target\file1.txt, then we end up with target\file1.txt (not repo\target\file1.txt),
/// which is a non-existent path. To resolve repo\source\symlink1, we need to resolve the reparse points of its prefix, i.e., repo\source. For directory symlinks,
/// we need to resolve the prefix to its target. I.e., repo\source is resolved to repo\intermediate\current, and so, given repo\source\symlink1.link, this method returns
/// repo\intermediate\current\symlink1.link. Combining repo\intermediate\current\symlink1.link with ..\..\target\file1.txt will give the correct path, i.e., repo\target\file1.txt.
///
/// Similarly, given repo\source\symlink2.link, the method returns repo\intermediate\current\symlink2.link, and combining it with ..\target\file2.txt, will give us
/// repo\intermediate\target\file2.txt, which is a non-existent path. This corresponds to the behavior of symlink accesses above.
///
/// **CASE 2**: source ==> intermediate\current is a junction.
///
/// If a tool accesses repo\source\symlink1.link (say 'type repo\source\symlink1.link'), then the tool should get path-not-found error because the resolve path will be target\file1.txt (not repo\target\file1).
/// If the tool accesses repo\source\symlink2.link, then the tool should the content of repo\target\file2.txt.
/// Unlike directory symlinks, when we try to resolve repo\source\symlink2.link, the prefix repo\source is left intact because it is a junction. Thus, combining repo\source\symlink2.link
/// with ..\target\file2.txt results in a correct path, i.e., repo\target\file2.txt. The same reasoning can be given for repo\source\symlink1.link, and its resolution results in
/// a non-existent path target\file1.txt.
/// </remarks>
public Possible<string> TryResolveReparsePointRelativeTarget(string path, string relativeTarget)
{
var needToBeProcessed = new Stack<string>();
var processed = new Stack<string>();
using (var sbWrapper = Pools.GetStringBuilder())
{
StringBuilder result = sbWrapper.Instance;
FileUtilities.SplitPathsReverse(path, needToBeProcessed);
while (needToBeProcessed.Count != 0)
{
string atom = needToBeProcessed.Pop();
processed.Push(atom);
if (result.Length > 0)
{
if (!IsDirectorySeparator(result[result.Length - 1]) && !IsDirectorySeparator(atom[0]))
{
result.Append(Path.DirectorySeparatorChar);
}
}
result.Append(atom);
if (needToBeProcessed.Count == 0)
{
// The last atom is the one that we are going to replace.
break;
}
string resultSoFar = result.ToString();
var maybeReparsePointType = TryGetReparsePointType(resultSoFar);
if (!maybeReparsePointType.Succeeded)
{
return maybeReparsePointType.Failure;
}
if (IsReparsePointSymbolicLink(maybeReparsePointType.Result))
{
var maybeTarget = TryGetReparsePointTarget(null, resultSoFar);
if (!maybeTarget.Succeeded)
{
return maybeTarget.Failure;
}
if (IsPathRooted(maybeTarget.Result))
{
// Target is an absolute path -> restart symlink resolution.
result.Clear();
processed.Clear();
FileUtilities.SplitPathsReverse(maybeTarget.Result, needToBeProcessed);
}
else
{
// Target is a relative path.
var maybeResolveRelative = FileUtilities.TryResolveRelativeTarget(resultSoFar, maybeTarget.Result, processed, needToBeProcessed);
if (!maybeResolveRelative.Succeeded)
{
return maybeResolveRelative.Failure;
}
result.Clear();
result.Append(maybeResolveRelative.Result);
}
}
}
var maybeResolveFinalRelative = FileUtilities.TryResolveRelativeTarget(result.ToString(), relativeTarget, null, null);
if (!maybeResolveFinalRelative.Succeeded)
{
return maybeResolveFinalRelative.Failure;
}
return maybeResolveFinalRelative;
}
}
public bool TryWriteFileSync(SafeFileHandle handle, byte[] content, out int nativeErrorCode)
{
bool result;
unsafe
{
// By passing null to the overlapped argument we are indicating a synchronous write
result = WriteFile(handle, content, content.Length, out _, lpOverlapped: null);
}
nativeErrorCode = Marshal.GetLastWin32Error();
return result;
}
/// <inheritdoc />
public bool IsDirectorySymlinkOrJunction(string path)
{
FileAttributes dirSymlinkOrJunction = FileAttributes.ReparsePoint | FileAttributes.Directory;
var success = TryGetFileAttributes(path, out FileAttributes attributes, out _);
return success && ((attributes & dirSymlinkOrJunction) == dirSymlinkOrJunction);
}
}
}
#pragma warning restore CA1823 // Unused field
| 45.974341 | 264 | 0.569506 | [
"MIT"
] | Bhaskers-Blu-Org2/BuildXL | Public/Src/Utilities/Native/IO/Windows/FileSystem.Win.cs | 195,299 | C# |
namespace BolaoNet.Domain.Interfaces.Services.Facade.Campeonatos
{
public interface ICopaMundo2018FacadeService : ICopaFacadeService
{
////Entities.Campeonatos.Campeonato Campeonato { get; }
//Entities.Campeonatos.Campeonato CreateCampeonato(string nomeCampeonato, bool isClube);
////IList<Entities.Campeonatos.Jogo> GetJogosGrupo();
////IList<Entities.Campeonatos.Jogo> GetOitavasFinal();
////IList<Entities.Campeonatos.Jogo> GetQuartasFinal();
////IList<Entities.Campeonatos.Jogo> GetSemiFinal();
////IList<Entities.Campeonatos.Jogo> GetFinal();
////IList<Entities.Campeonatos.CampeonatoPosicao> GetCampeonatoPosicoes();
//bool InsertResults(string nomeCampeonato, Entities.Users.User validatedBy);
}
}
| 32.28 | 96 | 0.701363 | [
"MIT"
] | Thoris/BolaoNet | BolaoNet.Domain.Interfaces.Services/Facade/Campeonatos/ICopaMundo2018FacadeService.cs | 809 | C# |
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
namespace LXGaming.Ticket.Server.Models {
public class User {
[Key]
public ulong Id { get; init; }
[Required]
public bool Banned { get; set; }
[Required]
public DateTime CreatedAt { get; init; }
[Required]
public DateTime UpdatedAt { get; set; }
public virtual List<UserIdentifier> Identifiers { get; init; }
public virtual List<UserName> Names { get; init; }
}
} | 22.12 | 70 | 0.622061 | [
"Apache-2.0"
] | LXGaming/Ticket.NET | LXGaming.Ticket.Server/Models/User.cs | 555 | C# |
using System;
using System.Threading.Tasks;
using System.Collections.Generic;
using PaymentsSystemExample.Domain.Adapters.JsonObjects;
namespace PaymentsSystemExample.Api.Services
{
public interface IPaymentPersistenceService
{
Task<Payment> Get(Guid paymentid);
Task<IEnumerable<Payment>> List(Guid organisationId);
Task<bool> Delete(Guid paymentid);
Task<bool> Create(IEnumerable<Payment> payments);
Task<bool> Update(IEnumerable<Payment> payments);
}
} | 31.75 | 61 | 0.740157 | [
"MIT"
] | michal-franc/payment-system-example | src/PaymentsSystemExample.Api/Services/IPaymentPersistenceService.cs | 508 | C# |
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Diagnostics;
using FluentAssertions;
using FluentAssertions.Execution;
using Microsoft.CodeAnalysis.Text;
using System;
using System.Collections.Immutable;
using Microsoft.CodeAnalysis.CodeActions;
using Microsoft.CodeAnalysis.CodeFixes;
using System.Threading;
using Microsoft.CodeAnalysis.Simplification;
using Microsoft.CodeAnalysis.Formatting;
using Microsoft.CodeAnalysis.CSharp;
using System.Reflection;
using XunitAssert = Xunit.Assert;
namespace FluentAssertions.Analyzers.Tests
{
/// <summary>
/// Superclass of all Unit Tests for DiagnosticAnalyzers
/// </summary>
public static class DiagnosticVerifier
{
static DiagnosticVerifier()
{
References = new[]
{
typeof(object), // System.Private.CoreLib
typeof(Console), // System
typeof(Enumerable), // System.Linq
typeof(CSharpCompilation), // Microsoft.CodeAnalysis.CSharp
typeof(Compilation), // Microsoft.CodeAnalysis
typeof(AssertionScope), // FluentAssertions.Core
typeof(AssertionExtensions), // FluentAssertions
typeof(Microsoft.VisualStudio.TestTools.UnitTesting.Assert), // MsTest
typeof(XunitAssert), // Xunit
}.Select(type => type.GetTypeInfo().Assembly.Location)
.Append(GetSystemAssemblyPathByName("System.Globalization.dll"))
.Append(GetSystemAssemblyPathByName("System.Text.RegularExpressions.dll"))
.Append(GetSystemAssemblyPathByName("System.Runtime.Extensions.dll"))
.Append(GetSystemAssemblyPathByName("System.Data.Common.dll"))
.Append(GetSystemAssemblyPathByName("System.Threading.Tasks.dll"))
.Append(GetSystemAssemblyPathByName("System.Runtime.dll"))
.Append(GetSystemAssemblyPathByName("System.Reflection.dll"))
.Append(GetSystemAssemblyPathByName("System.Xml.dll"))
.Append(GetSystemAssemblyPathByName("System.Xml.XDocument.dll"))
.Append(GetSystemAssemblyPathByName("System.Private.Xml.Linq.dll"))
.Append(GetSystemAssemblyPathByName("System.Linq.Expressions.dll"))
.Append(GetSystemAssemblyPathByName("System.Collections.dll"))
.Append(GetSystemAssemblyPathByName("netstandard.dll"))
.Append(GetSystemAssemblyPathByName("System.Xml.ReaderWriter.dll"))
.Append(GetSystemAssemblyPathByName("System.Private.Xml.dll"))
.Select(location => (MetadataReference)MetadataReference.CreateFromFile(location))
.ToImmutableArray();
DefaultFilePathPrefix = "Test";
CSharpDefaultFileExt = "cs";
VisualBasicDefaultExt = "vb";
TestProjectName = "TestProject";
string GetSystemAssemblyPathByName(string assemblyName)
{
var root = System.IO.Path.GetDirectoryName(typeof(object).Assembly.Location);
return System.IO.Path.Combine(root, assemblyName);
}
}
// based on http://code.fitness/post/2017/02/using-csharpscript-with-netstandard.html
public static string GetSystemAssemblyPathByName(string assemblyName)
{
var root = System.IO.Path.GetDirectoryName(typeof(object).Assembly.Location);
return System.IO.Path.Combine(root, assemblyName);
}
private static readonly ImmutableArray<MetadataReference> References;
private static readonly string DefaultFilePathPrefix;
private static readonly string CSharpDefaultFileExt;
private static readonly string VisualBasicDefaultExt;
private static readonly string TestProjectName;
#region CodeFixVerifier
public static void VerifyCSharpFix<TCodeFixProvider, TDiagnosticAnalyzer>(string oldSource, string newSource, int? codeFixIndex = null, bool allowNewCompilerDiagnostics = false)
where TCodeFixProvider : CodeFixProvider, new()
where TDiagnosticAnalyzer : DiagnosticAnalyzer, new()
{
VerifyFix(LanguageNames.CSharp, new TDiagnosticAnalyzer(), new TCodeFixProvider(), oldSource, newSource, codeFixIndex, allowNewCompilerDiagnostics);
}
public static void VerifyBasicFix<TCodeFixProvider, TDiagnosticAnalyzer>(string oldSource, string newSource, int? codeFixIndex = null, bool allowNewCompilerDiagnostics = false)
where TCodeFixProvider : CodeFixProvider, new()
where TDiagnosticAnalyzer : DiagnosticAnalyzer, new()
{
VerifyFix(LanguageNames.VisualBasic, new TDiagnosticAnalyzer(), new TCodeFixProvider(), oldSource, newSource, codeFixIndex, allowNewCompilerDiagnostics);
}
/// <summary>
/// General verifier for codefixes.
/// Creates a Document from the source string, then gets diagnostics on it and applies the relevant codefixes.
/// Then gets the string after the codefix is applied and compares it with the expected result.
/// Note: If any codefix causes new diagnostics to show up, the test fails unless allowNewCompilerDiagnostics is set to true.
/// </summary>
/// <param name="language">The language the source code is in</param>
/// <param name="analyzer">The analyzer to be applied to the source code</param>
/// <param name="codeFixProvider">The codefix to be applied to the code wherever the relevant Diagnostic is found</param>
/// <param name="oldSource">A class in the form of a string before the CodeFix was applied to it</param>
/// <param name="newSource">A class in the form of a string after the CodeFix was applied to it</param>
/// <param name="codeFixIndex">Index determining which codefix to apply if there are multiple</param>
/// <param name="allowNewCompilerDiagnostics">A bool controlling whether or not the test will fail if the CodeFix introduces other warnings after being applied</param>
private static void VerifyFix(string language, DiagnosticAnalyzer analyzer, CodeFixProvider codeFixProvider, string oldSource, string newSource, int? codeFixIndex, bool allowNewCompilerDiagnostics)
{
var document = CreateDocument(oldSource, language);
var analyzerDiagnostics = GetSortedDiagnosticsFromDocuments(new[] { analyzer }, new[] { document });
var compilerDiagnostics = GetCompilerDiagnostics(document);
var attempts = analyzerDiagnostics.Length;
for (int i = 0; i < attempts; ++i)
{
var actions = new List<CodeAction>();
var context = new CodeFixContext(document, analyzerDiagnostics[0], (a, d) => actions.Add(a), CancellationToken.None);
codeFixProvider.RegisterCodeFixesAsync(context).Wait();
if (actions.Count == 0)
{
break;
}
if (codeFixIndex != null)
{
document = ApplyFix(document, actions[(int)codeFixIndex]);
break;
}
document = ApplyFix(document, actions[0]);
analyzerDiagnostics = GetSortedDiagnosticsFromDocuments(new[] { analyzer }, new[] { document });
var newCompilerDiagnostics = GetNewDiagnostics(compilerDiagnostics, GetCompilerDiagnostics(document));
//check if applying the code fix introduced any new compiler diagnostics
if (!allowNewCompilerDiagnostics && newCompilerDiagnostics.Any())
{
// Format and get the compiler diagnostics again so that the locations make sense in the output
document = document.WithSyntaxRoot(Formatter.Format(document.GetSyntaxRootAsync().Result, Formatter.Annotation, document.Project.Solution.Workspace));
newCompilerDiagnostics = GetNewDiagnostics(compilerDiagnostics, GetCompilerDiagnostics(document));
throw new AssertionFailedException(string.Format("Fix introduced new compiler diagnostics:\r\n{0}\r\n\r\nNew document:\r\n{1}\r\n",
string.Join("\r\n", newCompilerDiagnostics.Select(d => d.ToString())),
document.GetSyntaxRootAsync().Result.ToFullString()));
}
//check if there are analyzer diagnostics left after the code fix
if (analyzerDiagnostics.Length > 0)
{
break;
}
}
//after applying all of the code fixes, compare the resulting string to the inputted one
var actual = GetStringFromDocument(document);
actual.Should().Be(newSource);
}
/// <summary>
/// Apply the inputted CodeAction to the inputted document.
/// Meant to be used to apply codefixes.
/// </summary>
/// <param name="document">The Document to apply the fix on</param>
/// <param name="codeAction">A CodeAction that will be applied to the Document.</param>
/// <returns>A Document with the changes from the CodeAction</returns>
private static Document ApplyFix(Document document, CodeAction codeAction)
{
var operations = codeAction.GetOperationsAsync(CancellationToken.None).Result;
var solution = operations.OfType<ApplyChangesOperation>().Single().ChangedSolution;
return solution.GetDocument(document.Id);
}
/// <summary>
/// Compare two collections of Diagnostics,and return a list of any new diagnostics that appear only in the second collection.
/// Note: Considers Diagnostics to be the same if they have the same Ids. In the case of multiple diagnostics with the same Id in a row,
/// this method may not necessarily return the new one.
/// </summary>
/// <param name="diagnostics">The Diagnostics that existed in the code before the CodeFix was applied</param>
/// <param name="newDiagnostics">The Diagnostics that exist in the code after the CodeFix was applied</param>
/// <returns>A list of Diagnostics that only surfaced in the code after the CodeFix was applied</returns>
private static IEnumerable<Diagnostic> GetNewDiagnostics(IEnumerable<Diagnostic> diagnostics, IEnumerable<Diagnostic> newDiagnostics)
{
var oldArray = diagnostics.OrderBy(d => d.Location.SourceSpan.Start).ToArray();
var newArray = newDiagnostics.OrderBy(d => d.Location.SourceSpan.Start).ToArray();
int oldIndex = 0;
int newIndex = 0;
while (newIndex < newArray.Length)
{
if (oldIndex < oldArray.Length && oldArray[oldIndex].Id == newArray[newIndex].Id)
{
++oldIndex;
++newIndex;
}
else
{
yield return newArray[newIndex++];
}
}
}
/// <summary>
/// Get the existing compiler diagnostics on the inputted document.
/// </summary>
/// <param name="document">The Document to run the compiler diagnostic analyzers on</param>
/// <returns>The compiler diagnostics that were found in the code</returns>
private static IEnumerable<Diagnostic> GetCompilerDiagnostics(Document document)
{
var compilation = document.GetSemanticModelAsync().Result.Compilation;
return compilation.WithOptions(compilation.Options
.WithSpecificDiagnosticOptions(new Dictionary<string, ReportDiagnostic>
{
["CS1701"] = ReportDiagnostic.Suppress, // Binding redirects
["CS1702"] = ReportDiagnostic.Suppress,
["CS1705"] = ReportDiagnostic.Suppress,
["CS8019"] = ReportDiagnostic.Suppress // TODO: Unnecessary using directive
})
).GetDiagnostics();
}
/// <summary>
/// Given a document, turn it into a string based on the syntax root
/// </summary>
/// <param name="document">The Document to be converted to a string</param>
/// <returns>A string containing the syntax of the Document after formatting</returns>
private static string GetStringFromDocument(Document document)
{
var simplifiedDoc = Simplifier.ReduceAsync(document, Simplifier.Annotation).Result;
var root = simplifiedDoc.GetSyntaxRootAsync().Result;
root = Formatter.Format(root, Formatter.Annotation, simplifiedDoc.Project.Solution.Workspace);
return root.GetText().ToString();
}
#endregion
#region Get Diagnostics
/// <summary>
/// Given classes in the form of strings, their language, and an IDiagnosticAnlayzer to apply to it, return the diagnostics found in the string after converting it to a document.
/// </summary>
/// <param name="sources">Classes in the form of strings</param>
/// <param name="language">The language the source classes are in</param>
/// <param name="analyzer">The analyzer to be run on the sources</param>
/// <returns>An IEnumerable of Diagnostics that surfaced in the source code, sorted by Location</returns>
private static Diagnostic[] GetSortedDiagnostics(string[] sources, string language, params DiagnosticAnalyzer[] analyzers)
{
return GetSortedDiagnosticsFromDocuments(analyzers, GetDocuments(sources, language));
}
/// <summary>
/// Given an analyzer and a document to apply it to, run the analyzer and gather an array of diagnostics found in it.
/// The returned diagnostics are then ordered by location in the source document.
/// </summary>
/// <param name="analyzers">The analyzer to run on the documents</param>
/// <param name="documents">The Documents that the analyzer will be run on</param>
/// <returns>An IEnumerable of Diagnostics that surfaced in the source code, sorted by Location</returns>
private static Diagnostic[] GetSortedDiagnosticsFromDocuments(DiagnosticAnalyzer[] analyzers, Document[] documents)
{
var projects = new HashSet<Project>();
foreach (var document in documents)
{
projects.Add(document.Project);
}
var diagnostics = new List<Diagnostic>();
foreach (var project in projects)
{
var compilation = project.GetCompilationAsync().Result;
var compilationWithAnalyzers = compilation
.WithOptions(compilation.Options
.WithSpecificDiagnosticOptions(new Dictionary<string, ReportDiagnostic>
{
["CS1701"] = ReportDiagnostic.Suppress, // Binding redirects
["CS1702"] = ReportDiagnostic.Suppress,
["CS1705"] = ReportDiagnostic.Suppress,
["CS8019"] = ReportDiagnostic.Suppress // TODO: Unnecessary using directive
}))
.WithAnalyzers(ImmutableArray.Create(analyzers));
var relevantDiagnostics = compilationWithAnalyzers.GetAnalyzerDiagnosticsAsync().Result;
var allDiagnostics = compilationWithAnalyzers.GetAllDiagnosticsAsync().Result;
var other = allDiagnostics.Except(relevantDiagnostics).ToArray();
var code = documents[0].GetSyntaxRootAsync().Result.ToFullString();
other.Should().BeEmpty("there should be no error diagnostics that are not related to the test.{0}code: {1}", Environment.NewLine, code);
foreach (var diag in relevantDiagnostics)
{
if (diag.Location == Location.None || diag.Location.IsInMetadata)
{
diagnostics.Add(diag);
}
else
{
for (int i = 0; i < documents.Length; i++)
{
var document = documents[i];
var tree = document.GetSyntaxTreeAsync().Result;
if (tree == diag.Location.SourceTree)
{
diagnostics.Add(diag);
}
}
}
}
}
var results = SortDiagnostics(diagnostics);
diagnostics.Clear();
return results;
}
/// <summary>
/// Sort diagnostics by location in source document
/// </summary>
/// <param name="diagnostics">The list of Diagnostics to be sorted</param>
/// <returns>An IEnumerable containing the Diagnostics in order of Location</returns>
private static Diagnostic[] SortDiagnostics(IEnumerable<Diagnostic> diagnostics)
{
return diagnostics.OrderBy(d => d.Location.SourceSpan.Start).ToArray();
}
#endregion
#region Set up compilation and documents
/// <summary>
/// Given an array of strings as sources and a language, turn them into a project and return the documents and spans of it.
/// </summary>
/// <param name="sources">Classes in the form of strings</param>
/// <param name="language">The language the source code is in</param>
/// <returns>A Tuple containing the Documents produced from the sources and their TextSpans if relevant</returns>
private static Document[] GetDocuments(string[] sources, string language)
{
if (language != LanguageNames.CSharp && language != LanguageNames.VisualBasic)
{
throw new ArgumentException("Unsupported Language");
}
var project = CreateProject(sources, language);
var documents = project.Documents.ToArray();
if (sources.Length != documents.Length)
{
throw new SystemException("Amount of sources did not match amount of Documents created");
}
return documents;
}
/// <summary>
/// Create a Document from a string through creating a project that contains it.
/// </summary>
/// <param name="source">Classes in the form of a string</param>
/// <param name="language">The language the source code is in</param>
/// <returns>A Document created from the source string</returns>
private static Document CreateDocument(string source, string language = LanguageNames.CSharp)
{
return CreateProject(new[] { source }, language).Documents.First();
}
/// <summary>
/// Create a project using the inputted strings as sources.
/// </summary>
/// <param name="sources">Classes in the form of strings</param>
/// <param name="language">The language the source code is in</param>
/// <returns>A Project created out of the Documents created from the source strings</returns>
private static Project CreateProject(string[] sources, string language = LanguageNames.CSharp)
{
string fileNamePrefix = DefaultFilePathPrefix;
string fileExt = language == LanguageNames.CSharp ? CSharpDefaultFileExt : VisualBasicDefaultExt;
var projectId = ProjectId.CreateNewId(debugName: TestProjectName);
var solution = new AdhocWorkspace()
.CurrentSolution
.AddProject(projectId, TestProjectName, TestProjectName, language)
.AddMetadataReferences(projectId, References);
int count = 0;
foreach (var source in sources)
{
var newFileName = fileNamePrefix + count + "." + fileExt;
var documentId = DocumentId.CreateNewId(projectId, debugName: newFileName);
solution = solution.AddDocument(documentId, newFileName, SourceText.From(source));
count++;
}
return solution.GetProject(projectId);
}
#endregion
#region Verifier wrappers
/// <summary>
/// Called to test a C# DiagnosticAnalyzer when applied on the single inputted string as a source
/// Note: input a DiagnosticResult for each Diagnostic expected
/// </summary>
/// <param name="source">A class in the form of a string to run the analyzer on</param>
/// <param name="expected"> DiagnosticResults that should appear after the analyzer is run on the source</param>
public static void VerifyCSharpDiagnostic<TDiagnosticAnalyzer>(string source, params DiagnosticResult[] expected) where TDiagnosticAnalyzer : DiagnosticAnalyzer, new()
{
VerifyDiagnostics(new[] { source }, LanguageNames.CSharp, new TDiagnosticAnalyzer(), expected);
}
/// <summary>
/// Called to test a VB DiagnosticAnalyzer when applied on the single inputted string as a source
/// Note: input a DiagnosticResult for each Diagnostic expected
/// </summary>
/// <param name="source">A class in the form of a string to run the analyzer on</param>
/// <param name="expected">DiagnosticResults that should appear after the analyzer is run on the source</param>
public static void VerifyBasicDiagnostic<TDiagnosticAnalyzer>(string source, params DiagnosticResult[] expected) where TDiagnosticAnalyzer : DiagnosticAnalyzer, new()
{
VerifyDiagnostics(new[] { source }, LanguageNames.VisualBasic, new TDiagnosticAnalyzer(), expected);
}
/// <summary>
/// Called to test a C# DiagnosticAnalyzer when applied on the inputted strings as a source
/// Note: input a DiagnosticResult for each Diagnostic expected
/// </summary>
/// <param name="sources">An array of strings to create source documents from to run the analyzers on</param>
/// <param name="expected">DiagnosticResults that should appear after the analyzer is run on the sources</param>
public static void VerifyCSharpDiagnostic<TDiagnosticAnalyzer>(string[] sources, params DiagnosticResult[] expected) where TDiagnosticAnalyzer : DiagnosticAnalyzer, new()
{
VerifyDiagnostics(sources, LanguageNames.CSharp, new TDiagnosticAnalyzer(), expected);
}
/// <summary>
/// Called to test a VB DiagnosticAnalyzer when applied on the inputted strings as a source
/// Note: input a DiagnosticResult for each Diagnostic expected
/// </summary>
/// <param name="sources">An array of strings to create source documents from to run the analyzers on</param>
/// <param name="expected">DiagnosticResults that should appear after the analyzer is run on the sources</param>
public static void VerifyBasicDiagnostic<TDiagnosticAnalyzer>(string[] sources, params DiagnosticResult[] expected) where TDiagnosticAnalyzer : DiagnosticAnalyzer, new()
{
VerifyDiagnostics(sources, LanguageNames.VisualBasic, new TDiagnosticAnalyzer(), expected);
}
public static void VerifyCSharpDiagnosticUsingAllAnalyzers(string source, params DiagnosticResult[] expected)
{
var analyzers = CreateAllAnalyzers();
var diagnostics = GetSortedDiagnostics(new[] { source }, LanguageNames.CSharp, analyzers);
VerifyDiagnosticResults(diagnostics, analyzers, expected);
}
/// <summary>
/// General method that gets a collection of actual diagnostics found in the source after the analyzer is run,
/// then verifies each of them.
/// </summary>
/// <param name="sources">An array of strings to create source documents from to run the analyzers on</param>
/// <param name="language">The language of the classes represented by the source strings</param>
/// <param name="analyzer">The analyzer to be run on the source code</param>
/// <param name="expected">DiagnosticResults that should appear after the analyzer is run on the sources</param>
private static void VerifyDiagnostics(string[] sources, string language, DiagnosticAnalyzer analyzer, params DiagnosticResult[] expected)
{
var diagnostics = GetSortedDiagnostics(sources, language, analyzer);
VerifyDiagnosticResults(diagnostics, analyzer, expected);
}
#endregion
#region Actual comparisons and verifications
/// <summary>
/// Checks each of the actual Diagnostics found and compares them with the corresponding DiagnosticResult in the array of expected results.
/// Diagnostics are considered equal only if the DiagnosticResultLocation, Id, Severity, and Message of the DiagnosticResult match the actual diagnostic.
/// </summary>
/// <param name="actualResults">The Diagnostics found by the compiler after running the analyzer on the source code</param>
/// <param name="analyzer">The analyzer that was being run on the sources</param>
/// <param name="expectedResults">Diagnostic Results that should have appeared in the code</param>
private static void VerifyDiagnosticResults(IEnumerable<Diagnostic> actualResults, DiagnosticAnalyzer analyzer, params DiagnosticResult[] expectedResults)
{
VerifyDiagnosticResults(actualResults, new[] { analyzer }, expectedResults);
}
private static void VerifyDiagnosticResults(IEnumerable<Diagnostic> actualResults, DiagnosticAnalyzer[] analyzers, params DiagnosticResult[] expectedResults)
{
int expectedCount = expectedResults.Length;
int actualCount = actualResults.Count();
if (expectedCount != actualCount)
{
string diagnosticsOutput = actualResults.Any() ? FormatDiagnostics(analyzers, actualResults.ToArray()) : " NONE.";
throw new AssertionFailedException(
string.Format("Mismatch between number of diagnostics returned, expected \"{0}\" actual \"{1}\"\r\n\r\nDiagnostics:\r\n{2}\r\n", expectedCount, actualCount, diagnosticsOutput));
}
for (int i = 0; i < expectedResults.Length; i++)
{
var actual = actualResults.ElementAt(i);
var expected = expectedResults[i];
if (expected.Line == -1 && expected.Column == -1)
{
if (actual.Location != Location.None)
{
throw new AssertionFailedException(
string.Format("Expected:\nA project diagnostic with No location\nActual:\n{0}",
FormatDiagnostics(analyzers, actual)));
}
}
else
{
VerifyDiagnosticLocation(analyzers, actual, actual.Location, expected.Locations.First());
var additionalLocations = actual.AdditionalLocations.ToArray();
if (additionalLocations.Length != expected.Locations.Length - 1)
{
throw new AssertionFailedException(
string.Format("Expected {0} additional locations but got {1} for Diagnostic:\r\n {2}\r\n",
expected.Locations.Length - 1, additionalLocations.Length,
FormatDiagnostics(analyzers, actual)));
}
for (int j = 0; j < additionalLocations.Length; ++j)
{
VerifyDiagnosticLocation(analyzers, actual, additionalLocations[j], expected.Locations[j + 1]);
}
}
if (actual.Id != expected.Id)
{
throw new AssertionFailedException(
string.Format("Expected diagnostic id to be \"{0}\" was \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Id, actual.Id, FormatDiagnostics(analyzers, actual)));
}
if (actual.Severity != expected.Severity)
{
throw new AssertionFailedException(
string.Format("Expected diagnostic severity to be \"{0}\" was \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Severity, actual.Severity, FormatDiagnostics(analyzers, actual)));
}
if (actual.GetMessage() != expected.Message)
{
throw new AssertionFailedException(
string.Format("Expected diagnostic message to be \"{0}\" was \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Message, actual.GetMessage(), FormatDiagnostics(analyzers, actual)));
}
}
}
/// <summary>
/// Helper method to VerifyDiagnosticResult that checks the location of a diagnostic and compares it with the location in the expected DiagnosticResult.
/// </summary>
/// <param name="analyzers">The analyzer that was being run on the sources</param>
/// <param name="diagnostic">The diagnostic that was found in the code</param>
/// <param name="actual">The Location of the Diagnostic found in the code</param>
/// <param name="expected">The DiagnosticResultLocation that should have been found</param>
private static void VerifyDiagnosticLocation(DiagnosticAnalyzer[] analyzers, Diagnostic diagnostic, Location actual, DiagnosticResultLocation expected)
{
var actualSpan = actual.GetLineSpan();
(actualSpan.Path == expected.Path || (actualSpan.Path != null && actualSpan.Path.Contains("Test0.") && expected.Path.Contains("Test.")))
.Should().BeTrue(string.Format("Expected diagnostic to be in file \"{0}\" was actually in file \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Path, actualSpan.Path, FormatDiagnostics(analyzers, diagnostic)));
var actualLinePosition = actualSpan.StartLinePosition;
// Only check line position if there is an actual line in the real diagnostic
if (actualLinePosition.Line > 0)
{
if (actualLinePosition.Line + 1 != expected.Line)
{
throw new AssertionFailedException(string.Format("Expected diagnostic to be on line \"{0}\" was actually on line \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Line, actualLinePosition.Line + 1, FormatDiagnostics(analyzers, diagnostic)));
}
}
// Only check column position if there is an actual column position in the real diagnostic
if (actualLinePosition.Character > 0)
{
if (actualLinePosition.Character + 1 != expected.Column)
{
throw new AssertionFailedException(
string.Format("Expected diagnostic to start at column \"{0}\" was actually at column \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Column, actualLinePosition.Character + 1, FormatDiagnostics(analyzers, diagnostic)));
}
}
}
#endregion
#region Formatting Diagnostics
/// <summary>
/// Helper method to format a Diagnostic into an easily readable string
/// </summary>
/// <param name="analyzers">The analyzer that this verifier tests</param>
/// <param name="diagnostics">The Diagnostics to be formatted</param>
/// <returns>The Diagnostics formatted as a string</returns>
private static string FormatDiagnostics(DiagnosticAnalyzer[] analyzers, params Diagnostic[] diagnostics)
{
var builder = new StringBuilder();
for (int i = 0; i < diagnostics.Length; ++i)
{
builder.AppendLine("// " + diagnostics[i]);
foreach (var analyzer in analyzers)
{
var analyzerType = analyzer.GetType();
foreach (var rule in analyzer.SupportedDiagnostics)
{
if (rule != null && rule.Id == diagnostics[i].Id)
{
var location = diagnostics[i].Location;
if (location == Location.None)
{
builder.AppendFormat("GetGlobalResult({0}.{1})", analyzerType.Name, rule.Id);
}
else
{
location.IsInSource.Should().BeTrue($"Test base does not currently handle diagnostics in metadata locations. Diagnostic in metadata: {diagnostics[i]}\r\n");
string resultMethodName = diagnostics[i].Location.SourceTree.FilePath.EndsWith(".cs") ? "GetCSharpResultAt" : "GetBasicResultAt";
var linePosition = diagnostics[i].Location.GetLineSpan().StartLinePosition;
builder.AppendFormat("{0}({1}, {2}, {3}.{4})",
resultMethodName,
linePosition.Line + 1,
linePosition.Character + 1,
analyzerType.Name,
rule.Id);
}
if (i != diagnostics.Length - 1)
{
builder.Append(',');
}
builder.AppendLine();
break;
}
}
}
}
return builder.ToString();
}
#endregion
private static DiagnosticAnalyzer[] CreateAllAnalyzers()
{
var assembly = typeof(Constants).Assembly;
var analyzersTypes = assembly.GetTypes()
.Where(type => !type.IsAbstract && typeof(DiagnosticAnalyzer).IsAssignableFrom(type));
var analyzers = analyzersTypes.Select(type => (DiagnosticAnalyzer)Activator.CreateInstance(type));
return analyzers.ToArray();
}
}
}
| 53.171472 | 205 | 0.610046 | [
"MIT"
] | chvollm/fluentassertions.analyzers | src/FluentAssertions.Analyzers.Tests/DiagnosticVerifier.cs | 35,042 | C# |
namespace GameWasm.Webassembly.Instruction
{
internal class F32demoteF64 : Instruction
{
public F32demoteF64(Parser parser) : base(parser, true)
{
}
}
} | 21 | 63 | 0.62963 | [
"MIT"
] | rockyjvec/GameWasm | GameWasm/WebAssembly/Instruction/F32demoteF64.cs | 191 | C# |
//
// SocketListener.cs
//
// Author:
// Martin Baulig <mabaul@microsoft.com>
//
// Copyright (c) 2018 Xamarin Inc. (http://www.xamarin.com)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Threading.Tasks;
namespace Xamarin.AsyncTests.Remoting
{
public class SocketListener : IDisposable
{
Socket socket;
Socket accepted;
NetworkStream stream;
EndPoint endpoint;
TaskCompletionSource<Socket> tcs;
public EndPoint LocalEndPoint => endpoint;
public Socket AcceptedSocket => accepted;
public NetworkStream Stream => stream;
public SocketListener ()
{
socket = new Socket (AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
}
static int nextPort = 11000;
public void Start (EndPoint requestedEndPoint)
{
if (requestedEndPoint != null)
endpoint = requestedEndPoint;
else {
var port = Interlocked.Increment (ref nextPort);
endpoint = new IPEndPoint (IPAddress.Loopback, port);
}
socket.Bind (endpoint);
socket.Listen (1);
}
public Task<Socket> AcceptSocketAsync (CancellationToken cancellationToken)
{
var oldTcs = Interlocked.CompareExchange (ref tcs, new TaskCompletionSource<Socket> (), null);
if (oldTcs != null)
return oldTcs.Task;
if (cancellationToken.IsCancellationRequested) {
tcs.SetCanceled ();
return tcs.Task;
}
var args = new SocketAsyncEventArgs ();
args.Completed += (sender, e) => {
if (cancellationToken.IsCancellationRequested) {
tcs.TrySetCanceled ();
} else if (args.SocketError != SocketError.Success) {
var error = new IOException ($"AcceptAsync() failed: {args.SocketError})");
tcs.TrySetException (error);
} else {
accepted = args.AcceptSocket;
stream = new NetworkStream (accepted);
tcs.TrySetResult (args.AcceptSocket);
}
args.Dispose ();
};
try {
if (!socket.AcceptAsync (args))
throw new InvalidOperationException ();
} catch (Exception ex) {
tcs.TrySetException (ex);
}
return tcs.Task;
}
int disposed;
protected virtual void Dispose (bool disposing)
{
if (Interlocked.CompareExchange (ref disposed, 1, 0) != 0)
return;
if (accepted != null) {
try {
accepted.Dispose ();
} catch {
;
}
accepted = null;
}
if (stream != null) {
try {
stream.Dispose ();
} catch {
;
}
stream = null;
}
if (socket != null) {
try {
socket.Dispose ();
} catch {
;
}
socket = null;
}
}
public void Dispose ()
{
Dispose (true);
}
}
}
| 25.410959 | 97 | 0.68248 | [
"MIT"
] | xamarin/web-tests | Xamarin.AsyncTests.Framework/Xamarin.AsyncTests.Remoting/SocketListener.cs | 3,712 | C# |
using System;
using UnityEditor;
using UnityEngine;
namespace Juce.Feedbacks
{
internal class ColliderSetEnabledDocumentation : IFeedbackDocumentation
{
public Type FeedbackType => typeof(ColliderSetEnabledFeedback);
public void DrawDocumentation()
{
GUILayout.Label("Enables or disables the target Collider", EditorStyles.wordWrappedLabel);
EditorGUILayout.Space(2);
using (new EditorGUILayout.VerticalScope(EditorStyles.helpBox))
{
GUILayout.Label("- Target: Collider that is going to be enabled/disabled", EditorStyles.wordWrappedLabel);
}
using (new EditorGUILayout.VerticalScope(EditorStyles.helpBox))
{
GUILayout.Label("- Set Enabled: enables or disables the Collider", EditorStyles.wordWrappedLabel);
}
using (new EditorGUILayout.VerticalScope(EditorStyles.helpBox))
{
GenericsDocumentation.DelayDocumentation();
}
}
}
} | 32.060606 | 122 | 0.641777 | [
"MIT"
] | Guillemsc/Juce-Feedbacks | Editor/Documentation/Collider/ColliderSetEnabledDocumentation.cs | 1,060 | C# |
#if USE_UNI_LUA
using LuaAPI = UniLua.Lua;
using RealStatePtr = UniLua.ILuaState;
using LuaCSFunction = UniLua.CSharpFunctionDelegate;
#else
using LuaAPI = XLua.LuaDLL.Lua;
using RealStatePtr = System.IntPtr;
using LuaCSFunction = XLua.LuaDLL.lua_CSFunction;
#endif
using XLua;
using System.Collections.Generic;
namespace XLua.CSObjectWrap
{
using Utils = XLua.Utils;
public class XLuaCSObjectWrapFairyGUIGComboBoxWrapWrap
{
public static void __Register(RealStatePtr L)
{
ObjectTranslator translator = ObjectTranslatorPool.Instance.Find(L);
System.Type type = typeof(XLua.CSObjectWrap.FairyGUIGComboBoxWrap);
Utils.BeginObjectRegister(type, L, translator, 0, 0, 0, 0);
Utils.EndObjectRegister(type, L, translator, null, null,
null, null, null);
Utils.BeginClassRegister(type, L, __CreateInstance, 2, 0, 0);
Utils.RegisterFunc(L, Utils.CLS_IDX, "__Register", _m___Register_xlua_st_);
Utils.EndClassRegister(type, L, translator);
}
[MonoPInvokeCallbackAttribute(typeof(LuaCSFunction))]
static int __CreateInstance(RealStatePtr L)
{
try {
ObjectTranslator translator = ObjectTranslatorPool.Instance.Find(L);
if(LuaAPI.lua_gettop(L) == 1)
{
XLua.CSObjectWrap.FairyGUIGComboBoxWrap gen_ret = new XLua.CSObjectWrap.FairyGUIGComboBoxWrap();
translator.Push(L, gen_ret);
return 1;
}
}
catch(System.Exception gen_e) {
return LuaAPI.luaL_error(L, "c# exception:" + gen_e);
}
return LuaAPI.luaL_error(L, "invalid arguments to XLua.CSObjectWrap.FairyGUIGComboBoxWrap constructor!");
}
[MonoPInvokeCallbackAttribute(typeof(LuaCSFunction))]
static int _m___Register_xlua_st_(RealStatePtr L)
{
try {
{
System.IntPtr _L = LuaAPI.lua_touserdata(L, 1);
XLua.CSObjectWrap.FairyGUIGComboBoxWrap.__Register( _L );
return 0;
}
} catch(System.Exception gen_e) {
return LuaAPI.luaL_error(L, "c# exception:" + gen_e);
}
}
}
}
| 23.463636 | 117 | 0.554049 | [
"MIT"
] | zxsean/DCET | Unity/Assets/Model/XLua/Gen/XLuaCSObjectWrapFairyGUIGComboBoxWrapWrap.cs | 2,583 | C# |
namespace ProgramRecipe
{
partial class FrmRegisterPatient
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
this.lblName = new System.Windows.Forms.Label();
this.lblSurname = new System.Windows.Forms.Label();
this.lblID = new System.Windows.Forms.Label();
this.btnSave = new System.Windows.Forms.Button();
this.lblAge = new System.Windows.Forms.Label();
this.txtName = new ValidBox.ValidTextBox();
this.txtSurname = new ValidBox.ValidTextBox();
this.txtCard = new ValidBox.ValidTextBox();
this.txtAge = new ValidBox.ValidTextBox();
this.ctuGridPatient = new ProgramRecipe.CtuGridPatient();
this.SuspendLayout();
//
// lblName
//
this.lblName.AutoSize = true;
this.lblName.Location = new System.Drawing.Point(12, 9);
this.lblName.Name = "lblName";
this.lblName.Size = new System.Drawing.Size(47, 13);
this.lblName.TabIndex = 1;
this.lblName.Text = "Nombre:";
//
// lblSurname
//
this.lblSurname.AutoSize = true;
this.lblSurname.Location = new System.Drawing.Point(12, 69);
this.lblSurname.Name = "lblSurname";
this.lblSurname.Size = new System.Drawing.Size(47, 13);
this.lblSurname.TabIndex = 3;
this.lblSurname.Text = "Apellido:";
//
// lblID
//
this.lblID.AutoSize = true;
this.lblID.Location = new System.Drawing.Point(12, 127);
this.lblID.Name = "lblID";
this.lblID.Size = new System.Drawing.Size(43, 13);
this.lblID.TabIndex = 5;
this.lblID.Text = "Cedula:";
//
// btnSave
//
this.btnSave.Location = new System.Drawing.Point(286, 202);
this.btnSave.Name = "btnSave";
this.btnSave.Size = new System.Drawing.Size(79, 26);
this.btnSave.TabIndex = 7;
this.btnSave.Text = "Guardar";
this.btnSave.UseVisualStyleBackColor = true;
this.btnSave.Click += new System.EventHandler(this.btnSave_Click);
//
// lblAge
//
this.lblAge.AutoSize = true;
this.lblAge.Location = new System.Drawing.Point(12, 178);
this.lblAge.Name = "lblAge";
this.lblAge.Size = new System.Drawing.Size(35, 13);
this.lblAge.TabIndex = 11;
this.lblAge.Text = "Edad:";
//
// txtName
//
this.txtName.Location = new System.Drawing.Point(15, 26);
this.txtName.MaxLength = 24;
this.txtName.Name = "txtName";
this.txtName.Size = new System.Drawing.Size(100, 20);
this.txtName.TabIndex = 13;
this.txtName.Type = ValidBox.TypeValid.Letter;
//
// txtSurname
//
this.txtSurname.Location = new System.Drawing.Point(15, 86);
this.txtSurname.MaxLength = 24;
this.txtSurname.Name = "txtSurname";
this.txtSurname.Size = new System.Drawing.Size(100, 20);
this.txtSurname.TabIndex = 14;
this.txtSurname.Type = ValidBox.TypeValid.Letter;
//
// txtCard
//
this.txtCard.Location = new System.Drawing.Point(15, 141);
this.txtCard.MaxLength = 10;
this.txtCard.Name = "txtCard";
this.txtCard.Size = new System.Drawing.Size(100, 20);
this.txtCard.TabIndex = 15;
this.txtCard.Type = ValidBox.TypeValid.Numeric;
this.txtCard.Validating += new System.ComponentModel.CancelEventHandler(this.txtCard_Validating);
//
// txtAge
//
this.txtAge.Location = new System.Drawing.Point(15, 195);
this.txtAge.MaxLength = 3;
this.txtAge.Name = "txtAge";
this.txtAge.Size = new System.Drawing.Size(100, 20);
this.txtAge.TabIndex = 16;
this.txtAge.Type = ValidBox.TypeValid.Numeric;
this.txtAge.Validating += new System.ComponentModel.CancelEventHandler(this.txtAge_Validating);
//
// ctuGridPatient
//
this.ctuGridPatient.Location = new System.Drawing.Point(130, -9);
this.ctuGridPatient.Name = "ctuGridPatient";
this.ctuGridPatient.Size = new System.Drawing.Size(396, 200);
this.ctuGridPatient.TabIndex = 17;
//
// FrmRegisterPatient
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.AutoSizeMode = System.Windows.Forms.AutoSizeMode.GrowAndShrink;
this.ClientSize = new System.Drawing.Size(578, 239);
this.Controls.Add(this.ctuGridPatient);
this.Controls.Add(this.txtName);
this.Controls.Add(this.txtSurname);
this.Controls.Add(this.txtCard);
this.Controls.Add(this.txtAge);
this.Controls.Add(this.lblAge);
this.Controls.Add(this.btnSave);
this.Controls.Add(this.lblID);
this.Controls.Add(this.lblSurname);
this.Controls.Add(this.lblName);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
this.MaximizeBox = false;
this.Name = "FrmRegisterPatient";
this.Text = "Registro del Paciente";
this.Activated += new System.EventHandler(this.FrmRegisterPatient_Activated);
this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.FrmRegisterPatient_FormClosing);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.Label lblName;
private System.Windows.Forms.Label lblSurname;
private System.Windows.Forms.Label lblID;
private System.Windows.Forms.Button btnSave;
private System.Windows.Forms.Label lblAge;
private ValidBox.ValidTextBox txtName;
private ValidBox.ValidTextBox txtSurname;
private ValidBox.ValidTextBox txtCard;
private ValidBox.ValidTextBox txtAge;
private CtuGridPatient ctuGridPatient;
}
}
| 42.204545 | 118 | 0.570948 | [
"MIT"
] | MrDave1999/ProgramRecipe | visual/FrmRegisterPatient.Designer.cs | 7,428 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Telerik.JustDecompiler.Ast;
using Telerik.JustDecompiler.Ast.Expressions;
using Mono.Cecil;
using Mono.Cecil.Extensions;
namespace Telerik.JustDecompiler.Decompiler
{
/// <summary>
/// This fixer is responsible for changing expressions, based on their usage. For instance, literal expression '1' should be changed to 'true', if it's used as boolean.
/// This also handles casts to ushort, that should be casts to char (in IL both types are unsigned 2-byte integer).
/// </summary>
class UsageBasedExpressionFixer : BaseCodeTransformer
{
private readonly MethodSpecificContext methodContext;
private readonly TypeSystem currentTypeSystem;
private readonly bool isBoolReturnType;
private readonly bool isCharReturnType;
public UsageBasedExpressionFixer(MethodSpecificContext methodContext)
{
this.methodContext = methodContext;
TypeReference returnType = methodContext.Method.ReturnType;
this.currentTypeSystem = methodContext.Method.Module.TypeSystem;
this.isBoolReturnType = returnType.FullName == currentTypeSystem.Boolean.FullName;
this.isCharReturnType = returnType.FullName == currentTypeSystem.Char.FullName;
}
public void FixLiterals()
{
foreach (IList<Expression> expressions in methodContext.Expressions.BlockExpressions.Values)
{
foreach (Expression expression in expressions)
{
Visit(expression);
}
}
}
public override ICodeNode VisitMethodInvocationExpression(MethodInvocationExpression node)
{
base.VisitMethodInvocationExpression(node);
FixArguments(node.MethodExpression.Method, node.Arguments);
return node;
}
public override ICodeNode VisitObjectCreationExpression(ObjectCreationExpression node)
{
base.VisitObjectCreationExpression(node);
FixArguments(node.Constructor, node.Arguments);
return node;
}
public override ICodeNode VisitThisCtorExpression(ThisCtorExpression node)
{
base.VisitThisCtorExpression(node);
FixArguments(node.MethodExpression.Method, node.Arguments);
return node;
}
public override ICodeNode VisitBaseCtorExpression(BaseCtorExpression node)
{
base.VisitBaseCtorExpression(node);
FixArguments(node.MethodExpression.Method, node.Arguments);
return node;
}
private void FixArguments(MethodReference methodRef, ExpressionCollection arguments)
{
if (methodRef == null)
{
return;
}
for (int i = 0; i < arguments.Count; i++)
{
TypeReference parameterType = methodRef.Parameters[i].ResolveParameterType(methodRef);
LiteralExpression literalArgument = arguments[i] as LiteralExpression;
if (literalArgument != null)
{
HandleLiteralArgument(parameterType, literalArgument);
}
ExplicitCastExpression castArgument = arguments[i] as ExplicitCastExpression;
if (castArgument != null)
{
HandleCastArgument(parameterType, castArgument);
}
}
}
private void HandleCastArgument(TypeReference parameterType, ExplicitCastExpression castArgument)
{
if (parameterType.FullName == currentTypeSystem.Char.FullName && castArgument.ExpressionType.FullName == currentTypeSystem.UInt16.FullName)
{
castArgument.TargetType = currentTypeSystem.Char;
}
}
private void HandleLiteralArgument(TypeReference parameterType, LiteralExpression literalArgument)
{
if (parameterType.FullName == currentTypeSystem.Boolean.FullName)
{
FixBooleanLiteral(literalArgument);
}
else if (parameterType.FullName == currentTypeSystem.Char.FullName)
{
FixCharLiteral(literalArgument);
}
}
public override ICodeNode VisitReturnExpression(ReturnExpression node)
{
base.VisitReturnExpression(node);
LiteralExpression literalValue = node.Value as LiteralExpression;
if (literalValue != null)
{
if (isBoolReturnType)
{
FixBooleanLiteral(literalValue);
}
else if (isCharReturnType)
{
FixCharLiteral(literalValue);
}
}
ExplicitCastExpression castValue = node.Value as ExplicitCastExpression;
if (castValue != null)
{
if (castValue.ExpressionType.FullName != methodContext.Method.ReturnType.FullName)
{
if (isCharReturnType && castValue.ExpressionType.FullName == currentTypeSystem.UInt16.FullName)
{
castValue.TargetType = currentTypeSystem.Char;
}
}
}
return node;
}
public override ICodeNode VisitBoxExpression(BoxExpression node)
{
base.VisitBoxExpression(node);
if (node.BoxedExpression.CodeNodeType == CodeNodeType.LiteralExpression && node.BoxedAs.FullName == currentTypeSystem.Boolean.FullName)
{
FixBooleanLiteral(node.BoxedExpression as LiteralExpression);
return node.BoxedExpression.CloneAndAttachInstructions(node.MappedInstructions);
}
if (node.BoxedExpression.CodeNodeType == CodeNodeType.ExplicitCastExpression && ((ExplicitCastExpression)node.BoxedExpression).Expression.CodeNodeType == CodeNodeType.ExplicitCastExpression)
{
// double cast in a boxed expression;
ExplicitCastExpression outerCast = node.BoxedExpression as ExplicitCastExpression;
ExplicitCastExpression innerCast = outerCast.Expression as ExplicitCastExpression;
if (outerCast.TargetType.FullName == currentTypeSystem.Char.FullName &&
innerCast.TargetType.FullName == currentTypeSystem.UInt16.FullName)
{
// Remove the outer cast, as it is produced by the box expression and doesn't have any instructions mapped.
// The inner cast contains the instruction, converting the stack value to 2-byte integer (which at this point is known to be char, not ushort).
innerCast.TargetType = currentTypeSystem.Char;
node.BoxedExpression = innerCast;
}
}
return node;
}
public override ICodeNode VisitExplicitCastExpression(ExplicitCastExpression node)
{
if (node.Expression.CodeNodeType == CodeNodeType.LiteralExpression && node.TargetType.FullName == currentTypeSystem.Boolean.FullName)
{
FixBooleanLiteral(node.Expression as LiteralExpression);
return node.Expression.CloneAndAttachInstructions(node.MappedInstructions);
}
if (node.Expression.CodeNodeType == CodeNodeType.LiteralExpression && node.TargetType.FullName == currentTypeSystem.Char.FullName)
{
FixCharLiteral(node.Expression as LiteralExpression);
return node.Expression.CloneAndAttachInstructions(node.MappedInstructions);
}
return base.VisitExplicitCastExpression(node);
}
private void FixBooleanLiteral(LiteralExpression literal)
{
literal.Value = Convert.ToBoolean(literal.Value);
}
private void FixCharLiteral(LiteralExpression literal)
{
literal.Value = Convert.ToChar(literal.Value);
}
}
}
| 37.248804 | 194 | 0.659602 | [
"ECL-2.0",
"Apache-2.0"
] | Bebere/JustDecompileEngine | Cecil.Decompiler/Decompiler/UsageBasedExpressionFixer.cs | 7,787 | C# |
using System.Collections.Generic;
using System.Collections.ObjectModel;
using LiveChartsCore;
using LiveChartsCore.SkiaSharpView;
namespace ViewModelsSamples.VisualTest.DataTemplate;
public class ViewModel
{
public IEnumerable<IEnumerable<ISeries>> Models { get; set; }
= new List<IEnumerable<ISeries>>
{
new ObservableCollection<ISeries>
{
new LineSeries<double>
{
Values = new ObservableCollection<double> { 2, 5, 4, -2, 4, -3, 5 }
}
},
new ObservableCollection<ISeries>
{
new LineSeries<double>
{
Values = new ObservableCollection<double> { 2, 5, 4, -2, 4, -3, 5 }
}
},
new ObservableCollection<ISeries>
{
new LineSeries<double>
{
Values = new ObservableCollection<double> { 2, 5, 4, -2, 4, -3, 5 }
}
}
};
}
| 31.611111 | 91 | 0.462214 | [
"MIT"
] | Live-Charts/LiveCharts2 | samples/ViewModelsSamples/VisualTest/DataTemplate/ViewModel.cs | 1,140 | C# |
/*
* Copyright (c) 2018 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
using System.Collections.Generic;
using Algolia.Search.Models.Enums;
using Algolia.Search.Transport;
namespace Algolia.Search.Clients
{
/// <summary>
/// Insights client configuration
/// </summary>
public sealed class InsightsConfig : AlgoliaConfig
{
/// <summary>
/// The configuration of the insights client
/// A client should have it's own configuration ie on configuration per client instance
/// </summary>
/// <param name="applicationId">Your application ID</param>
/// <param name="apiKey">Your API Key</param>
/// <param name="region">Insigts api's region</param>
public InsightsConfig(string applicationId, string apiKey, string region = "us") : base(applicationId, apiKey)
{
DefaultHosts = new List<StatefulHost>
{
new StatefulHost {Url = $"insights.{region}.algolia.io", Accept = CallType.Read | CallType.Write}
};
Compression = CompressionType.NONE;
}
}
}
| 40.735849 | 118 | 0.701251 | [
"MIT"
] | DrLeh/algoliasearch-client-csharp | src/Algolia.Search/Clients/InsightsConfig.cs | 2,159 | C# |
namespace Bot.ChuckNorris.DataAccess
{
public class ChuckNorrisModel
{
public int Id { get; set; }
public string FactDescription { get; set; }
public string CleanText { get; set; }
public string FactDate { get; set; }
public int Vote { get; set; }
public int Points { get; set; }
}
} | 20.529412 | 51 | 0.581662 | [
"MIT"
] | christopheMeudec/Bot.ChuckNorris | Bot.ChuckNorris.DataAccess/ChuckNorris/ChuckNorrisModel.cs | 351 | C# |
#if !NETFX_CORE && !(ANDROID || IOS) && !NETSTANDARD2_0
//-----------------------------------------------------------------------
// <copyright file="ContextManager.cs" company="Marimer LLC">
// Copyright (c) Marimer LLC. All rights reserved.
// Website: http://www.lhotka.net/cslanet/
// </copyright>
// <summary>Provides an automated way to reuse </summary>
//-----------------------------------------------------------------------
using System;
using Csla.Configuration;
using System.Data.Linq;
using Csla.Properties;
namespace Csla.Data
{
/// <summary>
/// Provides an automated way to reuse
/// LINQ data context objects within the context
/// of a single data portal operation.
/// </summary>
/// <typeparam name="C">
/// Type of database
/// LINQ data context objects object to use.
/// </typeparam>
/// <remarks>
/// This type stores the LINQ data context object
/// in <see cref="Csla.ApplicationContext.LocalContext" />
/// and uses reference counting through
/// <see cref="IDisposable" /> to keep the data context object
/// open for reuse by child objects, and to automatically
/// dispose the object when the last consumer
/// has called Dispose."
/// </remarks>
public class ContextManager<C> : IDisposable where C : DataContext
{
private static object _lock = new object();
private C _context;
private string _connectionString;
private string _label;
/// <summary>
/// Gets the ContextManager object for the
/// specified database.
/// </summary>
/// <param name="database">
/// Database name as shown in the config file.
/// </param>
public static ContextManager<C> GetManager(string database)
{
return GetManager(database, true);
}
/// <summary>
/// Gets the ContextManager object for the
/// specified database.
/// </summary>
/// <param name="database">
/// Database name as shown in the config file.
/// </param>
/// <param name="label">Label for this context.</param>
public static ContextManager<C> GetManager(string database, string label)
{
return GetManager(database, true, label);
}
/// <summary>
/// Gets the ContextManager object for the
/// specified database.
/// </summary>
/// <param name="database">
/// The database name or connection string.
/// </param>
/// <param name="isDatabaseName">
/// True to indicate that the connection string
/// should be retrieved from the config file. If
/// False, the database parameter is directly
/// used as a connection string.
/// </param>
/// <returns>ContextManager object for the name.</returns>
public static ContextManager<C> GetManager(string database, bool isDatabaseName)
{
return GetManager(database, isDatabaseName, "default");
}
/// <summary>
/// Gets the ContextManager object for the
/// specified database.
/// </summary>
/// <param name="database">
/// The database name or connection string.
/// </param>
/// <param name="isDatabaseName">
/// True to indicate that the connection string
/// should be retrieved from the config file. If
/// False, the database parameter is directly
/// used as a connection string.
/// </param>
/// <param name="label">Label for this context.</param>
/// <returns>ContextManager object for the name.</returns>
public static ContextManager<C> GetManager(string database, bool isDatabaseName, string label)
{
if (isDatabaseName)
{
var connection = ConfigurationManager.ConnectionStrings[database];
if (connection == null)
throw new System.Configuration.ConfigurationErrorsException(String.Format(Resources.DatabaseNameNotFound, database));
var conn = ConfigurationManager.ConnectionStrings[database].ConnectionString;
if (string.IsNullOrEmpty(conn))
throw new System.Configuration.ConfigurationErrorsException(String.Format(Resources.DatabaseNameNotFound, database));
database = conn;
}
lock (_lock)
{
var contextLabel = GetContextName(database, label);
ContextManager<C> mgr = null;
if (ApplicationContext.LocalContext.Contains(contextLabel))
{
mgr = (ContextManager<C>)(ApplicationContext.LocalContext[contextLabel]);
}
else
{
mgr = new ContextManager<C>(database, label);
ApplicationContext.LocalContext[contextLabel] = mgr;
}
mgr.AddRef();
return mgr;
}
}
private ContextManager(string connectionString, string label)
{
_label = label;
_connectionString = connectionString;
_context = (C)(Activator.CreateInstance(typeof(C), connectionString));
}
private static string GetContextName(string connectionString, string label)
{
return "__ctx:" + label + "-" + connectionString;
}
/// <summary>
/// Gets the LINQ data context object.
/// </summary>
public C DataContext
{
get
{
return _context;
}
}
#region Reference counting
private int _refCount;
/// <summary>
/// Gets the current reference count for this
/// object.
/// </summary>
public int RefCount
{
get { return _refCount; }
}
private void AddRef()
{
_refCount += 1;
}
private void DeRef()
{
lock (_lock)
{
_refCount -= 1;
if (_refCount == 0)
{
_context.Dispose();
ApplicationContext.LocalContext.Remove(GetContextName(_connectionString, _label));
}
}
}
#endregion
#region IDisposable
/// <summary>
/// Dispose object, dereferencing or
/// disposing the context it is
/// managing.
/// </summary>
public void Dispose()
{
DeRef();
}
#endregion
}
}
#endif | 28.507177 | 127 | 0.614132 | [
"MIT"
] | ronnymgm/csla-light | Source/Csla.Shared/Data/ContextManager.cs | 5,960 | C# |
using Microsoft.AspNetCore.Hosting;
using Microsoft.EntityFrameworkCore;
using System.IO;
using System.Threading.Tasks;
using tcs_service.Models;
using tcs_service.Repos.Interfaces;
namespace tcs_service.EF
{
public class DbInitializer
{
public static void InitializeData(TCSContext context, IUserRepo userRepo, IHostingEnvironment env)
{
context.Database.Migrate();
if (!env.IsProduction())
{
ClearData(context);
SeedData(context);
}
Task.Run(() => SeedAdmin(context, userRepo)).Wait();
}
private static void ClearData(TCSContext context)
{
// All Identifiers in Postgres must be wrapped
// in double quotes to preserve capitalization
context.Database.ExecuteSqlCommand("DELETE FROM \"ClassTours\"");
context.Database.ExecuteSqlCommand("DELETE FROM \"SessionReasons\"");
context.Database.ExecuteSqlCommand("DELETE FROM \"SessionClasses\"");
context.Database.ExecuteSqlCommand("DELETE FROM \"Sessions\"");
context.Database.ExecuteSqlCommand("DELETE FROM \"Classes\"");
context.Database.ExecuteSqlCommand("DELETE FROM \"Departments\"");
context.Database.ExecuteSqlCommand("DELETE FROM \"Reasons\"");
context.Database.ExecuteSqlCommand("DELETE FROM \"People\"");
context.Database.ExecuteSqlCommand("DELETE FROM \"Semesters\"");
context.Database.ExecuteSqlCommand("DELETE FROM \"Users\"");
}
private async static Task SeedAdmin(TCSContext context, IUserRepo userRepo)
{
if (await context.Users.AnyAsync()) return;
var user = new User()
{
FirstName = "Change",
LastName = "Username",
Username = "tcs",
};
await userRepo.Create(user, "Develop@90");
}
private static void SeedData(TCSContext context)
{
string classTours = File.ReadAllText(@"./SampleData/dbo.ClassTours.data.sql");
string classes = File.ReadAllText(@"./SampleData/dbo.Classes.data.sql");
string departments = File.ReadAllText(@"./SampleData/dbo.Departments.data.sql");
string people = File.ReadAllText(@"./SampleData/dbo.People.data.sql");
string reasons = File.ReadAllText(@"./SampleData/dbo.Reasons.data.sql");
string semesters = File.ReadAllText(@"./SampleData/dbo.Semesters.data.sql");
string sessions = File.ReadAllText(@"./SampleData/dbo.Sessions.data.sql");
string sessionReasons = File.ReadAllText(@"./SampleData/dbo.SessionReasons.data.sql");
string sessionClasses = File.ReadAllText(@"./SampleData/dbo.SessionClasses.data.sql");
context.Database.ExecuteSqlCommand(classTours);
context.Database.ExecuteSqlCommand(departments);
context.Database.ExecuteSqlCommand(classes);
context.Database.ExecuteSqlCommand(people);
context.Database.ExecuteSqlCommand(reasons);
context.Database.ExecuteSqlCommand(semesters);
context.Database.ExecuteSqlCommand(sessions);
context.Database.ExecuteSqlCommand(sessionReasons);
context.Database.ExecuteSqlCommand(sessionClasses);
}
}
}
| 44.855263 | 106 | 0.63479 | [
"MIT"
] | a2937/wvup-tcs | backend/tcs-service/EF/DbInitializer.cs | 3,411 | C# |
using UnityEngine;
using UnityEngine.UI;
namespace ModIO.UI
{
/// <summary>A wrapper for the Unity UI Toggle component to allow it to be used as a
/// StateToggleDisplay.</summary>
[RequireComponent(typeof(Toggle))]
public class StateToggle : StateToggleDisplay
{
/// <summary>Pass-through to the Toggle sibling component.</summary>
public override bool isOn
{
get {
return this.gameObject.GetComponent<Toggle>().isOn;
}
set {
this.gameObject.GetComponent<Toggle>().SetIsOnWithoutNotify(value);
}
}
}
}
| 27.826087 | 88 | 0.601563 | [
"MIT"
] | DBolical/modioUNITY | Runtime/UI/Utility/StateToggle.cs | 640 | C# |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Threading;
/// <summary>
/// The interface IWorkbookTableItemAtRequest.
/// </summary>
public partial interface IWorkbookTableItemAtRequest : IBaseRequest
{
/// <summary>
/// Issues the GET request.
/// </summary>
System.Threading.Tasks.Task<WorkbookTable> GetAsync();
/// <summary>
/// Issues the GET request.
/// </summary>
/// /// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>TheWorkbookTable</returns>
System.Threading.Tasks.Task<WorkbookTable> GetAsync(CancellationToken cancellationToken);
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="value">The expand value.</param>
/// <returns>The request object to send.</returns>
IWorkbookTableItemAtRequest Expand(string value);
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="value">The select value.</param>
/// <returns>The request object to send.</returns>
IWorkbookTableItemAtRequest Select(string value);
}
}
| 36.387755 | 153 | 0.5788 | [
"MIT"
] | MIchaelMainer/GraphAPI | src/Microsoft.Graph/Requests/Generated/IWorkbookTableItemAtRequest.cs | 1,783 | C# |
// <auto-generated />
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage;
using Microsoft.EntityFrameworkCore.Storage.Internal;
using P03_FootballBetting.Data;
using System;
namespace P03_FootballBetting.Migrations
{
[DbContext(typeof(FootballBettingContext))]
partial class FootballBettingContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "2.0.0-rtm-26452")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
modelBuilder.Entity("P03_FootballBetting.Data.Models.Bet", b =>
{
b.Property<int>("BetId")
.ValueGeneratedOnAdd();
b.Property<decimal>("Amount");
b.Property<DateTime>("DateTime");
b.Property<int>("GameId");
b.Property<string>("Prediction")
.IsRequired()
.IsUnicode(true);
b.Property<int>("UserId");
b.HasKey("BetId");
b.HasIndex("GameId");
b.HasIndex("UserId");
b.ToTable("Bets");
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Color", b =>
{
b.Property<int>("ColorId")
.ValueGeneratedOnAdd();
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(50)
.IsUnicode(true);
b.HasKey("ColorId");
b.ToTable("Colors");
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Country", b =>
{
b.Property<int>("CountryId")
.ValueGeneratedOnAdd();
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(50)
.IsUnicode(true);
b.HasKey("CountryId");
b.ToTable("Countries");
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Game", b =>
{
b.Property<int>("GameId")
.ValueGeneratedOnAdd();
b.Property<double>("AwayTeamBetRate");
b.Property<int>("AwayTeamGoals");
b.Property<int>("AwayTeamId");
b.Property<DateTime>("DateTime");
b.Property<double>("DrawBetRate");
b.Property<double>("HomeTeamBetRate");
b.Property<int>("HomeTeamGoals");
b.Property<int>("HomeTeamId");
b.Property<int>("Result");
b.HasKey("GameId");
b.HasIndex("AwayTeamId");
b.HasIndex("HomeTeamId");
b.ToTable("Games");
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Player", b =>
{
b.Property<int>("PlayerId")
.ValueGeneratedOnAdd();
b.Property<bool>("IsInjured");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(100)
.IsUnicode(true);
b.Property<int>("PositionId");
b.Property<int>("SquadNumber");
b.Property<int>("TeamId");
b.HasKey("PlayerId");
b.HasIndex("PositionId");
b.HasIndex("SquadNumber")
.IsUnique();
b.HasIndex("TeamId");
b.ToTable("Players");
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.PlayerStatistic", b =>
{
b.Property<int>("GameId");
b.Property<int>("PlayerId");
b.Property<string>("Assists");
b.Property<DateTime>("MinutesPlayed");
b.HasKey("GameId", "PlayerId");
b.HasIndex("PlayerId");
b.ToTable("PlayerStatistics");
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Position", b =>
{
b.Property<int>("PositionId")
.ValueGeneratedOnAdd();
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(50)
.IsUnicode(true);
b.HasKey("PositionId");
b.ToTable("Positions");
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Team", b =>
{
b.Property<int>("TeamId")
.ValueGeneratedOnAdd();
b.Property<decimal>("Budget");
b.Property<string>("Initials")
.IsRequired()
.HasMaxLength(3)
.IsUnicode(true);
b.Property<string>("LogoUrl")
.IsRequired();
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(80)
.IsUnicode(true);
b.Property<int>("PrimaryKitColorId");
b.Property<int>("SecondaryKitColorId");
b.Property<int>("TownId");
b.HasKey("TeamId");
b.HasIndex("PrimaryKitColorId");
b.HasIndex("SecondaryKitColorId");
b.HasIndex("TownId");
b.ToTable("Teams");
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Town", b =>
{
b.Property<int>("TownId")
.ValueGeneratedOnAdd();
b.Property<int>("CountryId");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(100)
.IsUnicode(true);
b.HasKey("TownId");
b.HasIndex("CountryId");
b.ToTable("Towns");
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.User", b =>
{
b.Property<int>("UserId")
.ValueGeneratedOnAdd();
b.Property<decimal>("Balance");
b.Property<string>("Email")
.IsRequired()
.IsUnicode(true);
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(50)
.IsUnicode(true);
b.Property<string>("Password")
.IsRequired()
.IsUnicode(true);
b.Property<string>("Username")
.IsRequired()
.HasMaxLength(100)
.IsUnicode(true);
b.HasKey("UserId");
b.ToTable("Users");
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Bet", b =>
{
b.HasOne("P03_FootballBetting.Data.Models.Game", "Game")
.WithMany("Bets")
.HasForeignKey("GameId")
.OnDelete(DeleteBehavior.Cascade);
b.HasOne("P03_FootballBetting.Data.Models.User", "User")
.WithMany("Bets")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Game", b =>
{
b.HasOne("P03_FootballBetting.Data.Models.Team", "AwayTeam")
.WithMany("AwayGames")
.HasForeignKey("AwayTeamId")
.OnDelete(DeleteBehavior.Cascade);
b.HasOne("P03_FootballBetting.Data.Models.Team", "HomeTeam")
.WithMany("HomeGames")
.HasForeignKey("HomeTeamId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Player", b =>
{
b.HasOne("P03_FootballBetting.Data.Models.Position", "Position")
.WithMany("Players")
.HasForeignKey("PositionId")
.OnDelete(DeleteBehavior.Cascade);
b.HasOne("P03_FootballBetting.Data.Models.Team", "Team")
.WithMany("Players")
.HasForeignKey("TeamId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.PlayerStatistic", b =>
{
b.HasOne("P03_FootballBetting.Data.Models.Game", "Game")
.WithMany("PlayerStatistics")
.HasForeignKey("GameId")
.OnDelete(DeleteBehavior.Cascade);
b.HasOne("P03_FootballBetting.Data.Models.Player", "Player")
.WithMany("PlayerStatistics")
.HasForeignKey("PlayerId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Team", b =>
{
b.HasOne("P03_FootballBetting.Data.Models.Color", "PrimaryKitColor")
.WithMany("PrimaryKitTeams")
.HasForeignKey("PrimaryKitColorId")
.OnDelete(DeleteBehavior.Cascade);
b.HasOne("P03_FootballBetting.Data.Models.Color", "SecondaryKitColor")
.WithMany("SecondaryKitTeams")
.HasForeignKey("SecondaryKitColorId")
.OnDelete(DeleteBehavior.Cascade);
b.HasOne("P03_FootballBetting.Data.Models.Town", "Town")
.WithMany("Teams")
.HasForeignKey("TownId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("P03_FootballBetting.Data.Models.Town", b =>
{
b.HasOne("P03_FootballBetting.Data.Models.Country", "Country")
.WithMany("Towns")
.HasForeignKey("CountryId")
.OnDelete(DeleteBehavior.Cascade);
});
#pragma warning restore 612, 618
}
}
}
| 33.661765 | 117 | 0.449104 | [
"MIT"
] | krasiymihajlov/Soft-Uni-practices | C# DB Advanced/02. Entity Relations/P03_FootballBetting/Migrations/FootballBettingContextModelSnapshot.cs | 11,447 | C# |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Bot.Builder;
using Microsoft.Bot.Builder.Dialogs;
using Microsoft.Bot.Builder.Teams;
using Microsoft.Bot.Connector;
using Microsoft.Bot.Schema;
using Microsoft.Extensions.DependencyInjection;
namespace ToDoSkill.Bots
{
public class DefaultActivityHandler<T> : TeamsActivityHandler
where T : Dialog
{
private readonly Dialog _dialog;
private readonly BotState _conversationState;
private readonly BotState _userState;
private readonly IStatePropertyAccessor<DialogState> _dialogStateAccessor;
public DefaultActivityHandler(IServiceProvider serviceProvider, T dialog)
{
_dialog = dialog;
_dialog.TelemetryClient = serviceProvider.GetService<IBotTelemetryClient>();
_conversationState = serviceProvider.GetService<ConversationState>();
_userState = serviceProvider.GetService<UserState>();
_dialogStateAccessor = _conversationState.CreateProperty<DialogState>(nameof(DialogState));
}
public override async Task OnTurnAsync(ITurnContext turnContext, CancellationToken cancellationToken = default)
{
await base.OnTurnAsync(turnContext, cancellationToken);
// Save any state changes that might have occured during the turn.
await _conversationState.SaveChangesAsync(turnContext, false, cancellationToken);
await _userState.SaveChangesAsync(turnContext, false, cancellationToken);
}
protected override Task OnTeamsSigninVerifyStateAsync(ITurnContext<IInvokeActivity> turnContext, CancellationToken cancellationToken)
{
return _dialog.RunAsync(turnContext, _dialogStateAccessor, cancellationToken);
}
protected override Task OnMembersAddedAsync(IList<ChannelAccount> membersAdded, ITurnContext<IConversationUpdateActivity> turnContext, CancellationToken cancellationToken)
{
return _dialog.RunAsync(turnContext, _dialogStateAccessor, cancellationToken);
}
protected override Task OnMessageActivityAsync(ITurnContext<IMessageActivity> turnContext, CancellationToken cancellationToken)
{
// directline speech occasionally sends empty message activities that should be ignored
var activity = turnContext.Activity;
if (activity.ChannelId == Channels.DirectlineSpeech && activity.Type == ActivityTypes.Message && string.IsNullOrEmpty(activity.Text))
{
return Task.CompletedTask;
}
return _dialog.RunAsync(turnContext, _dialogStateAccessor, cancellationToken);
}
protected override Task OnEventActivityAsync(ITurnContext<IEventActivity> turnContext, CancellationToken cancellationToken)
{
return _dialog.RunAsync(turnContext, _dialogStateAccessor, cancellationToken);
}
protected override Task OnEndOfConversationActivityAsync(ITurnContext<IEndOfConversationActivity> turnContext, CancellationToken cancellationToken)
{
return _dialog.RunAsync(turnContext, _dialogStateAccessor, cancellationToken);
}
}
}
| 44.486842 | 179 | 0.731736 | [
"MIT"
] | Power-Maverick/botframework-skills | skills/csharp/todoskill/Bots/DefaultActivityHandler.cs | 3,383 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Umbraco.Core.Persistence
{
//TODO: Would be good to use this exception type anytime we cannot find an entity
/// <summary>
/// An exception used to indicate that an umbraco entity could not be found
/// </summary>
public class EntityNotFoundException : Exception
{
public object Id { get; private set; }
private readonly string _msg;
public EntityNotFoundException(object id, string msg)
{
Id = id;
_msg = msg;
}
public EntityNotFoundException(string msg)
{
_msg = msg;
}
public override string Message
{
get { return _msg; }
}
public override string ToString()
{
var result = base.ToString();
if (Id != null)
{
return "Umbraco entity (id: " + Id + ") not found. " + result;
}
return result;
}
}
}
| 22.408163 | 85 | 0.551913 | [
"MIT"
] | bharanijayasuri/umbraco8 | src/Umbraco.Core/Persistence/EntityNotFoundException.cs | 1,100 | C# |
using Microsoft.Extensions.DependencyInjection;
using System.Linq;
using System.Reflection;
using ToDo.Infra.Dapper.Core;
namespace ToDo.Infra.Application.Modules
{
public static class DapperModule
{
public static void Configure(IServiceCollection services)
{
var types = Assembly.GetAssembly(typeof(QueryBase)).GetTypes();
var queries = types.Where(t => t.Name.EndsWith("Query")).ToDictionary(i => i.GetInterfaces()[0], i => i).ToList();
queries.ForEach(item =>
{
var (service, implementation) = item;
services.AddTransient(service, implementation);
});
}
}
} | 32.857143 | 126 | 0.624638 | [
"MIT"
] | Cristianotx/ewave-livraria-senior | ToDo-API/src/ToDo.Infra.Application/Modules/DapperModule.cs | 692 | C# |
using Microsoft.AspNetCore.Http;
namespace NHSD.GPITF.BuyingCatalog.Logic
{
public sealed class LinkManagerValidator : ValidatorBase<object>, ILinkManagerValidator
{
public LinkManagerValidator(IHttpContextAccessor context) :
base(context)
{
MustBeAdmin();
}
}
}
| 21.071429 | 89 | 0.732203 | [
"MIT"
] | TrevorDArcyEvans/NHSBuyingCatalogue | beta-prototype/api/NHSD.GPITF.BuyingCatalog/NHSD.GPITF.BuyingCatalog.Logic/LinkManagerValidator.cs | 297 | C# |
using GalaxyGen.Engine;
using GalaxyGen.Framework;
using GalaxyGenCore;
using GalaxyGenCore.Resources;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GalaxyGen.Model
{
public class Store
{
public Store()
{
StoreId = IdUtils.GetId();
StoredResources = new Dictionary<ResourceTypeEnum, UInt64>();
}
public Int64 StoreId { get; set; }
public String Name { get; set; }
public Dictionary<ResourceTypeEnum,UInt64> StoredResources { get; set; }
public Agent Owner { get; set; }
public IStoreLocation Location { get; set; }
}
}
| 22.971429 | 80 | 0.682836 | [
"MIT"
] | AshRolls/GalaxyGen | GalaxyGen/Model/Store.cs | 806 | C# |
/*
*
* (c) Copyright Ascensio System Limited 2010-2020
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
namespace ASC.Web.Files.Services.WCFService.FileOperations
{
public enum FileOperationType
{
Move,
Copy,
Delete,
Download,
MarkAsRead,
Import,
Convert
}
} | 27.866667 | 75 | 0.692584 | [
"Apache-2.0"
] | Ektai-Solution-Pty-Ltd/CommunityServer | web/studio/ASC.Web.Studio/Products/Files/Services/WCFService/FileOperations/FileOperationType.cs | 836 | C# |
// Copyright 2018 by JCoder58. See License.txt for license
// Auto-generated --- Do not modify.
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using UE4.Core;
using UE4.CoreUObject;
using UE4.CoreUObject.Native;
using UE4.InputCore;
using UE4.Native;
namespace UE4.MovieScene{
///<summary>Struct that stores the key for an evaluated entity, and the index at which it was (or is to be) evaluated</summary>
[StructLayout( LayoutKind.Explicit, Size=16 )]
public unsafe struct MovieSceneOrderedEvaluationKey {
///<summary>Key</summary>
[FieldOffset(0)] MovieSceneEvaluationKey Key;
[FieldOffset(12)] byte EvaluationIndex; //TODO: numeric uint32 EvaluationIndex
}
}
| 32.208333 | 135 | 0.712807 | [
"MIT"
] | UE4DotNet/Plugin | DotNet/DotNet/UE4/Generated/MovieScene/MovieSceneOrderedEvaluationKey.cs | 773 | C# |
using System;
using System.Threading.Tasks;
using CSharpFunctionalExtensions;
using Logic.Utils.Shared;
using Newtonsoft.Json;
namespace Logic.Utils.Decorators.Command
{
[AttributeUsage(AttributeTargets.Class, Inherited = false, AllowMultiple = false)]
public sealed class AuditLogAttribute : Attribute
{
public AuditLogAttribute() { }
}
public sealed class AuditLoggingDecorator<TCommand> : ICommandHandler<TCommand>
where TCommand : ICommand
{
private readonly ICommandHandler<TCommand> _handler;
public AuditLoggingDecorator(ICommandHandler<TCommand> handler)
{
_handler = handler;
}
public Type CommandType => typeof(AuditLogAttribute);
public Task<Result> Handle(TCommand command)
{
string commandJson = JsonConvert.SerializeObject(command);
// Use proper logging here
Console.WriteLine($"Command of type {command.GetType().Name}: {commandJson}");
return _handler.Handle(command);
}
}
}
| 27.410256 | 90 | 0.673527 | [
"MIT"
] | gs1993/CQRS_Dotnet5 | src/domain/Logic/Utils/Decorators/Command/AuditLoggingDecorator.cs | 1,071 | C# |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace ServiTec.Win.Properties
{
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase
{
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default
{
get
{
return defaultInstance;
}
}
}
}
| 35.533333 | 151 | 0.581614 | [
"MIT"
] | Dpineda89/ServiTec | ServiTec/ServiTec.Win/Properties/Settings.Designer.cs | 1,068 | C# |
using ColossalFramework.UI;
namespace ACME
{
/// <summary>
/// Utilities for Options Panel UI.
/// </summary>
internal static class PanelUtils
{
/// <summary>
/// Adds a tab to a UI tabstrip.
/// </summary>
/// <param name="tabStrip">UIT tabstrip to add to</param>
/// <param name="tabName">Name of this tab</param>
/// <param name="tabIndex">Index number of this tab</param>
/// <param name="autoLayout">Autolayout</param>
/// <returns>UIHelper instance for the new tab panel</returns>
internal static UIPanel AddTab(UITabstrip tabStrip, string tabName, int tabIndex, bool autoLayout)
{
// Create tab.
UIButton tabButton = tabStrip.AddTab(tabName);
// Sprites.
tabButton.normalBgSprite = "SubBarButtonBase";
tabButton.disabledBgSprite = "SubBarButtonBaseDisabled";
tabButton.focusedBgSprite = "SubBarButtonBaseFocused";
tabButton.hoveredBgSprite = "SubBarButtonBaseHovered";
tabButton.pressedBgSprite = "SubBarButtonBasePressed";
// Tooltip.
tabButton.tooltip = tabName;
tabStrip.selectedIndex = tabIndex;
// Force width.
tabButton.width = 200;
// Get tab root panel.
UIPanel rootPanel = tabStrip.tabContainer.components[tabIndex] as UIPanel;
// Autolayout.
rootPanel.autoLayout = autoLayout;
if (autoLayout)
{
rootPanel.autoLayoutDirection = LayoutDirection.Vertical;
rootPanel.autoLayoutPadding.top = 5;
rootPanel.autoLayoutPadding.left = 10;
}
return rootPanel;
}
}
} | 32.618182 | 106 | 0.583612 | [
"MIT"
] | algernon-A/ACME | Code/Settings/PanelUtils.cs | 1,796 | C# |
using System.Collections.Generic;
using System.Linq;
using ICSharpCode.NRefactory.CSharp.Refactoring;
using OmniSharp.Common;
using OmniSharp.Configuration;
using OmniSharp.Parser;
using OmniSharp.Refactoring;
namespace OmniSharp.CodeActions
{
public class GetCodeActionsHandler
{
readonly BufferParser _bufferParser;
readonly OmniSharpConfiguration _config;
public GetCodeActionsHandler(BufferParser bufferParser, OmniSharpConfiguration config)
{
_bufferParser = bufferParser;
_config = config;
}
public GetCodeActionsResponse GetCodeActions(Request req)
{
var actions = GetContextualCodeActions(req);
return new GetCodeActionsResponse { CodeActions = actions.Select(a => a.Description) };
}
public RunCodeActionsResponse RunCodeAction(CodeActionRequest req)
{
var actions = GetContextualCodeActions(req).ToList();
if(req.CodeAction > actions.Count)
return new RunCodeActionsResponse();
var context = OmniSharpRefactoringContext.GetContext(_bufferParser, req);
using (var script = new OmniSharpScript(context, _config))
{
CodeAction action = actions[req.CodeAction];
action.Run(script);
}
return new RunCodeActionsResponse {Text = context.Document.Text};
}
private IEnumerable<CodeAction> GetContextualCodeActions(Request req)
{
var refactoringContext = OmniSharpRefactoringContext.GetContext(_bufferParser, req);
var actions = new List<CodeAction>();
var providers = new CodeActionProviders().GetProviders();
foreach (var provider in providers)
{
var providerActions = provider.GetActions(refactoringContext);
actions.AddRange(providerActions);
}
return actions;
}
}
}
| 32.836066 | 100 | 0.64653 | [
"MIT"
] | BugDiver/omnisharp-server | OmniSharp/CodeActions/GetCodeActionsHandler.cs | 2,005 | C# |
// <auto-generated>
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Gov.Jag.Spice.Interfaces
{
using Microsoft.Rest;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Modifiedonbehalfbyspicerequiredchecks operations.
/// </summary>
public partial class Modifiedonbehalfbyspicerequiredchecks : IServiceOperations<DynamicsClient>, IModifiedonbehalfbyspicerequiredchecks
{
/// <summary>
/// Initializes a new instance of the Modifiedonbehalfbyspicerequiredchecks class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public Modifiedonbehalfbyspicerequiredchecks(DynamicsClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the DynamicsClient
/// </summary>
public DynamicsClient Client { get; private set; }
/// <summary>
/// Get modifiedonbehalfby_spice_requiredchecks from spice_requiredcheckses
/// </summary>
/// <param name='activityid'>
/// key: activityid of spice_requiredchecks
/// </param>
/// <param name='select'>
/// Select properties to be returned
/// </param>
/// <param name='expand'>
/// Expand related entities
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<MicrosoftDynamicsCRMsystemuser>> GetWithHttpMessagesAsync(string activityid, IList<string> select = default(IList<string>), IList<string> expand = default(IList<string>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (activityid == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "activityid");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("activityid", activityid);
tracingParameters.Add("select", select);
tracingParameters.Add("expand", expand);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "spice_requiredcheckses({activityid})/modifiedonbehalfby_spice_requiredchecks").ToString();
_url = _url.Replace("{activityid}", System.Uri.EscapeDataString(activityid));
List<string> _queryParameters = new List<string>();
if (select != null)
{
_queryParameters.Add(string.Format("$select={0}", System.Uri.EscapeDataString(string.Join(",", select))));
}
if (expand != null)
{
_queryParameters.Add(string.Format("$expand={0}", System.Uri.EscapeDataString(string.Join(",", expand))));
}
if (_queryParameters.Count > 0)
{
_url += "?" + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new HttpOperationException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
if (_httpResponse.Content != null) {
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
}
else {
_responseContent = string.Empty;
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<MicrosoftDynamicsCRMsystemuser>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<MicrosoftDynamicsCRMsystemuser>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| 42.126761 | 339 | 0.573164 | [
"Apache-2.0"
] | BrendanBeachBC/jag-spd-spice | interfaces/Dynamics-Autorest/Modifiedonbehalfbyspicerequiredchecks.cs | 8,973 | C# |
/*
Copyright 2017 YANG Huan (sy.yanghuan@gmail.com).
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
namespace CSharpLua {
class Program {
private const string kHelpCmdString = @"Usage: CSharp.lua [-s srcfolder] [-d dstfolder]
Arguments
-s : can be a directory where all cs files will be compiled, or a list of files, using ';' or ',' to separate
-d : destination directory, will put the out lua files
Options
-h : show the help message and exit
-l : libraries referenced, use ';' to separate
if the library is a module, which is compiled by CSharp.lua with -module argument, the last character needs to be '!' in order to mark
-m : meta files, like System.xml, use ';' to separate
-csc : csc.exe command arguments, use ' ' or '\t' to separate
-c : support classic lua version(5.1), default support 5.3
-a : attributes need to export, use ';' to separate, if ""-a"" only, all attributes will be exported
-e : enums need to export, use ';' to separate, if ""-e"" only, all enums will be exported
-ei : enums is represented by a variable reference rather than a constant value, need to be used with -e
-p : do not use debug.setmetatable, in some Addon/Plugin environment debug object cannot be used
-metadata : export all metadata, use @CSharpLua.Metadata annotations for precise control
-module : the currently compiled assembly needs to be referenced, it's useful for multiple module compiled
-inline-property: inline some single-line properties
-include : the root directory of the CoreSystem library, adds all the dependencies to a single file named out.lua
-noconcurrent : close concurrent compile
";
public static void Main(string[] args) {
if (args.Length > 0) {
try {
var cmds = Utility.GetCommandLines(args);
if (cmds.ContainsKey("-h")) {
ShowHelpInfo();
return;
}
var sw = new Stopwatch();
sw.Start();
string input = cmds.GetArgument("-s");
string output = cmds.GetArgument("-d");
string lib = cmds.GetArgument("-l", true);
string meta = cmds.GetArgument("-m", true);
bool isClassic = cmds.ContainsKey("-c");
string atts = cmds.GetArgument("-a", true);
if (atts == null && cmds.ContainsKey("-a")) {
atts = string.Empty;
}
string enums = cmds.GetArgument("-e", true);
if (enums == null && cmds.ContainsKey("-e")) {
enums = string.Empty;
}
string csc = GetCSCArgument(args);
bool isPreventDebugObject = cmds.ContainsKey("-p");
bool isExportMetadata = cmds.ContainsKey("-metadata");
bool isModule = cmds.ContainsKey("-module");
bool isInlineSimpleProperty = cmds.ContainsKey("-inline-property");
bool isNotConstantForEnum = cmds.ContainsKey("-ei");
bool isNoConcurrent = cmds.ContainsKey("-noconcurrent");
string include = cmds.GetArgument("-include", true);
Compiler c = new Compiler(input, output, lib, meta, csc, isClassic, atts, enums) {
IsExportMetadata = isExportMetadata,
IsModule = isModule,
IsInlineSimpleProperty = isInlineSimpleProperty,
IsPreventDebugObject = isPreventDebugObject,
IsNotConstantForEnum = isNotConstantForEnum,
Include = include,
IsNoConcurrent = isNoConcurrent,
};
c.Compile();
Console.WriteLine($"Compiled Success, cost {sw.Elapsed.TotalSeconds}s");
} catch (CmdArgumentException e) {
Console.Error.WriteLine(e.Message);
ShowHelpInfo();
Environment.ExitCode = -1;
} catch (CompilationErrorException e) {
Console.Error.WriteLine(e.Message);
Environment.ExitCode = -1;
} catch (Exception e) {
Console.Error.WriteLine(e.ToString());
Environment.ExitCode = -1;
}
} else {
ShowHelpInfo();
Environment.ExitCode = -1;
}
}
private static void ShowHelpInfo() {
Console.Error.WriteLine(kHelpCmdString);
}
private static HashSet<string> arguments_;
private static bool IsArgumentKey(string key) {
if (arguments_ == null) {
arguments_ = new HashSet<string>();
string[] lines = kHelpCmdString.Split(new[] { "\r\n", "\n", "\r" }, StringSplitOptions.RemoveEmptyEntries);
foreach (string line in lines) {
if (line.StartsWith('-')) {
char[] chars = line.TakeWhile(i => !char.IsWhiteSpace(i)).ToArray();
arguments_.Add(new string(chars));
}
}
}
return arguments_.Contains(key);
}
private static string GetCSCArgument(string[] args) {
int index = args.IndexOf("-csc");
if (index != -1) {
var remains = args.Skip(index + 1);
int end = remains.IndexOf(IsArgumentKey);
if (end != -1) {
remains = remains.Take(end);
}
return string.Join(" ", remains);
}
return null;
}
}
}
| 40.734266 | 154 | 0.618369 | [
"Apache-2.0"
] | Cheatoid/CSharp.lua | CSharp.lua.Launcher/Program.cs | 5,825 | C# |
using System;
using System.Collections.Generic;
using System.Reactive.Linq;
using System.Reactive.Threading.Tasks;
using MS.Core;
namespace System.Globalization
{
public static class __TextInfo
{
public static IObservable<System.Object> Clone(this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.Clone());
}
public static IObservable<System.Globalization.TextInfo> ReadOnly(
IObservable<System.Globalization.TextInfo> textInfo)
{
return Observable.Select(textInfo,
(textInfoLambda) => System.Globalization.TextInfo.ReadOnly(textInfoLambda));
}
public static IObservable<System.Char> ToLower(this IObservable<System.Globalization.TextInfo> TextInfoValue,
IObservable<System.Char> c)
{
return Observable.Zip(TextInfoValue, c,
(TextInfoValueLambda, cLambda) => TextInfoValueLambda.ToLower(cLambda));
}
public static IObservable<System.String> ToLower(this IObservable<System.Globalization.TextInfo> TextInfoValue,
IObservable<System.String> str)
{
return Observable.Zip(TextInfoValue, str,
(TextInfoValueLambda, strLambda) => TextInfoValueLambda.ToLower(strLambda));
}
public static IObservable<System.Char> ToUpper(this IObservable<System.Globalization.TextInfo> TextInfoValue,
IObservable<System.Char> c)
{
return Observable.Zip(TextInfoValue, c,
(TextInfoValueLambda, cLambda) => TextInfoValueLambda.ToUpper(cLambda));
}
public static IObservable<System.String> ToUpper(this IObservable<System.Globalization.TextInfo> TextInfoValue,
IObservable<System.String> str)
{
return Observable.Zip(TextInfoValue, str,
(TextInfoValueLambda, strLambda) => TextInfoValueLambda.ToUpper(strLambda));
}
public static IObservable<System.Boolean> Equals(this IObservable<System.Globalization.TextInfo> TextInfoValue,
IObservable<System.Object> obj)
{
return Observable.Zip(TextInfoValue, obj,
(TextInfoValueLambda, objLambda) => TextInfoValueLambda.Equals(objLambda));
}
public static IObservable<System.Int32> GetHashCode(
this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.GetHashCode());
}
public static IObservable<System.String> ToString(this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.ToString());
}
public static IObservable<System.String> ToTitleCase(
this IObservable<System.Globalization.TextInfo> TextInfoValue, IObservable<System.String> str)
{
return Observable.Zip(TextInfoValue, str,
(TextInfoValueLambda, strLambda) => TextInfoValueLambda.ToTitleCase(strLambda));
}
public static IObservable<System.Int32> get_ANSICodePage(
this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.ANSICodePage);
}
public static IObservable<System.Int32> get_OEMCodePage(
this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.OEMCodePage);
}
public static IObservable<System.Int32> get_MacCodePage(
this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.MacCodePage);
}
public static IObservable<System.Int32> get_EBCDICCodePage(
this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.EBCDICCodePage);
}
public static IObservable<System.Int32> get_LCID(this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.LCID);
}
public static IObservable<System.String> get_CultureName(
this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.CultureName);
}
public static IObservable<System.Boolean> get_IsReadOnly(
this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.IsReadOnly);
}
public static IObservable<System.String> get_ListSeparator(
this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.ListSeparator);
}
public static IObservable<System.Boolean> get_IsRightToLeft(
this IObservable<System.Globalization.TextInfo> TextInfoValue)
{
return Observable.Select(TextInfoValue, (TextInfoValueLambda) => TextInfoValueLambda.IsRightToLeft);
}
public static IObservable<System.Reactive.Unit> set_ListSeparator(
this IObservable<System.Globalization.TextInfo> TextInfoValue, IObservable<System.String> value)
{
return ObservableExt.ZipExecute(TextInfoValue, value,
(TextInfoValueLambda, valueLambda) => TextInfoValueLambda.ListSeparator = valueLambda);
}
}
} | 39.296774 | 120 | 0.683467 | [
"MIT"
] | RixianOpenTech/RxWrappers | Source/Wrappers/mscorlib/System.Globalization.TextInfo.cs | 6,091 | C# |
namespace PVOutput.Net.Modules
{
/// <summary>
/// Base that all the PVOutput services derive from
/// </summary>
public abstract class BaseService
{
internal PVOutputClient Client { get; }
internal BaseService(PVOutputClient client)
{
Client = client;
}
}
}
| 20.5625 | 55 | 0.586626 | [
"MIT"
] | pyrocumulus/pvoutput.net | src/PVOutput.Net/Modules/BaseService.cs | 331 | C# |
using System;
using Microsoft.SPOT;
namespace Skewworks.NETMF.Controls
{
[Serializable]
public class ContextMenu : Control
{
#region Variables
private Font _font;
private MenuItem[] _items;
private readonly Bitmap _right = Resources.GetBitmap(Resources.BitmapResources.right);
#endregion
#region Constructors
public ContextMenu(string name, Font font)
{
Name = name;
_font = font;
}
public ContextMenu(string name, Font font, MenuItem[] menuItems)
{
Name = name;
_font = font;
_items = menuItems;
}
#endregion
#region Properties
public Font Font
{
get { return _font; }
set
{
if (_font == value)
return;
_font = value;
Core.ActiveContainer.Invalidate();
}
}
public MenuItem[] MenuItems
{
get { return _items; }
}
#endregion
#region Public Methods
public void AddMenuItem(MenuItem value)
{
// Update Array Size
if (_items == null)
{
_items = new[] { value };
}
else
{
var tmp = new MenuItem[_items.Length + 1];
Array.Copy(_items, tmp, _items.Length);
tmp[tmp.Length - 1] = value;
_items = tmp;
}
if (Parent != null)
{
Parent.Render(ScreenBounds, true);
}
}
public void AddMenuItems(MenuItem[] values)
{
if (_items == null)
{
_items = values;
}
else
{
var tmp = new MenuItem[_items.Length + values.Length];
Array.Copy(_items, tmp, _items.Length);
Array.Copy(values, 0, tmp, 0, values.Length);
}
if (Parent != null)
{
Parent.Render(ScreenBounds, true);
}
}
public void ClearMenuItems()
{
if (_items == null)
{
return;
}
_items = null;
if (Parent != null)
{
Parent.Render(ScreenBounds, true);
}
}
public void InsertMenuItemAt(MenuItem value, int index)
{
if (_items == null && index == 0)
{
AddMenuItem(value);
return;
}
if (_items == null || index < 0 || index > _items.Length)
throw new ArgumentOutOfRangeException();
var tmp = new MenuItem[_items.Length + 1];
int i;
// Copy upper
for (i = 0; i < index; i++)
{
tmp[i] = _items[i];
}
// Insert
tmp[index] = value;
// Copy lower
for (i = index; i < _items.Length; i++)
{
tmp[i + 1] = _items[i];
}
// Update
_items = tmp;
if (Parent != null)
{
Parent.Render(ScreenBounds, true);
}
}
/// <summary>
/// Removes item by value
/// </summary>
/// <param name="value"></param>
public void RemoveMenuItem(MenuItem value)
{
if (_items == null)
return;
for (int i = 0; i < _items.Length; i++)
{
if (_items[i] == value)
{
RemoveMenuItemAt(i);
return;
}
}
}
/// <summary>
/// Removes item at specific point in array
/// </summary>
/// <param name="index"></param>
public void RemoveMenuItemAt(int index)
{
if (_items.Length == 1)
{
ClearMenuItems();
return;
}
var tmp = new MenuItem[_items.Length - 1];
int c = 0;
for (int i = 0; i < _items.Length; i++)
{
if (i != index)
tmp[c++] = _items[i];
}
_items = tmp;
if (Parent != null)
{
Parent.Render(ScreenBounds, true);
}
}
public void Show(IContainer parent, int x, int y, bool bottomAlign = false)
{
int w = 0;
int i;
bool addSubs = false;
// Check for empty or bad parent
if (_items == null || parent.TopLevelContainer != Core.ActiveContainer)
return;
// Calculate Size
int h = GetMenuItem(_items);
for (i = 0; i < _items.Length; i++)
{
int j = FontManager.ComputeExtentEx(_font, _items[i].Text).Width;
if (_items[i].Length > 0)
{
addSubs = true;
}
if (j > w)
{
w = j;
}
}
w += (addSubs) ? 40 : 8;
// Update Position
if (bottomAlign)
{
y -= h;
}
if (y + h > Core.Screen.Height)
{
y = y - h;
}
if (y < 0)
{
y = 0;
}
if (x + w > Core.Screen.Width)
{
x = x - w;
}
if (x < 0)
{
x = 0;
}
X = x;
Y = y;
Width = w;
Height = h;
if (!Visible)
{
Visible = true;
}
parent.AddChild(this);
parent.ActiveChild = this;
}
#endregion
#region Focus
public override void Blur()
{
if (Parent == null)
return;
IContainer ic = Parent;
//ic.Suspended = true;
ic.RemoveChild(this);
// Collapse all menus
if (_items != null)
{
for (int i = 0; i < _items.Length; i++)
{
if (_items[i].Expanded)
{
_items[i].Expanded = false;
_items[i].SendTouchUp(null, new point(-1, -1));
BlurChildren(_items[i]);
}
}
}
OnLostFocus(this);
//ic.QuiteUnsuspend();
Core.ActiveContainer.Invalidate();
}
private void BlurChildren(MenuItem mnu)
{
for (int i = 0; i < mnu.Items.Length; i++)
{
if (mnu.Items[i].Expanded)
{
mnu.Items[i].Expanded = false;
BlurChildren(mnu.Items[i]);
}
}
}
public override bool HitTest(point point)
{
// Check Main Bounds
if (ScreenBounds.Contains(point))
return true;
// Check Expanded Menus
if (_items != null)
{
for (int i = 0; i < _items.Length; i++)
{
if (_items[i].Expanded && _items[i].ExpandedBounds.Contains(point))
{
return true;
}
if (_items[i].Expanded && _items.Length > 0 && HitTestSub(_items[i], point))
{
return true;
}
}
}
return false;
}
private bool HitTestSub(MenuItem mnu, point e)
{
for (int i = 0; i < mnu.Items.Length; i++)
{
if (mnu.Items[i].Expanded && mnu.Items[i].ExpandedBounds.Contains(e))
{
return true;
}
if (mnu.Items[i].Expanded && mnu.Items.Length > 0 && HitTestSub(mnu.Items[i], e))
{
return true;
}
}
return false;
}
#endregion
#region Touch
protected override void TouchDownMessage(object sender, point point, ref bool handled)
{
if (_items == null)
return;
for (int i = 0; i < _items.Length; i++)
{
if (_items[i].Expanded && _items.Length > 0)
{
// Check subnodes first
if (SendTouchDownSub(_items[i], point))
return;
}
else if (_items[i].ScreenBounds.Contains(point))
{
Parent.Suspended = true;
_items[i].SendTouchDown(this, point);
CollapseOnLne(_items, i);
Parent.Suspended = false;
return;
}
}
}
private bool SendTouchDownSub(MenuItem mnu, point e)
{
for (int i = 0; i < mnu.Items.Length; i++)
{
if (mnu.Items[i].Expanded && mnu.Items[i].Length > 0)
{
if (SendTouchDownSub(mnu.Items[i], e))
{
return true;
}
mnu.Items[i].Expanded = false;
}
else if (mnu.Items[i].ScreenBounds.Contains(e))
{
mnu.Items[i].SendTouchDown(this, e);
CollapseSubsFrom(mnu.Items, i + 1);
Parent.Invalidate();
return true;
}
}
return false;
}
protected override void TouchUpMessage(object sender, point point, ref bool handled)
{
if (_items != null)
{
for (int i = 0; i < _items.Length; i++)
{
if (_items[i].Expanded && _items.Length > 0)
{
if (SendTouchUpSub(_items[i], point))
{
Blur();
_items[i].Expanded = false;
break;
}
//_items[i].SetExpanded(false);
}
else if (_items[i].ScreenBounds.Contains(point))
{
if (_items[i].Touching)
{
if (_items[i].Items != null)
{
_items[i].SetExpanded(!_items[i].Expanded);
Parent.Invalidate();
}
else
{
Blur();
_items[i].SendTouchUp(this, point);
}
break;
}
Invalidate(ScreenBounds);
_items[i].SendTouchUp(this, point);
break;
}
else if (_items[i].Touching)
{
_items[i].SendTouchUp(this, point);
Invalidate();
}
}
}
base.TouchUpMessage(sender, point, ref handled);
}
private bool SendTouchUpSub(MenuItem mnu, point e)
{
for (int i = 0; i < mnu.Items.Length; i++)
{
if (mnu.Items[i].Expanded && mnu.Items.Length > 0)
{
if (SendTouchUpSub(mnu.Items[i], e))
return true;
}
else if (mnu.Items[i].ScreenBounds.Contains(e))
{
mnu.Items[i].SendTouchUp(this, e);
if (mnu.Items[i].Touching)
{
if (mnu.Items[i].Items != null)
{
mnu.Items[i].Expanded = !mnu.Items[i].Expanded;
Parent.Invalidate();
}
else
Blur();
}
else
Parent.Invalidate();
return true;
}
}
return false;
}
#endregion
#region GUI
// ReSharper disable RedundantAssignment
protected override void OnRender(int x, int y, int width, int height)
// ReSharper restore RedundantAssignment
{
Core.Screen.SetClippingRectangle(0, 0, Core.ScreenWidth, Core.ScreenHeight);
Core.Screen.DrawRectangle(Core.SystemColors.BorderColor, 1, Left, Top, Width, Height, 0, 0, Core.SystemColors.ControlTop, Left, Top, Core.SystemColors.ControlBottom, Left + Width, Top + Height, 256);
// Draw Items
y = Top + 4;
for (int i = 0; i < _items.Length; i++)
{
if (_items[i].Visible)
{
if (_items[i].Touching || _items[i].Expanded)
{
Core.Screen.DrawRectangle(0, 0, Left + 1, y - 3, Width - 2, _font.Height + 8, 0, 0, Core.SystemColors.SelectionColor, 0, 0, Core.SystemColors.SelectionColor, 0, 0, 256);
Core.Screen.DrawTextInRect(_items[i].Text, Left + 4, y, Width - 8, _font.Height, Bitmap.DT_AlignmentLeft, Core.SystemColors.SelectedFontColor, _font);
}
else
Core.Screen.DrawTextInRect(_items[i].Text, Left + 4, y, Width - 8, _font.Height, Bitmap.DT_AlignmentLeft, (_items[i].Enabled) ? Core.SystemColors.FontColor : Colors.DarkGray, _font);
_items[i].X = Left;
_items[i].Y = y;
_items[i].Width = Width - 2;
_items[i].Height = _font.Height + 8;
_items[i].Parent = Parent;
if (_items[i].Length > 0)
Core.Screen.DrawImage(Left + Width - 10, y + _font.Height / 2 - 2, _right, 0, 0, 4, 5);
if (_items[i].Expanded)
DrawExpandedMenu(_items[i], Left + Width - 1, y - 4, Width);
y += _font.Height + 5;
if (i != _items.Length - 1)
Core.Screen.DrawLine(Core.SystemColors.BorderColor, 1, Left + 1, y, Left + Width - 2, y);
y += 4;
}
}
}
private void DrawExpandedMenu(MenuItem mnu, int x, int y, int ownerWidth)
{
int w = 0;
int i;
bool addSubs = false;
//_iSel = -1;
// Calculate Size
int h = GetMenuItem(mnu.Items);
for (i = 0; i < mnu.Items.Length; i++)
{
int j = FontManager.ComputeExtentEx(_font, mnu.Items[i].Text).Width;
if (mnu.Items[i].Length > 0)
{
addSubs = true;
}
if (j > w)
{
w = j;
}
}
w += (addSubs) ? 40 : 8;
// Update Position
if (y + h > Core.Screen.Height)
y = y - h;
if (y < 0)
y = 0;
if (x + w > Core.Screen.Width)
x = x - w - ownerWidth + 3;
if (x < 0)
x = 0;
mnu.ExpandedBounds = new rect(x, y, w, h);
Core.Screen.DrawRectangle(Colors.DarkGray, 1, x, y, w - 1, h, 0, 0, Core.SystemColors.ControlTop, x, y, Core.SystemColors.ControlBottom, x + w, y + h, 256);
// Draw Items
y += 4;
for (i = 0; i < mnu.Items.Length; i++)
{
if (mnu.Items[i].Visible)
{
if (mnu.Items[i].Touching || mnu.Items[i].Expanded)
{
Core.Screen.DrawRectangle(0, 0, x + 1, y - 3, w - 2, _font.Height + 8, 0, 0, Core.SystemColors.SelectionColor, 0, 0, Core.SystemColors.SelectionColor, 0, 0, 256);
Core.Screen.DrawTextInRect(mnu.Items[i].Text, x + 4, y, w - 8, _font.Height, Bitmap.DT_AlignmentLeft, Core.SystemColors.SelectedFontColor, _font);
}
else
Core.Screen.DrawTextInRect(mnu.Items[i].Text, x + 4, y, w - 8, _font.Height, Bitmap.DT_AlignmentLeft, (mnu.Items[i].Enabled) ? Core.SystemColors.FontColor : Colors.DarkGray, _font);
mnu.Items[i].X = x;// 1;
mnu.Items[i].Y = y; // y; // y - y;
mnu.Items[i].Width = w - 2;
mnu.Items[i].Height = _font.Height + 8;
mnu.Items[i].Parent = Parent;
if (mnu.Items[i].Length > 0)
Core.Screen.DrawImage(x + w - 10, y + _font.Height / 2 - 2, _right, 0, 0, 4, 5);
if (mnu.Items[i].Expanded)
{
DrawExpandedMenu(mnu.Items[i], x + w - 1, y - 4, w);
}
y += _font.Height + 5;
if (i != mnu.Items.Length - 1)
{
Core.Screen.DrawLine(Core.SystemColors.BorderColor, 1, x + 1, y, x + w - 2, y);
}
y += 4;
}
}
}
#endregion
#region Private Methods
private void CollapseOnLne(MenuItem[] items, int exclude)
{
for (int i = 0; i < items.Length; i++)
{
if (items[i].Expanded && i != exclude)
{
items[i].Expanded = false;
if (_items[i].Touching)
_items[i].SendTouchUp(null, new point(_items[i].Left - 1, 0));
}
}
}
private void CollapseSubsFrom(MenuItem[] items, int index)
{
for (int i = index; i < items.Length; i++)
items[i].Expanded = false;
}
private int GetMenuItem(MenuItem[] items)
{
int c = 0;
for (int i = 0; i < items.Length; i++)
{
if (items[i].Visible)
c++;
}
return 1 + ((_font.Height + 9) * c);
}
#endregion
}
}
| 26.80094 | 208 | 0.435464 | [
"Apache-2.0"
] | osre77/Tinkr | Tinkr2/4.2/AphelionCore/AphelionExtendedControls/Controls/Menus/ContextMenu.cs | 17,099 | C# |
using System;
using LinCms.Core.Entities.Blog;
namespace LinCms.Application.Contracts.Blog.Notifications.Dtos
{
public class CreateNotificationDto
{
public NotificationType NotificationType { get; set; }
public Guid? ArticleId { get; set; }
public Guid? CommentId { get; set; }
public long NotificationRespUserId { get; set; }
public long UserInfoId { get; set; }
public bool IsCancel { get; set; }
public DateTime CreateTime { get; set; }
}
}
| 29.055556 | 62 | 0.646272 | [
"MIT"
] | Newxml/lin-cms-dotnetcore | src/LinCms.Application.Contracts/Blog/Notifications/Dtos/CreateNotificationDto.cs | 525 | C# |
#region License
//
// The Open Toolkit Library License
//
// Copyright (c) 2006 - 2010 the Open Toolkit library.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do
// so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
//
#endregion
// Created by Erik Ylvisaker on 3/17/08.
using System;
using System.Collections.Generic;
using System.Text;
namespace OpenTK.Platform.MacOS
{
using Carbon;
using Input;
class MacOSKeyMap : Dictionary<MacOSKeyCode, Key>
{
public MacOSKeyMap()
{
// comments indicate members of the Key enum that are missing
Add(MacOSKeyCode.A, Key.A);
// AltLeft
// AltRight
Add(MacOSKeyCode.B, Key.B);
Add(MacOSKeyCode.Backslash, Key.BackSlash);
Add(MacOSKeyCode.Backspace, Key.BackSpace);
Add(MacOSKeyCode.BracketLeft, Key.BracketLeft);
Add(MacOSKeyCode.BracketRight, Key.BracketRight);
Add(MacOSKeyCode.C, Key.C);
// Capslock
// Clear
Add(MacOSKeyCode.Comma, Key.Comma);
// ControlLeft
// ControlRight
Add(MacOSKeyCode.D, Key.D);
Add(MacOSKeyCode.Del, Key.Delete);
Add(MacOSKeyCode.Down, Key.Down);
Add(MacOSKeyCode.E, Key.E);
Add(MacOSKeyCode.End, Key.End);
Add(MacOSKeyCode.Enter, Key.Enter);
Add(MacOSKeyCode.Return, Key.Enter);
Add(MacOSKeyCode.Esc, Key.Escape);
Add(MacOSKeyCode.F, Key.F);
Add(MacOSKeyCode.F1, Key.F1);
Add(MacOSKeyCode.F2, Key.F2);
Add(MacOSKeyCode.F3, Key.F3);
Add(MacOSKeyCode.F4, Key.F4);
Add(MacOSKeyCode.F5, Key.F5);
Add(MacOSKeyCode.F6, Key.F6);
Add(MacOSKeyCode.F7, Key.F7);
Add(MacOSKeyCode.F8, Key.F8);
Add(MacOSKeyCode.F9, Key.F9);
Add(MacOSKeyCode.F10, Key.F10);
Add(MacOSKeyCode.F11, Key.F11);
Add(MacOSKeyCode.F12, Key.F12);
Add(MacOSKeyCode.F13, Key.F13);
Add(MacOSKeyCode.F14, Key.F14);
Add(MacOSKeyCode.F15, Key.F15);
// F16-F35
Add(MacOSKeyCode.G, Key.G);
Add(MacOSKeyCode.H, Key.H);
Add(MacOSKeyCode.Home, Key.Home);
Add(MacOSKeyCode.I, Key.I);
Add(MacOSKeyCode.Insert, Key.Insert);
Add(MacOSKeyCode.J, Key.J);
Add(MacOSKeyCode.K, Key.K);
Add(MacOSKeyCode.KeyPad_0, Key.Keypad0);
Add(MacOSKeyCode.KeyPad_1, Key.Keypad1);
Add(MacOSKeyCode.KeyPad_2, Key.Keypad2);
Add(MacOSKeyCode.KeyPad_3, Key.Keypad3);
Add(MacOSKeyCode.KeyPad_4, Key.Keypad4);
Add(MacOSKeyCode.KeyPad_5, Key.Keypad5);
Add(MacOSKeyCode.KeyPad_6, Key.Keypad6);
Add(MacOSKeyCode.KeyPad_7, Key.Keypad7);
Add(MacOSKeyCode.KeyPad_8, Key.Keypad8);
Add(MacOSKeyCode.KeyPad_9, Key.Keypad9);
Add(MacOSKeyCode.KeyPad_Add, Key.KeypadAdd);
Add(MacOSKeyCode.KeyPad_Decimal, Key.KeypadDecimal);
Add(MacOSKeyCode.KeyPad_Divide, Key.KeypadDivide);
Add(MacOSKeyCode.KeyPad_Enter, Key.KeypadEnter);
Add(MacOSKeyCode.KeyPad_Multiply, Key.KeypadMultiply);
Add(MacOSKeyCode.KeyPad_Subtract, Key.KeypadSubtract);
//Add(MacOSKeyCode.KeyPad_Equal);
Add(MacOSKeyCode.L, Key.L);
Add(MacOSKeyCode.Left, Key.Left);
Add(MacOSKeyCode.M, Key.M);
//Key.MaxKeys
Add(MacOSKeyCode.Menu, Key.Menu);
Add(MacOSKeyCode.Minus, Key.Minus);
Add(MacOSKeyCode.N, Key.N);
Add(MacOSKeyCode.Key_0, Key.Number0);
Add(MacOSKeyCode.Key_1, Key.Number1);
Add(MacOSKeyCode.Key_2, Key.Number2);
Add(MacOSKeyCode.Key_3, Key.Number3);
Add(MacOSKeyCode.Key_4, Key.Number4);
Add(MacOSKeyCode.Key_5, Key.Number4);
Add(MacOSKeyCode.Key_6, Key.Number5);
Add(MacOSKeyCode.Key_7, Key.Number6);
Add(MacOSKeyCode.Key_8, Key.Number7);
Add(MacOSKeyCode.Key_9, Key.Number9);
// Numlock
Add(MacOSKeyCode.O, Key.O);
Add(MacOSKeyCode.P, Key.P);
Add(MacOSKeyCode.Pagedown, Key.PageDown);
Add(MacOSKeyCode.Pageup, Key.PageUp);
// Pause
Add(MacOSKeyCode.Period, Key.Period);
Add(MacOSKeyCode.Equals, Key.Plus);
// PrintScreen
Add(MacOSKeyCode.Q, Key.Q);
Add(MacOSKeyCode.Quote, Key.Quote);
Add(MacOSKeyCode.R, Key.R);
Add(MacOSKeyCode.Right, Key.Right);
Add(MacOSKeyCode.S, Key.S);
// ScrollLock
Add(MacOSKeyCode.Semicolon, Key.Semicolon);
//Key.ShiftLeft
//Key.ShiftRight
Add(MacOSKeyCode.Slash, Key.Slash);
// Key.Sleep
Add(MacOSKeyCode.Space, Key.Space);
Add(MacOSKeyCode.T, Key.T);
Add(MacOSKeyCode.Tab, Key.Tab);
Add(MacOSKeyCode.Tilde, Key.Tilde);
Add(MacOSKeyCode.U, Key.U);
Add(MacOSKeyCode.Up, Key.Up);
Add(MacOSKeyCode.V, Key.V);
Add(MacOSKeyCode.W, Key.W);
// WinKeyLeft
// WinKeyRight
Add(MacOSKeyCode.X, Key.X);
Add(MacOSKeyCode.Y, Key.Y);
Add(MacOSKeyCode.Z, Key.Z);
}
}
}
| 40.306748 | 83 | 0.595282 | [
"MIT"
] | Anunayj/scallion | opentk/Source/OpenTK/Platform/MacOS/MacOSKeyMap.cs | 6,570 | C# |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using Amazon.Lambda.Core;
using Amazon.Lambda.S3Events;
using Amazon.S3;
using Amazon.S3.Model;
using Amazon.S3.Transfer;
using org.ohdsi.cdm.framework.common2.Base;
using org.ohdsi.cdm.framework.common2.Core.Transformation.Cerner;
using org.ohdsi.cdm.framework.common2.Core.Transformation.CPRD;
using org.ohdsi.cdm.framework.common2.Core.Transformation.HCUP;
using org.ohdsi.cdm.framework.common2.Core.Transformation.JMDC;
using org.ohdsi.cdm.framework.common2.Core.Transformation.nhanes;
using org.ohdsi.cdm.framework.common2.Core.Transformation.OptumExtended;
using org.ohdsi.cdm.framework.common2.Core.Transformation.OptumOncology;
using org.ohdsi.cdm.framework.common2.Core.Transformation.Premier;
using org.ohdsi.cdm.framework.common2.Core.Transformation.SEER;
using org.ohdsi.cdm.framework.common2.Core.Transformation.Truven;
using org.ohdsi.cdm.framework.common2.Enums;
using org.ohdsi.cdm.presentation.lambdabuilder.Base;
// Assembly attribute to enable the Lambda function's JSON input to be converted into a .NET class.
[assembly: LambdaSerializer(typeof(Amazon.Lambda.Serialization.Json.JsonSerializer))]
namespace org.ohdsi.cdm.presentation.lambdabuilder
{
public class Function
{
IAmazonS3 S3Client { get; set; }
private Vocabulary _vocabulary;
//private bool _initialized = false;
private bool _attemptFileRemoved;
private long? _lastSavedPersonIdOutput;
private Dictionary<string, long> _restorePoint = new Dictionary<string, long>();
/// <summary>
/// Default constructor. This constructor is used by Lambda to construct the instance. When invoked in a Lambda environment
/// the AWS credentials will come from the IAM role associated with the function and the AWS region will be set to the
/// region the Lambda function is executed in.
/// </summary>
public Function()
{
S3Client = new AmazonS3Client();
Console.WriteLine("ctor 1");
}
/// <summary>
/// Constructs an instance with a preconfigured S3 client. This can be used for testing the outside of the Lambda environment.
/// </summary>
/// <param name="s3Client"></param>
public Function(IAmazonS3 s3Client)
{
this.S3Client = s3Client;
Console.WriteLine("ctor 2");
}
public void Initialize()
{
Settings.Current.Started = DateTime.Now;
Settings.Current.Error = false;
Settings.Current.WatchdogTimeout = false;
_attemptFileRemoved = false;
_lastSavedPersonIdOutput = null;
_restorePoint.Clear();
if (_vocabulary != null && _vocabulary.Vendor == Settings.Current.Building.Vendor) return;
try
{
var timer = new Stopwatch();
timer.Start();
_vocabulary = new Vocabulary(Settings.Current.Building.Vendor);
_vocabulary.Fill(false);
timer.Stop();
Console.WriteLine("Vocabulary initialized for " + Settings.Current.Building.Vendor + " | " +
timer.ElapsedMilliseconds + "ms");
}
catch (Exception e)
{
_vocabulary = null;
throw;
}
}
public async Task<string> FunctionHandler2(Vendors vendor, int buildingId, int chunkId, string prefix,
int attempt)
{
Dictionary<string, long> restorePoint = null;
while (true)
{
try
{
if (attempt > 55)
{
Console.WriteLine("Too many attempts");
return null;
}
Settings.Initialize(buildingId, vendor);
Settings.Current.S3AwsAccessKeyId = "";
Settings.Current.S3AwsSecretAccessKey = "";
Settings.Current.Bucket = "";
Settings.Current.StorageType = S3StorageType.Parquet;
Settings.Current.CDMFolder = "cdm";
Settings.Current.TimeoutValue = 18000;
Settings.Current.WatchdogValue = 100000 * 1000;
Settings.Current.MinPersonToBuild = 100;
Settings.Current.MinPersonToSave = 100;
Console.WriteLine(
$"vendor={vendor};buildingId={buildingId};chunkId={chunkId};prefix={prefix};attempt={attempt}");
Initialize();
if (restorePoint != null)
{
_restorePoint = restorePoint;
}
_vocabulary.Attach();
var chunkBuilder = new LambdaChunkBuilder(CreatePersonBuilder);
_lastSavedPersonIdOutput = chunkBuilder.Process(chunkId, prefix, _restorePoint, attempt);
Console.WriteLine("DONE");
return "done";
}
catch (Exception e)
{
attempt++;
Console.WriteLine();
Console.WriteLine("Attempt: " + attempt);
Console.WriteLine();
restorePoint = new Dictionary<string, long>(_restorePoint);
}
}
}
/// <summary>
/// This method is called for every Lambda invocation. This method takes in an S3 event object and can be used
/// to respond to S3 notifications.
/// </summary>
/// <param name="evnt"></param>
/// <param name="context"></param>
/// <returns></returns>
public async Task<string> FunctionHandler(S3Event evnt, ILambdaContext context)
{
//return null;
var s3Event = evnt.Records?[0].S3;
if (s3Event == null)
{
return null;
}
int chunkId = 0;
int buildingId = 0;
string prefix = String.Empty;
int attempt = 0;
Vendors vendor;
try
{
// 0 1 2 3 4 5
//vendor.buildingId.chunkId.prefix.attempt.txt
vendor = Enum.Parse<Vendors>(s3Event.Object.Key.Split('.')[0]);
buildingId = int.Parse(s3Event.Object.Key.Split('.')[1]);
chunkId = int.Parse(s3Event.Object.Key.Split('.')[2]);
prefix = s3Event.Object.Key.Split('.')[3].Trim();
if (s3Event.Object.Key.Split('.').Length == 6)
{
attempt = int.Parse(s3Event.Object.Key.Split('.')[4]);
}
Settings.Initialize(buildingId, vendor);
Settings.Current.TimeoutValue = 180;
Settings.Current.WatchdogValue = 10 * 1000;
Settings.Current.MinPersonToBuild = 100;
Settings.Current.MinPersonToSave = 100;
Settings.Current.S3AwsAccessKeyId = Environment.GetEnvironmentVariable("S3AwsAccessKeyId");
Settings.Current.S3AwsSecretAccessKey = Environment.GetEnvironmentVariable("S3AwsSecretAccessKey");
Settings.Current.Bucket = Environment.GetEnvironmentVariable("Bucket");
Settings.Current.CDMFolder = Environment.GetEnvironmentVariable("CDMFolder");
Settings.Current.StorageType = Enum.TryParse(Environment.GetEnvironmentVariable("StorageType"), true,
out S3StorageType storageType) ? storageType : S3StorageType.CSV;
//TODO different behavior(num of subChunks 256, 31...)
if (attempt > 55)
{
//Console.WriteLine($"*** too many attempt || chunkId={chunkId};prefix={prefix};attempt={attempt} - STARTED from PersonId={lastSavedPersonIdInput}");
return null;
}
Console.WriteLine($"vendor={vendor};buildingId={buildingId};chunkId={chunkId};prefix={prefix};attempt={attempt}");
Console.WriteLine($"Bucket={Settings.Current.Bucket};CDMFolder={Settings.Current.CDMFolder};StorageType={Settings.Current.StorageType};");
Console.WriteLine($"TimeoutValue={Settings.Current.TimeoutValue}s;WatchdogValue={Settings.Current.WatchdogValue}ms;MinPersonToBuild={Settings.Current.MinPersonToBuild}; MinPersonToSave={Settings.Current.MinPersonToSave}");
Initialize();
GetRestorePoint(s3Event);
_vocabulary.Attach();
var chunkBuilder = new LambdaChunkBuilder(CreatePersonBuilder);
var attempt1 = attempt;
_lastSavedPersonIdOutput = chunkBuilder.Process(chunkId, prefix, _restorePoint, attempt1);
RemoveAttemptFile(s3Event);
if (_lastSavedPersonIdOutput.HasValue || (Settings.Current.WatchdogTimeout && chunkBuilder.TotalPersonConverted == 0))
{
attempt++;
CreateAttemptFile(s3Event, chunkId, prefix, attempt);
Console.WriteLine($"chunkId={chunkId};prefix={prefix} - FINISHED by timeout on PersonId={_lastSavedPersonIdOutput.Value}");
return "done";
}
Console.WriteLine($"chunkId={chunkId};prefix={prefix} - FINISHED, {s3Event.Object.Key} - removed");
return "done";
}
catch (Exception e)
{
Console.WriteLine($"WARN_EXC - FunctionHandler");
Console.WriteLine($"getting object {s3Event.Object.Key} from bucket {s3Event.Bucket.Name}. Make sure they exist and your bucket is in the same region as this function.");
Console.WriteLine(e.Message);
Console.WriteLine(e.StackTrace);
if (RemoveAttemptFile(s3Event))
{
attempt++;
if (!CreateAttemptFile(s3Event, chunkId, prefix, attempt))
{
Console.WriteLine($"Can't convert chunkId={chunkId} prefix={prefix} | CreateAttemptFile");
}
}
else
{
Console.WriteLine($"Can't convert chunkId={chunkId} prefix={prefix} | RemoveAttemptFile");
}
return "interrupted";
}
}
private bool GetRestorePoint(Amazon.S3.Util.S3EventNotification.S3Entity s3Event)
{
var attempt = 0;
while (true)
{
try
{
attempt++;
var msg = new StringBuilder();
var timer = new Stopwatch();
timer.Start();
using (var transferUtility = new TransferUtility(this.S3Client))
using (var responseStream = transferUtility.OpenStream(s3Event.Bucket.Name, s3Event.Object.Key))
using (var reader = new StreamReader(responseStream))
{
string line;
while ((line = reader.ReadLine()) != null)
{
var fileName = line.Split(':')[0];
var rowIndex = long.Parse(line.Split(':')[1]);
_restorePoint.Add(fileName, rowIndex);
msg.Append($"{fileName}:{rowIndex};");
}
}
timer.Stop();
Console.WriteLine("Restore point:" + msg + " | " + timer.ElapsedMilliseconds + "ms");
return true;
}
catch (Exception e)
{
if (attempt > 5)
{
Console.WriteLine($"WARN_EXC - GetRestorePoint [{s3Event.Object.Key}]");
Console.WriteLine(e.Message);
Console.WriteLine(e.StackTrace);
throw;
}
}
}
}
private bool RemoveAttemptFile(Amazon.S3.Util.S3EventNotification.S3Entity s3Event)
{
if (_attemptFileRemoved)
return true;
var attempt = 0;
var key = s3Event.Object.Key;
while (true)
{
try
{
attempt++;
var task = this.S3Client.DeleteObjectAsync(new DeleteObjectRequest
{
BucketName = s3Event.Bucket.Name,
Key = key
});
task.Wait();
Console.WriteLine($"Attempt file was removed - {key} | attempt={attempt}");
_attemptFileRemoved = true;
return true;
}
catch (Exception e)
{
if (attempt > 5)
{
Console.WriteLine($"WARN_EXC - Can't remove [{key}]");
Console.WriteLine(e.Message);
Console.WriteLine(e.StackTrace);
return false;
}
}
}
}
private bool CreateAttemptFile(Amazon.S3.Util.S3EventNotification.S3Entity s3Event, int chunkId, string prefix, int processAttempt)
{
if (!_attemptFileRemoved)
return false;
var attempt = 0;
var key = $"{Settings.Current.Building.Vendor}.{Settings.Current.Building.Id}.{chunkId}.{prefix}.{processAttempt}.txt";
//if (_lastSavedPersonIdOutput.HasValue)
// key = $"{chunkId}.{prefix}.{_lastSavedPersonIdOutput.Value}.{processAttempt}.txt";
while (true)
{
try
{
attempt++;
using (var memoryStream = new MemoryStream())
using (var writer = new StreamWriter(memoryStream))
using (var tu = new TransferUtility(this.S3Client))
{
foreach (var rp in _restorePoint)
{
writer.WriteLine($"{rp.Key}:{rp.Value}");
}
writer.Flush();
tu.Upload(memoryStream, s3Event.Bucket.Name, key);
}
Console.WriteLine($"Attempt file was created - {key} | attempt={attempt}");
return true;
}
catch (Exception e)
{
if (attempt > 5)
{
Console.WriteLine($"WARN_EXC - Can't create new attempt [{key}]");
Console.WriteLine(e.Message);
Console.WriteLine(e.StackTrace);
return false;
}
}
}
}
private static IPersonBuilder CreatePersonBuilder()
{
switch (Settings.Current.Building.Vendor)
{
case Vendors.Truven_CCAE:
case Vendors.Truven_MDCR:
case Vendors.Truven_MDCD:
return new TruvenPersonBuilder();
case Vendors.OptumExtendedSES:
case Vendors.OptumExtendedDOD:
return new OptumExtendedPersonBuilder();
case Vendors.PremierV5:
return new PremierPersonBuilder();
case Vendors.Cerner:
return new CernerPersonBuilder();
case Vendors.HCUPv5:
return new HcupPersonBuilder();
case Vendors.JMDCv5:
return new JmdcPersonBuilder();
case Vendors.SEER:
return new SeerPersonBuilder();
case Vendors.OptumOncology:
return new OptumOncologyPersonBuilder();
case Vendors.CprdV5:
return new CprdPersonBuilder();
case Vendors.NHANES:
return new NhanesPersonBuilder();
// case Vendors.ErasV5:
// return new ErasV5PersonBuilder();
// case Vendors.OptumIntegrated:
// return new OptumIntegratedPersonBuilder();
}
return new PersonBuilder();
}
}
}
| 38.778539 | 238 | 0.52093 | [
"Apache-2.0"
] | AnthonyMolinaro/ETL-LambdaBuilder | sources/Presentation/org.ohdsi.cdm.presentation.lambdabuilder/Function.cs | 16,985 | C# |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using Windows.ApplicationModel;
using Windows.ApplicationModel.Activation;
using Windows.Foundation;
using Windows.Foundation.Collections;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Controls.Primitives;
using Windows.UI.Xaml.Data;
using Windows.UI.Xaml.Input;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Media.Animation;
using Windows.UI.Xaml.Navigation;
// The Blank Application template is documented at http://go.microsoft.com/fwlink/?LinkId=391641
namespace SwingingEntrance.WinPhone81
{
/// <summary>
/// Provides application-specific behavior to supplement the default Application class.
/// </summary>
public sealed partial class App : Application
{
private TransitionCollection transitions;
/// <summary>
/// Initializes the singleton application object. This is the first line of authored code
/// executed, and as such is the logical equivalent of main() or WinMain().
/// </summary>
public App()
{
this.InitializeComponent();
this.Suspending += this.OnSuspending;
}
/// <summary>
/// Invoked when the application is launched normally by the end user. Other entry points
/// will be used when the application is launched to open a specific file, to display
/// search results, and so forth.
/// </summary>
/// <param name="e">Details about the launch request and process.</param>
protected override void OnLaunched(LaunchActivatedEventArgs e)
{
#if DEBUG
if (System.Diagnostics.Debugger.IsAttached)
{
this.DebugSettings.EnableFrameRateCounter = true;
}
#endif
Frame rootFrame = Window.Current.Content as Frame;
// Do not repeat app initialization when the Window already has content,
// just ensure that the window is active
if (rootFrame == null)
{
// Create a Frame to act as the navigation context and navigate to the first page
rootFrame = new Frame();
// TODO: change this value to a cache size that is appropriate for your application
rootFrame.CacheSize = 1;
Xamarin.Forms.Forms.Init(e);
if (e.PreviousExecutionState == ApplicationExecutionState.Terminated)
{
// TODO: Load state from previously suspended application
}
// Place the frame in the current Window
Window.Current.Content = rootFrame;
}
if (rootFrame.Content == null)
{
// Removes the turnstile navigation for startup.
if (rootFrame.ContentTransitions != null)
{
this.transitions = new TransitionCollection();
foreach (var c in rootFrame.ContentTransitions)
{
this.transitions.Add(c);
}
}
rootFrame.ContentTransitions = null;
rootFrame.Navigated += this.RootFrame_FirstNavigated;
// When the navigation stack isn't restored navigate to the first page,
// configuring the new page by passing required information as a navigation
// parameter
if (!rootFrame.Navigate(typeof(MainPage), e.Arguments))
{
throw new Exception("Failed to create initial page");
}
}
// Ensure the current window is active
Window.Current.Activate();
}
/// <summary>
/// Restores the content transitions after the app has launched.
/// </summary>
/// <param name="sender">The object where the handler is attached.</param>
/// <param name="e">Details about the navigation event.</param>
private void RootFrame_FirstNavigated(object sender, NavigationEventArgs e)
{
var rootFrame = sender as Frame;
rootFrame.ContentTransitions = this.transitions ?? new TransitionCollection() { new NavigationThemeTransition() };
rootFrame.Navigated -= this.RootFrame_FirstNavigated;
}
/// <summary>
/// Invoked when application execution is being suspended. Application state is saved
/// without knowing whether the application will be terminated or resumed with the contents
/// of memory still intact.
/// </summary>
/// <param name="sender">The source of the suspend request.</param>
/// <param name="e">Details about the suspend request.</param>
private void OnSuspending(object sender, SuspendingEventArgs e)
{
var deferral = e.SuspendingOperation.GetDeferral();
// TODO: Save application state and stop any background activity
deferral.Complete();
}
}
} | 38.894737 | 126 | 0.610284 | [
"Apache-2.0"
] | NoleHealth/xamarin-forms-book-preview-2 | Chapter22/SwingingEntrance/SwingingEntrance/SwingingEntrance.WinPhone81/App.xaml.cs | 5,175 | C# |
using System;
using NQuery.Symbols;
using Xunit;
namespace NQuery.Tests
{
public partial class ExpressionTests
{
[Fact]
public void Expression_Queries_SingleRowSubselect()
{
var dataContext = NorthwindDataContext.Instance;
var text = "(SELECT LastName FROM Employees WHERE FirstName = 'Margaret')";
var expression = Expression<string>.Create(dataContext, text);
var result = expression.Evaluate();
Assert.Equal("Peacock", result);
}
[Fact]
public void Expression_Queries_Exists()
{
var dataContext = NorthwindDataContext.Instance;
var text = "EXISTS (SELECT * FROM Employees WHERE FirstName = 'Margaret')";
var expression = Expression<bool>.Create(dataContext, text);
var result = expression.Evaluate();
Assert.True(result);
}
[Fact]
public void Expression_Queries_Exists_NoFilter()
{
var dataContext = NorthwindDataContext.Instance;
var text = "EXISTS (SELECT * FROM Employees)";
var expression = Expression<bool>.Create(dataContext, text);
var result = expression.Evaluate();
Assert.True(result);
}
[Fact]
public void Expression_Queries_All()
{
var dataContext = NorthwindDataContext.Instance;
var text = "10 >= ALL (SELECT EmployeeId FROM Employees)";
var expression = Expression<bool>.Create(dataContext, text);
var result = expression.Evaluate();
Assert.True(result);
}
[Fact]
public void Expression_Queries_Any()
{
var name = new VariableSymbol("name", typeof(string), "Margaret");
var dataContext = NorthwindDataContext.Instance.AddVariables(name);
var text = "'London' = ANY (SELECT City FROM Employees)";
var expression = Expression<bool>.Create(dataContext, text);
var result = expression.Evaluate();
Assert.True(result);
}
}
} | 31.835821 | 87 | 0.592124 | [
"MIT"
] | dallmair/nquery-vnext | src/NQuery.Tests/ExpressionTests.Queries.cs | 2,133 | C# |
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace Mundipagg.Models.Response
{
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetTransferResponse
{
public string Id { get; set; }
public int Amount { get; set; }
public string Status { get; set; }
public DateTime CreatedAt { get; set; }
public DateTime UpdatedAt { get; set; }
public GetBankAccountResponse BankAccount { get; set; }
public Dictionary<string, string> Metadata { get; set; }
}
} | 25.222222 | 70 | 0.69163 | [
"MIT"
] | ddomingos-mundi/mundipagg-dotnet | Mundipagg/Models/Response/GetTransferResponse.cs | 681 | C# |
using System.Threading.Tasks;
namespace cloudscribe.SimpleContent.Models
{
public interface IPageUrlResolver
{
Task<string> ResolvePageUrl(IPage page);
Task ConvertMediaToRelativeUrls(IPage page);
Task ConvertMediaToAbsoluteUrls(IPage page, IProjectSettings projectSettings);
}
}
| 26.5 | 86 | 0.745283 | [
"Apache-2.0"
] | cloudscribe/cloudscribe.SimpleContent | src/cloudscribe.SimpleContent.Models/Page/IPageUrlResolver.cs | 320 | C# |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the databrew-2017-07-25.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using System.Net;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.GlueDataBrew.Model
{
/// <summary>
/// Container for the parameters to the CreateDataset operation.
/// Creates a new AWS Glue DataBrew dataset for this AWS account.
/// </summary>
public partial class CreateDatasetRequest : AmazonGlueDataBrewRequest
{
private FormatOptions _formatOptions;
private Input _input;
private string _name;
private Dictionary<string, string> _tags = new Dictionary<string, string>();
/// <summary>
/// Gets and sets the property FormatOptions.
/// </summary>
public FormatOptions FormatOptions
{
get { return this._formatOptions; }
set { this._formatOptions = value; }
}
// Check to see if FormatOptions property is set
internal bool IsSetFormatOptions()
{
return this._formatOptions != null;
}
/// <summary>
/// Gets and sets the property Input.
/// </summary>
[AWSProperty(Required=true)]
public Input Input
{
get { return this._input; }
set { this._input = value; }
}
// Check to see if Input property is set
internal bool IsSetInput()
{
return this._input != null;
}
/// <summary>
/// Gets and sets the property Name.
/// <para>
/// The name of the dataset to be created.
/// </para>
/// </summary>
[AWSProperty(Required=true, Min=1, Max=255)]
public string Name
{
get { return this._name; }
set { this._name = value; }
}
// Check to see if Name property is set
internal bool IsSetName()
{
return this._name != null;
}
/// <summary>
/// Gets and sets the property Tags.
/// <para>
/// Metadata tags to apply to this dataset.
/// </para>
/// </summary>
[AWSProperty(Min=1, Max=200)]
public Dictionary<string, string> Tags
{
get { return this._tags; }
set { this._tags = value; }
}
// Check to see if Tags property is set
internal bool IsSetTags()
{
return this._tags != null && this._tags.Count > 0;
}
}
} | 29.946429 | 107 | 0.56619 | [
"Apache-2.0"
] | philasmar/aws-sdk-net | sdk/src/Services/GlueDataBrew/Generated/Model/CreateDatasetRequest.cs | 3,354 | C# |
using IIASA.FloodCitiSense.Mobile.Core.Core;
using IIASA.FloodCitiSense.Mobile.Core.Core.Dependency;
using IIASA.FloodCitiSense.Mobile.Core.Services.Permission;
using System;
using Xamarin.Forms;
using Xamarin.Forms.Xaml;
namespace IIASA.FloodCitiSense.Mobile.Core.Extensions.MarkupExtensions
{
[ContentProperty("Text")]
public class HasPermissionExtension : IMarkupExtension
{
public string Text { get; set; }
public object ProvideValue(IServiceProvider serviceProvider)
{
if (ApplicationBootstrapper.AbpBootstrapper == null || Text == null)
{
return false;
}
var permissionService = Resolver.Resolve<IPermissionService>();
return permissionService.HasPermission(Text);
}
}
} | 30.923077 | 80 | 0.686567 | [
"MIT"
] | FloodCitiSense/FloodCitiSense | aspnet-core/src/IIASA.FloodCitiSense.Mobile.Core/Extensions/MarkupExtensions/HasPermissionExtension.cs | 806 | C# |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Azure.Core;
using Azure.Core.Pipeline;
using Azure.Storage.Blobs.Models;
using Azure.Storage.Cryptography;
using Azure.Storage.Sas;
using Azure.Storage.Shared;
using Metadata = System.Collections.Generic.IDictionary<string, string>;
using Tags = System.Collections.Generic.IDictionary<string, string>;
#pragma warning disable SA1402 // File may only contain a single type
namespace Azure.Storage.Blobs.Specialized
{
/// <summary>
/// The <see cref="BlobBaseClient"/> allows you to manipulate Azure Storage
/// blobs.
/// </summary>
public class BlobBaseClient
{
/// <summary>
/// The blob's primary <see cref="Uri"/> endpoint.
/// </summary>
private protected readonly Uri _uri;
/// <summary>
/// Gets the blob's primary <see cref="Uri"/> endpoint.
/// </summary>
public virtual Uri Uri => _uri;
/// <summary>
/// <see cref="BlobClientConfiguration"/>.
/// </summary>
internal readonly BlobClientConfiguration _clientConfiguration;
/// <summary>
/// <see cref="BlobClientConfiguration"/>.
/// </summary>
internal virtual BlobClientConfiguration ClientConfiguration => _clientConfiguration;
/// <summary>
/// The <see cref="ClientSideEncryptionOptions"/> to be used when sending/receiving requests.
/// </summary>
private readonly ClientSideEncryptionOptions _clientSideEncryption;
/// <summary>
/// The <see cref="ClientSideEncryptionOptions"/> to be used when sending/receiving requests.
/// </summary>
internal virtual ClientSideEncryptionOptions ClientSideEncryption => _clientSideEncryption;
internal bool UsingClientSideEncryption => ClientSideEncryption != default;
/// <summary>
/// Optional. The snapshot of the blob.
/// </summary>
private string _snapshot;
/// <summary>
/// Optional. The version of the blob.
/// </summary>
private string _blobVersionId;
/// <summary>
/// The Storage account name corresponding to the blob client.
/// </summary>
private string _accountName;
/// <summary>
/// Gets the Storage account name corresponding to the blob client.
/// </summary>
public virtual string AccountName
{
get
{
SetNameFieldsIfNull();
return _accountName;
}
}
/// <summary>
/// The container name corresponding to the blob client.
/// </summary>
private string _containerName;
/// <summary>
/// Gets the container name corresponding to the blob client.
/// </summary>
public virtual string BlobContainerName
{
get
{
SetNameFieldsIfNull();
return _containerName;
}
}
/// <summary>
/// The name of the blob.
/// </summary>
private string _name;
/// <summary>
/// Gets the name of the blob.
/// </summary>
public virtual string Name
{
get
{
SetNameFieldsIfNull();
return _name;
}
}
/// <summary>
/// Determines whether the client is able to generate a SAS.
/// If the client is authenticated with a <see cref="StorageSharedKeyCredential"/>.
/// </summary>
public virtual bool CanGenerateSasUri => ClientConfiguration.SharedKeyCredential != null;
/// <summary>
/// BlobRestClient.
/// </summary>
private readonly BlobRestClient _blobRestClient;
/// <summary>
/// BlobRestClient.
/// </summary>
internal virtual BlobRestClient BlobRestClient => _blobRestClient;
#region ctors
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class.
/// </summary>
protected BlobBaseClient()
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class.
/// </summary>
/// <param name="connectionString">
/// A connection string includes the authentication information
/// required for your application to access data in an Azure Storage
/// account at runtime.
///
/// For more information,
/// <see href="https://docs.microsoft.com/azure/storage/common/storage-configure-connection-string">
/// Configure Azure Storage connection strings</see>
/// </param>
/// <param name="blobContainerName">
/// The name of the container containing this blob.
/// </param>
/// <param name="blobName">
/// The name of this blob.
/// </param>
public BlobBaseClient(string connectionString, string blobContainerName, string blobName)
: this(connectionString, blobContainerName, blobName, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class.
/// </summary>
/// <param name="connectionString">
/// A connection string includes the authentication information
/// required for your application to access data in an Azure Storage
/// account at runtime.
///
/// For more information,
/// <see href="https://docs.microsoft.com/azure/storage/common/storage-configure-connection-string">
/// Configure Azure Storage connection strings</see>
/// </param>
/// <param name="blobContainerName">
/// The name of the container containing this blob.
/// </param>
/// <param name="blobName">
/// The name of this blob.
/// </param>
/// <param name="options">
/// Optional client options that define the transport pipeline
/// policies for authentication, retries, etc., that are applied to
/// every request.
/// </param>
public BlobBaseClient(string connectionString, string blobContainerName, string blobName, BlobClientOptions options)
{
options ??= new BlobClientOptions();
var conn = StorageConnectionString.Parse(connectionString);
var builder =
new BlobUriBuilder(conn.BlobEndpoint)
{
BlobContainerName = blobContainerName,
BlobName = blobName
};
_uri = builder.ToUri();
_clientConfiguration = new BlobClientConfiguration(
pipeline: options.Build(conn.Credentials),
sharedKeyCredential: conn.Credentials as StorageSharedKeyCredential,
clientDiagnostics: new StorageClientDiagnostics(options),
version: options.Version,
customerProvidedKey: options.CustomerProvidedKey,
encryptionScope: options.EncryptionScope);
_clientSideEncryption = options._clientSideEncryptionOptions?.Clone();
_blobRestClient = BuildBlobRestClient(_uri);
BlobErrors.VerifyHttpsCustomerProvidedKey(_uri, _clientConfiguration.CustomerProvidedKey);
BlobErrors.VerifyCpkAndEncryptionScopeNotBothSet(_clientConfiguration.CustomerProvidedKey, _clientConfiguration.EncryptionScope);
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class.
/// </summary>
/// <param name="blobUri">
/// A <see cref="Uri"/> referencing the blob that includes the
/// name of the account, the name of the container, and the name of
/// the blob.
/// This is likely to be similar to "https://{account_name}.blob.core.windows.net/{container_name}/{blob_name}".
/// </param>
/// <param name="options">
/// Optional client options that define the transport pipeline
/// policies for authentication, retries, etc., that are applied to
/// every request.
/// </param>
public BlobBaseClient(Uri blobUri, BlobClientOptions options = default)
: this(blobUri, (HttpPipelinePolicy)null, options, null)
{
_blobRestClient = BuildBlobRestClient(blobUri);
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class.
/// </summary>
/// <param name="blobUri">
/// A <see cref="Uri"/> referencing the blob that includes the
/// name of the account, the name of the container, and the name of
/// the blob.
/// This is likely to be similar to "https://{account_name}.blob.core.windows.net/{container_name}/{blob_name}".
/// </param>
/// <param name="credential">
/// The shared key credential used to sign requests.
/// </param>
/// <param name="options">
/// Optional client options that define the transport pipeline
/// policies for authentication, retries, etc., that are applied to
/// every request.
/// </param>
public BlobBaseClient(Uri blobUri, StorageSharedKeyCredential credential, BlobClientOptions options = default)
: this(blobUri, credential.AsPolicy(), options, credential)
{
_blobRestClient = BuildBlobRestClient(blobUri);
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class.
/// </summary>
/// <param name="blobUri">
/// A <see cref="Uri"/> referencing the blob that includes the
/// name of the account, the name of the container, and the name of
/// the blob.
/// This is likely to be similar to "https://{account_name}.blob.core.windows.net/{container_name}/{blob_name}".
/// Must not contain shared access signature, which should be passed in the second parameter.
/// </param>
/// <param name="credential">
/// The shared access signature credential used to sign requests.
/// </param>
/// <param name="options">
/// Optional client options that define the transport pipeline
/// policies for authentication, retries, etc., that are applied to
/// every request.
/// </param>
/// <remarks>
/// This constructor should only be used when shared access signature needs to be updated during lifespan of this client.
/// </remarks>
public BlobBaseClient(Uri blobUri, AzureSasCredential credential, BlobClientOptions options = default)
: this(blobUri, credential.AsPolicy<BlobUriBuilder>(blobUri), options, null)
{
_blobRestClient = BuildBlobRestClient(blobUri);
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class.
/// </summary>
/// <param name="blobUri">
/// A <see cref="Uri"/> referencing the blob that includes the
/// name of the account, the name of the container, and the name of
/// the blob.
/// This is likely to be similar to "https://{account_name}.blob.core.windows.net/{container_name}/{blob_name}".
/// </param>
/// <param name="credential">
/// The token credential used to sign requests.
/// </param>
/// <param name="options">
/// Optional client options that define the transport pipeline
/// policies for authentication, retries, etc., that are applied to
/// every request.
/// </param>
public BlobBaseClient(Uri blobUri, TokenCredential credential, BlobClientOptions options = default)
: this(blobUri, credential.AsPolicy(), options, null)
{
_blobRestClient = BuildBlobRestClient(blobUri);
Errors.VerifyHttpsTokenAuth(blobUri);
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class.
/// </summary>
/// <param name="blobUri">
/// A <see cref="Uri"/> referencing the blob that includes the
/// name of the account, the name of the container, and the name of
/// the blob.
/// This is likely to be similar to "https://{account_name}.blob.core.windows.net/{container_name}/{blob_name}".
/// </param>
/// <param name="authentication">
/// An optional authentication policy used to sign requests.
/// </param>
/// <param name="options">
/// Optional client options that define the transport pipeline
/// policies for authentication, retries, etc., that are applied to
/// every request.
/// </param>
/// <param name="storageSharedKeyCredential">
/// The shared key credential used to sign requests.
/// </param>
internal BlobBaseClient(
Uri blobUri,
HttpPipelinePolicy authentication,
BlobClientOptions options,
StorageSharedKeyCredential storageSharedKeyCredential)
{
Argument.AssertNotNull(blobUri, nameof(blobUri));
options ??= new BlobClientOptions();
_uri = blobUri;
if (!string.IsNullOrEmpty(blobUri.Query))
{
UriQueryParamsCollection queryParamsCollection = new UriQueryParamsCollection(blobUri.Query);
if (queryParamsCollection.ContainsKey(Constants.SnapshotParameterName))
{
_snapshot = System.Web.HttpUtility.ParseQueryString(blobUri.Query).Get(Constants.SnapshotParameterName);
}
if (queryParamsCollection.ContainsKey(Constants.VersionIdParameterName))
{
_blobVersionId = System.Web.HttpUtility.ParseQueryString(blobUri.Query).Get(Constants.VersionIdParameterName);
}
}
_clientConfiguration = new BlobClientConfiguration(
pipeline: options.Build(authentication),
sharedKeyCredential: storageSharedKeyCredential,
clientDiagnostics: new StorageClientDiagnostics(options),
version: options.Version,
customerProvidedKey: options.CustomerProvidedKey,
encryptionScope: options.EncryptionScope);
_clientSideEncryption = options._clientSideEncryptionOptions?.Clone();
_blobRestClient = BuildBlobRestClient(blobUri);
BlobErrors.VerifyHttpsCustomerProvidedKey(_uri, _clientConfiguration.CustomerProvidedKey);
BlobErrors.VerifyCpkAndEncryptionScopeNotBothSet(_clientConfiguration.CustomerProvidedKey, _clientConfiguration.EncryptionScope);
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class.
/// </summary>
/// <param name="blobUri">
/// A <see cref="Uri"/> referencing the blob that includes the
/// name of the account, the name of the container, and the name of
/// the blob.
/// This is likely to be similar to "https://{account_name}.blob.core.windows.net/{container_name}/{blob_name}".
/// </param>
/// <param name="clientConfiguration">
/// <see cref="BlobClientConfiguration"/>.
/// </param>
/// <param name="clientSideEncryption">
/// Client-side encryption options.
/// </param>
internal BlobBaseClient(
Uri blobUri,
BlobClientConfiguration clientConfiguration,
ClientSideEncryptionOptions clientSideEncryption)
{
_uri = blobUri;
if (!string.IsNullOrEmpty(blobUri.Query))
{
UriQueryParamsCollection queryParamsCollection = new UriQueryParamsCollection(blobUri.Query);
if (queryParamsCollection.ContainsKey(Constants.SnapshotParameterName))
{
_snapshot = System.Web.HttpUtility.ParseQueryString(blobUri.Query).Get(Constants.SnapshotParameterName);
}
if (queryParamsCollection.ContainsKey(Constants.VersionIdParameterName))
{
_blobVersionId = System.Web.HttpUtility.ParseQueryString(blobUri.Query).Get(Constants.VersionIdParameterName);
}
}
_clientConfiguration = clientConfiguration;
_clientSideEncryption = clientSideEncryption?.Clone();
_blobRestClient = BuildBlobRestClient(blobUri);
BlobErrors.VerifyHttpsCustomerProvidedKey(_uri, _clientConfiguration.CustomerProvidedKey);
BlobErrors.VerifyCpkAndEncryptionScopeNotBothSet(_clientConfiguration.CustomerProvidedKey, _clientConfiguration.EncryptionScope);
}
private BlobRestClient BuildBlobRestClient(Uri blobUri)
{
return new BlobRestClient(
clientDiagnostics: _clientConfiguration.ClientDiagnostics,
pipeline: _clientConfiguration.Pipeline,
url: blobUri.AbsoluteUri,
version: _clientConfiguration.Version.ToVersionString());
}
#endregion ctors
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class with an identical <see cref="Uri"/> source but the specified
/// <paramref name="snapshot"/> timestamp.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/creating-a-snapshot-of-a-blob">
/// Create a snapshot of a blob</see>.
/// </summary>
/// <param name="snapshot">The snapshot identifier.</param>
/// <returns>A new <see cref="BlobBaseClient"/> instance.</returns>
/// <remarks>
/// Pass null or empty string to remove the snapshot returning a URL
/// to the base blob.
/// </remarks>
public virtual BlobBaseClient WithSnapshot(string snapshot) => WithSnapshotCore(snapshot);
/// <summary>
/// Creates a new instance of the <see cref="BlobBaseClient"/> class
/// with an identical <see cref="Uri"/> source but the specified
/// <paramref name="snapshot"/> timestamp.
/// </summary>
/// <param name="snapshot">The snapshot identifier.</param>
/// <returns>A new <see cref="BlobBaseClient"/> instance.</returns>
protected virtual BlobBaseClient WithSnapshotCore(string snapshot)
{
_snapshot = snapshot;
var blobUriBuilder = new BlobUriBuilder(Uri)
{
Snapshot = snapshot
};
return new BlobBaseClient(
blobUriBuilder.ToUri(),
ClientConfiguration,
ClientSideEncryption);
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class with an identical <see cref="Uri"/> source but the specified
/// <paramref name="versionId"/> timestamp.
///
/// </summary>
/// <param name="versionId">The version identifier.</param>
/// <returns>A new <see cref="BlobBaseClient"/> instance.</returns>
/// <remarks>
/// Pass null or empty string to remove the version returning a URL
/// to the base blob.
/// </remarks>
public virtual BlobBaseClient WithVersion(string versionId) => WithVersionCore(versionId);
/// <summary>
/// Creates a new instance of the <see cref="BlobBaseClient"/> class
/// with an identical <see cref="Uri"/> source but the specified
/// <paramref name="versionId"/> timestamp.
/// </summary>
/// <param name="versionId">The version identifier.</param>
/// <returns>A new <see cref="BlobBaseClient"/> instance.</returns>
private protected virtual BlobBaseClient WithVersionCore(string versionId)
{
_blobVersionId = versionId;
BlobUriBuilder blobUriBuilder = new BlobUriBuilder(Uri)
{
VersionId = versionId
};
return new BlobBaseClient(
blobUriBuilder.ToUri(),
ClientConfiguration,
ClientSideEncryption);
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class with an identical <see cref="Uri"/> source but the specified
/// <paramref name="customerProvidedKey"/>.
///
/// </summary>
/// <param name="customerProvidedKey">The customer provided key.</param>
/// <returns>A new <see cref="BlobBaseClient"/> instance.</returns>
/// <remarks>
/// Pass null to remove the customer provide key in the returned <see cref="BlobBaseClient"/>.
/// </remarks>
public virtual BlobBaseClient WithCustomerProvidedKey(CustomerProvidedKey? customerProvidedKey) => WithCustomerProvidedKeyCore(customerProvidedKey);
private protected virtual BlobBaseClient WithCustomerProvidedKeyCore(CustomerProvidedKey? customerProvidedKey)
{
BlobClientConfiguration newClientConfiguration = BlobClientConfiguration.DeepCopy(ClientConfiguration);
newClientConfiguration.CustomerProvidedKey = customerProvidedKey;
return new BlobBaseClient(
blobUri: Uri,
clientConfiguration: newClientConfiguration,
clientSideEncryption: ClientSideEncryption?.Clone());
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobBaseClient"/>
/// class with an identical <see cref="Uri"/> source but the specified
/// <paramref name="encryptionScope"/>.
///
/// </summary>
/// <param name="encryptionScope">The encryption scope.</param>
/// <returns>A new <see cref="BlobBaseClient"/> instance.</returns>
/// <remarks>
/// Pass null to remove the encryption scope in the returned <see cref="BlobBaseClient"/>.
/// </remarks>
public virtual BlobBaseClient WithEncryptionScope(string encryptionScope) => WithEncryptionScopeCore(encryptionScope);
private protected virtual BlobBaseClient WithEncryptionScopeCore(string encryptionScope)
{
BlobClientConfiguration newClientConfiguration = BlobClientConfiguration.DeepCopy(ClientConfiguration);
newClientConfiguration.EncryptionScope = encryptionScope;
return new BlobBaseClient(
blobUri: Uri,
clientConfiguration: newClientConfiguration,
clientSideEncryption: ClientSideEncryption?.Clone());
}
/// <summary>
/// Initializes a new instance of the <see cref="BlobLeaseClient"/> class.
/// </summary>
/// <param name="leaseId">
/// An optional lease ID. If no lease ID is provided, a random lease
/// ID will be created.
/// </param>
protected internal virtual BlobLeaseClient GetBlobLeaseClientCore(string leaseId) =>
new BlobLeaseClient(this, leaseId);
/// <summary>
/// Sets the various name fields if they are currently null.
/// </summary>
private void SetNameFieldsIfNull()
{
if (_name == null || _containerName == null || _accountName == null)
{
var builder = new BlobUriBuilder(Uri);
_name = builder.BlobName;
_containerName = builder.BlobContainerName;
_accountName = builder.AccountName;
}
}
///// <summary>
///// Creates a clone of this instance that references a version ID rather than the base blob.
///// </summary>
///// /// <remarks>
///// Pass null or empty string to remove the version ID returning a URL to the base blob.
///// </remarks>
///// <param name="versionId">The version ID to use on this blob. An empty string or null indicates to use the base blob.</param>
///// <returns>The new <see cref="BlobBaseClient"/> instance referencing the versionId.</returns>
//public virtual BlobBaseClient WithVersionId(string versionId) => this.WithVersionIdImpl(versionId);
//protected virtual BlobBaseClient WithVersionIdImpl(string versionId)
//{
// var builder = new BlobUriBuilder(this.Uri)
// {
// VersionId = versionId
// };
// return new BlobUri(builder.ToUri(), this.Pipeline);
//}
#region Download
/// <summary>
/// The <see cref="Download()"/> operation downloads a blob from
/// the service, including its metadata and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <returns>
/// A <see cref="Response{BlobDownloadInfo}"/> describing the
/// downloaded blob. <see cref="BlobDownloadInfo.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual Response<BlobDownloadInfo> Download() =>
Download(CancellationToken.None);
/// <summary>
/// The <see cref="DownloadAsync()"/> operation downloads a blob from
/// the service, including its metadata and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <returns>
/// A <see cref="Response{BlobDownloadInfo}"/> describing the
/// downloaded blob. <see cref="BlobDownloadInfo.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual async Task<Response<BlobDownloadInfo>> DownloadAsync() =>
await DownloadAsync(CancellationToken.None).ConfigureAwait(false);
/// <summary>
/// The <see cref="Download(CancellationToken)"/> operation downloads
/// a blob from the service, including its metadata and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadInfo}"/> describing the
/// downloaded blob. <see cref="BlobDownloadInfo.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual Response<BlobDownloadInfo> Download(
CancellationToken cancellationToken = default) =>
Download(
conditions: default, // Pass anything else so we don't recurse on this overload
cancellationToken: cancellationToken);
/// <summary>
/// The <see cref="DownloadAsync(CancellationToken)"/> operation
/// downloads a blob from the service, including its metadata and
/// properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadInfo}"/> describing the
/// downloaded blob. <see cref="BlobDownloadInfo.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual async Task<Response<BlobDownloadInfo>> DownloadAsync(
CancellationToken cancellationToken) =>
await DownloadAsync(
conditions: default, // Pass anything else so we don't recurse on this overload
cancellationToken: cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="Download(HttpRange, BlobRequestConditions, bool, CancellationToken)"/>
/// operation downloads a blob from the service, including its metadata
/// and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="range">
/// If provided, only download the bytes of the blob in the specified
/// range. If not provided, download the entire blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// downloading this blob.
/// </param>
/// <param name="rangeGetContentHash">
/// When set to true and specified together with the <paramref name="range"/>,
/// the service returns the MD5 hash for the range, as long as the
/// range is less than or equal to 4 MB in size. If this value is
/// specified without <paramref name="range"/> or set to true when the
/// range exceeds 4 MB in size, a <see cref="RequestFailedException"/>
/// is thrown.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadInfo}"/> describing the
/// downloaded blob. <see cref="BlobDownloadInfo.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual Response<BlobDownloadInfo> Download(
HttpRange range = default,
BlobRequestConditions conditions = default,
bool rangeGetContentHash = default,
CancellationToken cancellationToken = default) =>
DownloadInternal(
range,
conditions,
rangeGetContentHash,
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="DownloadAsync(HttpRange, BlobRequestConditions, bool, CancellationToken)"/>
/// operation downloads a blob from the service, including its metadata
/// and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="range">
/// If provided, only download the bytes of the blob in the specified
/// range. If not provided, download the entire blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// downloading this blob.
/// </param>
/// <param name="rangeGetContentHash">
/// When set to true and specified together with the <paramref name="range"/>,
/// the service returns the MD5 hash for the range, as long as the
/// range is less than or equal to 4 MB in size. If this value is
/// specified without <paramref name="range"/> or set to true when the
/// range exceeds 4 MB in size, a <see cref="RequestFailedException"/>
/// is thrown.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadInfo}"/> describing the
/// downloaded blob. <see cref="BlobDownloadInfo.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual async Task<Response<BlobDownloadInfo>> DownloadAsync(
HttpRange range = default,
BlobRequestConditions conditions = default,
bool rangeGetContentHash = default,
CancellationToken cancellationToken = default) =>
await DownloadInternal(
range,
conditions,
rangeGetContentHash,
true, // async
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="DownloadInternal"/> operation downloads a blob
/// from the service, including its metadata and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="range">
/// If provided, only download the bytes of the blob in the specified
/// range. If not provided, download the entire blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// downloading this blob.
/// </param>
/// <param name="rangeGetContentHash">
/// When set to true and specified together with the <paramref name="range"/>,
/// the service returns the MD5 hash for the range, as long as the
/// range is less than or equal to 4 MB in size. If this value is
/// specified without <paramref name="range"/> or set to true when the
/// range exceeds 4 MB in size, a <see cref="RequestFailedException"/>
/// is thrown.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadInfo}"/> describing the
/// downloaded blob. <see cref="BlobDownloadInfo.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response<BlobDownloadInfo>> DownloadInternal(
HttpRange range,
BlobRequestConditions conditions,
bool rangeGetContentHash,
bool async,
CancellationToken cancellationToken)
{
Response<BlobDownloadStreamingResult> response = await DownloadStreamingInternal(
range,
conditions,
rangeGetContentHash,
$"{nameof(BlobBaseClient)}.{nameof(Download)}",
async,
cancellationToken).ConfigureAwait(false);
BlobDownloadStreamingResult blobDownloadStreamingResult = response.Value;
BlobDownloadDetails blobDownloadDetails = blobDownloadStreamingResult.Details;
return Response.FromValue(
new BlobDownloadInfo()
{
Content = blobDownloadStreamingResult.Content,
Details = blobDownloadDetails,
BlobType = blobDownloadDetails.BlobType,
ContentHash = blobDownloadDetails.ContentHash,
ContentLength = blobDownloadDetails.ContentLength,
ContentType = blobDownloadDetails.ContentType,
}, response.GetRawResponse());
}
#endregion
#region DownloadStreaming
/// <summary>
/// The <see cref="DownloadStreaming(HttpRange, BlobRequestConditions, bool, CancellationToken)"/>
/// operation downloads a blob from the service, including its metadata
/// and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="range">
/// If provided, only download the bytes of the blob in the specified
/// range. If not provided, download the entire blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// downloading this blob.
/// </param>
/// <param name="rangeGetContentHash">
/// When set to true and specified together with the <paramref name="range"/>,
/// the service returns the MD5 hash for the range, as long as the
/// range is less than or equal to 4 MB in size. If this value is
/// specified without <paramref name="range"/> or set to true when the
/// range exceeds 4 MB in size, a <see cref="RequestFailedException"/>
/// is thrown.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadStreamingResult}"/> describing the
/// downloaded blob. <see cref="BlobDownloadStreamingResult.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<BlobDownloadStreamingResult> DownloadStreaming(
HttpRange range = default,
BlobRequestConditions conditions = default,
bool rangeGetContentHash = default,
CancellationToken cancellationToken = default) =>
DownloadStreamingInternal(
range,
conditions,
rangeGetContentHash,
$"{nameof(BlobBaseClient)}.{nameof(DownloadStreaming)}",
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="DownloadStreamingAsync(HttpRange, BlobRequestConditions, bool, CancellationToken)"/>
/// operation downloads a blob from the service, including its metadata
/// and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="range">
/// If provided, only download the bytes of the blob in the specified
/// range. If not provided, download the entire blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// downloading this blob.
/// </param>
/// <param name="rangeGetContentHash">
/// When set to true and specified together with the <paramref name="range"/>,
/// the service returns the MD5 hash for the range, as long as the
/// range is less than or equal to 4 MB in size. If this value is
/// specified without <paramref name="range"/> or set to true when the
/// range exceeds 4 MB in size, a <see cref="RequestFailedException"/>
/// is thrown.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadStreamingResult}"/> describing the
/// downloaded blob. <see cref="BlobDownloadStreamingResult.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<BlobDownloadStreamingResult>> DownloadStreamingAsync(
HttpRange range = default,
BlobRequestConditions conditions = default,
bool rangeGetContentHash = default,
CancellationToken cancellationToken = default) =>
await DownloadStreamingInternal(
range,
conditions,
rangeGetContentHash,
$"{nameof(BlobBaseClient)}.{nameof(DownloadStreaming)}",
true, // async
cancellationToken)
.ConfigureAwait(false);
private async Task<Response<BlobDownloadStreamingResult>> DownloadStreamingInternal(
HttpRange range,
BlobRequestConditions conditions,
bool rangeGetContentHash,
string operationName,
bool async,
CancellationToken cancellationToken)
{
HttpRange requestedRange = range;
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(nameof(BlobBaseClient), message: $"{nameof(Uri)}: {Uri}");
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope(operationName);
try
{
scope.Start();
if (UsingClientSideEncryption)
{
range = BlobClientSideDecryptor.GetEncryptedBlobRange(range);
}
// Start downloading the blob
Response<BlobDownloadStreamingResult> response = await StartDownloadAsync(
range,
conditions,
rangeGetContentHash,
async: async,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
// Return an exploding Response on 304
if (response.IsUnavailable())
{
return response.GetRawResponse().AsNoBodyResponse<BlobDownloadStreamingResult>();
}
ETag etag = response.Value.Details.ETag;
BlobRequestConditions conditionsWithEtag = conditions?.WithIfMatch(etag) ?? new BlobRequestConditions { IfMatch = etag };
// Wrap the response Content in a RetriableStream so we
// can return it before it's finished downloading, but still
// allow retrying if it fails.
Stream stream = RetriableStream.Create(
response.Value.Content,
startOffset =>
StartDownloadAsync(
range,
conditionsWithEtag,
rangeGetContentHash,
startOffset,
async,
cancellationToken)
.EnsureCompleted()
.Value.Content,
async startOffset =>
(await StartDownloadAsync(
range,
conditionsWithEtag,
rangeGetContentHash,
startOffset,
async,
cancellationToken)
.ConfigureAwait(false))
.Value.Content,
ClientConfiguration.Pipeline.ResponseClassifier,
Constants.MaxReliabilityRetries);
// if using clientside encryption, wrap the auto-retry stream in a decryptor
// we already return a nonseekable stream; returning a crypto stream is fine
if (UsingClientSideEncryption)
{
stream = await new BlobClientSideDecryptor(
new ClientSideDecryptor(ClientSideEncryption)).DecryptInternal(
stream,
response.Value.Details.Metadata,
requestedRange,
response.Value.Details.ContentRange,
async,
cancellationToken).ConfigureAwait(false);
}
response.Value.Content = stream;
return response;
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
/// <summary>
/// The <see cref="StartDownloadAsync"/> operation starts downloading
/// a blob from the service from a given <paramref name="startOffset"/>.
/// </summary>
/// <param name="range">
/// If provided, only download the bytes of the blob in the specified
/// range. If not provided, download the entire blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// downloading this blob.
/// </param>
/// <param name="rangeGetContentHash">
/// When set to true and specified together with the <paramref name="range"/>,
/// the service returns the MD5 hash for the range, as long as the
/// range is less than or equal to 4 MB in size. If this value is
/// specified without <paramref name="range"/> or set to true when the
/// range exceeds 4 MB in size, a <see cref="RequestFailedException"/>
/// is thrown.
/// </param>
/// <param name="startOffset">
/// Starting offset to request - in the event of a retry.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadInfo}"/> describing the
/// downloaded blob. <see cref="BlobDownloadInfo.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response<BlobDownloadStreamingResult>> StartDownloadAsync(
HttpRange range = default,
BlobRequestConditions conditions = default,
bool rangeGetContentHash = default,
long startOffset = 0,
bool async = true,
CancellationToken cancellationToken = default)
{
HttpRange? pageRange = null;
if (range != default || startOffset != 0)
{
pageRange = new HttpRange(
range.Offset + startOffset,
range.Length.HasValue ?
range.Length.Value - startOffset :
(long?)null);
}
ClientConfiguration.Pipeline.LogTrace($"Download {Uri} with range: {pageRange}");
ResponseWithHeaders<Stream, BlobDownloadHeaders> response;
if (async)
{
response = await BlobRestClient.DownloadAsync(
range: pageRange?.ToString(),
leaseId: conditions?.LeaseId,
rangeGetContentMD5: rangeGetContentHash ? (bool?)true : null,
encryptionKey: ClientConfiguration.CustomerProvidedKey?.EncryptionKey,
encryptionKeySha256: ClientConfiguration.CustomerProvidedKey?.EncryptionKeyHash,
encryptionAlgorithm: ClientConfiguration.CustomerProvidedKey?.EncryptionAlgorithm == null ? null : EncryptionAlgorithmTypeInternal.AES256,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.Download(
range: pageRange?.ToString(),
leaseId: conditions?.LeaseId,
rangeGetContentMD5: rangeGetContentHash ? (bool?)true : null,
encryptionKey: ClientConfiguration.CustomerProvidedKey?.EncryptionKey,
encryptionKeySha256: ClientConfiguration.CustomerProvidedKey?.EncryptionKeyHash,
encryptionAlgorithm: ClientConfiguration.CustomerProvidedKey?.EncryptionAlgorithm == null ? null : EncryptionAlgorithmTypeInternal.AES256,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
cancellationToken: cancellationToken);
}
// Watch out for exploding Responses
long length = response.IsUnavailable() ? 0 : response.Headers.ContentLength ?? 0;
ClientConfiguration.Pipeline.LogTrace($"Response: {response.GetRawResponse().Status}, ContentLength: {length}");
return Response.FromValue(
response.ToBlobDownloadStreamingResult(),
response.GetRawResponse());
}
#endregion
#region DownloadContent
/// <summary>
/// The <see cref="DownloadContent()"/> operation downloads a blob from
/// the service, including its metadata and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <returns>
/// A <see cref="Response{BlobDownloadResult}"/> describing the
/// downloaded blob. <see cref="BlobDownloadResult.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<BlobDownloadResult> DownloadContent() =>
DownloadContent(CancellationToken.None);
/// <summary>
/// The <see cref="DownloadContentAsync()"/> operation downloads a blob from
/// the service, including its metadata and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <returns>
/// A <see cref="Response{BlobDownloadResult}"/> describing the
/// downloaded blob. <see cref="BlobDownloadResult.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<BlobDownloadResult>> DownloadContentAsync() =>
await DownloadContentAsync(CancellationToken.None).ConfigureAwait(false);
/// <summary>
/// The <see cref="DownloadContent(CancellationToken)"/> operation downloads
/// a blob from the service, including its metadata and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadResult}"/> describing the
/// downloaded blob. <see cref="BlobDownloadResult.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<BlobDownloadResult> DownloadContent(
CancellationToken cancellationToken = default) =>
DownloadContent(
conditions: default, // Pass anything else so we don't recurse on this overload
cancellationToken: cancellationToken);
/// <summary>
/// The <see cref="DownloadContentAsync(CancellationToken)"/> operation
/// downloads a blob from the service, including its metadata and
/// properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadResult}"/> describing the
/// downloaded blob. <see cref="BlobDownloadResult.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<BlobDownloadResult>> DownloadContentAsync(
CancellationToken cancellationToken) =>
await DownloadContentAsync(
conditions: default, // Pass anything else so we don't recurse on this overload
cancellationToken: cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="DownloadContent(BlobRequestConditions, CancellationToken)"/>
/// operation downloads a blob from the service, including its metadata
/// and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// downloading this blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadResult}"/> describing the
/// downloaded blob. <see cref="BlobDownloadResult.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<BlobDownloadResult> DownloadContent(
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
DownloadContentInternal(
conditions,
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="DownloadContentAsync(BlobRequestConditions, CancellationToken)"/>
/// operation downloads a blob from the service, including its metadata
/// and properties.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob">
/// Get Blob</see>.
/// </summary>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// downloading this blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobDownloadResult}"/> describing the
/// downloaded blob. <see cref="BlobDownloadResult.Content"/> contains
/// the blob's data.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<BlobDownloadResult>> DownloadContentAsync(
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
await DownloadContentInternal(
conditions,
true, // async
cancellationToken)
.ConfigureAwait(false);
private async Task<Response<BlobDownloadResult>> DownloadContentInternal(
BlobRequestConditions conditions,
bool async,
CancellationToken cancellationToken)
{
Response<BlobDownloadStreamingResult> response = await DownloadStreamingInternal(
range: default,
conditions: conditions,
rangeGetContentHash: default,
operationName: $"{nameof(BlobBaseClient)}.{nameof(DownloadContent)}",
async: async,
cancellationToken: cancellationToken).ConfigureAwait(false);
using BlobDownloadStreamingResult blobDownloadStreamingResult = response.Value;
BinaryData data;
if (async)
{
data = await BinaryData.FromStreamAsync(blobDownloadStreamingResult.Content, cancellationToken).ConfigureAwait(false);
}
else
{
data = BinaryData.FromStream(blobDownloadStreamingResult.Content);
}
return Response.FromValue(
new BlobDownloadResult()
{
Content = data,
Details = blobDownloadStreamingResult.Details,
}, response.GetRawResponse());
}
#endregion
#region Parallel Download
/// <summary>
/// The <see cref="DownloadTo(Stream)"/> operation downloads a blob using parallel requests,
/// and writes the content to <paramref name="destination"/>.
/// </summary>
/// <param name="destination">
/// A <see cref="Stream"/> to write the downloaded content to.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response DownloadTo(Stream destination) =>
DownloadTo(destination, CancellationToken.None);
/// <summary>
/// The <see cref="DownloadTo(string)"/> operation downloads a blob using parallel requests,
/// and writes the content to <paramref name="path"/>.
/// </summary>
/// <param name="path">
/// A file path to write the downloaded content to.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response DownloadTo(string path) =>
DownloadTo(path, CancellationToken.None);
/// <summary>
/// The <see cref="DownloadToAsync(Stream)"/> downloads a blob using parallel requests,
/// and writes the content to <paramref name="destination"/>.
/// </summary>
/// <param name="destination">
/// A <see cref="Stream"/> to write the downloaded content to.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> DownloadToAsync(Stream destination) =>
await DownloadToAsync(destination, CancellationToken.None).ConfigureAwait(false);
/// <summary>
/// The <see cref="DownloadToAsync(string)"/> downloads a blob using parallel requests,
/// and writes the content to <paramref name="path"/>.
/// </summary>
/// <param name="path">
/// A file path to write the downloaded content to.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> DownloadToAsync(string path) =>
await DownloadToAsync(path, CancellationToken.None).ConfigureAwait(false);
/// <summary>
/// The <see cref="DownloadTo(Stream, CancellationToken)"/> operation
/// downloads a blob using parallel requests,
/// and writes the content to <paramref name="destination"/>.
/// </summary>
/// <param name="destination">
/// A <see cref="Stream"/> to write the downloaded content to.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response DownloadTo(
Stream destination,
CancellationToken cancellationToken) =>
DownloadTo(
destination,
conditions: default, // Pass anything else so we don't recurse on this overload
cancellationToken: cancellationToken);
/// <summary>
/// The <see cref="DownloadTo(string, CancellationToken)"/> operation
/// downloads a blob using parallel requests,
/// and writes the content to <paramref name="path"/>.
/// </summary>
/// <param name="path">
/// A file path to write the downloaded content to.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response DownloadTo(
string path,
CancellationToken cancellationToken) =>
DownloadTo(
path,
conditions: default, // Pass anything else so we don't recurse on this overload
cancellationToken: cancellationToken);
/// <summary>
/// The <see cref="DownloadToAsync(Stream, CancellationToken)"/> operation
/// downloads a blob using parallel requests,
/// and writes the content to <paramref name="destination"/>.
/// </summary>
/// <param name="destination">
/// A <see cref="Stream"/> to write the downloaded content to.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> DownloadToAsync(
Stream destination,
CancellationToken cancellationToken) =>
await DownloadToAsync(
destination,
conditions: default, // Pass anything else so we don't recurse on this overload
cancellationToken: cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="DownloadToAsync(string, CancellationToken)"/> operation
/// downloads a blob using parallel requests,
/// and writes the content to <paramref name="path"/>.
/// </summary>
/// <param name="path">
/// A file path to write the downloaded content to.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> DownloadToAsync(
string path,
CancellationToken cancellationToken) =>
await DownloadToAsync(
path,
conditions: default, // Pass anything else so we don't recurse on this overload
cancellationToken: cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="DownloadTo(Stream, BlobRequestConditions, StorageTransferOptions, CancellationToken)"/>
/// operation downloads a blob using parallel requests,
/// and writes the content to <paramref name="destination"/>.
/// </summary>
/// <param name="destination">
/// A <see cref="Stream"/> to write the downloaded content to.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the creation of this new block blob.
/// </param>
/// <param name="transferOptions">
/// Optional <see cref="StorageTransferOptions"/> to configure
/// parallel transfer behavior.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response DownloadTo(
Stream destination,
BlobRequestConditions conditions = default,
///// <param name="progressHandler">
///// Optional <see cref="IProgress{Long}"/> to provide
///// progress updates about data transfers.
///// </param>
//IProgress<long> progressHandler = default,
StorageTransferOptions transferOptions = default,
CancellationToken cancellationToken = default) =>
StagedDownloadAsync(
destination,
conditions,
//progressHandler, // TODO: #8506
transferOptions: transferOptions,
async: false,
cancellationToken: cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="DownloadTo(string, BlobRequestConditions, StorageTransferOptions, CancellationToken)"/>
/// operation downloads a blob using parallel requests,
/// and writes the content to <paramref name="path"/>.
/// </summary>
/// <param name="path">
/// A file path to write the downloaded content to.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the creation of this new block blob.
/// </param>
/// <param name="transferOptions">
/// Optional <see cref="StorageTransferOptions"/> to configure
/// parallel transfer behavior.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response DownloadTo(
string path,
BlobRequestConditions conditions = default,
///// <param name="progressHandler">
///// Optional <see cref="IProgress{Long}"/> to provide
///// progress updates about data transfers.
///// </param>
//IProgress<long> progressHandler = default,
StorageTransferOptions transferOptions = default,
CancellationToken cancellationToken = default)
{
using Stream destination = File.Create(path);
return StagedDownloadAsync(
destination,
conditions,
//progressHandler, // TODO: #8506
transferOptions: transferOptions,
async: false,
cancellationToken: cancellationToken)
.EnsureCompleted();
}
/// <summary>
/// The <see cref="DownloadToAsync(Stream, BlobRequestConditions, StorageTransferOptions, CancellationToken)"/>
/// operation downloads a blob using parallel requests,
/// and writes the content to <paramref name="destination"/>.
/// </summary>
/// <param name="destination">
/// A <see cref="Stream"/> to write the downloaded content to.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the creation of this new block blob.
/// </param>
/// <param name="transferOptions">
/// Optional <see cref="StorageTransferOptions"/> to configure
/// parallel transfer behavior.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> DownloadToAsync(
Stream destination,
BlobRequestConditions conditions = default,
///// <param name="progressHandler">
///// Optional <see cref="IProgress{Long}"/> to provide
///// progress updates about data transfers.
///// </param>
//IProgress<long> progressHandler = default,
StorageTransferOptions transferOptions = default,
CancellationToken cancellationToken = default) =>
await StagedDownloadAsync(
destination,
conditions,
//progressHandler, // TODO: #8506
transferOptions: transferOptions,
async: true,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="DownloadToAsync(string, BlobRequestConditions, StorageTransferOptions, CancellationToken)"/>
/// operation downloads a blob using parallel requests,
/// and writes the content to <paramref name="path"/>.
/// </summary>
/// <param name="path">
/// A file path to write the downloaded content to.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the creation of this new block blob.
/// </param>
/// <param name="transferOptions">
/// Optional <see cref="StorageTransferOptions"/> to configure
/// parallel transfer behavior.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> DownloadToAsync(
string path,
BlobRequestConditions conditions = default,
///// <param name="progressHandler">
///// Optional <see cref="IProgress{Long}"/> to provide
///// progress updates about data transfers.
///// </param>
//IProgress<long> progressHandler = default,
StorageTransferOptions transferOptions = default,
CancellationToken cancellationToken = default)
{
using Stream destination = File.Create(path);
return await StagedDownloadAsync(
destination,
conditions,
//progressHandler, // TODO: #8506
transferOptions: transferOptions,
async: true,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// This operation will download a blob of arbitrary size by downloading it as individually staged
/// partitions if it's larger than the
/// <paramref name="transferOptions"/> MaximumTransferLength.
/// </summary>
/// <param name="destination">
/// A <see cref="Stream"/> to write the downloaded content to.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the creation of this new block blob.
/// </param>
/// <param name="transferOptions">
/// Optional <see cref="StorageTransferOptions"/> to configure
/// parallel transfer behavior.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> describing the operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
internal async Task<Response> StagedDownloadAsync(
Stream destination,
BlobRequestConditions conditions = default,
///// <param name="progressHandler">
///// Optional <see cref="IProgress{Long}"/> to provide
///// progress updates about data transfers.
///// </param>
//IProgress<long> progressHandler, // TODO: #8506
StorageTransferOptions transferOptions = default,
bool async = true,
CancellationToken cancellationToken = default)
{
PartitionedDownloader downloader = new PartitionedDownloader(this, transferOptions);
if (async)
{
return await downloader.DownloadToAsync(destination, conditions, cancellationToken).ConfigureAwait(false);
}
else
{
return downloader.DownloadTo(destination, conditions, cancellationToken);
}
}
#endregion Parallel Download
#region OpenRead
/// <summary>
/// Opens a stream for reading from the blob. The stream will only download
/// the blob as the stream is read from.
/// </summary>
/// <param name="options">
/// Optional parameters.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// Returns a stream that will download the blob as the stream
/// is read from.
/// </returns>
#pragma warning disable AZC0015 // Unexpected client method return type.
public virtual Stream OpenRead(
#pragma warning restore AZC0015 // Unexpected client method return type.
BlobOpenReadOptions options,
CancellationToken cancellationToken = default)
=> OpenReadInternal(
options?.Position ?? 0,
options?.BufferSize,
options?.Conditions,
allowModifications: options?.AllowModifications ?? false,
async: false,
cancellationToken).EnsureCompleted();
/// <summary>
/// Opens a stream for reading from the blob. The stream will only download
/// the blob as the stream is read from.
/// </summary>
/// <param name="options">
/// Optional parameters.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// Returns a stream that will download the blob as the stream
/// is read from.
/// </returns>
#pragma warning disable AZC0015 // Unexpected client method return type.
public virtual async Task<Stream> OpenReadAsync(
#pragma warning restore AZC0015 // Unexpected client method return type.
BlobOpenReadOptions options,
CancellationToken cancellationToken = default)
=> await OpenReadInternal(
options?.Position ?? 0,
options?.BufferSize,
options?.Conditions,
allowModifications: options?.AllowModifications ?? false,
async: true,
cancellationToken).ConfigureAwait(false);
/// <summary>
/// Opens a stream for reading from the blob. The stream will only download
/// the blob as the stream is read from.
/// </summary>
/// <param name="position">
/// The position within the blob to begin the stream.
/// Defaults to the beginning of the blob.
/// </param>
/// <param name="bufferSize">
/// The buffer size to use when the stream downloads parts
/// of the blob. Defaults to 1 MB.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the download of the blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// Returns a stream that will download the blob as the stream
/// is read from.
/// </returns>
[EditorBrowsable(EditorBrowsableState.Never)]
#pragma warning disable AZC0015 // Unexpected client method return type.
public virtual Stream OpenRead(
#pragma warning restore AZC0015 // Unexpected client method return type.
long position = 0,
int? bufferSize = default,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default)
=> OpenReadInternal(
position,
bufferSize,
conditions,
allowModifications: false,
async: false,
cancellationToken).EnsureCompleted();
/// <summary>
/// Opens a stream for reading from the blob. The stream will only download
/// the blob as the stream is read from.
/// </summary>
/// <param name="allowBlobModifications">
/// If true, you can continue streaming a blob even if it has been modified.
/// </param>
/// <param name="position">
/// The position within the blob to begin the stream.
/// Defaults to the beginning of the blob.
/// </param>
/// <param name="bufferSize">
/// The buffer size to use when the stream downloads parts
/// of the blob. Defaults to 1 MB.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// Returns a stream that will download the blob as the stream
/// is read from.
/// </returns>
[EditorBrowsable(EditorBrowsableState.Never)]
#pragma warning disable AZC0015 // Unexpected client method return type.
public virtual Stream OpenRead(
#pragma warning restore AZC0015 // Unexpected client method return type.
bool allowBlobModifications,
long position = 0,
int? bufferSize = default,
CancellationToken cancellationToken = default)
=> OpenRead(
position,
bufferSize,
allowBlobModifications ? new BlobRequestConditions() : null,
cancellationToken);
/// <summary>
/// Opens a stream for reading from the blob. The stream will only download
/// the blob as the stream is read from.
/// </summary>
/// <param name="position">
/// The position within the blob to begin the stream.
/// Defaults to the beginning of the blob.
/// </param>
/// <param name="bufferSize">
/// The buffer size to use when the stream downloads parts
/// of the blob. Defaults to 1 MB.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the download of the blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// Returns a stream that will download the blob as the stream
/// is read from.
/// </returns>
[EditorBrowsable(EditorBrowsableState.Never)]
#pragma warning disable AZC0015 // Unexpected client method return type.
public virtual async Task<Stream> OpenReadAsync(
#pragma warning restore AZC0015 // Unexpected client method return type.
long position = 0,
int? bufferSize = default,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default)
=> await OpenReadInternal(
position,
bufferSize,
conditions,
allowModifications: false,
async: true,
cancellationToken).ConfigureAwait(false);
/// <summary>
/// Opens a stream for reading from the blob. The stream will only download
/// the blob as the stream is read from.
/// </summary>
/// <param name="allowBlobModifications">
/// If true, you can continue streaming a blob even if it has been modified.
/// </param>
/// <param name="position">
/// The position within the blob to begin the stream.
/// Defaults to the beginning of the blob.
/// </param>
/// <param name="bufferSize">
/// The buffer size to use when the stream downloads parts
/// of the blob. Defaults to 1 MB.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// Returns a stream that will download the blob as the stream
/// is read from.
/// </returns>
[EditorBrowsable(EditorBrowsableState.Never)]
#pragma warning disable AZC0015 // Unexpected client method return type.
public virtual async Task<Stream> OpenReadAsync(
#pragma warning restore AZC0015 // Unexpected client method return type.
bool allowBlobModifications,
long position = 0,
int? bufferSize = default,
CancellationToken cancellationToken = default)
=> await OpenReadAsync(
position,
bufferSize,
allowBlobModifications ? new BlobRequestConditions() : null,
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// Opens a stream for reading from the blob. The stream will only download
/// the blob as the stream is read from.
/// </summary>
/// <param name="position">
/// The position within the blob to begin the stream.
/// Defaults to the beginning of the blob.
/// </param>
/// <param name="bufferSize">
/// The buffer size to use when the stream downloads parts
/// of the blob. Defaults to 1 MB.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the download of the blob.
/// </param>
/// <param name="allowModifications">
/// Whether to allow modifications during the read.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// Returns a stream that will download the blob as the stream
/// is read from.
/// </returns>
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
internal async Task<Stream> OpenReadInternal(
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
long position,
int? bufferSize,
BlobRequestConditions conditions,
bool allowModifications,
#pragma warning disable CA1801
bool async,
CancellationToken cancellationToken)
#pragma warning restore CA1801
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(position)}: {position}\n" +
$"{nameof(bufferSize)}: {bufferSize}\n" +
$"{nameof(conditions)}: {conditions}");
string operationName = $"{nameof(BlobBaseClient)}.{nameof(OpenRead)}";
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope(operationName);
try
{
scope.Start();
// This also makes sure that we fail fast if file doesn't exist.
var blobProperties = await GetPropertiesInternal(conditions: conditions, async, cancellationToken).ConfigureAwait(false);
var etag = blobProperties.Value.ETag;
var readConditions = conditions;
if (!allowModifications)
{
readConditions = readConditions?.WithIfMatch(etag) ?? new BlobRequestConditions { IfMatch = etag };
}
return new LazyLoadingReadOnlyStream<BlobProperties>(
async (HttpRange range,
bool rangeGetContentHash,
bool async,
CancellationToken cancellationToken) =>
{
Response<BlobDownloadStreamingResult> response = await DownloadStreamingInternal(
range,
readConditions,
rangeGetContentHash,
operationName,
async,
cancellationToken).ConfigureAwait(false);
return Response.FromValue(
(IDownloadedContent)response.Value,
response.GetRawResponse());
},
async (bool async, CancellationToken cancellationToken)
=> await GetPropertiesInternal(conditions: default, async, cancellationToken).ConfigureAwait(false),
allowModifications,
blobProperties.Value.ContentLength,
position,
bufferSize);
}
catch (Exception ex)
{
scope.Failed(ex);
ClientConfiguration.Pipeline.LogException(ex);
throw;
}
finally
{
scope.Dispose();
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobContainerClient));
}
}
}
#endregion OpenRead
#region StartCopyFromUri
/// <summary>
/// The <see cref="StartCopyFromUri(Uri, BlobCopyFromUriOptions, CancellationToken)"/>
/// operation begins an asynchronous copy of the data from the <paramref name="source"/> to this blob.
/// You can check the <see cref="BlobProperties.CopyStatus"/>
/// returned from the <see cref="GetProperties"/> to determine if the
/// copy has completed.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob">
/// Copy Blob</see>.
/// </summary>
/// <param name="source">
/// Specifies the <see cref="Uri"/> of the source blob. The value may
/// be a <see cref="Uri" /> of up to 2 KB in length that specifies a
/// blob. A source blob in the same storage account can be
/// authenticated via Shared Key. However, if the source is a blob in
/// another account, the source blob must either be public or must be
/// authenticated via a shared access signature. If the source blob
/// is public, no authentication is required to perform the copy
/// operation.
///
/// The source object may be a file in the Azure File service. If the
/// source object is a file that is to be copied to a blob, then the
/// source file must be authenticated using a shared access signature,
/// whether it resides in the same account or in a different account.
/// </param>
/// <param name="options">
/// Optional parameters.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="CopyFromUriOperation"/> describing the
/// state of the copy operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual CopyFromUriOperation StartCopyFromUri(
Uri source,
BlobCopyFromUriOptions options,
CancellationToken cancellationToken = default)
{
Response<BlobCopyInfo> response = StartCopyFromUriInternal(
source,
options?.Metadata,
options?.Tags,
options?.AccessTier,
options?.SourceConditions,
options?.DestinationConditions,
options?.RehydratePriority,
options?.ShouldSealDestination,
async: false,
cancellationToken)
.EnsureCompleted();
return new CopyFromUriOperation(
this,
response.Value.CopyId,
response.GetRawResponse(),
cancellationToken);
}
/// <summary>
/// The <see cref="StartCopyFromUri(Uri, Metadata, AccessTier?, BlobRequestConditions, BlobRequestConditions, RehydratePriority?, CancellationToken)"/>
/// operation begins an asynchronous copy of the data from the <paramref name="source"/> to this blob.
/// You can check the <see cref="BlobProperties.CopyStatus"/>
/// returned from the <see cref="GetProperties"/> to determine if the
/// copy has completed.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob">
/// Copy Blob</see>.
/// </summary>
/// <param name="source">
/// Specifies the <see cref="Uri"/> of the source blob. The value may
/// be a <see cref="Uri" /> of up to 2 KB in length that specifies a
/// blob. A source blob in the same storage account can be
/// authenticated via Shared Key. However, if the source is a blob in
/// another account, the source blob must either be public or must be
/// authenticated via a shared access signature. If the source blob
/// is public, no authentication is required to perform the copy
/// operation.
///
/// The source object may be a file in the Azure File service. If the
/// source object is a file that is to be copied to a blob, then the
/// source file must be authenticated using a shared access signature,
/// whether it resides in the same account or in a different account.
/// </param>
/// <param name="metadata">
/// Optional custom metadata to set for this blob.
/// </param>
/// <param name="accessTier">
/// Optional <see cref="AccessTier"/>
/// Indicates the tier to be set on the blob.
/// </param>
/// <param name="sourceConditions">
/// Optional <see cref="BlobRequestConditions"/> to add
/// conditions on the copying of data from this source blob.
/// </param>
/// <param name="destinationConditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the copying of data to this blob.
/// </param>
/// <param name="rehydratePriority">
/// Optional <see cref="RehydratePriority"/>
/// Indicates the priority with which to rehydrate an archived blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="CopyFromUriOperation"/> describing the
/// state of the copy operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual CopyFromUriOperation StartCopyFromUri(
Uri source,
Metadata metadata = default,
AccessTier? accessTier = default,
BlobRequestConditions sourceConditions = default,
BlobRequestConditions destinationConditions = default,
RehydratePriority? rehydratePriority = default,
CancellationToken cancellationToken = default)
{
Response<BlobCopyInfo> response = StartCopyFromUriInternal(
source,
metadata,
default,
accessTier,
sourceConditions,
destinationConditions,
rehydratePriority,
sealBlob: default,
async: false,
cancellationToken)
.EnsureCompleted();
return new CopyFromUriOperation(
this,
response.Value.CopyId,
response.GetRawResponse(),
cancellationToken);
}
/// <summary>
/// The <see cref="StartCopyFromUri(Uri, Metadata, AccessTier?, BlobRequestConditions, BlobRequestConditions, RehydratePriority?, CancellationToken)"/>
/// operation begins an asynchronous copy of the data from the <paramref name="source"/>
/// to this blob. You can check the <see cref="BlobProperties.CopyStatus"/>
/// returned from the <see cref="GetPropertiesAsync"/> to determine if
/// the copy has completed.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob">
/// Copy Blob</see>.
/// </summary>
/// <param name="source">
/// Specifies the <see cref="Uri"/> of the source blob. The value may
/// be a <see cref="Uri" /> of up to 2 KB in length that specifies a
/// blob. A source blob in the same storage account can be
/// authenticated via Shared Key. However, if the source is a blob in
/// another account, the source blob must either be public or must be
/// authenticated via a shared access signature. If the source blob
/// is public, no authentication is required to perform the copy
/// operation.
///
/// The source object may be a file in the Azure File service. If the
/// source object is a file that is to be copied to a blob, then the
/// source file must be authenticated using a shared access signature,
/// whether it resides in the same account or in a different account.
/// </param>
/// <param name="options">
/// Optional parameters.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="CopyFromUriOperation"/> describing the
/// state of the copy operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<CopyFromUriOperation> StartCopyFromUriAsync(
Uri source,
BlobCopyFromUriOptions options,
CancellationToken cancellationToken = default)
{
Response<BlobCopyInfo> response = await StartCopyFromUriInternal(
source,
options?.Metadata,
options?.Tags,
options?.AccessTier,
options?.SourceConditions,
options?.DestinationConditions,
options?.RehydratePriority,
options?.ShouldSealDestination,
async: true,
cancellationToken)
.ConfigureAwait(false);
return new CopyFromUriOperation(
this,
response.Value.CopyId,
response.GetRawResponse(),
cancellationToken);
}
/// <summary>
/// The <see cref="StartCopyFromUri(Uri, Metadata, AccessTier?, BlobRequestConditions, BlobRequestConditions, RehydratePriority?, CancellationToken)"/>
/// operation begins an asynchronous copy of the data from the <paramref name="source"/>
/// to this blob.You can check the <see cref="BlobProperties.CopyStatus"/>
/// returned from the <see cref="GetPropertiesAsync"/> to determine if
/// the copy has completed.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob">
/// Copy Blob</see>.
/// </summary>
/// <param name="source">
/// Specifies the <see cref="Uri"/> of the source blob. The value may
/// be a <see cref="Uri" /> of up to 2 KB in length that specifies a
/// blob. A source blob in the same storage account can be
/// authenticated via Shared Key. However, if the source is a blob in
/// another account, the source blob must either be public or must be
/// authenticated via a shared access signature. If the source blob
/// is public, no authentication is required to perform the copy
/// operation.
///
/// The source object may be a file in the Azure File service. If the
/// source object is a file that is to be copied to a blob, then the
/// source file must be authenticated using a shared access signature,
/// whether it resides in the same account or in a different account.
/// </param>
/// <param name="metadata">
/// Optional custom metadata to set for this blob.
/// </param>
/// <param name="accessTier">
/// Optional <see cref="AccessTier"/>
/// Indicates the tier to be set on the blob.
/// </param>
/// <param name="sourceConditions">
/// Optional <see cref="BlobRequestConditions"/> to add
/// conditions on the copying of data from this source blob.
/// </param>
/// <param name="destinationConditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the copying of data to this blob.
/// </param>
/// <param name="rehydratePriority">
/// Optional <see cref="RehydratePriority"/>
/// Indicates the priority with which to rehydrate an archived blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="CopyFromUriOperation"/> describing the
/// state of the copy operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual async Task<CopyFromUriOperation> StartCopyFromUriAsync(
Uri source,
Metadata metadata = default,
AccessTier? accessTier = default,
BlobRequestConditions sourceConditions = default,
BlobRequestConditions destinationConditions = default,
RehydratePriority? rehydratePriority = default,
CancellationToken cancellationToken = default)
{
Response<BlobCopyInfo> response = await StartCopyFromUriInternal(
source,
metadata,
default,
accessTier,
sourceConditions,
destinationConditions,
rehydratePriority,
sealBlob: default,
async: true,
cancellationToken)
.ConfigureAwait(false);
return new CopyFromUriOperation(
this,
response.Value.CopyId,
response.GetRawResponse(),
cancellationToken);
}
/// <summary>
/// The <see cref="StartCopyFromUriInternal"/> operation begins an
/// asynchronous copy of the data from the <paramref name="source"/>
/// to this blob. You can check <see cref="BlobProperties.CopyStatus"/>
/// returned from the<see cref="GetPropertiesAsync"/> to determine if
/// the copy has completed.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob">
/// Copy Blob</see>.
/// </summary>
/// <param name="source">
/// Specifies the <see cref="Uri"/> of the source blob. The value may
/// be a <see cref="Uri" /> of up to 2 KB in length that specifies a
/// blob. A source blob in the same storage account can be
/// authenticated via Shared Key. However, if the source is a blob in
/// another account, the source blob must either be public or must be
/// authenticated via a shared access signature. If the source blob
/// is public, no authentication is required to perform the copy
/// operation.
///
/// The source object may be a file in the Azure File service. If the
/// source object is a file that is to be copied to a blob, then the
/// source file must be authenticated using a shared access signature,
/// whether it resides in the same account or in a different account.
/// </param>
/// <param name="metadata">
/// Optional custom metadata to set for this blob.
/// </param>
/// <param name="tags">
/// Optional tags to set for this blob.
/// </param>
/// <param name="accessTier">
/// Optional <see cref="AccessTier"/>
/// Indicates the tier to be set on the blob.
/// </param>
/// <param name="sourceConditions">
/// Optional <see cref="BlobRequestConditions"/> to add
/// conditions on the copying of data from this source blob.
/// </param>
/// <param name="destinationConditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the copying of data to this blob.
/// </param>
/// <param name="rehydratePriority">
/// Optional <see cref="RehydratePriority"/>
/// Indicates the priority with which to rehydrate an archived blob.
/// </param>
/// <param name="sealBlob">
/// If the destination blob should be sealed.
/// Only applicable for Append Blobs.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobCopyInfo}"/> describing the
/// state of the copy operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response<BlobCopyInfo>> StartCopyFromUriInternal(
Uri source,
Metadata metadata,
Tags tags,
AccessTier? accessTier,
BlobRequestConditions sourceConditions,
BlobRequestConditions destinationConditions,
RehydratePriority? rehydratePriority,
bool? sealBlob,
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(source)}: {source}\n" +
$"{nameof(sourceConditions)}: {sourceConditions}\n" +
$"{nameof(destinationConditions)}: {destinationConditions}");
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(BlobBaseClient)}.{nameof(StartCopyFromUri)}");
try
{
scope.Start();
ResponseWithHeaders<BlobStartCopyFromURLHeaders> response;
if (async)
{
response = await BlobRestClient.StartCopyFromURLAsync(
copySource: source.AbsoluteUri,
metadata: metadata,
tier: accessTier,
rehydratePriority: rehydratePriority,
sourceIfModifiedSince: sourceConditions?.IfModifiedSince,
sourceIfUnmodifiedSince: sourceConditions?.IfUnmodifiedSince,
sourceIfMatch: sourceConditions?.IfMatch?.ToString(),
sourceIfNoneMatch: sourceConditions?.IfNoneMatch?.ToString(),
sourceIfTags: sourceConditions?.TagConditions,
ifModifiedSince: destinationConditions?.IfModifiedSince,
ifUnmodifiedSince: destinationConditions?.IfUnmodifiedSince,
ifMatch: destinationConditions?.IfMatch?.ToString(),
ifNoneMatch: destinationConditions?.IfNoneMatch?.ToString(),
leaseId: destinationConditions?.LeaseId,
ifTags: destinationConditions?.TagConditions,
blobTagsString: tags?.ToTagsString(),
sealBlob: sealBlob,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.StartCopyFromURL(
copySource: source.AbsoluteUri,
metadata: metadata,
tier: accessTier,
rehydratePriority: rehydratePriority,
sourceIfModifiedSince: sourceConditions?.IfModifiedSince,
sourceIfUnmodifiedSince: sourceConditions?.IfUnmodifiedSince,
sourceIfMatch: sourceConditions?.IfMatch?.ToString(),
sourceIfNoneMatch: sourceConditions?.IfNoneMatch?.ToString(),
sourceIfTags: sourceConditions?.TagConditions,
ifModifiedSince: destinationConditions?.IfModifiedSince,
ifUnmodifiedSince: destinationConditions?.IfUnmodifiedSince,
ifMatch: destinationConditions?.IfMatch?.ToString(),
ifNoneMatch: destinationConditions?.IfNoneMatch?.ToString(),
leaseId: destinationConditions?.LeaseId,
ifTags: destinationConditions?.TagConditions,
blobTagsString: tags?.ToTagsString(),
sealBlob: sealBlob,
cancellationToken: cancellationToken);
}
return Response.FromValue(
response.ToBlobCopyInfo(),
response.GetRawResponse());
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion StartCopyFromUri
#region AbortCopyFromUri
/// <summary>
/// The <see cref="AbortCopyFromUri"/> operation aborts a pending
/// <see cref="CopyFromUriOperation"/>, and leaves a this
/// blob with zero length and full metadata.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob">
/// Abort Copy Blob</see>.
/// </summary>
/// <param name="copyId">
/// ID of the copy operation to abort.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add
/// conditions on aborting the copy operation.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully aborting.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response AbortCopyFromUri(
string copyId,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
AbortCopyFromUriInternal(
copyId,
conditions,
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="AbortCopyFromUriAsync"/> operation aborts a pending
/// <see cref="CopyFromUriOperation"/>, and leaves a this
/// blob with zero length and full metadata.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob">
/// Abort Copy Blob</see>.
/// </summary>
/// <param name="copyId">
/// ID of the copy operation to abort.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add
/// conditions on aborting the copy operation.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully aborting.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> AbortCopyFromUriAsync(
string copyId,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
await AbortCopyFromUriInternal(
copyId,
conditions,
true, // async
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="AbortCopyFromUriAsync"/> operation aborts a pending
/// <see cref="CopyFromUriOperation"/>, and leaves a this
/// blob with zero length and full metadata.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob">
/// Abort Copy Blob</see>.
/// </summary>
/// <param name="copyId">
/// ID of the copy operation to abort.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add
/// conditions on aborting the copy operation.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully aborting.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response> AbortCopyFromUriInternal(
string copyId,
BlobRequestConditions conditions,
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(copyId)}: {copyId}\n" +
$"{nameof(conditions)}: {conditions}");
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(BlobBaseClient)}.{nameof(AbortCopyFromUri)}");
try
{
scope.Start();
ResponseWithHeaders<BlobAbortCopyFromURLHeaders> response;
if (async)
{
response = await BlobRestClient.AbortCopyFromURLAsync(
copyId: copyId,
leaseId: conditions?.LeaseId,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.AbortCopyFromURL(
copyId: copyId,
leaseId: conditions?.LeaseId,
cancellationToken: cancellationToken);
}
return response.GetRawResponse();
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion AbortCopyFromUri
#region CopyFromUri
/// <summary>
/// The Copy Blob From URL operation copies a blob to a destination within the storage account synchronously
/// for source blob sizes up to 256 MB. This API is available starting in version 2018-03-28.
/// The source for a Copy Blob From URL operation can be any committed block blob in any Azure storage account
/// which is either public or authorized with a shared access signature.
///
/// The size of the source blob can be a maximum length of up to 256 MB.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url">
/// Copy Blob From URL</see>.
/// </summary>
/// <param name="source">
/// Required. Specifies the URL of the source blob. The value may be a URL of up to 2 KB in length
/// that specifies a blob. The value should be URL-encoded as it would appear in a request URI. The
/// source blob must either be public or must be authorized via a shared access signature. If the
/// source blob is public, no authorization is required to perform the operation. If the size of the
/// source blob is greater than 256 MB, the request will fail with 409 (Conflict). The blob type of
/// the source blob has to be block blob.
/// </param>
/// <param name="options">
/// Optional parameters.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobCopyInfo}"/> describing the
/// state of the copy operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<BlobCopyInfo> SyncCopyFromUri(
Uri source,
BlobCopyFromUriOptions options = default,
CancellationToken cancellationToken = default)
=> SyncCopyFromUriInternal(
source: source,
metadata: options?.Metadata,
tags: options?.Tags,
accessTier: options?.AccessTier,
sourceConditions: options?.SourceConditions,
destinationConditions: options?.DestinationConditions,
async: false,
cancellationToken: cancellationToken)
.EnsureCompleted();
/// <summary>
/// The Copy Blob From URL operation copies a blob to a destination within the storage account synchronously
/// for source blob sizes up to 256 MB. This API is available starting in version 2018-03-28.
/// The source for a Copy Blob From URL operation can be any committed block blob in any Azure storage account
/// which is either public or authorized with a shared access signature.
///
/// The size of the source blob can be a maximum length of up to 256 MB.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url">
/// Copy Blob From URL</see>.
/// </summary>
/// <param name="source">
/// Required. Specifies the URL of the source blob. The value may be a URL of up to 2 KB in length
/// that specifies a blob. The value should be URL-encoded as it would appear in a request URI. The
/// source blob must either be public or must be authorized via a shared access signature. If the
/// source blob is public, no authorization is required to perform the operation. If the size of the
/// source blob is greater than 256 MB, the request will fail with 409 (Conflict). The blob type of
/// the source blob has to be block blob.
/// </param>
/// <param name="options">
/// Optional parameters.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobCopyInfo}"/> describing the
/// state of the copy operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<BlobCopyInfo>> SyncCopyFromUriAsync(
Uri source,
BlobCopyFromUriOptions options = default,
CancellationToken cancellationToken = default)
=> await SyncCopyFromUriInternal(
source: source,
metadata: options?.Metadata,
tags: options?.Tags,
accessTier: options?.AccessTier,
sourceConditions: options?.SourceConditions,
destinationConditions: options?.DestinationConditions,
async: true,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The Copy Blob From URL operation copies a blob to a destination within the storage account synchronously
/// for source blob sizes up to 256 MB. This API is available starting in version 2018-03-28.
/// The source for a Copy Blob From URL operation can be any committed block blob in any Azure storage account
/// which is either public or authorized with a shared access signature.
///
/// The size of the source blob can be a maximum length of up to 256 MB.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url">
/// Copy Blob From URL</see>.
/// </summary>
/// <param name="source">
/// Required. Specifies the URL of the source blob. The value may be a URL of up to 2 KB in length
/// that specifies a blob. The value should be URL-encoded as it would appear in a request URI. The
/// source blob must either be public or must be authorized via a shared access signature. If the
/// source blob is public, no authorization is required to perform the operation. If the size of the
/// source blob is greater than 256 MB, the request will fail with 409 (Conflict). The blob type of
/// the source blob has to be block blob.
/// </param>
/// <param name="metadata">
/// Optional custom metadata to set for this blob.
/// </param>
/// <param name="tags">
/// Optional tags to set for this blob.
/// </param>
/// <param name="accessTier">
/// Optional <see cref="AccessTier"/>
/// Indicates the tier to be set on the blob.
/// </param>
/// <param name="sourceConditions">
/// Optional <see cref="BlobRequestConditions"/> to add
/// conditions on the copying of data from this source blob.
/// </param>
/// <param name="destinationConditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// the copying of data to this blob.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobCopyInfo}"/> describing the
/// state of the copy operation.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response<BlobCopyInfo>> SyncCopyFromUriInternal(
Uri source,
Metadata metadata,
Tags tags,
AccessTier? accessTier,
BlobRequestConditions sourceConditions,
BlobRequestConditions destinationConditions,
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(BlobBaseClient)}.{nameof(SyncCopyFromUri)}");
try
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(source)}: {source}\n" +
$"{nameof(sourceConditions)}: {sourceConditions}\n" +
$"{nameof(destinationConditions)}: {destinationConditions}");
scope.Start();
ResponseWithHeaders<BlobCopyFromURLHeaders> response;
if (async)
{
response = await BlobRestClient.CopyFromURLAsync(
copySource: source.AbsoluteUri,
metadata: metadata,
tier: accessTier,
sourceIfModifiedSince: sourceConditions?.IfModifiedSince,
sourceIfUnmodifiedSince: sourceConditions?.IfUnmodifiedSince,
sourceIfMatch: sourceConditions?.IfMatch.ToString(),
sourceIfNoneMatch: sourceConditions?.IfNoneMatch.ToString(),
ifModifiedSince: destinationConditions?.IfModifiedSince,
ifUnmodifiedSince: destinationConditions?.IfUnmodifiedSince,
ifMatch: destinationConditions?.IfMatch?.ToString(),
ifNoneMatch: destinationConditions?.IfNoneMatch?.ToString(),
ifTags: destinationConditions?.TagConditions,
leaseId: destinationConditions?.LeaseId,
blobTagsString: tags?.ToTagsString(),
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.CopyFromURL(
copySource: source.AbsoluteUri,
metadata: metadata,
tier: accessTier,
sourceIfModifiedSince: sourceConditions?.IfModifiedSince,
sourceIfUnmodifiedSince: sourceConditions?.IfUnmodifiedSince,
sourceIfMatch: sourceConditions?.IfMatch.ToString(),
sourceIfNoneMatch: sourceConditions?.IfNoneMatch.ToString(),
ifModifiedSince: destinationConditions?.IfModifiedSince,
ifUnmodifiedSince: destinationConditions?.IfUnmodifiedSince,
ifMatch: destinationConditions?.IfMatch?.ToString(),
ifNoneMatch: destinationConditions?.IfNoneMatch?.ToString(),
ifTags: destinationConditions?.TagConditions,
leaseId: destinationConditions?.LeaseId,
blobTagsString: tags?.ToTagsString(),
cancellationToken: cancellationToken);
}
return Response.FromValue(
response.ToBlobCopyInfo(),
response.GetRawResponse());
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion CopyFromUri
#region Delete
/// <summary>
/// The <see cref="Delete"/> operation marks the specified blob
/// or snapshot for deletion. The blob is later deleted during
/// garbage collection.
///
/// Note that in order to delete a blob, you must delete all of its
/// snapshots. You can delete both at the same time using
/// <see cref="DeleteSnapshotsOption.IncludeSnapshots"/>.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/delete-blob">
/// Delete Blob</see>.
/// </summary>
/// <param name="snapshotsOption">
/// Specifies options for deleting blob snapshots.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// deleting this blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully deleting.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response Delete(
DeleteSnapshotsOption snapshotsOption = default,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
DeleteInternal(
snapshotsOption,
conditions,
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="DeleteAsync"/> operation marks the specified blob
/// or snapshot for deletion. The blob is later deleted during
/// garbage collection.
///
/// Note that in order to delete a blob, you must delete all of its
/// snapshots. You can delete both at the same time using
/// <see cref="DeleteSnapshotsOption.IncludeSnapshots"/>.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/delete-blob">
/// Delete Blob</see>.
/// </summary>
/// <param name="snapshotsOption">
/// Specifies options for deleting blob snapshots.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// deleting this blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully deleting.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> DeleteAsync(
DeleteSnapshotsOption snapshotsOption = default,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
await DeleteInternal(
snapshotsOption,
conditions,
true, // async
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="DeleteIfExists"/> operation marks the specified blob
/// or snapshot for deletion, if the blob exists. The blob is later deleted
/// during garbage collection.
///
/// Note that in order to delete a blob, you must delete all of its
/// snapshots. You can delete both at the same time using
/// <see cref="DeleteSnapshotsOption.IncludeSnapshots"/>.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/delete-blob">
/// Delete Blob</see>.
/// </summary>
/// <param name="snapshotsOption">
/// Specifies options for deleting blob snapshots.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// deleting this blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> Returns true if blob exists and was
/// deleted, return false otherwise.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<bool> DeleteIfExists(
DeleteSnapshotsOption snapshotsOption = default,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
DeleteIfExistsInternal(
snapshotsOption,
conditions ?? default,
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="DeleteIfExistsAsync"/> operation marks the specified blob
/// or snapshot for deletion, if the blob exists. The blob is later deleted
/// during garbage collection.
///
/// Note that in order to delete a blob, you must delete all of its
/// snapshots. You can delete both at the same time using
/// <see cref="DeleteSnapshotsOption.IncludeSnapshots"/>.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/delete-blob">
/// Delete Blob</see>.
/// </summary>
/// <param name="snapshotsOption">
/// Specifies options for deleting blob snapshots.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// deleting this blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> Returns true if blob exists and was
/// deleted, return false otherwise.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<bool>> DeleteIfExistsAsync(
DeleteSnapshotsOption snapshotsOption = default,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
await DeleteIfExistsInternal(
snapshotsOption,
conditions ?? default,
true, // async
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="DeleteIfExistsInternal"/> operation marks the specified blob
/// or snapshot for deletion, if the blob exists. The blob is later deleted
/// during garbage collection.
///
/// Note that in order to delete a blob, you must delete all of its
/// snapshots. You can delete both at the same time using
/// <see cref="DeleteSnapshotsOption.IncludeSnapshots"/>.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/delete-blob">
/// Delete Blob</see>.
/// </summary>
/// <param name="snapshotsOption">
/// Specifies options for deleting blob snapshots.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// deleting this blob.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully deleting.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
internal async Task<Response<bool>> DeleteIfExistsInternal(
DeleteSnapshotsOption snapshotsOption,
BlobRequestConditions conditions,
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(snapshotsOption)}: {snapshotsOption}\n" +
$"{nameof(conditions)}: {conditions}");
string operationName = $"{nameof(BlobBaseClient)}.{nameof(DeleteIfExists)}";
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope(operationName);
try
{
Response response = await DeleteInternal(
snapshotsOption,
conditions,
async,
cancellationToken,
operationName)
.ConfigureAwait(false);
return Response.FromValue(true, response);
}
catch (RequestFailedException storageRequestFailedException)
when (storageRequestFailedException.ErrorCode == BlobErrorCode.BlobNotFound
|| storageRequestFailedException.ErrorCode == BlobErrorCode.ContainerNotFound)
{
return Response.FromValue(false, default);
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
}
}
}
/// <summary>
/// The <see cref="DeleteInternal"/> operation marks the specified blob
/// or snapshot for deletion. The blob is later deleted during
/// garbage collection.
///
/// Note that in order to delete a blob, you must delete all of its
/// snapshots. You can delete both at the same time using
/// <see cref="DeleteSnapshotsOption.IncludeSnapshots"/>.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/delete-blob">
/// Delete Blob</see>.
/// </summary>
/// <param name="snapshotsOption">
/// Specifies options for deleting blob snapshots.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// deleting this blob.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <param name="operationName">
/// Optional. To indicate if the name of the operation.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully deleting.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response> DeleteInternal(
DeleteSnapshotsOption snapshotsOption,
BlobRequestConditions conditions,
bool async,
CancellationToken cancellationToken,
string operationName = null)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(snapshotsOption)}: {snapshotsOption}\n" +
$"{nameof(conditions)}: {conditions}");
operationName ??= $"{nameof(BlobBaseClient)}.{nameof(Delete)}";
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope(operationName);
try
{
scope.Start();
ResponseWithHeaders<BlobDeleteHeaders> response;
if (async)
{
response = await BlobRestClient.DeleteAsync(
leaseId: conditions?.LeaseId,
deleteSnapshots: snapshotsOption == DeleteSnapshotsOption.None ? null : (DeleteSnapshotsOption?)snapshotsOption,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.Delete(
leaseId: conditions?.LeaseId,
deleteSnapshots: snapshotsOption == DeleteSnapshotsOption.None ? null : (DeleteSnapshotsOption?)snapshotsOption,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
cancellationToken: cancellationToken);
}
return response.GetRawResponse();
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion Delete
#region Exists
/// <summary>
/// The <see cref="Exists"/> operation can be called on a
/// <see cref="BlobBaseClient"/> to see if the associated blob
/// exists in the container on the storage account in the
/// storage service.
/// </summary>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// Returns true if the blob exists.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<bool> Exists(
CancellationToken cancellationToken = default) =>
ExistsInternal(
async: false,
cancellationToken).EnsureCompleted();
/// <summary>
/// The <see cref="ExistsAsync"/> operation can be called on a
/// <see cref="BlobBaseClient"/> to see if the associated blob
/// exists in the container on the storage account in the storage service.
/// </summary>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// Returns true if the blob exists.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<bool>> ExistsAsync(
CancellationToken cancellationToken = default) =>
await ExistsInternal(
async: true,
cancellationToken).ConfigureAwait(false);
/// <summary>
/// The <see cref="ExistsInternal"/> operation can be called on a
/// <see cref="BlobBaseClient"/> to see if the associated blob
/// exists on the storage account in the storage service.
/// </summary>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// Returns true if the blob exists.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response<bool>> ExistsInternal(
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}");
string operationName = $"{nameof(BlobBaseClient)}.{nameof(Exists)}";
try
{
Response<BlobProperties> response = await GetPropertiesInternal(
conditions: default,
async: async,
cancellationToken: cancellationToken,
operationName)
.ConfigureAwait(false);
return Response.FromValue(true, response.GetRawResponse());
}
catch (RequestFailedException storageRequestFailedException)
when (storageRequestFailedException.ErrorCode == BlobErrorCode.BlobNotFound
|| storageRequestFailedException.ErrorCode == BlobErrorCode.ContainerNotFound)
{
return Response.FromValue(false, default);
}
catch (RequestFailedException storageRequestFailedException)
when (storageRequestFailedException.ErrorCode == BlobErrorCode.BlobUsesCustomerSpecifiedEncryption)
{
return Response.FromValue(true, default);
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
}
}
}
#endregion Exists
#region Undelete
/// <summary>
/// The <see cref="Undelete"/> operation restores the contents
/// and metadata of a soft deleted blob and any associated soft
/// deleted snapshots.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/undelete-blob">
/// Undelete Blob</see>.
/// </summary>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully deleting.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response Undelete(
CancellationToken cancellationToken = default) =>
UndeleteInternal(
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="UndeleteAsync"/> operation restores the contents
/// and metadata of a soft deleted blob and any associated soft
/// deleted snapshots.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/undelete-blob">
/// Undelete Blob</see>.
/// </summary>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully deleting.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> UndeleteAsync(
CancellationToken cancellationToken = default) =>
await UndeleteInternal(
true, // async
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="UndeleteInternal"/> operation restores the contents
/// and metadata of a soft deleted blob and any associated soft
/// deleted snapshots.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/undelete-blob">
/// Undelete Blob</see>.
/// </summary>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully deleting.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response> UndeleteInternal(
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(nameof(BlobBaseClient), message: $"{nameof(Uri)}: {Uri}");
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(BlobBaseClient)}.{nameof(Undelete)}");
try
{
scope.Start();
ResponseWithHeaders<BlobUndeleteHeaders> response;
if (async)
{
response = await BlobRestClient.UndeleteAsync(
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.Undelete(
cancellationToken: cancellationToken);
}
return response.GetRawResponse();
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion Undelete
#region GetProperties
/// <summary>
/// The <see cref="GetProperties"/> operation returns all
/// user-defined metadata, standard HTTP properties, and system
/// properties for the blob. It does not return the content of the
/// blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob-properties">
/// Get Blob Properties</see>.
/// </summary>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add
/// conditions on getting the blob's properties.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobProperties}"/> describing the
/// blob's properties.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<BlobProperties> GetProperties(
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
GetPropertiesInternal(
conditions,
async: false,
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="GetPropertiesAsync"/> operation returns all
/// user-defined metadata, standard HTTP properties, and system
/// properties for the blob. It does not return the content of the
/// blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob-properties">
/// Get Blob Properties</see>.
/// </summary>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add
/// conditions on getting the blob's properties.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobProperties}"/> describing the
/// blob's properties.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<BlobProperties>> GetPropertiesAsync(
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
await GetPropertiesInternal(
conditions,
async: true,
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="GetPropertiesInternal"/> operation returns all
/// user-defined metadata, standard HTTP properties, and system
/// properties for the blob. It does not return the content of the
/// blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/get-blob-properties">
/// Get Blob Properties</see>.
/// </summary>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add
/// conditions on getting the blob's properties.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <param name="operationName">
/// The name of the calling operation.
/// </param>
/// <returns>
/// A <see cref="Response{BlobProperties}"/> describing the
/// blob's properties.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
internal async Task<Response<BlobProperties>> GetPropertiesInternal(
BlobRequestConditions conditions,
bool async,
CancellationToken cancellationToken,
string operationName = default)
{
operationName ??= $"{nameof(BlobBaseClient)}.{nameof(GetProperties)}";
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(conditions)}: {conditions}");
operationName ??= $"{nameof(BlobBaseClient)}.{nameof(GetProperties)}";
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope(operationName);
try
{
scope.Start();
ResponseWithHeaders<BlobGetPropertiesHeaders> response;
if (async)
{
response = await BlobRestClient.GetPropertiesAsync(
leaseId: conditions?.LeaseId,
encryptionKey: ClientConfiguration.CustomerProvidedKey?.EncryptionKey,
encryptionKeySha256: ClientConfiguration.CustomerProvidedKey?.EncryptionKeyHash,
encryptionAlgorithm: ClientConfiguration.CustomerProvidedKey?.EncryptionAlgorithm == null ? null : EncryptionAlgorithmTypeInternal.AES256,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.GetProperties(
leaseId: conditions?.LeaseId,
encryptionKey: ClientConfiguration.CustomerProvidedKey?.EncryptionKey,
encryptionKeySha256: ClientConfiguration.CustomerProvidedKey?.EncryptionKeyHash,
encryptionAlgorithm: ClientConfiguration.CustomerProvidedKey?.EncryptionAlgorithm == null ? null : EncryptionAlgorithmTypeInternal.AES256,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
cancellationToken: cancellationToken);
}
return Response.FromValue(
response.ToBlobProperties(),
response.GetRawResponse());
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion GetProperties
#region SetHttpHeaders
/// <summary>
/// The <see cref="SetHttpHeaders"/> operation sets system
/// properties on the blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/set-blob-properties">
/// Set Blob Properties</see>.
/// </summary>
/// <param name="httpHeaders">
/// Optional. The standard HTTP header system properties to set.
/// If not specified, existing values will be cleared.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the blob's HTTP headers.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobInfo}"/> describing the updated
/// blob.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<BlobInfo> SetHttpHeaders(
BlobHttpHeaders httpHeaders = default,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
SetHttpHeadersInternal(
httpHeaders,
conditions,
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="SetHttpHeadersAsync"/> operation sets system
/// properties on the blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/set-blob-properties">
/// Set Blob Properties</see>.
/// </summary>
/// <param name="httpHeaders">
/// Optional. The standard HTTP header system properties to set. If not specified, existing values will be cleared.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the blob's HTTP headers.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobInfo}"/> describing the updated
/// blob.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<BlobInfo>> SetHttpHeadersAsync(
BlobHttpHeaders httpHeaders = default,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
await SetHttpHeadersInternal(
httpHeaders,
conditions,
true, // async
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="SetHttpHeadersInternal"/> operation sets system
/// properties on the blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/set-blob-properties">
/// Set Blob Properties</see>.
/// </summary>
/// <param name="httpHeaders">
/// Optional. The standard HTTP header system properties to set. If not specified, existing values will be cleared.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the blob's HTTP headers.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobInfo}"/> describing the updated
/// blob.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response<BlobInfo>> SetHttpHeadersInternal(
BlobHttpHeaders httpHeaders,
BlobRequestConditions conditions,
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(httpHeaders)}: {httpHeaders}\n" +
$"{nameof(conditions)}: {conditions}");
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(BlobBaseClient)}.{nameof(SetHttpHeaders)}");
try
{
scope.Start();
ResponseWithHeaders<BlobSetHttpHeadersHeaders> response;
if (async)
{
response = await BlobRestClient.SetHttpHeadersAsync(
blobCacheControl: httpHeaders?.CacheControl,
blobContentType: httpHeaders?.ContentType,
blobContentMD5: httpHeaders?.ContentHash,
blobContentEncoding: httpHeaders?.ContentEncoding,
blobContentLanguage: httpHeaders?.ContentLanguage,
leaseId: conditions?.LeaseId,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
blobContentDisposition: httpHeaders?.ContentDisposition,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.SetHttpHeaders(
blobCacheControl: httpHeaders?.CacheControl,
blobContentType: httpHeaders?.ContentType,
blobContentMD5: httpHeaders?.ContentHash,
blobContentEncoding: httpHeaders?.ContentEncoding,
blobContentLanguage: httpHeaders?.ContentLanguage,
leaseId: conditions?.LeaseId,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
blobContentDisposition: httpHeaders?.ContentDisposition,
cancellationToken: cancellationToken);
}
return Response.FromValue(
response.ToBlobInfo(),
response.GetRawResponse());
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion SetHttpHeaders
#region SetMetadata
/// <summary>
/// The <see cref="SetMetadata"/> operation sets user-defined
/// metadata for the specified blob as one or more name-value pairs.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/set-blob-metadata">
/// Set Blob Metadata</see>.
/// </summary>
/// <param name="metadata">
/// Custom metadata to set for this blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the blob's metadata.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobInfo}"/> describing the updated
/// blob.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<BlobInfo> SetMetadata(
Metadata metadata,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
SetMetadataInternal(
metadata,
conditions,
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="SetMetadataAsync"/> operation sets user-defined
/// metadata for the specified blob as one or more name-value pairs.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/set-blob-metadata">
/// Set Blob Metadata</see>.
/// </summary>
/// <param name="metadata">
/// Custom metadata to set for this blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the blob's metadata.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobInfo}"/> describing the updated
/// blob.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<BlobInfo>> SetMetadataAsync(
Metadata metadata,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
await SetMetadataInternal(
metadata,
conditions,
true, // async
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="SetMetadataInternal"/> operation sets user-defined
/// metadata for the specified blob as one or more name-value pairs.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/set-blob-metadata">
/// Set Blob Metadata</see>.
/// </summary>
/// <param name="metadata">
/// Custom metadata to set for this blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the blob's metadata.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobInfo}"/> describing the updated
/// blob.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response<BlobInfo>> SetMetadataInternal(
Metadata metadata,
BlobRequestConditions conditions,
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(conditions)}: {conditions}");
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(BlobBaseClient)}.{nameof(SetMetadata)}");
try
{
scope.Start();
ResponseWithHeaders<BlobSetMetadataHeaders> response;
if (async)
{
response = await BlobRestClient.SetMetadataAsync(
metadata: metadata,
leaseId: conditions?.LeaseId,
encryptionKey: ClientConfiguration.CustomerProvidedKey?.EncryptionKey,
encryptionKeySha256: ClientConfiguration.CustomerProvidedKey?.EncryptionKeyHash,
encryptionAlgorithm: ClientConfiguration.CustomerProvidedKey?.EncryptionAlgorithm == null ? null : EncryptionAlgorithmTypeInternal.AES256,
encryptionScope: ClientConfiguration.EncryptionScope,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.SetMetadata(
metadata: metadata,
leaseId: conditions?.LeaseId,
encryptionKey: ClientConfiguration.CustomerProvidedKey?.EncryptionKey,
encryptionKeySha256: ClientConfiguration.CustomerProvidedKey?.EncryptionKeyHash,
encryptionAlgorithm: ClientConfiguration.CustomerProvidedKey?.EncryptionAlgorithm == null ? null : EncryptionAlgorithmTypeInternal.AES256,
encryptionScope: ClientConfiguration.EncryptionScope,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
cancellationToken: cancellationToken);
}
return Response.FromValue(
response.ToBlobInfo(),
response.GetRawResponse());
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion SetMetadata
#region CreateSnapshot
/// <summary>
/// The <see cref="CreateSnapshot"/> operation creates a
/// read-only snapshot of a blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/snapshot-blob">
/// Snapshot Blob</see>.
/// </summary>
/// <param name="metadata">
/// Optional custom metadata to set for this blob snapshot.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting creating this snapshot.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobSnapshotInfo}"/> describing the
/// new blob snapshot.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<BlobSnapshotInfo> CreateSnapshot(
Metadata metadata = default,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
CreateSnapshotInternal(
metadata,
conditions,
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="CreateSnapshotAsync"/> operation creates a
/// read-only snapshot of a blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/snapshot-blob">
/// Snapshot Blob</see>.
/// </summary>
/// <param name="metadata">
/// Optional custom metadata to set for this blob snapshot.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting creating this snapshot.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobSnapshotInfo}"/> describing the
/// new blob snapshot.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<BlobSnapshotInfo>> CreateSnapshotAsync(
Metadata metadata = default,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
await CreateSnapshotInternal(
metadata,
conditions,
true, // async
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="CreateSnapshotInternal"/> operation creates a
/// read-only snapshot of a blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/rest/api/storageservices/snapshot-blob">
/// Snapshot Blob</see>.
/// </summary>
/// <param name="metadata">
/// Optional custom metadata to set for this blob snapshot.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting creating this snapshot.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{BlobSnapshotInfo}"/> describing the
/// new blob snapshot.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response<BlobSnapshotInfo>> CreateSnapshotInternal(
Metadata metadata,
BlobRequestConditions conditions,
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(conditions)}: {conditions}");
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(BlobBaseClient)}.{nameof(CreateSnapshot)}");
try
{
scope.Start();
ResponseWithHeaders<BlobCreateSnapshotHeaders> response;
if (async)
{
response = await BlobRestClient.CreateSnapshotAsync(
metadata: metadata,
encryptionKey: ClientConfiguration.CustomerProvidedKey?.EncryptionKey,
encryptionKeySha256: ClientConfiguration.CustomerProvidedKey?.EncryptionKeyHash,
encryptionAlgorithm: ClientConfiguration.CustomerProvidedKey?.EncryptionAlgorithm == null ? null : EncryptionAlgorithmTypeInternal.AES256,
encryptionScope: ClientConfiguration.EncryptionScope,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
leaseId: conditions?.LeaseId,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.CreateSnapshot(
metadata: metadata,
encryptionKey: ClientConfiguration.CustomerProvidedKey?.EncryptionKey,
encryptionKeySha256: ClientConfiguration.CustomerProvidedKey?.EncryptionKeyHash,
encryptionAlgorithm: ClientConfiguration.CustomerProvidedKey?.EncryptionAlgorithm == null ? null : EncryptionAlgorithmTypeInternal.AES256,
encryptionScope: ClientConfiguration.EncryptionScope,
ifModifiedSince: conditions?.IfModifiedSince,
ifUnmodifiedSince: conditions?.IfUnmodifiedSince,
ifMatch: conditions?.IfMatch?.ToString(),
ifNoneMatch: conditions?.IfNoneMatch?.ToString(),
ifTags: conditions?.TagConditions,
leaseId: conditions?.LeaseId,
cancellationToken: cancellationToken);
}
return Response.FromValue(
response.ToBlobSnapshotInfo(),
response.GetRawResponse());
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion CreateSnapshot
#region SetAccessTier
/// <summary>
/// The <see cref="SetAccessTier"/> operation sets the tier on a blob.
/// The operation is allowed on a page blob in a premium storage
/// account and on a block blob in a blob storage or general purpose
/// v2 account.
///
/// A premium page blob's tier determines the allowed size, IOPS, and
/// bandwidth of the blob. A block blob's tier determines
/// Hot/Cool/Archive storage type. This operation does not update the
/// blob's ETag. For detailed information about block blob level
/// tiering <see href="https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers">
/// Blob Storage Tiers</see>.
///
/// For more information about setting the tier, see
/// <see href="https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers">
/// Blob Storage Tiers</see>.
/// </summary>
/// <param name="accessTier">
/// Indicates the tier to be set on the blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the access tier.
/// </param>
/// <param name="rehydratePriority">
/// Optional <see cref="RehydratePriority"/>
/// Indicates the priority with which to rehydrate an archived blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully setting the tier.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response SetAccessTier(
AccessTier accessTier,
BlobRequestConditions conditions = default,
RehydratePriority? rehydratePriority = default,
CancellationToken cancellationToken = default) =>
SetAccessTierInternal(
accessTier,
conditions,
rehydratePriority,
false, // async
cancellationToken)
.EnsureCompleted();
/// <summary>
/// The <see cref="SetAccessTierAsync"/> operation sets the tier on a blob.
/// The operation is allowed on a page blob in a premium storage
/// account and on a block blob in a blob storage or general purpose
/// v2 account.
///
/// A premium page blob's tier determines the allowed size, IOPS, and
/// bandwidth of the blob. A block blob's tier determines
/// Hot/Cool/Archive storage type. This operation does not update the
/// blob's ETag. For detailed information about block blob level
/// tiering <see href="https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers">
/// Blob Storage Tiers</see>.
///
/// For more information about setting the tier, see
/// <see href="https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers">
/// Blob Storage Tiers</see>.
/// </summary>
/// <param name="accessTier">
/// Indicates the tier to be set on the blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the access tier.
/// </param>
/// <param name="rehydratePriority">
/// Optional <see cref="RehydratePriority"/>
/// Indicates the priority with which to rehydrate an archived blob.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully setting the tier.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> SetAccessTierAsync(
AccessTier accessTier,
BlobRequestConditions conditions = default,
RehydratePriority? rehydratePriority = default,
CancellationToken cancellationToken = default) =>
await SetAccessTierInternal(
accessTier,
conditions,
rehydratePriority,
true, // async
cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// The <see cref="SetAccessTierInternal"/> operation sets the tier on a blob.
/// The operation is allowed on a page blob in a premium storage
/// account and on a block blob in a blob storage or general purpose
/// v2 account.
///
/// A premium page blob's tier determines the allowed size, IOPS, and
/// bandwidth of the blob. A block blob's tier determines
/// Hot/Cool/Archive storage type. This operation does not update the
/// blob's ETag. For detailed information about block blob level
/// tiering <see href="https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers">
/// Blob Storage Tiers</see>.
///
/// For more information about setting the tier, see
/// <see href="https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers">
/// Blob Storage Tiers</see>.
/// </summary>
/// <param name="accessTier">
/// Indicates the tier to be set on the blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the access tier.
/// </param>
/// <param name="rehydratePriority">
/// Optional <see cref="RehydratePriority"/>
/// Indicates the priority with which to rehydrate an archived blob.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully setting the tier.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response> SetAccessTierInternal(
AccessTier accessTier,
BlobRequestConditions conditions,
RehydratePriority? rehydratePriority,
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(accessTier)}: {accessTier}\n" +
$"{nameof(conditions)}: {conditions}");
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(BlobBaseClient)}.{nameof(SetAccessTier)}");
try
{
scope.Start();
ResponseWithHeaders<BlobSetTierHeaders> response;
if (async)
{
response = await BlobRestClient.SetTierAsync(
tier: accessTier,
rehydratePriority: rehydratePriority,
leaseId: conditions?.LeaseId,
ifTags: conditions?.TagConditions,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.SetTier(
tier: accessTier,
rehydratePriority: rehydratePriority,
leaseId: conditions?.LeaseId,
ifTags: conditions?.TagConditions,
cancellationToken: cancellationToken);
}
return response.GetRawResponse();
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion SetAccessTier
#region GetTags
/// <summary>
/// Gets the tags associated with the underlying blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-tags">
/// Get Blob Tags</see>
/// </summary>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// getting the blob's tags. Note that TagConditions is currently the
/// only condition supported by GetTags.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{Tags}"/> on successfully getting tags.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response<GetBlobTagResult> GetTags(
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
GetTagsInternal(
conditions: conditions,
async: false,
cancellationToken: cancellationToken)
.EnsureCompleted();
/// <summary>
/// Gets the tags associated with the underlying blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-tags">
/// Get Blob Tags</see>
/// </summary>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// getting the blob's tags. Note that TagConditions is currently the
/// only condition supported by GetTags.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{Tags}"/> on successfully getting tags.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response<GetBlobTagResult>> GetTagsAsync(
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
await GetTagsInternal(
conditions: conditions,
async: true,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// Gets the tags associated with the underlying blob.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-tags">
/// Get Blob Tags</see>
/// </summary>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// getting the blob's tags. Note that TagConditions is currently the
/// only condition supported by GetTags.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response{Tags}"/> on successfully getting tags.
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
private async Task<Response<GetBlobTagResult>> GetTagsInternal(
bool async,
BlobRequestConditions conditions,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}");
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(BlobBaseClient)}.{nameof(GetTags)}");
try
{
scope.Start();
ResponseWithHeaders<BlobTags, BlobGetTagsHeaders> response;
if (async)
{
response = await BlobRestClient.GetTagsAsync(
ifTags: conditions?.TagConditions,
leaseId: conditions?.LeaseId,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.GetTags(
ifTags: conditions?.TagConditions,
leaseId: conditions?.LeaseId,
cancellationToken: cancellationToken);
}
GetBlobTagResult result = new GetBlobTagResult
{
Tags = response.Value.ToTagDictionary()
};
return Response.FromValue(
result,
response.GetRawResponse());
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion
#region SetTags
/// <summary>
/// Sets tags on the underlying blob.
/// A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.
/// Valid tag key and value characters include lower and upper case letters, digits (0-9),
/// space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tags">
/// Set Blob Tags</see>.
/// </summary>
/// <param name="tags">
/// The tags to set on the blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the blob's tags. Note that TagConditions is currently the
/// only condition supported by SetTags.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully setting the blob tags..
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Response SetTags(
Tags tags,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
SetTagsInternal(
tags: tags,
conditions: conditions,
async: false,
cancellationToken: cancellationToken)
.EnsureCompleted();
/// <summary>
/// Sets tags on the underlying blob.
/// A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.
/// Valid tag key and value characters include lower and upper case letters, digits (0-9),
/// space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tags">
/// Set Blob Tags</see>.
/// </summary>
/// <param name="tags">
/// The tags to set on the blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the blob's tags. Note that TagConditions is currently the
/// only condition supported by SetTags.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully setting the blob tags..
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual async Task<Response> SetTagsAsync(
Tags tags,
BlobRequestConditions conditions = default,
CancellationToken cancellationToken = default) =>
await SetTagsInternal(
tags: tags,
conditions: conditions,
async: true,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
/// <summary>
/// Sets tags on the underlying blob.
/// A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.
/// Valid tag key and value characters include lower and upper case letters, digits (0-9),
/// space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tags">
/// Set Blob Tags</see>.
/// </summary>
/// <param name="tags">
/// The tags to set on the blob.
/// </param>
/// <param name="conditions">
/// Optional <see cref="BlobRequestConditions"/> to add conditions on
/// setting the blob's tags. Note that TagConditions is currently the
/// only condition supported by SetTags.
/// </param>
/// <param name="async">
/// Whether to invoke the operation asynchronously.
/// </param>
/// <param name="cancellationToken">
/// Optional <see cref="CancellationToken"/> to propagate
/// notifications that the operation should be cancelled.
/// </param>
/// <returns>
/// A <see cref="Response"/> on successfully setting the blob tags..
/// </returns>
/// <remarks>
/// A <see cref="RequestFailedException"/> will be thrown if
/// a failure occurs.
/// </remarks>
//TODO what about content CRC and content MD5?
private async Task<Response> SetTagsInternal(
Tags tags,
BlobRequestConditions conditions,
bool async,
CancellationToken cancellationToken)
{
using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(BlobBaseClient)))
{
ClientConfiguration.Pipeline.LogMethodEnter(
nameof(BlobBaseClient),
message:
$"{nameof(Uri)}: {Uri}\n" +
$"{nameof(tags)}: {tags}");
DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(BlobBaseClient)}.{nameof(SetTags)}");
try
{
scope.Start();
ResponseWithHeaders<BlobSetTagsHeaders> response;
if (async)
{
response = await BlobRestClient.SetTagsAsync(
ifTags: conditions?.TagConditions,
leaseId: conditions?.LeaseId,
tags: tags.ToBlobTags(),
cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
response = BlobRestClient.SetTags(
ifTags: conditions?.TagConditions,
leaseId: conditions?.LeaseId,
tags: tags.ToBlobTags(),
cancellationToken: cancellationToken);
}
return response.GetRawResponse();
}
catch (Exception ex)
{
ClientConfiguration.Pipeline.LogException(ex);
scope.Failed(ex);
throw;
}
finally
{
ClientConfiguration.Pipeline.LogMethodExit(nameof(BlobBaseClient));
scope.Dispose();
}
}
}
#endregion
#region GenerateSas
/// <summary>
/// The <see cref="GenerateSasUri(BlobSasPermissions, DateTimeOffset)"/>
/// returns a <see cref="Uri"/> that generates a Blob Service
/// Shared Access Signature (SAS) Uri based on the Client properties and
/// parameters passed. The SAS is signed by the shared key credential
/// of the client.
///
/// To check if the client is able to sign a Service Sas see
/// <see cref="CanGenerateSasUri"/>.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas">
/// Constructing a service SAS</see>.
/// </summary>
/// <param name="permissions">
/// Required. Specifies the list of permissions to be associated with the SAS.
/// See <see cref="BlobSasPermissions"/>.
/// </param>
/// <param name="expiresOn">
/// Required. Specifies the time at which the SAS becomes invalid. This field
/// must be omitted if it has been specified in an associated stored access policy.
/// </param>
/// <returns>
/// A <see cref="Uri"/> containing the SAS Uri.
/// </returns>
/// <remarks>
/// A <see cref="Exception"/> will be thrown if a failure occurs.
/// </remarks>
public virtual Uri GenerateSasUri(BlobSasPermissions permissions, DateTimeOffset expiresOn) =>
GenerateSasUri(new BlobSasBuilder(permissions, expiresOn)
{
BlobContainerName = BlobContainerName,
BlobName = Name,
Snapshot = _snapshot,
BlobVersionId = _blobVersionId
});
/// <summary>
/// The <see cref="GenerateSasUri(BlobSasBuilder)"/> returns a <see cref="Uri"/>
/// that generates a Blob Service Shared Access Signature (SAS) Uri
/// based on the Client properties and and builder. The SAS is signed
/// by the shared key credential of the client.
///
/// To check if the client is able to sign a Service Sas see
/// <see cref="CanGenerateSasUri"/>.
///
/// For more information, see
/// <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas">
/// Constructing a Service SAS</see>.
/// </summary>
/// <param name="builder">
/// Used to generate a Shared Access Signature (SAS).
/// </param>
/// <returns>
/// A <see cref="Uri"/> containing the SAS Uri.
/// </returns>
/// <remarks>
/// A <see cref="Exception"/> will be thrown if
/// a failure occurs.
/// </remarks>
public virtual Uri GenerateSasUri(BlobSasBuilder builder)
{
if (builder == null)
{
throw Errors.ArgumentNull(nameof(builder));
}
// Deep copy of builder so we don't modify the user's original BlobSasBuilder.
builder = BlobSasBuilder.DeepCopy(builder);
// Assign builder's ContainerName, BlobName, Snapshot, and BlobVersionId, if they are null.
builder.BlobContainerName ??= BlobContainerName;
builder.BlobName ??= Name;
builder.Snapshot ??= _snapshot;
builder.BlobVersionId ??= _blobVersionId;
if (!builder.BlobContainerName.Equals(BlobContainerName, StringComparison.InvariantCulture))
{
throw Errors.SasNamesNotMatching(
nameof(builder.BlobContainerName),
nameof(BlobSasBuilder),
nameof(BlobContainerName));
}
if (!builder.BlobName.Equals(Name, StringComparison.InvariantCulture))
{
throw Errors.SasNamesNotMatching(
nameof(builder.BlobName),
nameof(BlobSasBuilder),
nameof(Name));
}
if (string.Compare(_snapshot, builder.Snapshot, StringComparison.InvariantCulture) != 0)
{
throw Errors.SasNamesNotMatching(
nameof(builder.Snapshot),
nameof(BlobSasBuilder));
}
if (string.Compare(_blobVersionId, builder.BlobVersionId, StringComparison.InvariantCulture) != 0)
{
throw Errors.SasNamesNotMatching(
nameof(builder.BlobVersionId),
nameof(BlobSasBuilder));
}
BlobUriBuilder sasUri = new BlobUriBuilder(Uri)
{
Sas = builder.ToSasQueryParameters(ClientConfiguration.SharedKeyCredential)
};
return sasUri.ToUri();
}
#endregion
#region GetParentBlobContainerClientCore
private BlobContainerClient _parentBlobContainerClient;
/// <summary>
/// Create a new <see cref="BlobContainerClient"/> that pointing to this <see cref="BlobBaseClient"/>'s parent container.
/// The new <see cref="BlockBlobClient"/>
/// uses the same request policy pipeline as the
/// <see cref="BlobBaseClient"/>.
/// </summary>
/// <returns>A new <see cref="BlobContainerClient"/> instance.</returns>
protected internal virtual BlobContainerClient GetParentBlobContainerClientCore()
{
if (_parentBlobContainerClient == null)
{
BlobUriBuilder blobUriBuilder = new BlobUriBuilder(Uri)
{
// erase parameters unrelated to container
BlobName = null,
VersionId = null,
Snapshot = null,
};
_parentBlobContainerClient = new BlobContainerClient(
blobUriBuilder.ToUri(),
ClientConfiguration,
ClientSideEncryption);
}
return _parentBlobContainerClient;
}
#endregion
}
/// <summary>
/// Add easy to discover methods to <see cref="BlobContainerClient"/> for
/// creating <see cref="BlobBaseClient"/> instances.
/// </summary>
public static partial class SpecializedBlobExtensions
{
/// <summary>
/// Create a new <see cref="BlobContainerClient"/> that pointing to this <see cref="BlobBaseClient"/>'s parent container.
/// The new <see cref="BlockBlobClient"/>
/// uses the same request policy pipeline as the
/// <see cref="BlobBaseClient"/>.
/// </summary>
/// <param name="client">The <see cref="BlobBaseClient"/>.</param>
/// <returns>A new <see cref="BlobContainerClient"/> instance.</returns>
public static BlobContainerClient GetParentBlobContainerClient(this BlobBaseClient client)
{
return client.GetParentBlobContainerClientCore();
}
/// <summary>
/// Create a new <see cref="BlobBaseClient"/> object by concatenating
/// <paramref name="blobName"/> to the end of the
/// <paramref name="client"/>'s <see cref="BlobContainerClient.Uri"/>.
/// The new <see cref="BlobBaseClient"/> uses the same request policy
/// pipeline as the <see cref="BlobContainerClient"/>.
/// </summary>
/// <param name="client">The <see cref="BlobContainerClient"/>.</param>
/// <param name="blobName">The name of the blob.</param>
/// <returns>A new <see cref="BlobBaseClient"/> instance.</returns>
public static BlobBaseClient GetBlobBaseClient(
this BlobContainerClient client,
string blobName) =>
client.GetBlobBaseClientCore(blobName);
/// <summary>
/// Creates a new instance of the <see cref="BlobClient"/> class, maintaining all the same
/// internals but specifying new <see cref="ClientSideEncryptionOptions"/>.
/// </summary>
/// <param name="client">Client to base off of.</param>
/// <param name="clientSideEncryptionOptions">New encryption options. Setting this to <code>default</code> will clear client-side encryption.</param>
/// <returns>New instance with provided options and same internals otherwise.</returns>
public static BlobClient WithClientSideEncryptionOptions(this BlobClient client, ClientSideEncryptionOptions clientSideEncryptionOptions)
=> client.WithClientSideEncryptionOptionsCore(clientSideEncryptionOptions);
}
}
| 44.75005 | 166 | 0.559327 | [
"MIT"
] | AME-Redmond/azure-sdk-for-net | sdk/storage/Azure.Storage.Blobs/src/BlobBaseClient.cs | 224,513 | C# |
using Boilerplate.Features.Core.Commands;
namespace Boilerplate.Features.MassTransit.Services
{
public class MassTransitCommandDispatcher
: ICommandDispatcher
{
private readonly ICommandDispatcher _decorated;
private readonly IDistributedCommandDispatcher _dispatcher;
public MassTransitCommandDispatcher(ICommandDispatcher decorated, IDistributedCommandDispatcher dispatcher)
{
_decorated = decorated;
_dispatcher = dispatcher;
}
public async Task<bool> DispatchAsync(ICommand command)
{
if (_dispatcher.IsDistributed(command))
{
return await _dispatcher.DispatchAsync(command);
}
return await _decorated.DispatchAsync(command);
}
}
}
| 29.035714 | 115 | 0.665437 | [
"MIT"
] | loremipsumdonec/photo-gallery-service | src/Boilerplate/Features/MassTransit/Services/MassTransitCommandDispatcher.cs | 815 | C# |
using FreeSql.DatabaseModel;
using FreeSql.Internal;
using FreeSql.Internal.Model;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Data;
using System.Data.Odbc;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
namespace FreeSql.Odbc.Dameng
{
class OdbcDamengDbFirst : IDbFirst
{
IFreeSql _orm;
protected CommonUtils _commonUtils;
protected CommonExpression _commonExpression;
public OdbcDamengDbFirst(IFreeSql orm, CommonUtils commonUtils, CommonExpression commonExpression)
{
_orm = orm;
_commonUtils = commonUtils;
_commonExpression = commonExpression;
}
public int GetDbType(DbColumnInfo column) => (int)GetSqlDbType(column);
OdbcType GetSqlDbType(DbColumnInfo column)
{
var dbfull = column.DbTypeTextFull.ToLower();
switch (dbfull)
{
case "number(1)": return OdbcType.Bit;
case "number(4)": return OdbcType.SmallInt;
case "number(6)": return OdbcType.SmallInt;
case "number(11)": return OdbcType.Int;
case "number(21)": return OdbcType.BigInt;
case "number(3)": return OdbcType.TinyInt;
case "number(5)": return OdbcType.SmallInt;
case "number(10)": return OdbcType.BigInt;
case "number(20)": return OdbcType.Decimal;
case "float(126)": return OdbcType.Double;
case "float(63)": return OdbcType.Real;
case "number(10,2)": return OdbcType.Decimal;
case "interval day(2) to second(6)": return OdbcType.Time;
case "timestamp(6)": return OdbcType.DateTime;
case "timestamp(6) with local time zone": return OdbcType.DateTime;
case "blob": return OdbcType.VarBinary;
case "nvarchar2(255)": return OdbcType.NVarChar;
case "char(36)": return OdbcType.Char;
}
switch (column.DbTypeText.ToLower())
{
case "bit":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["number(1)"]);
return OdbcType.Bit;
case "smallint":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["number(4)"]);
return OdbcType.SmallInt;
case "byte":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["number(3)"]);
return OdbcType.TinyInt;
case "tinyint":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["number(3)"]);
return OdbcType.TinyInt;
case "integer":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["number(11)"]);
return OdbcType.Int;
case "bigint":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["number(21)"]);
return OdbcType.Int;
case "dec":
case "decimal":
case "numeric":
case "number":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["number(10,2)"]);
return OdbcType.Decimal;
case "time":
case "interval day to second":
case "interval year to month":
case "interval year":
case "interval month":
case "interval day":
case "interval day to hour":
case "interval day to minute":
case "interval hour":
case "interval hour to minute":
case "interval hour to second":
case "interval minute":
case "interval minute to second":
case "interval second":
case "time with time zone":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["interval day(2) to second(6)"]);
return OdbcType.Time;
case "date":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["date(7)"]);
return OdbcType.DateTime;
case "timestamp":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["timestamp(6)"]);
return OdbcType.DateTime;
case "timestamp with local time zone":
case "timestamp with time zone":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["timestamp(6) with local time zone"]);
return OdbcType.DateTime;
case "binary":
case "varbinary":
case "blob":
case "image":
case "longvarbinary":
case "bfile":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["blob"]);
return OdbcType.VarBinary;
case "nvarchar2":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["nvarchar2(255)"]);
return OdbcType.NVarChar;
case "varchar":
case "varchar2":
case "text":
case "longvarchar":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["nvarchar2(255)"]);
return OdbcType.NVarChar;
case "character":
case "char":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["nvarchar2(255)"]);
return OdbcType.Char;
case "nchar":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["nvarchar2(255)"]);
return OdbcType.NChar;
case "clob":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["nvarchar2(255)"]);
return OdbcType.NVarChar;
case "nclob":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["nvarchar2(255)"]);
return OdbcType.NVarChar;
case "raw":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["blob"]);
return OdbcType.VarBinary;
case "long raw":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["blob"]);
return OdbcType.VarBinary;
case "real":
case "binary_float":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["float(63)"]);
return OdbcType.Real;
case "double":
case "float":
case "double precision":
case "binary_double":
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["float(126)"]);
return OdbcType.Double;
case "rowid":
default:
_dicDbToCs.TryAdd(dbfull, _dicDbToCs["nvarchar2(255)"]);
return OdbcType.NVarChar;
}
throw new NotImplementedException($"未实现 {column.DbTypeTextFull} 类型映射");
}
static ConcurrentDictionary<string, DbToCs> _dicDbToCs = new ConcurrentDictionary<string, DbToCs>(StringComparer.CurrentCultureIgnoreCase);
static OdbcDamengDbFirst()
{
var defaultDbToCs = new Dictionary<string, DbToCs>() {
{ "number(1)", new DbToCs("(bool?)", "{0} == \"1\"", "{0} == true ? \"1\" : \"0\"", "bool?", typeof(bool), typeof(bool?), "{0}.Value", "GetBoolean") },
{ "number(4)", new DbToCs("(sbyte?)", "sbyte.Parse({0})", "{0}.ToString()", "sbyte?", typeof(sbyte), typeof(sbyte?), "{0}.Value", "GetInt16") },
{ "number(6)", new DbToCs("(short?)", "short.Parse({0})", "{0}.ToString()", "short?", typeof(short), typeof(short?), "{0}.Value", "GetInt16") },
{ "number(11)", new DbToCs("(int?)", "int.Parse({0})", "{0}.ToString()", "int?", typeof(int), typeof(int?), "{0}.Value", "GetInt32") },
{ "number(21)", new DbToCs("(long?)", "long.Parse({0})", "{0}.ToString()", "long?", typeof(long), typeof(long?), "{0}.Value", "GetInt64") },
{ "number(3)", new DbToCs("(byte?)", "byte.Parse({0})", "{0}.ToString()", "byte?", typeof(byte), typeof(byte?), "{0}.Value", "GetByte") },
{ "number(5)", new DbToCs("(ushort?)", "ushort.Parse({0})", "{0}.ToString()", "ushort?", typeof(ushort), typeof(ushort?), "{0}.Value", "GetInt32") },
{ "number(10)", new DbToCs("(uint?)", "uint.Parse({0})", "{0}.ToString()", "uint?", typeof(uint), typeof(uint?), "{0}.Value", "GetInt64") },
{ "number(20)", new DbToCs("(ulong?)", "ulong.Parse({0})", "{0}.ToString()", "ulong?", typeof(ulong), typeof(ulong?), "{0}.Value", "GetDecimal") },
{ "float(126)", new DbToCs("(double?)", "double.Parse({0})", "{0}.ToString()", "double?", typeof(double), typeof(double?), "{0}.Value", "GetDouble") },
{ "float(63)", new DbToCs("(float?)", "float.Parse({0})", "{0}.ToString()", "float?", typeof(float), typeof(float?), "{0}.Value", "GetFloat") },
{ "number(10,2)", new DbToCs("(decimal?)", "decimal.Parse({0})", "{0}.ToString()", "decimal?", typeof(decimal), typeof(decimal?), "{0}.Value", "GetDecimal") },
{ "interval day(2) to second(6)", new DbToCs("(TimeSpan?)", "TimeSpan.Parse(double.Parse({0}))", "{0}.Ticks.ToString()", "TimeSpan?", typeof(TimeSpan), typeof(TimeSpan?), "{0}.Value", "GetValue") },
{ "date(7)", new DbToCs("(DateTime?)", "new DateTime(long.Parse({0}))", "{0}.Ticks.ToString()", "DateTime?", typeof(DateTime), typeof(DateTime?), "{0}.Value", "GetValue") },
{ "timestamp(6)", new DbToCs("(DateTime?)", "new DateTime(long.Parse({0}))", "{0}.Ticks.ToString()", "DateTime?", typeof(DateTime), typeof(DateTime?), "{0}.Value", "GetValue") },
{ "timestamp(6) with local time zone", new DbToCs("(DateTime?)", "new DateTime(long.Parse({0}))", "{0}.Ticks.ToString()", "DateTime?", typeof(DateTime), typeof(DateTime?), "{0}.Value", "GetValue") },
{ "blob", new DbToCs("(byte[])", "Convert.FromBase64String({0})", "Convert.ToBase64String({0})", "byte[]", typeof(byte[]), typeof(byte[]), "{0}", "GetValue") },
{ "nvarchar2(255)", new DbToCs("", "{0}.Replace(StringifySplit, \"|\")", "{0}.Replace(\"|\", StringifySplit)", "string", typeof(string), typeof(string), "{0}", "GetString") },
{ "char(36 char)", new DbToCs("(Guid?)", "Guid.Parse({0})", "{0}.ToString()", "Guid?", typeof(Guid), typeof(Guid?), "{0}.Value", "GetGuid") },
};
foreach (var kv in defaultDbToCs)
_dicDbToCs.TryAdd(kv.Key, kv.Value);
}
public string GetCsConvert(DbColumnInfo column) => _dicDbToCs.TryGetValue(column.DbTypeTextFull, out var trydc) ? (column.IsNullable ? trydc.csConvert : trydc.csConvert.Replace("?", "")) : null;
public string GetCsParse(DbColumnInfo column) => _dicDbToCs.TryGetValue(column.DbTypeTextFull, out var trydc) ? trydc.csParse : null;
public string GetCsStringify(DbColumnInfo column) => _dicDbToCs.TryGetValue(column.DbTypeTextFull, out var trydc) ? trydc.csStringify : null;
public string GetCsType(DbColumnInfo column) => _dicDbToCs.TryGetValue(column.DbTypeTextFull, out var trydc) ? (column.IsNullable ? trydc.csType : trydc.csType.Replace("?", "")) : null;
public Type GetCsTypeInfo(DbColumnInfo column) => _dicDbToCs.TryGetValue(column.DbTypeTextFull, out var trydc) ? trydc.csTypeInfo : null;
public string GetCsTypeValue(DbColumnInfo column) => _dicDbToCs.TryGetValue(column.DbTypeTextFull, out var trydc) ? trydc.csTypeValue : null;
public string GetDataReaderMethod(DbColumnInfo column) => _dicDbToCs.TryGetValue(column.DbTypeTextFull, out var trydc) ? trydc.dataReaderMethod : null;
public List<string> GetDatabases()
{
var sql = @" select username from all_users";
var ds = _orm.Ado.ExecuteArray(CommandType.Text, sql);
return ds.Select(a => a.FirstOrDefault()?.ToString()).ToList();
}
public bool ExistsTable(string name, bool ignoreCase)
{
if (string.IsNullOrEmpty(name)) return false;
var tbname = _commonUtils.SplitTableName(name);
if (tbname?.Length == 1)
{
var userId = (_orm.Ado.MasterPool as OdbcDamengConnectionPool)?.UserId;
if (string.IsNullOrEmpty(userId))
using (var conn = _orm.Ado.MasterPool.Get())
{
userId = OdbcDamengConnectionPool.GetUserId(conn.Value.ConnectionString);
}
tbname = new[] { userId, tbname[0] };
}
if (ignoreCase) tbname = tbname.Select(a => a.ToLower()).ToArray();
var sql = $" select 1 from all_tab_comments where {(ignoreCase ? "lower(owner)" : "owner")}={_commonUtils.FormatSql("{0}", tbname[0])} and {(ignoreCase ? "lower(table_name)" : "table_name")}={_commonUtils.FormatSql("{0}", tbname[1])}";
return string.Concat(_orm.Ado.ExecuteScalar(CommandType.Text, sql)) == "1";
}
public DbTableInfo GetTableByName(string name, bool ignoreCase = true) => GetTables(null, name, ignoreCase)?.FirstOrDefault();
public List<DbTableInfo> GetTablesByDatabase(params string[] database) => GetTables(database, null, false);
public List<DbTableInfo> GetTables(string[] database, string tablename, bool ignoreCase)
{
var loc1 = new List<DbTableInfo>();
var loc2 = new Dictionary<string, DbTableInfo>();
var loc3 = new Dictionary<string, Dictionary<string, DbColumnInfo>>();
string[] tbname = null;
if (string.IsNullOrEmpty(tablename) == false)
{
tbname = _commonUtils.SplitTableName(tablename);
if (tbname?.Length == 1)
{
var userUsers = _orm.Ado.ExecuteScalar(" select username from user_users")?.ToString();
if (string.IsNullOrEmpty(userUsers)) return loc1;
tbname = new[] { userUsers, tbname[0] };
}
if (ignoreCase) tbname = tbname.Select(a => a.ToLower()).ToArray();
database = new[] { tbname[0] };
}
else if (database == null || database.Any() == false)
{
var userUsers = _orm.Ado.ExecuteScalar(" select username from user_users")?.ToString();
if (string.IsNullOrEmpty(userUsers)) return loc1;
database = new[] { userUsers };
}
var databaseIn = string.Join(",", database.Select(a => _commonUtils.FormatSql("{0}", a)));
var sql = $@"
select
a.owner || '.' || a.table_name,
a.owner,
a.table_name,
b.comments,
'TABLE'
from all_tables a
left join all_tab_comments b on b.owner = a.owner and b.table_name = a.table_name and b.table_type = 'TABLE'
where {(ignoreCase ? "lower(a.owner)" : "a.owner")} in ({databaseIn}){(tbname == null ? "" : $" and {(ignoreCase ? "lower(a.table_name)" : "a.table_name")}={_commonUtils.FormatSql("{0}", tbname[1])}")}";
var ds = _orm.Ado.ExecuteArray(CommandType.Text, sql);
if (ds == null) return loc1;
var loc6 = new List<string[]>();
var loc66 = new List<string[]>();
var loc6_1000 = new List<string>();
var loc66_1000 = new List<string>();
foreach (var row in ds)
{
var table_id = string.Concat(row[0]);
var schema = string.Concat(row[1]);
var table = string.Concat(row[2]);
var comment = string.Concat(row[3]);
var type = string.Concat(row[4]) == "VIEW" ? DbTableType.VIEW : DbTableType.TABLE;
if (database.Length == 1)
{
table_id = table_id.Substring(table_id.IndexOf('.') + 1);
schema = "";
}
loc2.Add(table_id, new DbTableInfo { Id = table_id, Schema = schema, Name = table, Comment = comment, Type = type });
loc3.Add(table_id, new Dictionary<string, DbColumnInfo>());
switch (type)
{
case DbTableType.TABLE:
case DbTableType.VIEW:
loc6_1000.Add(table.Replace("'", "''"));
if (loc6_1000.Count >= 999)
{
loc6.Add(loc6_1000.ToArray());
loc6_1000.Clear();
}
break;
case DbTableType.StoreProcedure:
loc66_1000.Add(table.Replace("'", "''"));
if (loc66_1000.Count >= 999)
{
loc66.Add(loc66_1000.ToArray());
loc66_1000.Clear();
}
break;
}
}
if (loc6_1000.Count > 0) loc6.Add(loc6_1000.ToArray());
if (loc66_1000.Count > 0) loc66.Add(loc66_1000.ToArray());
if (loc6.Count == 0) return loc1;
var loc8 = new StringBuilder().Append("(");
for (var loc8idx = 0; loc8idx < loc6.Count; loc8idx++)
{
if (loc8idx > 0) loc8.Append(" OR ");
loc8.Append("a.table_name in (");
for (var loc8idx2 = 0; loc8idx2 < loc6[loc8idx].Length; loc8idx2++)
{
if (loc8idx2 > 0) loc8.Append(",");
loc8.Append($"'{loc6[loc8idx][loc8idx2]}'");
}
loc8.Append(")");
}
loc8.Append(")");
sql = $@"
select
a.owner || '.' || a.table_name,
a.column_name,
a.data_type,
a.data_length,
a.data_precision,
a.data_scale,
a.char_used,
case when a.nullable = 'N' then 0 else 1 end,
nvl((select 1 from user_sequences where upper(sequence_name)=upper(a.table_name||'_seq_'||a.column_name) and rownum < 2), 0),
b.comments,
a.data_default
from all_tab_cols a
left join all_col_comments b on b.owner = a.owner and b.table_name = a.table_name and b.column_name = a.column_name
where {(ignoreCase ? "lower(a.owner)" : "a.owner")} in ({databaseIn}) and {loc8}
";
ds = _orm.Ado.ExecuteArray(CommandType.Text, sql);
if (ds == null) return loc1;
var ds2 = new List<object[]>();
foreach (var row in ds)
{
var ds2item = new object[9];
ds2item[0] = row[0];
ds2item[1] = row[1];
ds2item[2] = Regex.Replace(string.Concat(row[2]), @"\(\d+\)", "");
ds2item[4] = OdbcDamengCodeFirst.GetDamengSqlTypeFullName(new object[] { row[1], row[2], row[3], row[4], row[5], row[6] });
ds2item[5] = string.Concat(row[7]);
ds2item[6] = string.Concat(row[8]);
ds2item[7] = string.Concat(row[9]);
ds2item[8] = string.Concat(row[10]);
ds2.Add(ds2item);
}
var position = 0;
foreach (var row in ds2)
{
string table_id = string.Concat(row[0]);
string column = string.Concat(row[1]);
string type = string.Concat(row[2]);
//long max_length = long.Parse(string.Concat(row[3]));
string sqlType = string.Concat(row[4]);
var m_len = Regex.Match(sqlType, @"\w+\((\d+)");
int max_length = m_len.Success ? int.Parse(m_len.Groups[1].Value) : -1;
bool is_nullable = string.Concat(row[5]) == "1";
bool is_identity = string.Concat(row[6]) == "1";
string comment = string.Concat(row[7]);
string defaultValue = string.Concat(row[8]);
if (max_length == 0) max_length = -1;
if (database.Length == 1)
{
table_id = table_id.Substring(table_id.IndexOf('.') + 1);
}
loc3[table_id].Add(column, new DbColumnInfo
{
Name = column,
MaxLength = max_length,
IsIdentity = is_identity,
IsNullable = is_nullable,
IsPrimary = false,
DbTypeText = type,
DbTypeTextFull = sqlType,
Table = loc2[table_id],
Coment = comment,
DefaultValue = defaultValue,
Position = ++position
});
loc3[table_id][column].DbType = this.GetDbType(loc3[table_id][column]);
loc3[table_id][column].CsType = this.GetCsTypeInfo(loc3[table_id][column]);
}
sql = $@"
select
a.table_owner || '.' || a.table_name,
c.column_name,
c.index_name,
case when a.uniqueness = 'UNIQUE' then 1 else 0 end,
case when exists(select 1 from all_constraints where index_name = a.index_name and constraint_type = 'P') then 1 else 0 end,
0,
case when c.descend = 'DESC' then 1 else 0 end,
c.column_position
from all_indexes a,
all_ind_columns c
where a.index_name = c.index_name
and a.table_owner = c.table_owner
and a.table_name = c.table_name
and {(ignoreCase ? "lower(a.table_owner)" : "a.table_owner")} in ({databaseIn}) and {loc8}
";
ds = _orm.Ado.ExecuteArray(CommandType.Text, sql);
if (ds == null) return loc1;
var indexColumns = new Dictionary<string, Dictionary<string, DbIndexInfo>>();
var uniqueColumns = new Dictionary<string, Dictionary<string, DbIndexInfo>>();
foreach (var row in ds)
{
string table_id = string.Concat(row[0]);
string column = string.Concat(row[1]).Trim('"');
string index_id = string.Concat(row[2]);
bool is_unique = string.Concat(row[3]) == "1";
bool is_primary_key = string.Concat(row[4]) == "1";
bool is_clustered = string.Concat(row[5]) == "1";
bool is_desc = string.Concat(row[6]) == "1";
if (database.Length == 1)
table_id = table_id.Substring(table_id.IndexOf('.') + 1);
if (loc3.ContainsKey(table_id) == false || loc3[table_id].ContainsKey(column) == false) continue;
var loc9 = loc3[table_id][column];
if (loc9.IsPrimary == false && is_primary_key) loc9.IsPrimary = is_primary_key;
if (is_primary_key) continue;
Dictionary<string, DbIndexInfo> loc10 = null;
DbIndexInfo loc11 = null;
if (!indexColumns.TryGetValue(table_id, out loc10))
indexColumns.Add(table_id, loc10 = new Dictionary<string, DbIndexInfo>());
if (!loc10.TryGetValue(index_id, out loc11))
loc10.Add(index_id, loc11 = new DbIndexInfo());
loc11.Columns.Add(new DbIndexColumnInfo { Column = loc9, IsDesc = is_desc });
if (is_unique && !is_primary_key)
{
if (!uniqueColumns.TryGetValue(table_id, out loc10))
uniqueColumns.Add(table_id, loc10 = new Dictionary<string, DbIndexInfo>());
if (!loc10.TryGetValue(index_id, out loc11))
loc10.Add(index_id, loc11 = new DbIndexInfo());
loc11.Columns.Add(new DbIndexColumnInfo { Column = loc9, IsDesc = is_desc });
}
}
foreach (string table_id in indexColumns.Keys)
{
foreach (var column in indexColumns[table_id])
loc2[table_id].IndexesDict.Add(column.Key, column.Value);
}
foreach (string table_id in uniqueColumns.Keys)
{
foreach (var column in uniqueColumns[table_id])
{
column.Value.Columns.Sort((c1, c2) => c1.Column.Name.CompareTo(c2.Column.Name));
loc2[table_id].UniquesDict.Add(column.Key, column.Value);
}
}
if (tbname == null)
{
sql = $@"
select
a.owner || '.' || a.table_name,
c.column_name,
c.constraint_name,
b.owner || '.' || b.table_name,
1,
d.column_name
-- a.owner 外键拥有者,
-- a.table_name 外键表,
-- c.column_name 外键列,
-- b.owner 主键拥有者,
-- b.table_name 主键表,
-- d.column_name 主键列,
-- c.constraint_name 外键名,
-- d.constraint_name 主键名
from
all_constraints a,
all_constraints b,
all_cons_columns c, --外键表
all_cons_columns d --主键表
where
a.r_constraint_name = b.constraint_name
and a.constraint_type = 'R'
and b.constraint_type = 'P'
and a.r_owner = b.owner
and a.constraint_name = c.constraint_name
and b.constraint_name = d.constraint_name
and a.owner = c.owner
and a.table_name = c.table_name
and b.owner = d.owner
and b.table_name = d.table_name
and {(ignoreCase ? "lower(a.owner)" : "a.owner")} in ({databaseIn}) and {loc8}
";
ds = _orm.Ado.ExecuteArray(CommandType.Text, sql);
if (ds == null) return loc1;
var fkColumns = new Dictionary<string, Dictionary<string, DbForeignInfo>>();
foreach (var row in ds)
{
string table_id = string.Concat(row[0]);
string column = string.Concat(row[1]);
string fk_id = string.Concat(row[2]);
string ref_table_id = string.Concat(row[3]);
bool is_foreign_key = string.Concat(row[4]) == "1";
string referenced_column = string.Concat(row[5]);
if (database.Length == 1)
{
table_id = table_id.Substring(table_id.IndexOf('.') + 1);
ref_table_id = ref_table_id.Substring(ref_table_id.IndexOf('.') + 1);
}
if (loc3.ContainsKey(table_id) == false || loc3[table_id].ContainsKey(column) == false) continue;
var loc9 = loc3[table_id][column];
if (loc2.ContainsKey(ref_table_id) == false) continue;
var loc10 = loc2[ref_table_id];
var loc11 = loc3[ref_table_id][referenced_column];
Dictionary<string, DbForeignInfo> loc12 = null;
DbForeignInfo loc13 = null;
if (!fkColumns.TryGetValue(table_id, out loc12))
fkColumns.Add(table_id, loc12 = new Dictionary<string, DbForeignInfo>());
if (!loc12.TryGetValue(fk_id, out loc13))
loc12.Add(fk_id, loc13 = new DbForeignInfo { Table = loc2[table_id], ReferencedTable = loc10 });
loc13.Columns.Add(loc9);
loc13.ReferencedColumns.Add(loc11);
}
foreach (var table_id in fkColumns.Keys)
foreach (var fk in fkColumns[table_id])
loc2[table_id].ForeignsDict.Add(fk.Key, fk.Value);
}
foreach (var table_id in loc3.Keys)
{
foreach (var loc5 in loc3[table_id].Values)
{
loc2[table_id].Columns.Add(loc5);
if (loc5.IsIdentity) loc2[table_id].Identitys.Add(loc5);
if (loc5.IsPrimary) loc2[table_id].Primarys.Add(loc5);
}
}
foreach (var loc4 in loc2.Values)
{
//if (loc4.Primarys.Count == 0 && loc4.UniquesDict.Count > 0)
//{
// foreach (var loc5 in loc4.UniquesDict.First().Value.Columns)
// {
// loc5.Column.IsPrimary = true;
// loc4.Primarys.Add(loc5.Column);
// }
//}
loc4.Primarys.Sort((c1, c2) => c1.Name.CompareTo(c2.Name));
loc4.Columns.Sort((c1, c2) =>
{
int compare = c2.IsPrimary.CompareTo(c1.IsPrimary);
if (compare == 0)
{
bool b1 = loc4.ForeignsDict.Values.Where(fk => fk.Columns.Where(c3 => c3.Name == c1.Name).Any()).Any();
bool b2 = loc4.ForeignsDict.Values.Where(fk => fk.Columns.Where(c3 => c3.Name == c2.Name).Any()).Any();
compare = b2.CompareTo(b1);
}
if (compare == 0) compare = c1.Name.CompareTo(c2.Name);
return compare;
});
loc1.Add(loc4);
}
loc1.Sort((t1, t2) =>
{
var ret = t1.Schema.CompareTo(t2.Schema);
if (ret == 0) ret = t1.Name.CompareTo(t2.Name);
return ret;
});
loc2.Clear();
loc3.Clear();
return loc1;
}
public List<DbEnumInfo> GetEnumsByDatabase(params string[] database)
{
return new List<DbEnumInfo>();
}
}
} | 48.944816 | 247 | 0.526871 | [
"MIT"
] | p0nley/FreeSql | Providers/FreeSql.Provider.Odbc/Dameng/OdbcDamengDbFirst.cs | 29,353 | C# |
using System.Reflection;
using System.IO;
using System.Text;
namespace Enki.Extensions
{
public static class Ext
{
//public static Weather Instance(this Weather i){
// return (Weather)typeof(Weather).GetField("inst", BindingFlags.GetField | BindingFlags.Static | BindingFlags.Instance).GetValue(null);
//}
//public static World Instance(this World i){
// return (World)typeof(World).GetField("inst", BindingFlags.GetField | BindingFlags.Static | BindingFlags.Instance).GetValue(null);
//}
public static T GetProperty<T>(this object o, string key) {
if (o == null) return default(T);
return (T)o.GetType().GetField(key).GetValue(o);
}
public static Stream StringStream(string input) {
return new MemoryStream(Encoding.UTF8.GetBytes(input));
}
public static UnityEngine.Vector3 ToV3(this Assimp.Vector3D v3) {
return new UnityEngine.Vector3(v3.X, v3.Y, v3.Z);
}
public static UnityEngine.Vector2 ToV2(this Assimp.Vector3D v3) {
return new UnityEngine.Vector2(v3.X, v3.Y);
}
}
}
| 30.235294 | 138 | 0.722763 | [
"MIT"
] | CEbbinghaus/Enki | Enki/Extentions.cs | 1,030 | C# |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// <auto-generated/>
#nullable disable
using System;
using System.ComponentModel;
namespace Azure.ResourceManager.Resources.Models
{
/// <summary> The policy assignment enforcement mode. Possible values are Default and DoNotEnforce. </summary>
public readonly partial struct EnforcementMode : IEquatable<EnforcementMode>
{
private readonly string _value;
/// <summary> Determines if two <see cref="EnforcementMode"/> values are the same. </summary>
/// <exception cref="ArgumentNullException"> <paramref name="value"/> is null. </exception>
public EnforcementMode(string value)
{
_value = value ?? throw new ArgumentNullException(nameof(value));
}
private const string DefaultValue = "Default";
private const string DoNotEnforceValue = "DoNotEnforce";
/// <summary> The policy effect is enforced during resource creation or update. </summary>
public static EnforcementMode Default { get; } = new EnforcementMode(DefaultValue);
/// <summary> The policy effect is not enforced during resource creation or update. </summary>
public static EnforcementMode DoNotEnforce { get; } = new EnforcementMode(DoNotEnforceValue);
/// <summary> Determines if two <see cref="EnforcementMode"/> values are the same. </summary>
public static bool operator ==(EnforcementMode left, EnforcementMode right) => left.Equals(right);
/// <summary> Determines if two <see cref="EnforcementMode"/> values are not the same. </summary>
public static bool operator !=(EnforcementMode left, EnforcementMode right) => !left.Equals(right);
/// <summary> Converts a string to a <see cref="EnforcementMode"/>. </summary>
public static implicit operator EnforcementMode(string value) => new EnforcementMode(value);
/// <inheritdoc />
[EditorBrowsable(EditorBrowsableState.Never)]
public override bool Equals(object obj) => obj is EnforcementMode other && Equals(other);
/// <inheritdoc />
public bool Equals(EnforcementMode other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase);
/// <inheritdoc />
[EditorBrowsable(EditorBrowsableState.Never)]
public override int GetHashCode() => _value?.GetHashCode() ?? 0;
/// <inheritdoc />
public override string ToString() => _value;
}
}
| 48.75 | 134 | 0.689152 | [
"MIT"
] | 0rland0Wats0n/azure-sdk-for-net | sdk/resources/Azure.ResourceManager.Resources/src/Generated/Models/EnforcementMode.cs | 2,535 | C# |
using EGamePlay;
using EGamePlay.Combat;
using ET;
using System.Threading;
using Sirenix.OdinInspector;
using UnityEngine;
using System;
public class CombatFlow : WorkFlow
{
public int JumpToTime { get; set; }
public override void Awake()
{
FlowSource = AddChild<WorkFlowSource>();
FlowSource.ToEnter<CombatCreateFlow>().ToEnter<CombatRunFlow>().ToEnter<CombatFinishFlow>().ToRestart();
}
public override void Startup()
{
FlowSource.Startup();
}
}
| 20.2 | 112 | 0.691089 | [
"MIT"
] | ErQing/EGamePlay | Assets/EGPsExamples/TurnBaseExample/Scripts/CombatFlow/0-CombatFlow.cs | 507 | C# |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Controllers;
using System.Web.Http.Description;
using Remotus.Web.Areas.HelpPage.ModelDescriptions;
using Remotus.Web.Areas.HelpPage.Models;
namespace Remotus.Web.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model description generator.
/// </summary>
/// <param name="config">The configuration.</param>
/// <returns>The <see cref="ModelDescriptionGenerator"/></returns>
public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config)
{
return (ModelDescriptionGenerator)config.Properties.GetOrAdd(
typeof(ModelDescriptionGenerator),
k => InitializeModelDescriptionGenerator(config));
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
model = GenerateApiModel(apiDescription, config);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config)
{
HelpPageApiModel apiModel = new HelpPageApiModel()
{
ApiDescription = apiDescription,
};
ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator();
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
GenerateUriParameters(apiModel, modelGenerator);
GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator);
GenerateResourceDescription(apiModel, modelGenerator);
GenerateSamples(apiModel, sampleGenerator);
return apiModel;
}
private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromUri)
{
HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor;
Type parameterType = null;
ModelDescription typeDescription = null;
ComplexTypeModelDescription complexTypeDescription = null;
if (parameterDescriptor != null)
{
parameterType = parameterDescriptor.ParameterType;
typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
complexTypeDescription = typeDescription as ComplexTypeModelDescription;
}
// Example:
// [TypeConverter(typeof(PointConverter))]
// public class Point
// {
// public Point(int x, int y)
// {
// X = x;
// Y = y;
// }
// public int X { get; set; }
// public int Y { get; set; }
// }
// Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection.
//
// public class Point
// {
// public int X { get; set; }
// public int Y { get; set; }
// }
// Regular complex class Point will have properties X and Y added to UriParameters collection.
if (complexTypeDescription != null
&& !IsBindableWithTypeConverter(parameterType))
{
foreach (ParameterDescription uriParameter in complexTypeDescription.Properties)
{
apiModel.UriParameters.Add(uriParameter);
}
}
else if (parameterDescriptor != null)
{
ParameterDescription uriParameter =
AddParameterDescription(apiModel, apiParameter, typeDescription);
if (!parameterDescriptor.IsOptional)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" });
}
object defaultValue = parameterDescriptor.DefaultValue;
if (defaultValue != null)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) });
}
}
else
{
Debug.Assert(parameterDescriptor == null);
// If parameterDescriptor is null, this is an undeclared route parameter which only occurs
// when source is FromUri. Ignored in request model and among resource parameters but listed
// as a simple string here.
ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string));
AddParameterDescription(apiModel, apiParameter, modelDescription);
}
}
}
}
private static bool IsBindableWithTypeConverter(Type parameterType)
{
if (parameterType == null)
{
return false;
}
return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string));
}
private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel,
ApiParameterDescription apiParameter, ModelDescription typeDescription)
{
ParameterDescription parameterDescription = new ParameterDescription
{
Name = apiParameter.Name,
Documentation = apiParameter.Documentation,
TypeDescription = typeDescription,
};
apiModel.UriParameters.Add(parameterDescription);
return parameterDescription;
}
private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromBody)
{
Type parameterType = apiParameter.ParameterDescriptor.ParameterType;
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
apiModel.RequestDocumentation = apiParameter.Documentation;
}
else if (apiParameter.ParameterDescriptor != null &&
apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))
{
Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
if (parameterType != null)
{
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
}
}
private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ResponseDescription response = apiModel.ApiDescription.ResponseDescription;
Type responseType = response.ResponseType ?? response.DeclaredType;
if (responseType != null && responseType != typeof(void))
{
apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType);
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator)
{
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture,
"An exception has occurred while generating the sample. Exception message: {0}",
HelpPageSampleGenerator.UnwrapException(e).Message));
}
}
private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType)
{
parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault(
p => p.Source == ApiParameterSource.FromBody ||
(p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)));
if (parameterDescription == null)
{
resourceType = null;
return false;
}
resourceType = parameterDescription.ParameterDescriptor.ParameterType;
if (resourceType == typeof(HttpRequestMessage))
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
}
if (resourceType == null)
{
parameterDescription = null;
return false;
}
return true;
}
private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config)
{
ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config);
Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions;
foreach (ApiDescription api in apis)
{
ApiParameterDescription parameterDescription;
Type parameterType;
if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType))
{
modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
return modelGenerator;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| 51.450855 | 196 | 0.622825 | [
"MIT"
] | LazyTarget/ProcHelper | src/Remotus.Web/Areas/HelpPage/HelpPageConfigurationExtensions.cs | 24,079 | C# |
using System;
using System.Threading.Tasks;
using GraphQL.Types;
namespace GraphQL.SystemTextJson
{
public static class SchemaExtensions
{
/// <summary>
/// Configures an <see cref="ExecutionOptions"/> using the given <paramref name="configure"/> action
/// then executes those options using the <paramref name="schema"/> and a <see cref="DocumentWriter"/>
/// with indentation turned on.
/// </summary>
/// <param name="schema">A schema to use.</param>
/// <param name="configure">An action that configures something to execute.</param>
/// <returns>The JSON result as a string.</returns>
/// <remarks>
/// Useful for quickly executing something and "getting started".
/// Part of the public API and should not be removed even if it has no references.
/// </remarks>
public static Task<string> ExecuteAsync(this ISchema schema, Action<ExecutionOptions> configure)
=> schema.ExecuteAsync(new DocumentWriter(indent: true), configure);
}
}
| 42.72 | 110 | 0.652622 | [
"MIT"
] | GrangerAts/graphql-dotnet | src/GraphQL.SystemTextJson/SchemaExtensions.cs | 1,068 | C# |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using enums;
public class SpawnPowerUps : MonoBehaviour
{
[SerializeField] private GameManager gm;
[SerializeField] private BackgroundScrollScript bss;
public const int RATIO_CHANCE_SHIELD = 75;
public const int RATIO_CHANCE_CAMO = 25;
public const int RATIO_CHANCE_LEAD = 10;
public const int RATIO_TOTAL = RATIO_CHANCE_SHIELD
+ RATIO_CHANCE_CAMO
+ RATIO_CHANCE_LEAD;
private float timeSinceLastSpwan;
private float waitTime = 0.75f;
private float minWait = 5.0f;
private float maxWait = 8.0f;
// Start is called before the first frame update
void Start()
{
timeSinceLastSpwan = 100f;
}
// Update is called once per frame
void Update()
{
timeSinceLastSpwan += Time.deltaTime;
if (timeSinceLastSpwan > waitTime)
{
timeSinceLastSpwan = 0;
waitTime = Random.Range(minWait, maxWait);
int x = Random.Range(0, RATIO_TOTAL);
if ((x -= RATIO_CHANCE_SHIELD) < 0) // Test for Layer
{
SpawnPowerUp(gm.powerUpPrefab, PowerUpType.Shield);
}
else if ((x -= RATIO_CHANCE_CAMO) < 0) // Test for Camo
{
SpawnPowerUp(gm.powerUpPrefab, PowerUpType.Camo);
}
else // No need for final if statement (Lead)
{
SpawnPowerUp(gm.powerUpPrefab, PowerUpType.Lead);
}
}
}
private void SpawnPowerUp(GameObject t, PowerUpType pt)
{
float isBelow = Random.Range(0, 2); // 0 or 1
float randY = Random.Range(0f, 2.5f);
if (isBelow > 0)
{
randY *= -1;
}
GameObject g = GameObject.Instantiate(t, new Vector3(10, randY, 0), Quaternion.identity);
g.GetComponent<PowerUp>().SetType(pt);
bss.stationaryObjects.Add(g);
}
}
| 28.222222 | 97 | 0.583661 | [
"MIT"
] | DevonGrant/Monkey-Bloon-Defense | Monkey-Bloon-Defense/Assets/Scripts/Spawners/SpawnPowerUps.cs | 2,034 | C# |
using Timetabler.Data;
namespace Timetabler.Models
{
/// <summary>
/// Data model for the <see cref="SignalboxHoursSetEditForm" /> form.
/// </summary>
public class SignalboxHoursSetEditFormModel
{
/// <summary>
/// The data to be edited.
/// </summary>
public SignalboxHoursSet Data { get; set; }
/// <summary>
/// The clock input mode (12/24 hours) to use.
/// </summary>
public ClockType InputMode { get; set; }
}
}
| 24.285714 | 73 | 0.566667 | [
"MIT"
] | caitlinsalt/simple-timetable-editor | Timetabler/Models/SignalboxHoursSetEditFormModel.cs | 512 | C# |
// This is an open source non-commercial project. Dear PVS-Studio, please check it.
// PVS-Studio Static Code Analyzer for C, C++, C#, and Java: http://www.viva64.com
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="PartyWorkerDelegate.cs" company="SyndicatedLife">
// Copyright(c) 2018 Ryan Wilson &lt;syndicated.life@gmail.com&gt; (http://syndicated.life/)
// Licensed under the MIT license. See LICENSE.md in the solution root for full license information.
// </copyright>
// <summary>
// PartyWorkerDelegate.cs Implementation
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Sharlayan.Delegates {
using System.Collections.Concurrent;
using Sharlayan.Core;
internal static class PartyWorkerDelegate {
public static ConcurrentDictionary<uint, PartyMember> PartyMembers = new ConcurrentDictionary<uint, PartyMember>();
public static void EnsurePartyMember(uint key, PartyMember entity) {
PartyMembers.AddOrUpdate(key, entity, (k, v) => entity);
}
public static PartyMember GetPartyMember(uint key) {
PartyMember entity;
PartyMembers.TryGetValue(key, out entity);
return entity;
}
public static bool RemovePartyMember(uint key) {
PartyMember entity;
return PartyMembers.TryRemove(key, out entity);
}
}
} | 39.923077 | 123 | 0.578035 | [
"MIT"
] | sappho192/TataruHelper | Sharlayan/Delegates/PartyWorkerDelegate.cs | 1,559 | C# |
using OpenBullet.Views.UserControls;
using PluginFramework;
using RuriLib;
using RuriLib.ViewModels;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
namespace OpenBullet.Plugins
{
public static class Loader
{
/// <summary>
/// Loads the plugins in a folder one by one.
/// </summary>
/// <param name="folder">The folder where plugins are located</param>
/// <returns>A tuple with the collection of plugin controls and block plugin controls.</returns>
public static (IEnumerable<PluginControl>, IEnumerable<IBlockPlugin>) LoadPlugins(string folder)
{
var plugins = new List<PluginControl>();
var blockPlugins = new List<IBlockPlugin>();
foreach (var dll in Directory.GetFiles(folder, "*.dll"))
{
var asm = Assembly.LoadFrom(dll);
// Hook the dependency folder (a folder with the name of the DLL) if it exists
var depFolder = Path.Combine(OB.pluginsFolder, Path.GetFileNameWithoutExtension(dll));
if (Directory.Exists(depFolder))
{
Hook(new string[] { depFolder });
}
// Load the dependencies
LoadDependencies(asm.GetReferencedAssemblies());
// Search all the types in the assembly
foreach (var type in asm.GetTypes())
{
// If it implements the IPlugin interface
if (type.GetInterface(nameof(IPlugin)) == typeof(IPlugin))
{
plugins.Add(new PluginControl(type, OB.App,
type.GetTypeInfo().IsSubclassOf(typeof(ViewModelBase))));
}
// If it implements the IBlockPlugin interface and derives from BlockBase
else if (type.GetInterface(nameof(IBlockPlugin)) == typeof(IBlockPlugin)
&& type.GetTypeInfo().IsSubclassOf(typeof(BlockBase)))
{
blockPlugins.Add(Activator.CreateInstance(type) as IBlockPlugin);
}
}
}
return (plugins, blockPlugins);
}
/// <summary>
/// Loads dependencies recursively in a greedy fashion.
/// </summary>
/// <param name="assemblies">The assemblies for which to load dependencies.</param>
public static void LoadDependencies(IEnumerable<AssemblyName> assemblies)
{
foreach (var asm in assemblies)
{
// Make sure we didn't load it yet
if (!AppDomain.CurrentDomain.GetAssemblies().Any(a => a.GetName().FullName == asm.FullName))
{
try
{
AppDomain.CurrentDomain.Load(asm);
// Load more dependencies recursively
LoadDependencies(Assembly.Load(asm).GetReferencedAssemblies());
}
catch { }
}
}
}
/// <summary>
/// Hooks folders to the AssemblyResolve of the current AppDomain.
/// </summary>
/// <param name="folders">The folders where to search for assemblies</param>
// Found on https://stackoverflow.com/questions/33975073/proper-way-to-resolving-assemblies-from-subfolders
public static void Hook(params string[] folders)
{
AppDomain.CurrentDomain.AssemblyResolve += (sender, args) =>
{
// Check if the requested assembly is part of the loaded assemblies
var loadedAssembly = AppDomain.CurrentDomain.GetAssemblies().FirstOrDefault(a => a.FullName == args.Name);
if (loadedAssembly != null)
return loadedAssembly;
// This resolver is called when an loaded control tries to load a generated XmlSerializer - We need to discard it.
// http://connect.microsoft.com/VisualStudio/feedback/details/88566/bindingfailure-an-assembly-failed-to-load-while-using-xmlserialization
var n = new AssemblyName(args.Name);
if (n.Name.EndsWith(".xmlserializers", StringComparison.OrdinalIgnoreCase))
return null;
// http://stackoverflow.com/questions/4368201/appdomain-currentdomain-assemblyresolve-asking-for-a-appname-resources-assembl
if (n.Name.EndsWith(".resources", StringComparison.OrdinalIgnoreCase))
return null;
string assy = null;
// Find the corresponding assembly file
foreach (var dir in folders)
{
assy = new[] { "*.dll", "*.exe" }.SelectMany(g => Directory.EnumerateFiles(dir, g)).FirstOrDefault(f =>
{
try { return n.Name.Equals(AssemblyName.GetAssemblyName(f).Name, StringComparison.OrdinalIgnoreCase); }
catch (BadImageFormatException) { return false; /* Bypass assembly is not a .net exe */ }
catch (Exception ex) { throw new ApplicationException("Error loading assembly " + f, ex); }
});
if (assy != null)
return Assembly.LoadFrom(assy);
}
throw new ApplicationException("Assembly " + args.Name + " not found");
};
}
}
}
| 42.916031 | 154 | 0.555852 | [
"MIT"
] | Area51Crew/OB-M2-Browning | OpenBullet/Plugins/Loader.cs | 5,624 | C# |
namespace TranscribeMe.API.Data
{
public class DownloadFileModel
{
public string Id { get; set; }
public string Status { get; set; }
public string FileName { get; set; }
public string Extension { get; set; }
public bool IsCompleted { get; set; }
public bool IsAdmin { get; set; }
}
} | 20.529412 | 45 | 0.581662 | [
"MIT"
] | TranscribeMe/.NET-SDK | TranscribeMe.API.Data/DownloadFileModel.cs | 351 | C# |
using GraphSharp.Controls;
using GraphSharp.Sample;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using Windows.Foundation;
using Windows.Foundation.Collections;
using Windows.UI;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Controls.Primitives;
using Windows.UI.Xaml.Data;
using Windows.UI.Xaml.Input;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Navigation;
// The User Control item template is documented at https://go.microsoft.com/fwlink/?LinkId=234236
namespace GraphSharpSampleCore
{
public sealed partial class PocVertexView : UserControl
{
public static readonly DependencyProperty BBrushProperty =
DependencyProperty.Register("BBrush", typeof(Brush), typeof(PocVertexView), new PropertyMetadata(null));// bcz: new UIPropertyMetadata( null ) );
public static readonly DependencyProperty BThicknessProperty =
DependencyProperty.Register("BThickness", typeof(Thickness), typeof(PocVertexView), new PropertyMetadata(null));// bcz: new UIPropertyMetadata( null ) );
VertexControl VC => VisualTreeHelper.GetParent(CP) as VertexControl;
ContentPresenter CP => VisualTreeHelper.GetParent(this) as ContentPresenter;
public PocVertexView()
{
this.InitializeComponent();
}
public Brush BBrush
{
get { return (Brush)GetValue(BBrushProperty); }
set { SetValue(BBrushProperty, value); }
}
public Thickness BThickness
{
get { return (Thickness)GetValue(BThicknessProperty); }
set { SetValue(BThicknessProperty, value); }
}
Point delta = new Point();
void MyVertex_PointerPressed(object sender, PointerRoutedEventArgs e)
{
PointerMoved -= MyVertex_PointerMoved;
PointerMoved += MyVertex_PointerMoved;
PointerReleased -= MyVertex_PointerReleased;
PointerReleased += MyVertex_PointerReleased;
var pos = e.GetCurrentPoint(VisualTreeHelper.GetParent(VC) as UIElement).Position;
delta = new Point(pos.X - Canvas.GetLeft(VC), pos.Y - Canvas.GetTop(VC));
e.Handled = true;
this.CapturePointer(e.Pointer);
}
private void MyVertex_PointerReleased(object sender, PointerRoutedEventArgs e)
{
PointerReleased -= MyVertex_PointerReleased;
PointerMoved -= MyVertex_PointerMoved;
e.Handled = true;
this.ReleasePointerCapture(e.Pointer);
}
private void MyVertex_PointerMoved(object sender, PointerRoutedEventArgs e)
{
var pos = e.GetCurrentPoint(VisualTreeHelper.GetParent(VC) as UIElement).Position;
Canvas.SetLeft(VC, pos.X-delta.X);
Canvas.SetTop(VC, pos.Y-delta.Y);
e.Handled = true;
}
private void Parameters_Click(object sender, RoutedEventArgs e)
{
var hyperParams = (DataContext as PocVertex).HyperParams;
vFlyyout.ItemsSource = hyperParams;
vFlyyout.Height = hyperParams.Count * 50;
}
}
}
| 37.918605 | 165 | 0.667587 | [
"Apache-2.0"
] | ezg/PanoramicDataWin8 | PanoramicDataWin8/view/graphSharp/PocVertexView.xaml.cs | 3,263 | C# |
/* _BEGIN_TEMPLATE_
{
"id": "DALA_Squeamlish",
"name": [
"斯奎丽什",
"Squeamlish"
],
"text": [
null,
null
],
"cardClass": "DRUID",
"type": "HERO",
"cost": null,
"rarity": null,
"set": "DALARAN",
"collectible": null,
"dbfId": 54551
}
_END_TEMPLATE_ */
namespace HREngine.Bots
{
class Sim_DALA_Squeamlish : SimTemplate //* 斯奎丽什 Squeamlish
{
//
//
}
} | 14.241379 | 63 | 0.53753 | [
"MIT"
] | chi-rei-den/Silverfish | cards/DALARAN/DALA/Sim_DALA_Squeamlish.cs | 429 | C# |
using System;
using System.Drawing;
using System.Windows.Forms;
using System.Threading.Tasks;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
public class Startup
{
private Func<object, Task<object>> eventHandler;
private Func<object, Task<object>> checkMessage;
private SysTrayApp app;
public async Task<object> Invoke(dynamic input)
{
var name = (string)input.name;
var items = ((object[])input.items).ToList().Cast<string>();
this.eventHandler = (Func<object, Task<object>>)input.eventHandler;
this.checkMessage = (Func<object, Task<object>>)input.checkMessage;
var i = new Input
{
Name = name,
Items = items,
TriggerEvent = this.SendEvent
};
try
{
var bytes = (byte[])input.icon;
var source = Image.FromStream(new System.IO.MemoryStream(bytes));
var target = new Bitmap(40, 40, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
var g = Graphics.FromImage(target);
g.DrawImage(source, 0, 0, 40, 40);
i.Icon = Icon.FromHandle(target.GetHicon());
}
catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException)
{
// no icon specified
}
// for some reason, have to call with await first, then it works for app lifetime
//await eventHandler(new { e = "start", data = false });
Func<object, Task<object>> receive = (x) =>
{
ReceiveEvent("ev", x);
return Task.FromResult<object>(true);
};
await eventHandler(new { e = "start", data = receive});
app = new SysTrayApp(i);
var t = new System.Timers.Timer(20);
t.AutoReset = true;
t.Elapsed += onTick;
t.Start();
Application.Run(app);
SendEvent("stop", null);
return 0;
}
void onTick(object sender, EventArgs t)
{
Check();
}
private async void Check()
{
var m = await checkMessage(null);
var message = (dynamic)m;
try
{
var e = (string)message.e;
var data = (object)message.data;
ReceiveEvent(e, data);
}
catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException)
{
// no message
}
}
public void SendEvent (string e, object data) {
var message = new
{
e = e,
data = data ?? false
};
eventHandler(message).ContinueWith(x =>
{
// required to invoke node fn
});
}
public void ReceiveEvent(string name, dynamic data)
{
switch (name)
{
case "add:menuItem":
addMenuItem((string) data);
break;
case "del:menuItem":
delMenuItem((string)data);
break;
case "del:menuItem:at":
delMenuItemAt((int)data);
break;
case "exit":
Application.Exit();
break;
case "drop:menu":
app.trayMenu.MenuItems.Clear();
break;
case "updateIcon":
updateIcon((byte[])data);
break;
}
}
private void updateIcon(byte[] data)
{
var source = Image.FromStream(new System.IO.MemoryStream(data));
var target = new Bitmap(40, 40, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
var g = Graphics.FromImage(target);
g.DrawImage(source, 0, 0, 40, 40);
var icon = Icon.FromHandle(target.GetHicon());
app.trayIcon.Icon = icon;
}
private void addMenuItem(string item)
{
app.trayMenu.MenuItems.Add(app.CreateMenuItem(item));
}
private void delMenuItem(string item)
{
var menuItem = Tools.ToList<MenuItem>(app.trayMenu.MenuItems).Where(i => i.Text == item).FirstOrDefault();
if (menuItem != null)
{
app.trayMenu.MenuItems.Remove(menuItem);
}
}
private void delMenuItemAt(int index)
{
app.trayMenu.MenuItems.RemoveAt(index);
}
}
public static class Tools
{
public static List<T> ToList<T>(this IEnumerable source) where T : class
{
var list = new List<T>();
var e = source.GetEnumerator();
foreach (var i in source) {
list.Add(i as T);
}
return list;
}
}
public class Input
{
public string Name { get; set; }
public IEnumerable<string> Items { get; set; }
public Icon Icon { get; set; }
public Action<string, object> TriggerEvent { get; set; }
public Input()
{
}
}
public class SysTrayApp : Form
{
public NotifyIcon trayIcon;
public ContextMenu trayMenu;
private Input input;
public SysTrayApp(Input input)
{
this.input = input;
// Create a simple tray menu with only one item.
trayMenu = new ContextMenu();
trayMenu.MenuItems.AddRange(input.Items.Select(CreateMenuItem).ToArray());
trayIcon = new NotifyIcon();
trayIcon.Text = input.Name;
trayIcon.Icon = input.Icon ?? new Icon(SystemIcons.Application, 40, 40);
// Add menu to tray icon and show it.
trayIcon.ContextMenu = trayMenu;
trayIcon.Visible = true;
trayIcon.Click += OnIconClick;
}
public MenuItem CreateMenuItem(string text)
{
return new MenuItem(text, OnMenuItemClick);
}
private void OnIconClick(object sender, EventArgs e)
{
var show = typeof(NotifyIcon).GetMethod("ShowContextMenu", BindingFlags.Instance | BindingFlags.NonPublic);
show.Invoke(trayIcon, null);
}
private void OnMenuItemClick(object sender, EventArgs e)
{
var item = (MenuItem)sender;
var data = new
{
index = item.Index,
text = item.Text
};
this.input.TriggerEvent("click:menuItem", data);
}
protected override void OnLoad(EventArgs e)
{
Visible = false; // Hide form window.
ShowInTaskbar = false; // Remove from taskbar.
base.OnLoad(e);
}
private void OnExit(object sender, EventArgs e)
{
Application.Exit();
}
protected override void Dispose(bool isDisposing)
{
if (isDisposing)
{
// Release the icon resource.
trayIcon.Dispose();
}
base.Dispose(isDisposing);
}
}
| 25.401515 | 115 | 0.557113 | [
"MIT"
] | jrdn91/tray-windows | lib/tray.cs | 6,706 | C# |
#pragma checksum "C:\Users\Willi\OneDrive\Desktop\Code Repository\GymWebsiteApp\GymWebsite\GymWebsite\Areas\Identity\Pages\Account\Manage\_ViewImports.cshtml" "{ff1816ec-aa5e-4d10-87f7-6f4963833460}" "5d3b8748540ae983ae698cc4d0f201bae9c754a2"
// <auto-generated/>
#pragma warning disable 1591
[assembly: global::Microsoft.AspNetCore.Razor.Hosting.RazorCompiledItemAttribute(typeof(AspNetCore.Areas_Identity_Pages_Account_Manage__ViewImports), @"mvc.1.0.view", @"/Areas/Identity/Pages/Account/Manage/_ViewImports.cshtml")]
namespace AspNetCore
{
#line hidden
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.AspNetCore.Mvc.ViewFeatures;
#nullable restore
#line 1 "C:\Users\Willi\OneDrive\Desktop\Code Repository\GymWebsiteApp\GymWebsite\GymWebsite\Areas\Identity\Pages\_ViewImports.cshtml"
using Microsoft.AspNetCore.Identity;
#line default
#line hidden
#nullable disable
#nullable restore
#line 2 "C:\Users\Willi\OneDrive\Desktop\Code Repository\GymWebsiteApp\GymWebsite\GymWebsite\Areas\Identity\Pages\_ViewImports.cshtml"
using GymWebsite.Areas.Identity;
#line default
#line hidden
#nullable disable
#nullable restore
#line 3 "C:\Users\Willi\OneDrive\Desktop\Code Repository\GymWebsiteApp\GymWebsite\GymWebsite\Areas\Identity\Pages\_ViewImports.cshtml"
using GymWebsite.Areas.Identity.Pages;
#line default
#line hidden
#nullable disable
#nullable restore
#line 1 "C:\Users\Willi\OneDrive\Desktop\Code Repository\GymWebsiteApp\GymWebsite\GymWebsite\Areas\Identity\Pages\Account\_ViewImports.cshtml"
using GymWebsite.Areas.Identity.Pages.Account;
#line default
#line hidden
#nullable disable
#nullable restore
#line 1 "C:\Users\Willi\OneDrive\Desktop\Code Repository\GymWebsiteApp\GymWebsite\GymWebsite\Areas\Identity\Pages\Account\Manage\_ViewImports.cshtml"
using GymWebsite.Areas.Identity.Pages.Account.Manage;
#line default
#line hidden
#nullable disable
[global::Microsoft.AspNetCore.Razor.Hosting.RazorSourceChecksumAttribute(@"SHA1", @"5d3b8748540ae983ae698cc4d0f201bae9c754a2", @"/Areas/Identity/Pages/Account/Manage/_ViewImports.cshtml")]
[global::Microsoft.AspNetCore.Razor.Hosting.RazorSourceChecksumAttribute(@"SHA1", @"fed6b39a094e9d74464e4b890c777dbcf8553700", @"/Areas/Identity/Pages/_ViewImports.cshtml")]
[global::Microsoft.AspNetCore.Razor.Hosting.RazorSourceChecksumAttribute(@"SHA1", @"1844c5a32a6a1160dd97352daa9d9440c0d1d2fc", @"/Areas/Identity/Pages/Account/_ViewImports.cshtml")]
public class Areas_Identity_Pages_Account_Manage__ViewImports : global::Microsoft.AspNetCore.Mvc.Razor.RazorPage<dynamic>
{
#pragma warning disable 1998
public async override global::System.Threading.Tasks.Task ExecuteAsync()
{
}
#pragma warning restore 1998
[global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute]
public global::Microsoft.AspNetCore.Mvc.ViewFeatures.IModelExpressionProvider ModelExpressionProvider { get; private set; }
[global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute]
public global::Microsoft.AspNetCore.Mvc.IUrlHelper Url { get; private set; }
[global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute]
public global::Microsoft.AspNetCore.Mvc.IViewComponentHelper Component { get; private set; }
[global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute]
public global::Microsoft.AspNetCore.Mvc.Rendering.IJsonHelper Json { get; private set; }
[global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute]
public global::Microsoft.AspNetCore.Mvc.Rendering.IHtmlHelper<dynamic> Html { get; private set; }
}
}
#pragma warning restore 1591
| 52.753425 | 242 | 0.795897 | [
"Unlicense"
] | shoreshanked/GymWebsiteApp | GymWebsite/GymWebsite/obj/Debug/net5.0/Razor/Areas/Identity/Pages/Account/Manage/_ViewImports.cshtml.g.cs | 3,851 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Lab2
{
// The 'ConcreteComponent' class
class Video : LibraryItem
{
private string _director;
private string _title;
private int _playTime;
// Constructor
public Video(string director, string title,
int numCopies, int playTime)
{
this._director = director;
this._title = title;
this.NumCopies = numCopies;
this._playTime = playTime;
}
public override void Display()
{
Console.WriteLine("\nVideo ----- ");
Console.WriteLine(" Director: {0}", _director);
Console.WriteLine(" Title: {0}", _title);
Console.WriteLine(" # Copies: {0}", NumCopies);
Console.WriteLine(" Playtime: {0}\n", _playTime);
}
}
}
| 24.358974 | 61 | 0.567368 | [
"MIT"
] | MrAlexLemon/Programming_Csharp | Lab2/Lab2/Video.cs | 952 | C# |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过以下
// 特性集控制。更改这些特性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("ConsoleApplication34")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("ConsoleApplication34")]
[assembly: AssemblyCopyright("Copyright © 2014")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。 如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 特性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("1cdb34f2-dcb2-48f0-8893-2f2c9925613b")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 生成号
// 修订号
//
// 可以指定所有这些值,也可以使用“生成号”和“修订号”的默认值,
// 方法是按如下所示使用“*”:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| 26.189189 | 56 | 0.717234 | [
"Apache-2.0"
] | zwfushichun/Lab | LambdaConvertSQL/ConsoleApplication34/Properties/AssemblyInfo.cs | 1,328 | C# |
namespace UptimeTracker
{
public enum PingState
{
Started,
Success,
Failure
}
} | 9.888889 | 24 | 0.696629 | [
"MIT"
] | duanenewman/NetworkUptimeTracker | UptimeTracker.Core/PingState.cs | 91 | C# |
using UnityEngine;
[RequireComponent(typeof(PolygonCollider2D))]
public class AreaSound : MonoBehaviour {
public GameObject InteriorSounds;
public GameObject ExteriorSounds;
private void OnTriggerEnter2D(Collider2D collision)
{
InteriorSounds.SetActive(true);
ExteriorSounds.SetActive(false);
}
private void OnTriggerExit2D(Collider2D collision)
{
InteriorSounds.SetActive(false);
ExteriorSounds.SetActive(true);
}
}
| 19.954545 | 52 | 0.794989 | [
"Apache-2.0"
] | pucprsoundgame/PUCPR-SoundGame | PUCPRSoundGame_sourceCode/Assets/Scripts/AreaSound.cs | 441 | C# |
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml;
namespace Bloom.Spreadsheet
{
/// <summary>
/// This class is a representation of text which may have parts
/// of it bolded, italicized, underlined, and/or superscripted
/// </summary>
public class MarkedUpText
{
private List<MarkedUpTextRun> _runList;
public IEnumerable Runs => _runList;
public bool HasFormatting => _runList.Any(r => r.HasFormatting);
public MarkedUpText()
{
_runList = new List<MarkedUpTextRun>();
}
public string PlainText()
{
var stringBuilder = new StringBuilder();
foreach (MarkedUpTextRun run in this._runList)
{
stringBuilder.Append(run.Text);
}
return stringBuilder.ToString();
}
public override string ToString()
{
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.Append("<p>");
foreach (var run in _runList)
{
if (run.Text.Equals("\r\n"))
{
stringBuilder.Append("</p>\r\n<p>");
continue;
}
List<string> endTags = new List<string>();
if (run.Bold)
{
AddTags(stringBuilder, "strong", endTags);
}
if (run.Italic)
{
AddTags(stringBuilder, "em", endTags);
}
if (run.Underlined)
{
AddTags(stringBuilder, "u", endTags);
}
if (run.Superscript)
{
AddTags(stringBuilder, "sup", endTags);
}
stringBuilder.Append(run.Text);
endTags.Reverse();
foreach (var endTag in endTags)
{
stringBuilder.Append(endTag);
}
}
stringBuilder.Append("</p>");
return stringBuilder.ToString();
}
private void AddTags(StringBuilder stringBuilder, string tagName, List<string> endTag)
{
stringBuilder.Append("<" + tagName + ">");
endTag.Add("</" + tagName + ">");
}
public int Count
{
get
{
return _runList.Count;
}
}
public MarkedUpTextRun GetRun(int index)
{
return _runList[index];
}
/// <summary>
/// Extract the text and any bold, italic, underline, and/or superscript formatting
/// Adds newlines after paragraphs except for the last one
/// </summary>
public static MarkedUpText ParseXml(string xmlString)
{
XmlDocument doc = new XmlDocument();
doc.PreserveWhitespace = true;
var wrappedXmlString = "<wrapper>" + xmlString + "</wrapper>";
doc.LoadXml(wrappedXmlString);
XmlNode root = doc.DocumentElement;
MarkedUpText result = new MarkedUpText();
MarkedUpText pending = new MarkedUpText();
//There are no paragraph elements, just keep all whitespace
if (((XmlElement) root).GetElementsByTagName("p").Count == 0)
{
return ParseXmlRecursive(root);
}
foreach (XmlNode x in root.ChildNodes.Cast<XmlNode>())
{
if (x.Name == "#whitespace")
{
continue;
}
if (string.IsNullOrWhiteSpace(x.InnerText) && x.Name != "p")
{
if (result.Count > 0)
pending._runList.Add(new MarkedUpTextRun(x.InnerText));
continue;
}
result.AddAllFrom(pending);
result.AddAllFrom(ParseXmlRecursive(x));
pending = new MarkedUpText();
if (x.Name == "p")
{
// We want a line break here, but only if something follows...we don't need a blank line at
// the end of the cell, which is what Excel will do with a trailing newline.
// Review or Environment.Newline? But I'd rather generate something consistent.
// Linux: what line break is best to use when constructing an Excel spreadsheet in Linux?
pending._runList.Add(new MarkedUpTextRun("\r\n"));
}
}
return result;
}
private void AddAllFrom(MarkedUpText m)
{
foreach (MarkedUpTextRun r in m.Runs)
{
this._runList.Add(r);
}
}
private static MarkedUpText ParseXmlRecursive(XmlNode node)
{
MarkedUpText markedUpText;
if ((node.Name == "br")
|| (node.Name == "span" && node.Attributes.GetNamedItem("class").Value.Equals("bloom-linebreak")))
{
MarkedUpTextRun run = new MarkedUpTextRun("\r\n");
markedUpText = new MarkedUpText();
markedUpText._runList.Add(run);
}
else if (!node.HasChildNodes)
{
MarkedUpTextRun run = new MarkedUpTextRun(node.InnerText);
markedUpText = new MarkedUpText();
markedUpText._runList.Add(run);
}
else
{
markedUpText = new MarkedUpText();
foreach (XmlNode child in node.ChildNodes)
{
MarkedUpText markedUpChild = ParseXmlRecursive(child);
ApplyFormatting(node.Name, markedUpChild);
markedUpText._runList.AddRange(markedUpChild._runList);
}
}
return markedUpText;
}
private static void ApplyFormatting(string formatName, MarkedUpText markedUpText)
{
foreach (MarkedUpTextRun run in markedUpText._runList)
{
run.setProperty(formatName);
}
}
}
/// <summary>
/// A run of text which has a certain formatting, within a larger blurb of text
/// e.g. a phrase which is italicized in the middle of a sentence
/// </summary>
public class MarkedUpTextRun
{
public MarkedUpTextRun(string textContent)
{
Text = textContent;
}
public string Text { get; set; }
public bool Bold { get; set; }
public bool Italic { get; set; }
public bool Underlined { get; set; }
public bool Superscript { get; set; }
public bool HasFormatting => Bold | Italic | Underlined | Superscript;
public void setProperty(string propertyName)
{
if (propertyName.Equals("strong") || propertyName.Equals("b"))
{
Bold = true;
}
else if (propertyName.Equals("em") || propertyName.Equals("i"))
{
Italic = true;
}
else if (propertyName.Equals("sup"))
{
Superscript = true;
}
else if (propertyName.Equals("u"))
{
Underlined = true;
}
}
}
}
| 24.280851 | 102 | 0.657904 | [
"MIT"
] | BloomBooks/BloomDesktop | src/BloomExe/Spreadsheet/MarkedUpText.cs | 5,706 | C# |
/*
* Original author: Nicholas Shulman <nicksh .at. u.washington.edu>,
* MacCoss Lab, Department of Genome Sciences, UW
*
* Copyright 2011 University of Washington - Seattle, WA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
namespace pwiz.Common.Collections
{
/// <summary>
/// Helper methods for dealing with Collections.
/// </summary>
public static class CollectionUtil
{
public static bool EqualsDeep<TKey,TValue>(IDictionary <TKey,TValue> dict1, IDictionary<TKey,TValue> dict2)
{
if (dict1.Count != dict2.Count)
{
return false;
}
foreach (var entry in dict1)
{
TValue value2;
if (!dict2.TryGetValue(entry.Key, out value2))
{
return false;
}
if (!Equals(entry.Value, value2))
{
return false;
}
}
return true;
}
public static int GetHashCodeDeep<TKey, TValue>(IDictionary<TKey, TValue> dict)
{
return dict.Aggregate(0,
(seed, keyValuePair) => seed ^ keyValuePair.GetHashCode()
);
}
public static int GetHashCodeDeep<T>(IList<T> list)
{
return list.Aggregate(0, (seed, item) => seed*397 + SafeGetHashCode(item));
}
public static int SafeGetHashCode<T>(T item)
{
return Equals(null, item) ? 0 : item.GetHashCode();
}
private class ListContentsEqualityComparer<T> : IEqualityComparer<IList<T>>
{
public bool Equals(IList<T> x, IList<T> y)
{
return x.SequenceEqual(y);
}
public int GetHashCode(IList<T> obj)
{
return GetHashCodeDeep(obj);
}
}
public static IEqualityComparer<IList<T>> GetListContentsEqualityComparer<T>()
{
return new ListContentsEqualityComparer<T>();
}
public static IDictionary<TKey,TValue> SingletonDictionary<TKey, TValue>(TKey key, TValue value)
{
// TODO: if performance becomes an issue, change this
return new ImmutableDictionary<TKey, TValue>(new Dictionary<TKey, TValue> {{key, value}});
}
public static IDictionary<TKey,TValue> EmptyDictionary<TKey,TValue>()
{
return new ImmutableDictionary<TKey, TValue>(new Dictionary<TKey, TValue>());
}
/// <summary>
/// Performs a binary search in a list of items. The list is assumed to be sorted with respect to
/// <paramref name="compareFunc" /> such that those items for which compareFunc returns a negative
/// number appear earlier in the list than those items for which compareFunc returns 0, which appear
/// earlier than the items for which compareFunc returns a positive number.
/// The return value is the index of the first or last (depending on <paramref name="firstIndex"/>) item
/// for which compareFunc returns 0. If no item was found, then the return value is the one's complement
/// of the index of the first item in the list for which compareFunc returns a positive number.
/// </summary>
public static int BinarySearch<TItem>(IList<TItem> items, Func<TItem, int> compareFunc, bool firstIndex)
{
var range = BinarySearch(items, compareFunc);
if (range.Length == 0)
{
return ~range.Start;
}
return firstIndex ? range.Start : range.End - 1;
}
public static Range BinarySearch<TItem>(IList<TItem> items, Func<TItem, int> compareFunc)
{
Range result = new Range(0, items.Count);
foreach (bool firstIndex in new[] { true, false })
{
int lo = result.Start;
int hi = result.End;
while (lo < hi)
{
int mid = (lo + hi) / 2;
int c = compareFunc(items[mid]);
if (c == 0)
{
if (firstIndex)
{
hi = mid;
}
else
{
lo = mid + 1;
}
}
else if (c < 0)
{
lo = mid + 1;
result = new Range(lo, result.End);
}
else
{
hi = mid;
result = new Range(result.Start, hi);
}
}
}
return result;
}
public static int BinarySearch<TItem>(IList<TItem> items, TItem key) where TItem : IComparable
{
Range range = BinarySearch(items, item => item.CompareTo(key));
if (range.Length == 0)
{
return ~range.Start;
}
return range.Start;
}
public static void Copy<T>(IList<T> sourceList, int sourceIndex, IList<T> destinationList, int destinationIndex,
int length)
{
for (int i = 0; i < length; i++)
{
destinationList[destinationIndex + i] = sourceList[sourceIndex + i];
}
}
}
}
| 37.963855 | 121 | 0.51206 | [
"Apache-2.0"
] | shze/pwizard-deb | pwiz_tools/Shared/Common/Collections/CollectionUtil.cs | 6,304 | C# |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using OpenNLP.Tools.Tokenize;
namespace OpenNLP.Tools.Chunker
{
public class RegexDictionaryDechunker : Dechunker
{
private readonly Dictionary<Regex, DechunkOperation> _regexToDechunkOperation;
// Constructors -----------------
public RegexDictionaryDechunker()
{
_regexToDechunkOperation = new Dictionary<Regex, DechunkOperation>()
{
// starts with punctuation, apostrophe, ), ] or € symbol --> merge to left
{new Regex(@"^[\.,\!\?;\)\]'€]+", RegexOptions.Compiled), DechunkOperation.MERGE_TO_LEFT},
// starts with (, [ r $ --> merge to right
{new Regex(@"^[\(\[$]+", RegexOptions.Compiled), DechunkOperation.MERGE_TO_RIGHT},
// behavior before/after " depends on the " occurences before/after
{new Regex(@"^\""+$", RegexOptions.Compiled), DechunkOperation.RIGHT_LEFT_MATCHING},
// '-' --> merge both only if two words on both sides
{new Regex(@"^-$", RegexOptions.Compiled), DechunkOperation.MERGE_BOTH_IF_SURROUNDED_BY_WORDS},
};
}
public RegexDictionaryDechunker(Dictionary<string, DechunkOperation> dict)
{
// compile regex for performance
this._regexToDechunkOperation = dict.ToDictionary(ent => new Regex(ent.Key, RegexOptions.Compiled), ent => ent.Value);
}
// Methods ----------------------------------------------------
private static readonly Regex EndByWordRegex = new Regex(@"\w+$", RegexOptions.Compiled);
private static readonly Regex StartByWordRegex = new Regex(@"^\w+", RegexOptions.Compiled);
public override DechunkOperation[] GetDechunkerOperations(string[] chunks)
{
var operations = new DechunkOperation[chunks.Length];
var matchingTokens = new HashSet<string>();
for (int i = 0; i < chunks.Length; i++)
{
var matchingRegexes = _regexToDechunkOperation
.Where(ent => ent.Key.IsMatch(chunks[i]))
.ToList();
if (!matchingRegexes.Any())
{
operations[i] = DechunkOperation.NO_OPERATION;
continue;
}
else
{
if (matchingRegexes.Count > 1)
{
// TODO: log issue, should not happen
}
var operation = matchingRegexes.First().Value;
if (operation == DechunkOperation.MERGE_TO_LEFT
|| operation == DechunkOperation.MERGE_TO_RIGHT
|| operation == DechunkOperation.MERGE_BOTH)
{
operations[i] = operation;
}
else if (operation == DechunkOperation.MERGE_BOTH_IF_SURROUNDED_BY_WORDS)
{
if (0 < i && i < chunks.Length - 1 && EndByWordRegex.IsMatch(chunks[i - 1]) && StartByWordRegex.IsMatch(chunks[i + 1]))
{
operations[i] = DechunkOperation.MERGE_BOTH;
}
else
{
operations[i] = DechunkOperation.NO_OPERATION;
}
}
else if (operation == DechunkOperation.RIGHT_LEFT_MATCHING)
{
if (matchingTokens.Contains(chunks[i]))
{
// The token already occurred once, move it to the left and clear the occurrence flag
operations[i] = DechunkOperation.MERGE_TO_LEFT;
matchingTokens.Remove(chunks[i]);
}
else
{
// First time this token is seen, move it to the right and remember it
operations[i] = DechunkOperation.MERGE_TO_RIGHT;
matchingTokens.Add(chunks[i]);
}
}
else
{
throw new InvalidEnumArgumentException("Unknown operation: " + operation);
}
}
}
return operations;
}
}
}
| 40.5 | 143 | 0.497233 | [
"MIT"
] | AlexPoint/OpenNlp | OpenNLP/Tools/Chunker/RegexDictionaryDechunker.cs | 4,704 | C# |
using System;
using BizHawk.Common;
namespace BizHawk.Emulation.Cores.Nintendo.NES
{
public sealed class Mapper043 : NES.NESBoardBase
{
int prg = 0;
int irqcnt = 0;
bool irqenable = false;
bool swap;
private static int[] lut = { 4, 3, 5, 3, 6, 3, 7, 3 };
public override bool Configure(NES.EDetectionOrigin origin)
{
switch (Cart.board_type)
{
case "MAPPER043":
break;
default:
return false;
}
Cart.wram_size = 0;
// not sure on initial mirroring
SetMirrorType(EMirrorType.Vertical);
return true;
}
public override void WriteEXP(int addr, byte value)
{
addr += 0x4000;
switch (addr & 0xF1FF)
{
case 0x4022:
prg = lut[value & 0x7];
break;
case 0x4120:
swap = (value & 1) == 1;
break;
case 0x4122:
irqenable = (value & 1) == 1;
IRQSignal = false;
irqcnt = 0;
break;
}
}
public override void WritePRG(int addr, byte value)
{
addr += 0x8000;
switch (addr & 0xF1FF)
{
case 0x8122:
irqenable = (value & 1) == 1;
IRQSignal = false;
irqcnt = 0;
break;
}
}
public override byte ReadEXP(int addr)
{
if (addr > 0x1000)
{
return ROM[(addr - 0x1000) + 8 * 0x2000];
}
else return base.ReadEXP(addr);
}
public override byte ReadWRAM(int addr)
{
if (swap)
{
return ROM[addr];
}
else
{
return ROM[addr + 0x4000];
}
}
public override byte ReadPRG(int addr)
{
if (addr < 0x2000)
{
return ROM[addr + 0x2000];
}
else if (addr < 0x4000)
{
return ROM[addr - 0x2000];
}
else if (addr < 0x6000)
{
return ROM[(addr - 0x4000) + prg * 0x2000];
}
else
{
if (swap)
{
return ROM[(addr - 0x6000) + 8 * 0x2000];
}
else
{
return ROM[(addr - 0x6000) + 9 * 0x2000];
}
}
}
public override void ClockCPU()
{
if (irqenable)
{
irqcnt++;
if (irqcnt >= 4096)
{
irqenable = false;
IRQSignal = true;
}
}
}
public override void SyncState(Serializer ser)
{
base.SyncState(ser);
ser.Sync("prg", ref prg);
ser.Sync("irqenable", ref irqenable);
ser.Sync("irqcnt", ref irqcnt);
ser.Sync("swap", ref swap);
}
}
}
| 16.23741 | 61 | 0.560479 | [
"MIT"
] | Asnivor/BizHawk | BizHawk.Emulation.Cores/Consoles/Nintendo/NES/Boards/Mapper043.cs | 2,259 | C# |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.