content stringlengths 5 1.04M | avg_line_length float64 1.75 12.9k | max_line_length int64 2 244k | alphanum_fraction float64 0 0.98 | licenses list | repository_name stringlengths 7 92 | path stringlengths 3 249 | size int64 5 1.04M | lang stringclasses 2
values |
|---|---|---|---|---|---|---|---|---|
using Content.Shared.Traitor.Uplink;
using Robust.Shared.GameObjects;
using Robust.Shared.Serialization;
using System;
namespace Content.Shared.PDA
{
[Serializable, NetSerializable]
public sealed class PDAUpdateState : BoundUserInterfaceState
{
public bool FlashlightEnabled;
public bool HasPen;
public PDAIdInfoText PDAOwnerInfo;
public bool HasUplink;
public PDAUpdateState(bool flashlightEnabled, bool hasPen, PDAIdInfoText pDAOwnerInfo, bool hasUplink = false)
{
FlashlightEnabled = flashlightEnabled;
HasPen = hasPen;
PDAOwnerInfo = pDAOwnerInfo;
HasUplink = hasUplink;
}
}
[Serializable, NetSerializable]
public struct PDAIdInfoText
{
public string? ActualOwnerName;
public string? IdOwner;
public string? JobTitle;
}
}
| 26.176471 | 118 | 0.673034 | [
"MIT"
] | A-Box-12/space-station-14 | Content.Shared/PDA/PDAUpdateState.cs | 890 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Xml;
using ClosedXML.Excel;
using DigitalPlatform.IO;
using DigitalPlatform.Xml;
namespace dp2Circulation.OperLog
{
public static class OperLogReport
{
// 按照关键字段分类的统计行
public class KeyStatisLine
{
// 关键分类字段。可以是 ItemRefID 等
public string Key { get; set; }
public string ItemBarcode { get; set; } // 第一个册条码号
public int Count { get; set; } // 重复写入次数
public string Operator { get; set; } // 第一个操作者
public DateTime OperTime { get; set; } // 第一个操作时间
}
static void WriteLine(
IXLWorksheet sheet,
string[] cols,
List<IXLCell> cells,
ref int nItemIndex,
ref int nRowIndex,
ref List<int> column_max_chars)
{
int nColIndex = 2;
foreach (string s in cols)
{
// 统计最大字符数
ReaderSearchForm.SetMaxChars(ref column_max_chars, nColIndex - 1, ReaderSearchForm.GetCharWidth(s));
IXLCell cell = null;
cell = sheet.Cell(nRowIndex, nColIndex).SetValue(s);
if (nColIndex == 2)
{
// cell = sheet.Cell(nRowIndex, nColIndex).SetValue(nItemIndex + 1);
cell.Style.Alignment.Horizontal = XLAlignmentHorizontalValues.Center;
}
cell.Style.Alignment.WrapText = true;
cell.Style.Alignment.Vertical = XLAlignmentVerticalValues.Center;
nColIndex++;
cells?.Add(cell);
}
nItemIndex++;
nRowIndex++;
}
// 日统计行
public class DateStatisLine
{
public string Date { get; set; } // 日期
public string Operator { get; set; } // 操作者
public int ItemCount { get; set; } // 册数
public int WriteCount { get; set; } // 写入次数。可能会大于册数
}
public class KeyStatisLine1
{
// 关键分类字段。可以是 ItemRefID 等
public string Key { get; set; }
public string ItemBarcode { get; set; } // 第一个册条码号
public int Count { get; set; } // 重复写入次数
public string Operator { get; set; } // 第一个操作者
public DateTime OperDate { get; set; } // 第一个操作时间
}
static DateTime GetDate(DateTime time)
{
return new DateTime(time.Year, time.Month, time.Day);
}
// 输出日统计表。
public static void BuildRfidStatisSheet(
IEnumerable<KeyStatisLine> lines,
IXLWorksheet sheet,
out List<DateStatisLine> results)
{
List<int> column_max_chars = new List<int>();
results =
lines
.Select(l=> new KeyStatisLine1 { Key = l.Key,
ItemBarcode = l.ItemBarcode,
Count = l.Count,
Operator = l.Operator,
OperDate = GetDate(l.OperTime)})
// .Where(o => bSeries || string.IsNullOrEmpty(o.State) == false)
.GroupBy(p => new { p.OperDate, p.Operator })
.Select(cl => new DateStatisLine
{
Date = (string)cl.First().OperDate.ToLongDateString(),
Operator = cl.First().Operator,
ItemCount = cl.Count(),
WriteCount = cl.Sum(a => a.Count),
}).ToList();
if (sheet != null)
{
int line = 0;
// 栏目标题行
string[] titles = new string[] {
"日期",
"操作者",
"册数",
"写入次数",
};
int nRowIndex = 1;
WriteLine(
sheet,
titles,
null, // cells,
ref line,
ref nRowIndex,
ref column_max_chars);
foreach (var result in results)
{
string[] cols = new string[] {
result.Date,
result.Operator,
result.ItemCount.ToString(),
result.WriteCount.ToString()
};
WriteLine(
sheet,
cols,
null, // cells,
ref line,
ref nRowIndex,
ref column_max_chars);
}
PrintOrderForm.AdjectColumnWidth(sheet, column_max_chars);
}
}
// 输出基本表。每行为一册。重复写入同一个 RFID 多次,会被归并为一行
public static void BuildRfidWriteReport(
IEnumerable<RfidWriteInfo> lines,
IXLWorksheet sheet,
out List<KeyStatisLine> results)
{
List<int> column_max_chars = new List<int>();
results =
lines
// .Where(o => bSeries || string.IsNullOrEmpty(o.State) == false)
.GroupBy(p => (string)p.ItemRefID)
.Select(cl => new KeyStatisLine
{
Key = (string)cl.First().ItemRefID,
ItemBarcode = cl.First().ItemBarcode,
Operator = cl.First().Operator,
OperTime = cl.First().OperTime,
Count = cl.Count(),
}).ToList();
if (sheet != null)
{
int line = 0;
// 栏目标题行
string[] titles = new string[] {
"参考ID",
"册条码号",
"写入次数",
"操作者",
"操作时间",
};
int nRowIndex = 1;
WriteLine(
sheet,
titles,
null, // cells,
ref line,
ref nRowIndex,
ref column_max_chars);
foreach (var result in results)
{
string[] cols = new string[] {
result.Key,
result.ItemBarcode,
result.Count.ToString(),
result.Operator,
result.OperTime.ToString()
};
WriteLine(
sheet,
cols,
null, // cells,
ref line,
ref nRowIndex,
ref column_max_chars);
}
PrintOrderForm.AdjectColumnWidth(sheet, column_max_chars);
}
}
}
// 日志原始信息。每个对象对应于一个日志记录
public class RfidWriteInfo
{
public int Index { get; set; }
public string Date { get; set; }
public string ItemBarcode { get; set; }
public string ItemRefID { get; set; }
public string Operator { get; set; }
public DateTime OperTime { get; set; }
public static RfidWriteInfo Build(string date, int index, XmlDocument dom)
{
RfidWriteInfo info = new RfidWriteInfo();
info.Date = date;
info.Index = index;
info.ItemBarcode = DomUtil.GetElementText(dom.DocumentElement,
"itemBarcode");
info.ItemRefID = DomUtil.GetElementText(dom.DocumentElement,
"itemRefID");
info.Operator = DomUtil.GetElementText(dom.DocumentElement,
"operator");
string operTime = DomUtil.GetElementText(dom.DocumentElement,
"operTime");
info.OperTime = DateTimeUtil.FromRfc1123DateTimeString(operTime).ToLocalTime();
return info;
}
}
}
| 29.892857 | 116 | 0.48865 | [
"Apache-2.0"
] | donsllon/dp2 | dp2Circulation/OperLog/OperLogReport.cs | 7,955 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Dawn;
using Microsoft.Azure.EventHubs;
namespace Take.Elephant.Azure
{
/// <summary>
/// TODO: Incomplete implementation, should remain internal.
/// </summary>
/// <typeparam name="T"></typeparam>
internal class AzureEventHubReceiverQueue<T> : IReceiverQueue<T>, IBlockingReceiverQueue<T>, IBatchReceiverQueue<T>, IOpenable, ICloseable
{
private readonly string _consumerGroupName;
private readonly EventPosition _eventPosition;
private readonly string _partitionId;
private readonly ReceiverOptions _receiverOptions;
private readonly TimeSpan _minDequeueRetryDelay;
private readonly TimeSpan _maxDequeueRetryDelay;
private readonly ISerializer<T> _serializer;
private readonly EventHubClient _eventHubClient;
private PartitionReceiver _receiver;
private readonly SemaphoreSlim _openSemaphore;
public AzureEventHubReceiverQueue(
string eventHubName,
string eventHubConnectionString,
ISerializer<T> serializer,
string consumerGroupName,
string partitionId,
EventPosition eventPosition,
ReceiverOptions receiverOptions = null,
int minDequeueRetryDelay = 250,
int maxDequeueRetryDelay = 30000)
{
Guard.Argument(eventHubName).NotNull().NotEmpty();
Guard.Argument(eventHubConnectionString).NotNull().NotEmpty();
_consumerGroupName = Guard.Argument(consumerGroupName).NotNull().Value;
_eventPosition = Guard.Argument(eventPosition).NotNull().Value;
_partitionId = Guard.Argument(partitionId).NotNull().Value;
_receiverOptions = receiverOptions;
Guard.Argument(minDequeueRetryDelay).Max(maxDequeueRetryDelay);
_minDequeueRetryDelay = TimeSpan.FromMilliseconds(minDequeueRetryDelay);
_maxDequeueRetryDelay = TimeSpan.FromMilliseconds(maxDequeueRetryDelay);
_serializer = Guard.Argument(serializer).NotNull().Value;
_eventHubClient = EventHubClient.CreateFromConnectionString(
new EventHubsConnectionStringBuilder(eventHubConnectionString)
{
EntityPath = eventHubName
}.ToString());
_openSemaphore = new SemaphoreSlim(1);
}
public async Task<T> DequeueOrDefaultAsync(CancellationToken cancellationToken = default)
{
await OpenIfRequiredAsync(cancellationToken);
var eventDatas = await _receiver.ReceiveAsync(1, _minDequeueRetryDelay);
var eventData = eventDatas?.FirstOrDefault();
return CreateItem(eventData);
}
public async Task<T> DequeueAsync(CancellationToken cancellationToken)
{
var interval = new ExponentialInterval(_minDequeueRetryDelay, _maxDequeueRetryDelay);
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var eventDatas = await _receiver.ReceiveAsync(1, interval.Interval);
}
throw new NotImplementedException();
}
public async Task<IEnumerable<T>> DequeueBatchAsync(int maxBatchSize, CancellationToken cancellationToken)
{
await OpenIfRequiredAsync(cancellationToken);
return null;
}
public async Task OpenAsync(CancellationToken cancellationToken)
{
if (!await OpenIfRequiredAsync(cancellationToken))
{
throw new InvalidOperationException("The receiver is already open");
}
}
public async Task CloseAsync(CancellationToken cancellationToken)
{
await (_receiver?.CloseAsync() ?? Task.CompletedTask);
await _eventHubClient.CloseAsync();
}
private async Task<bool> OpenIfRequiredAsync(CancellationToken cancellationToken)
{
if (_receiver != null) return false;
await _openSemaphore.WaitAsync(cancellationToken);
try
{
if (_receiver != null) return false;
_receiver = _eventHubClient.CreateReceiver(_consumerGroupName, _partitionId, _eventPosition, _receiverOptions);
return true;
}
finally
{
_openSemaphore.Release();
}
}
private T CreateItem(EventData eventData)
{
if (eventData == null) return default;
return _serializer.Deserialize(Encoding.UTF8.GetString(eventData.Body.Array));
}
internal class ExponentialInterval
{
private readonly TimeSpan _initialInterval;
private readonly TimeSpan _maxInterval;
public ExponentialInterval(TimeSpan initialInterval, TimeSpan maxInterval)
{
_initialInterval = initialInterval;
_maxInterval = maxInterval;
}
public int Count { get; private set; }
public TimeSpan Interval
{
get
{
var timeout = TimeSpan.FromTicks(_initialInterval.Ticks * (int)Math.Pow(2, Count++));
if (timeout < _maxInterval) return timeout;
return _maxInterval;
}
}
}
}
} | 38.385135 | 142 | 0.619081 | [
"Apache-2.0"
] | DanielCouto/elephant | src/Take.Elephant.Azure/AzureEventHubReceiverQueue.cs | 5,681 | C# |
using Terraria.ID;
using Terraria.ModLoader;
using static Terraria.ModLoader.ModContent;
namespace Comenzo.Items.Placeable.Ore.MagmaOre
{
public class MagmaOre : ModItem
{
public override void SetStaticDefaults()
{
ItemID.Sets.SortingPriorityMaterials[item.type] = 58;
}
public override void SetDefaults()
{
item.useStyle = ItemUseStyleID.SwingThrow;
item.useTurn = true;
item.useAnimation = 15;
item.useTime = 10;
item.autoReuse = true;
item.maxStack = 999;
item.consumable = true;
item.createTile = TileType<Tiles.Ore.MagmaOre.MagmaOre>();
item.width = 12;
item.height = 12;
item.value = 3000;
}
}
}
| 21.866667 | 61 | 0.707317 | [
"MIT"
] | nscornia/Comenzo | Items/Placeable/Ore/MagmaOre/MagmaOre.cs | 658 | C# |
// <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.Azure.Management.Monitor
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// BaselinesOperations operations.
/// </summary>
internal partial class BaselinesOperations : IServiceOperations<MonitorManagementClient>, IBaselinesOperations
{
/// <summary>
/// Initializes a new instance of the BaselinesOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal BaselinesOperations(MonitorManagementClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the MonitorManagementClient
/// </summary>
public MonitorManagementClient Client { get; private set; }
/// <summary>
/// **Lists the metric baseline values for a resource**.
/// </summary>
/// <param name='resourceUri'>
/// The identifier of the resource.
/// </param>
/// <param name='metricnames'>
/// The names of the metrics (comma separated) to retrieve.
/// </param>
/// <param name='metricnamespace'>
/// Metric namespace to query metric definitions for.
/// </param>
/// <param name='timespan'>
/// The timespan of the query. It is a string with the following format
/// 'startDateTime_ISO/endDateTime_ISO'.
/// </param>
/// <param name='interval'>
/// The interval (i.e. timegrain) of the query.
/// </param>
/// <param name='aggregation'>
/// The list of aggregation types (comma separated) to retrieve.
/// </param>
/// <param name='sensitivities'>
/// The list of sensitivities (comma separated) to retrieve.
/// </param>
/// <param name='filter'>
/// The **$filter** is used to reduce the set of metric data
/// returned.<br>Example:<br>Metric contains metadata A, B and
/// C.<br>- Return all time series of C where A = a1 and B = b1 or
/// b2<br>**$filter=A eq ‘a1’ and B eq ‘b1’ or B eq ‘b2’ and C eq
/// ‘*’**<br>- Invalid variant:<br>**$filter=A eq ‘a1’ and B eq
/// ‘b1’ and C eq ‘*’ or B = ‘b2’**<br>This is invalid because the
/// logical or operator cannot separate two different metadata
/// names.<br>- Return all time series where A = a1, B = b1 and C =
/// c1:<br>**$filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘c1’**<br>-
/// Return all time series where A = a1<br>**$filter=A eq ‘a1’ and B eq
/// ‘*’ and C eq ‘*’**.
/// </param>
/// <param name='resultType'>
/// Allows retrieving only metadata of the baseline. On data request all
/// information is retrieved. Possible values include: 'Data', 'Metadata'
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorResponseException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IEnumerable<SingleMetricBaseline>>> ListWithHttpMessagesAsync(string resourceUri, string metricnames = default(string), string metricnamespace = default(string), string timespan = default(string), System.TimeSpan? interval = default(System.TimeSpan?), string aggregation = default(string), string sensitivities = default(string), string filter = default(string), ResultType? resultType = default(ResultType?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceUri == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceUri");
}
string apiVersion = "2019-03-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceUri", resourceUri);
tracingParameters.Add("metricnames", metricnames);
tracingParameters.Add("metricnamespace", metricnamespace);
tracingParameters.Add("timespan", timespan);
tracingParameters.Add("interval", interval);
tracingParameters.Add("aggregation", aggregation);
tracingParameters.Add("sensitivities", sensitivities);
tracingParameters.Add("filter", filter);
tracingParameters.Add("resultType", resultType);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "{resourceUri}/providers/microsoft.insights/metricBaselines").ToString();
_url = _url.Replace("{resourceUri}", resourceUri);
List<string> _queryParameters = new List<string>();
if (metricnames != null)
{
_queryParameters.Add(string.Format("metricnames={0}", System.Uri.EscapeDataString(metricnames)));
}
if (metricnamespace != null)
{
_queryParameters.Add(string.Format("metricnamespace={0}", System.Uri.EscapeDataString(metricnamespace)));
}
if (timespan != null)
{
_queryParameters.Add(string.Format("timespan={0}", System.Uri.EscapeDataString(timespan)));
}
if (interval != null)
{
_queryParameters.Add(string.Format("interval={0}", System.Uri.EscapeDataString(Rest.Serialization.SafeJsonConvert.SerializeObject(interval, Client.SerializationSettings).Trim('"'))));
}
if (aggregation != null)
{
_queryParameters.Add(string.Format("aggregation={0}", System.Uri.EscapeDataString(aggregation)));
}
if (sensitivities != null)
{
_queryParameters.Add(string.Format("sensitivities={0}", System.Uri.EscapeDataString(sensitivities)));
}
if (filter != null)
{
_queryParameters.Add(string.Format("$filter={0}", System.Uri.EscapeDataString(filter)));
}
if (resultType != null)
{
_queryParameters.Add(string.Format("resultType={0}", System.Uri.EscapeDataString(Rest.Serialization.SafeJsonConvert.SerializeObject(resultType, Client.SerializationSettings).Trim('"'))));
}
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorResponseException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
ErrorResponse _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<ErrorResponse>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IEnumerable<SingleMetricBaseline>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<SingleMetricBaseline>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| 46.009804 | 579 | 0.572839 | [
"MIT"
] | 0rland0Wats0n/azure-sdk-for-net | sdk/monitor/Microsoft.Azure.Management.Monitor/src/Generated/BaselinesOperations.cs | 14,135 | C# |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Cache
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Threading.Tasks;
using Apache.Ignite.Core.Cache.Configuration;
using Apache.Ignite.Core.Cache.Expiry;
using Apache.Ignite.Core.Cache.Query;
using Apache.Ignite.Core.Cache.Query.Continuous;
using Apache.Ignite.Core.Cache.Store;
using Apache.Ignite.Core.Cluster;
using Apache.Ignite.Core.Transactions;
/// <summary>
/// Main entry point for Ignite cache APIs. You can get a named cache by calling
/// <see cref="IIgnite.GetCache{TK,TV}"/> method.
/// <para />
/// Cache API supports distributed transactions. All <c>Get(...)</c>, <c>Put(...)</c>, <c>Replace(...)</c>,
/// and <c>Remove(...)</c> operations are transactional and will participate in an ongoing transaction,
/// if any. Other methods like <c>Peek(...)</c> or various <c>Contains(...)</c> methods may
/// be transaction-aware, i.e. check in-transaction entries first, but will not affect the current
/// state of transaction. See <see cref="ITransaction"/> documentation for more information
/// about transactions.
/// <para />
/// Neither <c>null</c> keys or values are allowed to be stored in cache. If a <c>null</c> value
/// happens to be in cache (e.g. after invalidation or remove), then cache will treat this case
/// as there is no value at all.
/// <para />
/// Note that cache is generic and you can only work with provided key and value types. If cache also
/// contains keys or values of other types, any attempt to retrieve them will result in
/// <see cref="InvalidCastException"/>. Use <see cref="ICache{Object, Object}"/> in order to work with entries
/// of arbitrary types.
/// <para/>
/// All members are thread-safe and may be used concurrently from multiple threads.
/// </summary>
/// <typeparam name="TK">Key type.</typeparam>
/// <typeparam name="TV">Value type.</typeparam>
[SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
public interface ICache<TK, TV> : IEnumerable<ICacheEntry<TK, TV>>
{
/// <summary>
/// Name of this cache (<c>null</c> for default cache).
/// </summary>
string Name { get; }
/// <summary>
/// Ignite hosting this cache.
/// </summary>
IIgnite Ignite { get; }
/// <summary>
/// Gets the cache configuration.
/// </summary>
CacheConfiguration GetConfiguration();
/// <summary>
/// Checks whether this cache contains no key-value mappings.
/// <para />
/// Semantically equals to <c>ICache.Size(CachePeekMode.PRIMARY) == 0</c>.
/// </summary>
bool IsEmpty();
/// <summary>
/// Gets a value indicating whether to keep values in binary form.
/// </summary>
bool IsKeepBinary { get; }
/// <summary>
/// Gets a value indicating whether to allow use atomic operations in transactions.
/// </summary>
[Obsolete("Not supported, will be removed in future releases.")]
bool IsAllowAtomicOpsInTx { get; }
/// <summary>
/// Get another cache instance with read-through and write-through behavior disabled.
/// </summary>
/// <returns>Cache with read-through and write-through behavior disabled.</returns>
ICache<TK, TV> WithSkipStore();
/// <summary>
/// Returns cache with the specified expired policy set. This policy will be used for each operation
/// invoked on the returned cache.
/// <para />
/// Expiry durations for each operation are calculated only once and then used as constants. Please
/// consider this when implementing custom expiry policy implementations.
/// </summary>
/// <param name="plc">Expiry policy to use.</param>
/// <returns>Cache instance with the specified expiry policy set.</returns>
ICache<TK, TV> WithExpiryPolicy(IExpiryPolicy plc);
/// <summary>
/// Gets cache with KeepBinary mode enabled, changing key and/or value types if necessary.
/// You can only change key/value types when transitioning from non-binary to binary cache;
/// Changing type of binary cache is not allowed and will throw an <see cref="InvalidOperationException"/>
/// </summary>
/// <typeparam name="TK1">Key type in binary mode.</typeparam>
/// <typeparam name="TV1">Value type in binary mode.</typeparam>
/// <returns>Cache instance with binary mode enabled.</returns>
ICache<TK1, TV1> WithKeepBinary<TK1, TV1>();
/// <summary>
/// Get another cache instance with operations allowed in transactions.
/// Only atomic caches need this. Transactional caches already available for transactions.
/// </summary>
/// <returns>Cache allowed to use in transactions.</returns>
[Obsolete("Not supported, will be removed in future releases.")]
ICache<TK, TV> WithAllowAtomicOpsInTx();
/// <summary>
/// Executes <see cref="LocalLoadCache"/> on all cache nodes.
/// </summary>
/// <param name="p">
/// Optional predicate. If provided, will be used to filter values to be put into cache.
/// </param>
/// <param name="args">
/// Optional user arguments to be passed into <see cref="ICacheStore{K, V}.LoadCache" />.
/// </param>
void LoadCache(ICacheEntryFilter<TK, TV> p, params object[] args);
/// <summary>
/// Executes <see cref="LocalLoadCache"/> on all cache nodes.
/// </summary>
/// <param name="p">
/// Optional predicate. If provided, will be used to filter values to be put into cache.
/// </param>
/// <param name="args">
/// Optional user arguments to be passed into <see cref="ICacheStore{K, V}.LoadCache" />.
/// </param>
Task LoadCacheAsync(ICacheEntryFilter<TK, TV> p, params object[] args);
/// <summary>
/// Delegates to <see cref="ICacheStore{K, V}.LoadCache" /> method to load state
/// from the underlying persistent storage. The loaded values will then be given
/// to the optionally passed in predicate, and, if the predicate returns true,
/// will be stored in cache. If predicate is null, then all loaded values will be stored in cache.
/// </summary>
/// <param name="p">
/// Optional predicate. If provided, will be used to filter values to be put into cache.
/// </param>
/// <param name="args">
/// Optional user arguments to be passed into <see cref="ICacheStore{K, V}.LoadCache" />.
/// </param>
void LocalLoadCache(ICacheEntryFilter<TK, TV> p, params object[] args);
/// <summary>
/// Delegates to <see cref="ICacheStore{K, V}.LoadCache" /> method to load state
/// from the underlying persistent storage. The loaded values will then be given
/// to the optionally passed in predicate, and, if the predicate returns true,
/// will be stored in cache. If predicate is null, then all loaded values will be stored in cache.
/// </summary>
/// <param name="p">
/// Optional predicate. If provided, will be used to filter values to be put into cache.
/// </param>
/// <param name="args">
/// Optional user arguments to be passed into <see cref="ICacheStore{K, V}.LoadCache" />.
/// </param>
Task LocalLoadCacheAsync(ICacheEntryFilter<TK, TV> p, params object[] args);
/// <summary>
/// Loads the specified entries into the cache using the configured
/// <see cref="ICacheStore"/>> for the given keys.
/// <para />
/// If an entry for a key already exists in the cache, a value will be loaded if and only if
/// <paramref name="replaceExistingValues" /> is true.
/// If no loader is configured for the cache, no objects will be loaded.
/// </summary>
/// <param name="keys">The keys to load.</param>
/// <param name="replaceExistingValues">if set to <c>true</c>, existing cache values will
/// be replaced by those loaded from a cache store.</param>
void LoadAll(IEnumerable<TK> keys, bool replaceExistingValues);
/// <summary>
/// Asynchronously loads the specified entries into the cache using the configured
/// <see cref="ICacheStore"/>> for the given keys.
/// <para />
/// If an entry for a key already exists in the cache, a value will be loaded if and only if
/// <paramref name="replaceExistingValues" /> is true.
/// If no loader is configured for the cache, no objects will be loaded.
/// </summary>
/// <param name="keys">The keys to load.</param>
/// <param name="replaceExistingValues">if set to <c>true</c>, existing cache values will
/// be replaced by those loaded from a cache store.</param>
Task LoadAllAsync(IEnumerable<TK> keys, bool replaceExistingValues);
/// <summary>
/// Check if cache contains mapping for this key.
/// </summary>
/// <param name="key">Key.</param>
/// <returns>True if cache contains mapping for this key.</returns>
bool ContainsKey(TK key);
/// <summary>
/// Check if cache contains mapping for this key.
/// </summary>
/// <param name="key">Key.</param>
/// <returns>True if cache contains mapping for this key.</returns>
Task<bool> ContainsKeyAsync(TK key);
/// <summary>
/// Check if cache contains mapping for these keys.
/// </summary>
/// <param name="keys">Keys.</param>
/// <returns>True if cache contains mapping for all these keys.</returns>
bool ContainsKeys(IEnumerable<TK> keys);
/// <summary>
/// Check if cache contains mapping for these keys.
/// </summary>
/// <param name="keys">Keys.</param>
/// <returns>True if cache contains mapping for all these keys.</returns>
Task<bool> ContainsKeysAsync(IEnumerable<TK> keys);
/// <summary>
/// Peeks at cached value using optional set of peek modes. This method will sequentially
/// iterate over given peek modes, and try to peek at value using each peek mode. Once a
/// non-null value is found, it will be immediately returned.
/// This method does not participate in any transactions, however, it may peek at transactional
/// value depending on the peek modes used.
/// If key is not present in cache, KeyNotFoundException will be thrown.
/// </summary>
/// <param name="key">Key.</param>
/// <param name="modes">Peek modes.</param>
/// <returns>Peeked value.</returns>
TV LocalPeek(TK key, params CachePeekMode[] modes);
/// <summary>
/// Peeks at cached value using optional set of peek modes. This method will sequentially
/// iterate over given peek modes, and try to peek at value using each peek mode. Once a
/// non-null value is found, it will be immediately returned.
/// This method does not participate in any transactions, however, it may peek at transactional
/// value depending on the peek modes used.
/// </summary>
/// <param name="key">Key.</param>
/// <param name="value">When this method returns, the value associated with the specified key,
/// if the key is found; otherwise, the default value for the type of the value parameter.
/// This parameter is passed uninitialized.</param>
/// <param name="modes">Peek modes.</param>
/// <returns>
/// true if the cache contains an element with the specified key; otherwise, false.
/// </returns>
bool TryLocalPeek(TK key, out TV value, params CachePeekMode[] modes);
/// <summary>
/// Gets or sets a cache value with the specified key.
/// Shortcut to <see cref="Get"/> and <see cref="Put"/>
/// </summary>
/// <param name="key">Key.</param>
/// <returns>Cache value with the specified key.</returns>
/// <exception cref="KeyNotFoundException">If the key is not present in the cache.</exception>
TV this[TK key] { get; set; }
/// <summary>
/// Retrieves value mapped to the specified key from cache.
///
/// If the value is not present in cache, then it will be looked up from swap storage. If
/// it's not present in swap, or if swap is disable, and if read-through is allowed, value
/// will be loaded from persistent store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key.</param>
/// <returns>Value.</returns>
/// <exception cref="KeyNotFoundException">If the key is not present in the cache.</exception>
TV Get(TK key);
/// <summary>
/// Retrieves value mapped to the specified key from cache.
///
/// If the value is not present in cache, then it will be looked up from swap storage. If
/// it's not present in swap, or if swap is disable, and if read-through is allowed, value
/// will be loaded from persistent store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key.</param>
/// <returns>Value.</returns>
/// <exception cref="KeyNotFoundException">If the key is not present in the cache.</exception>
Task<TV> GetAsync(TK key);
/// <summary>
/// Retrieves value mapped to the specified key from cache.
/// If the value is not present in cache, then it will be looked up from swap storage. If
/// it's not present in swap, or if swap is disable, and if read-through is allowed, value
/// will be loaded from persistent store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key.</param>
/// <param name="value">When this method returns, the value associated with the specified key,
/// if the key is found; otherwise, the default value for the type of the value parameter.
/// This parameter is passed uninitialized.</param>
/// <returns>
/// true if the cache contains an element with the specified key; otherwise, false.
/// </returns>
bool TryGet(TK key, out TV value);
/// <summary>
/// Retrieves value mapped to the specified key from cache.
/// If the value is not present in cache, then it will be looked up from swap storage. If
/// it's not present in swap, or if swap is disable, and if read-through is allowed, value
/// will be loaded from persistent store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key.</param>
/// <returns>
/// <see cref="CacheResult{T}"/> containing a bool success flag and a value.
/// </returns>
Task<CacheResult<TV>> TryGetAsync(TK key);
/// <summary>
/// Retrieves values mapped to the specified keys from cache.
/// If some value is not present in cache, then it will be looked up from swap storage. If
/// it's not present in swap, or if swap is disabled, and if read-through is allowed, value
/// will be loaded from persistent store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="keys">Keys.</param>
/// <returns>Map of key-value pairs.</returns>
ICollection<ICacheEntry<TK, TV>> GetAll(IEnumerable<TK> keys);
/// <summary>
/// Retrieves values mapped to the specified keys from cache.
/// If some value is not present in cache, then it will be looked up from swap storage. If
/// it's not present in swap, or if swap is disabled, and if read-through is allowed, value
/// will be loaded from persistent store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="keys">Keys.</param>
/// <returns>Map of key-value pairs.</returns>
Task<ICollection<ICacheEntry<TK, TV>>> GetAllAsync(IEnumerable<TK> keys);
/// <summary>
/// Associates the specified value with the specified key in the cache.
/// <para />
/// If the cache previously contained a mapping for the key,
/// the old value is replaced by the specified value.
/// </summary>
/// <param name="key">Key with which the specified value is to be associated.</param>
/// <param name="val">Value to be associated with the specified key.</param>
void Put(TK key, TV val);
/// <summary>
/// Associates the specified value with the specified key in the cache.
/// <para />
/// If the cache previously contained a mapping for the key,
/// the old value is replaced by the specified value.
/// </summary>
/// <param name="key">Key with which the specified value is to be associated.</param>
/// <param name="val">Value to be associated with the specified key.</param>
Task PutAsync(TK key, TV val);
/// <summary>
/// Associates the specified value with the specified key in this cache,
/// returning an existing value if one existed.
/// </summary>
/// <param name="key">Key with which the specified value is to be associated.</param>
/// <param name="val">Value to be associated with the specified key.</param>
/// <returns>
/// The value associated with the key at the start of the operation.
/// </returns>
CacheResult<TV> GetAndPut(TK key, TV val);
/// <summary>
/// Associates the specified value with the specified key in this cache,
/// returning an existing value if one existed.
/// </summary>
/// <param name="key">Key with which the specified value is to be associated.</param>
/// <param name="val">Value to be associated with the specified key.</param>
/// <returns>
/// The value associated with the key at the start of the operation.
/// </returns>
Task<CacheResult<TV>> GetAndPutAsync(TK key, TV val);
/// <summary>
/// Atomically replaces the value for a given key if and only if there is a value currently mapped by the key.
/// </summary>
/// <param name="key">Key with which the specified value is to be associated.</param>
/// <param name="val">Value to be associated with the specified key.</param>
/// <returns>
/// The previous value associated with the specified key.
/// </returns>
CacheResult<TV> GetAndReplace(TK key, TV val);
/// <summary>
/// Atomically replaces the value for a given key if and only if there is a value currently mapped by the key.
/// </summary>
/// <param name="key">Key with which the specified value is to be associated.</param>
/// <param name="val">Value to be associated with the specified key.</param>
/// <returns>
/// The previous value associated with the specified key.
/// </returns>
Task<CacheResult<TV>> GetAndReplaceAsync(TK key, TV val);
/// <summary>
/// Atomically removes the entry for a key only if currently mapped to some value.
/// </summary>
/// <param name="key">Key with which the specified value is associated.</param>
/// <returns>The value if one existed.</returns>
CacheResult<TV> GetAndRemove(TK key);
/// <summary>
/// Atomically removes the entry for a key only if currently mapped to some value.
/// </summary>
/// <param name="key">Key with which the specified value is associated.</param>
/// <returns>The value if one existed.</returns>
Task<CacheResult<TV>> GetAndRemoveAsync(TK key);
/// <summary>
/// Atomically associates the specified key with the given value if it is not already associated with a value.
/// </summary>
/// <param name="key">Key with which the specified value is to be associated.</param>
/// <param name="val">Value to be associated with the specified key.</param>
/// <returns>True if a value was set.</returns>
bool PutIfAbsent(TK key, TV val);
/// <summary>
/// Atomically associates the specified key with the given value if it is not already associated with a value.
/// </summary>
/// <param name="key">Key with which the specified value is to be associated.</param>
/// <param name="val">Value to be associated with the specified key.</param>
/// <returns>True if a value was set.</returns>
Task<bool> PutIfAbsentAsync(TK key, TV val);
/// <summary>
/// Stores given key-value pair in cache only if cache had no previous mapping for it.
/// If cache previously contained value for the given key, then this value is returned.
/// In case of PARTITIONED or REPLICATED caches, the value will be loaded from the primary node,
/// which in its turn may load the value from the swap storage, and consecutively, if it's not
/// in swap, from the underlying persistent storage.
/// If the returned value is not needed, method putxIfAbsent() should be used instead of this one to
/// avoid the overhead associated with returning of the previous value.
/// If write-through is enabled, the stored value will be persisted to store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key to store in cache.</param>
/// <param name="val">Value to be associated with the given key.</param>
/// <returns>
/// Previously contained value regardless of whether put happened or not.
/// </returns>
CacheResult<TV> GetAndPutIfAbsent(TK key, TV val);
/// <summary>
/// Stores given key-value pair in cache only if cache had no previous mapping for it.
/// If cache previously contained value for the given key, then this value is returned.
/// In case of PARTITIONED or REPLICATED caches, the value will be loaded from the primary node,
/// which in its turn may load the value from the swap storage, and consecutively, if it's not
/// in swap, from the underlying persistent storage.
/// If the returned value is not needed, method putxIfAbsent() should be used instead of this one to
/// avoid the overhead associated with returning of the previous value.
/// If write-through is enabled, the stored value will be persisted to store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key to store in cache.</param>
/// <param name="val">Value to be associated with the given key.</param>
/// <returns>
/// Previously contained value regardless of whether put happened or not.
/// </returns>
Task<CacheResult<TV>> GetAndPutIfAbsentAsync(TK key, TV val);
/// <summary>
/// Stores given key-value pair in cache only if there is a previous mapping for it.
/// If cache previously contained value for the given key, then this value is returned.
/// In case of PARTITIONED or REPLICATED caches, the value will be loaded from the primary node,
/// which in its turn may load the value from the swap storage, and consecutively, if it's not
/// in swap, rom the underlying persistent storage.
/// If write-through is enabled, the stored value will be persisted to store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key to store in cache.</param>
/// <param name="val">Value to be associated with the given key.</param>
/// <returns>True if the value was replaced.</returns>
bool Replace(TK key, TV val);
/// <summary>
/// Stores given key-value pair in cache only if there is a previous mapping for it.
/// If cache previously contained value for the given key, then this value is returned.
/// In case of PARTITIONED or REPLICATED caches, the value will be loaded from the primary node,
/// which in its turn may load the value from the swap storage, and consecutively, if it's not
/// in swap, rom the underlying persistent storage.
/// If write-through is enabled, the stored value will be persisted to store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key to store in cache.</param>
/// <param name="val">Value to be associated with the given key.</param>
/// <returns>True if the value was replaced.</returns>
Task<bool> ReplaceAsync(TK key, TV val);
/// <summary>
/// Stores given key-value pair in cache only if only if the previous value is equal to the
/// old value passed as argument.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key to store in cache.</param>
/// <param name="oldVal">Old value to match.</param>
/// <param name="newVal">Value to be associated with the given key.</param>
/// <returns>True if replace happened, false otherwise.</returns>
bool Replace(TK key, TV oldVal, TV newVal);
/// <summary>
/// Stores given key-value pair in cache only if only if the previous value is equal to the
/// old value passed as argument.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key to store in cache.</param>
/// <param name="oldVal">Old value to match.</param>
/// <param name="newVal">Value to be associated with the given key.</param>
/// <returns>True if replace happened, false otherwise.</returns>
Task<bool> ReplaceAsync(TK key, TV oldVal, TV newVal);
/// <summary>
/// Stores given key-value pairs in cache.
/// If write-through is enabled, the stored values will be persisted to store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
///
/// Keys are locked in the order in which they are enumerated. It is caller's responsibility to
/// make sure keys always follow same order, such as by using <see cref="SortedDictionary{K, V}"/>. Using unordered
/// dictionary, such as <see cref="Dictionary{K, V}"/>, while calling this method in parallel <b>will lead to deadlock</b>.
/// </summary>
/// <param name="vals">Key-value pairs to store in cache.</param>
void PutAll(IEnumerable<KeyValuePair<TK, TV>> vals);
/// <summary>
/// Stores given key-value pairs in cache.
/// If write-through is enabled, the stored values will be persisted to store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
///
/// Keys are locked in the order in which they are enumerated. It is caller's responsibility to
/// make sure keys always follow same order, such as by using <see cref="SortedDictionary{K, V}"/>. Using unordered
/// dictionary, such as <see cref="Dictionary{K, V}"/>, while calling this method in parallel <b>will lead to deadlock</b>.
/// </summary>
/// <param name="vals">Key-value pairs to store in cache.</param>
Task PutAllAsync(IEnumerable<KeyValuePair<TK, TV>> vals);
/// <summary>
/// Attempts to evict all entries associated with keys. Note, that entry will be evicted only
/// if it's not used (not participating in any locks or transactions).
/// </summary>
/// <param name="keys">Keys to evict from cache.</param>
void LocalEvict(IEnumerable<TK> keys);
/// <summary>
/// Clears the contents of the cache, without notifying listeners or CacheWriters.
/// </summary>
void Clear();
/// <summary>
/// Clears the contents of the cache, without notifying listeners or CacheWriters.
/// </summary>
Task ClearAsync();
/// <summary>
/// Clear entry from the cache and swap storage, without notifying listeners or CacheWriters.
/// Entry is cleared only if it is not currently locked, and is not participating in a transaction.
/// </summary>
/// <param name="key">Key to clear.</param>
void Clear(TK key);
/// <summary>
/// Clear entry from the cache and swap storage, without notifying listeners or CacheWriters.
/// Entry is cleared only if it is not currently locked, and is not participating in a transaction.
/// </summary>
/// <param name="key">Key to clear.</param>
Task ClearAsync(TK key);
/// <summary>
/// Clear entries from the cache and swap storage, without notifying listeners or CacheWriters.
/// Entry is cleared only if it is not currently locked, and is not participating in a transaction.
/// </summary>
/// <param name="keys">Keys to clear.</param>
void ClearAll(IEnumerable<TK> keys);
/// <summary>
/// Clear entries from the cache and swap storage, without notifying listeners or CacheWriters.
/// Entry is cleared only if it is not currently locked, and is not participating in a transaction.
/// </summary>
/// <param name="keys">Keys to clear.</param>
Task ClearAllAsync(IEnumerable<TK> keys);
/// <summary>
/// Clear entry from the cache and swap storage, without notifying listeners or CacheWriters.
/// Entry is cleared only if it is not currently locked, and is not participating in a transaction.
/// <para />
/// Note that this operation is local as it merely clears
/// an entry from local cache, it does not remove entries from remote caches.
/// </summary>
/// <param name="key">Key to clear.</param>
void LocalClear(TK key);
/// <summary>
/// Clear entries from the cache and swap storage, without notifying listeners or CacheWriters.
/// Entry is cleared only if it is not currently locked, and is not participating in a transaction.
/// <para />
/// Note that this operation is local as it merely clears
/// entries from local cache, it does not remove entries from remote caches.
/// </summary>
/// <param name="keys">Keys to clear.</param>
void LocalClearAll(IEnumerable<TK> keys);
/// <summary>
/// Removes given key mapping from cache. If cache previously contained value for the given key,
/// then this value is returned. In case of PARTITIONED or REPLICATED caches, the value will be
/// loaded from the primary node, which in its turn may load the value from the disk-based swap
/// storage, and consecutively, if it's not in swap, from the underlying persistent storage.
/// If the returned value is not needed, method removex() should always be used instead of this
/// one to avoid the overhead associated with returning of the previous value.
/// If write-through is enabled, the value will be removed from store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key whose mapping is to be removed from cache.</param>
/// <returns>False if there was no matching key.</returns>
bool Remove(TK key);
/// <summary>
/// Removes given key mapping from cache. If cache previously contained value for the given key,
/// then this value is returned. In case of PARTITIONED or REPLICATED caches, the value will be
/// loaded from the primary node, which in its turn may load the value from the disk-based swap
/// storage, and consecutively, if it's not in swap, from the underlying persistent storage.
/// If the returned value is not needed, method removex() should always be used instead of this
/// one to avoid the overhead associated with returning of the previous value.
/// If write-through is enabled, the value will be removed from store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key whose mapping is to be removed from cache.</param>
/// <returns>False if there was no matching key.</returns>
Task<bool> RemoveAsync(TK key);
/// <summary>
/// Removes given key mapping from cache if one exists and value is equal to the passed in value.
/// If write-through is enabled, the value will be removed from store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key whose mapping is to be removed from cache.</param>
/// <param name="val">Value to match against currently cached value.</param>
/// <returns>True if entry was removed, false otherwise.</returns>
bool Remove(TK key, TV val);
/// <summary>
/// Removes given key mapping from cache if one exists and value is equal to the passed in value.
/// If write-through is enabled, the value will be removed from store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
/// </summary>
/// <param name="key">Key whose mapping is to be removed from cache.</param>
/// <param name="val">Value to match against currently cached value.</param>
/// <returns>True if entry was removed, false otherwise.</returns>
Task<bool> RemoveAsync(TK key, TV val);
/// <summary>
/// Removes given key mappings from cache.
/// If write-through is enabled, the value will be removed from store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
///
/// Keys are locked in the order in which they are enumerated. It is caller's responsibility to
/// make sure keys always follow same order, such as by using <see cref="SortedSet{K}"/>. Using unordered
/// collection, such as <see cref="HashSet{K}"/>, while calling this method in parallel <b>will lead to deadlock</b>.
/// </summary>
/// <param name="keys">Keys whose mappings are to be removed from cache.</param>
void RemoveAll(IEnumerable<TK> keys);
/// <summary>
/// Removes given key mappings from cache.
/// If write-through is enabled, the value will be removed from store.
/// This method is transactional and will enlist the entry into ongoing transaction if there is one.
///
/// Keys are locked in the order in which they are enumerated. It is caller's responsibility to
/// make sure keys always follow same order, such as by using <see cref="SortedSet{K}"/>. Using unordered
/// collection, such as <see cref="HashSet{K}"/>, while calling this method in parallel <b>will lead to deadlock</b>.
/// </summary>
/// <param name="keys">Keys whose mappings are to be removed from cache.</param>
Task RemoveAllAsync(IEnumerable<TK> keys);
/// <summary>
/// Removes all mappings from cache.
/// If write-through is enabled, the value will be removed from store.
/// This method is not transactional.
/// </summary>
void RemoveAll();
/// <summary>
/// Removes all mappings from cache.
/// If write-through is enabled, the value will be removed from store.
/// This method is not transactional.
/// </summary>
Task RemoveAllAsync();
/// <summary>
/// Gets the number of all entries cached on this node.
/// </summary>
/// <param name="modes">Optional peek modes. If not provided, then total cache size is returned.</param>
/// <returns>Cache size on this node.</returns>
int GetLocalSize(params CachePeekMode[] modes);
/// <summary>
/// Gets the number of all entries cached across all nodes.
/// <para />
/// NOTE: this operation is distributed and will query all participating nodes for their cache sizes.
/// </summary>
/// <param name="modes">Optional peek modes. If not provided, then total cache size is returned.</param>
/// <returns>Cache size across all nodes.</returns>
int GetSize(params CachePeekMode[] modes);
/// <summary>
/// Gets the number of all entries cached across all nodes.
/// <para />
/// NOTE: this operation is distributed and will query all participating nodes for their cache sizes.
/// </summary>
/// <param name="modes">Optional peek modes. If not provided, then total cache size is returned.</param>
/// <returns>Cache size across all nodes.</returns>
Task<int> GetSizeAsync(params CachePeekMode[] modes);
/// <summary>
/// Gets the number of all entries cached across all nodes as long value.
/// <para />
/// NOTE: this operation is distributed and will query all participating nodes for their cache sizes.
/// </summary>
/// <param name="modes">Optional peek modes. If not provided, then total cache size is returned.</param>
/// <returns>Cache size across all nodes.</returns>
long GetSizeLong(params CachePeekMode[] modes);
/// <summary>
/// Gets the number of all entries in partition cached across all nodes as long value.
/// <para />
/// NOTE: this operation is distributed and will query all participating nodes for their cache sizes.
/// </summary>
/// <param name="partition">Cache partition.</param>
/// <param name="modes">Optional peek modes. If not provided, then total cache size is returned.</param>
/// <returns>Partition cache size across all nodes.</returns>>
long GetSizeLong(int partition, params CachePeekMode[] modes);
/// <summary>
/// Gets the number of all entries cached across all nodes as long value.
/// <para />
/// NOTE: this operation is distributed and will query all participating nodes for their cache sizes.
/// </summary>
/// <param name="modes">Optional peek modes. If not provided, then total cache size is returned.</param>
/// <returns>Cache size across all nodes.</returns>
Task<long> GetSizeLongAsync(params CachePeekMode[] modes);
/// <summary>
/// Gets the number of all entries in a partition cached across all nodes as long value.
/// <para />
/// NOTE: this operation is distributed and will query all participating nodes for their cache sizes.
/// </summary>
/// <param name="partition">Cache partition.</param>
/// <param name="modes">Optional peek modes. If not provided, then total cache size is returned.</param>
/// <returns>Partition cache size across all nodes.</returns>
Task<long> GetSizeLongAsync(int partition, params CachePeekMode[] modes);
/// <summary>
/// Gets the number of all entries cached on this node as long value.
/// </summary>
/// <param name="modes">Optional peek modes. If not provided, then total cache size is returned.</param>
/// <returns>Cache size on this node.</returns>
long GetLocalSizeLong(params CachePeekMode[] modes);
/// <summary>
/// Gets the number of all entries in a partition cached on this node as long value.
/// </summary>
/// <param name="partition">Cache partition.</param>
/// <param name="modes">Optional peek modes. If not provided, then total cache size is returned.</param>
/// <returns>Partition cache size on this node.</returns>
long GetLocalSizeLong(int partition, params CachePeekMode[] modes);
/// <summary>
/// Queries cache.
/// </summary>
/// <param name="qry">Query.</param>
/// <returns>Cursor.</returns>
IQueryCursor<ICacheEntry<TK, TV>> Query(QueryBase qry);
/// <summary>
/// Queries separate entry fields.
/// </summary>
/// <param name="qry">SQL fields query.</param>
/// <returns>Cursor.</returns>
IFieldsQueryCursor Query(SqlFieldsQuery qry);
/// <summary>
/// Queries separate entry fields.
/// </summary>
/// <param name="qry">SQL fields query.</param>
/// <returns>Cursor.</returns>
[Obsolete("Use Query(SqlFieldsQuery qry) instead.")]
IQueryCursor<IList> QueryFields(SqlFieldsQuery qry);
/// <summary>
/// Start continuous query execution.
/// </summary>
/// <param name="qry">Continuous query.</param>
/// <returns>Handle to stop query execution.</returns>
IContinuousQueryHandle QueryContinuous(ContinuousQuery<TK, TV> qry);
/// <summary>
/// Start continuous query execution.
/// </summary>
/// <param name="qry">Continuous query.</param>
/// <param name="initialQry">
/// The initial query. This query will be executed before continuous listener is registered which allows
/// to iterate through entries which have already existed at the time continuous query is executed.
/// </param>
/// <returns>
/// Handle to get initial query cursor or stop query execution.
/// </returns>
IContinuousQueryHandle<ICacheEntry<TK, TV>> QueryContinuous(ContinuousQuery<TK, TV> qry, QueryBase initialQry);
/// <summary>
/// Get local cache entries.
/// </summary>
/// <param name="peekModes">Peek modes.</param>
/// <returns>Enumerable instance.</returns>
IEnumerable<ICacheEntry<TK, TV>> GetLocalEntries(params CachePeekMode[] peekModes);
/// <summary>
/// Invokes an <see cref="ICacheEntryProcessor{K, V, A, R}"/> against the
/// <see cref="IMutableCacheEntry{K, V}"/> specified by the provided key.
/// If an entry does not exist for the specified key, an attempt is made to load it (if a loader is configured)
/// or a surrogate entry, consisting of the key with a null value is used instead.
/// </summary>
/// <typeparam name="TArg">The type of the argument.</typeparam>
/// <typeparam name="TRes">The type of the result.</typeparam>
/// <param name="key">The key.</param>
/// <param name="processor">The processor.</param>
/// <param name="arg">The argument.</param>
/// <returns>Result of the processing.</returns>
/// <exception cref="CacheEntryProcessorException">If an exception has occured during processing.</exception>
TRes Invoke<TArg, TRes>(TK key, ICacheEntryProcessor<TK, TV, TArg, TRes> processor, TArg arg);
/// <summary>
/// Invokes an <see cref="ICacheEntryProcessor{K, V, A, R}"/> against the
/// <see cref="IMutableCacheEntry{K, V}"/> specified by the provided key.
/// If an entry does not exist for the specified key, an attempt is made to load it (if a loader is configured)
/// or a surrogate entry, consisting of the key with a null value is used instead.
/// </summary>
/// <typeparam name="TArg">The type of the argument.</typeparam>
/// <typeparam name="TRes">The type of the result.</typeparam>
/// <param name="key">The key.</param>
/// <param name="processor">The processor.</param>
/// <param name="arg">The argument.</param>
/// <returns>Result of the processing.</returns>
/// <exception cref="CacheEntryProcessorException">If an exception has occured during processing.</exception>
Task<TRes> InvokeAsync<TArg, TRes>(TK key, ICacheEntryProcessor<TK, TV, TArg, TRes> processor, TArg arg);
/// <summary>
/// Invokes an <see cref="ICacheEntryProcessor{K, V, A, R}"/> against a set of keys.
/// If an entry does not exist for the specified key, an attempt is made to load it (if a loader is configured)
/// or a surrogate entry, consisting of the key with a null value is used instead.
///
/// The order that the entries for the keys are processed is undefined.
/// Implementations may choose to process the entries in any order, including concurrently.
/// Furthermore there is no guarantee implementations will use the same processor instance
/// to process each entry, as the case may be in a non-local cache topology.
///
/// Keys are locked in the order in which they are enumerated. It is caller's responsibility to
/// make sure keys always follow same order, such as by using <see cref="SortedSet{K}"/>. Using unordered
/// collection, such as <see cref="HashSet{K}"/>, while calling this method in parallel <b>will lead to deadlock</b>.
/// </summary>
/// <typeparam name="TArg">The type of the argument.</typeparam>
/// <typeparam name="TRes">The type of the result.</typeparam>
/// <param name="keys">The keys.</param>
/// <param name="processor">The processor.</param>
/// <param name="arg">The argument.</param>
/// <returns>
/// Map of <see cref="ICacheEntryProcessorResult{K, R}" /> of the processing per key, if any,
/// defined by the <see cref="ICacheEntryProcessor{K,V,A,R}"/> implementation.
/// No mappings will be returned for processors that return a null value for a key.
/// </returns>
/// <exception cref="CacheEntryProcessorException">If an exception has occured during processing.</exception>
ICollection<ICacheEntryProcessorResult<TK, TRes>> InvokeAll<TArg, TRes>(IEnumerable<TK> keys,
ICacheEntryProcessor<TK, TV, TArg, TRes> processor, TArg arg);
/// <summary>
/// Invokes an <see cref="ICacheEntryProcessor{K, V, A, R}"/> against a set of keys.
/// If an entry does not exist for the specified key, an attempt is made to load it (if a loader is configured)
/// or a surrogate entry, consisting of the key with a null value is used instead.
///
/// The order that the entries for the keys are processed is undefined.
/// Implementations may choose to process the entries in any order, including concurrently.
/// Furthermore there is no guarantee implementations will use the same processor instance
/// to process each entry, as the case may be in a non-local cache topology.
///
/// Keys are locked in the order in which they are enumerated. It is caller's responsibility to
/// make sure keys always follow same order, such as by using <see cref="SortedSet{K}"/>. Using unordered
/// collection, such as <see cref="HashSet{K}"/>, while calling this method in parallel <b>will lead to deadlock</b>.
/// </summary>
/// <typeparam name="TArg">The type of the argument.</typeparam>
/// <typeparam name="TRes">The type of the result.</typeparam>
/// <param name="keys">The keys.</param>
/// <param name="processor">The processor.</param>
/// <param name="arg">The argument.</param>
/// <returns>
/// Map of <see cref="ICacheEntryProcessorResult{K, R}" /> of the processing per key, if any,
/// defined by the <see cref="ICacheEntryProcessor{K,V,A,R}"/> implementation.
/// No mappings will be returned for processors that return a null value for a key.
/// </returns>
/// <exception cref="CacheEntryProcessorException">If an exception has occured during processing.</exception>
Task<ICollection<ICacheEntryProcessorResult<TK, TRes>>> InvokeAllAsync<TArg, TRes>(IEnumerable<TK> keys,
ICacheEntryProcessor<TK, TV, TArg, TRes> processor, TArg arg);
/// <summary>
/// Creates an <see cref="ICacheLock"/> instance associated with passed key.
/// This method does not acquire lock immediately, you have to call appropriate method on returned instance.
/// </summary>
/// <param name="key">Key for lock.</param>
/// <returns>New <see cref="ICacheLock"/> instance associated with passed key.</returns>
ICacheLock Lock(TK key);
/// <summary>
/// Creates an <see cref="ICacheLock"/> instance associated with passed keys.
/// This method does not acquire lock immediately, you have to call appropriate method on returned instance.
/// </summary>
/// <param name="keys">Keys for lock.</param>
/// <returns>New <see cref="ICacheLock"/> instance associated with passed keys.</returns>
ICacheLock LockAll(IEnumerable<TK> keys);
/// <summary>
/// Checks if specified key is locked.
/// <para />
/// This is a local operation and does not involve any network trips
/// or access to persistent storage in any way.
/// </summary>
/// <param name="key">Key to check.</param>
/// <param name="byCurrentThread">
/// If true, checks that current thread owns a lock on this key;
/// otherwise, checks that any thread on any node owns a lock on this key.
/// </param>
/// <returns>True if specified key is locked; otherwise, false.</returns>
bool IsLocalLocked(TK key, bool byCurrentThread);
/// <summary>
/// Gets global (whole cluster) snapshot metrics (statistics) for this cache.
/// </summary>
/// <returns>Cache metrics.</returns>
[SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate",
Justification = "Expensive operation.")]
ICacheMetrics GetMetrics();
/// <summary>
/// Gets global (whole cluster group) snapshot metrics (statistics) for this cache.
/// </summary>
/// <param name="clusterGroup">The cluster group to get metrics for.</param>
[SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate",
Justification = "Expensive operation.")]
ICacheMetrics GetMetrics(IClusterGroup clusterGroup);
/// <summary>
/// Gets local snapshot metrics (statistics) for this cache.
/// </summary>
/// <returns>Cache metrics.</returns>
[SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate",
Justification = "Expensive operation.")]
ICacheMetrics GetLocalMetrics();
/// <summary>
/// Rebalances cache partitions. This method is usually used when rebalanceDelay configuration parameter
/// has non-zero value. When many nodes are started or stopped almost concurrently,
/// it is more efficient to delay rebalancing until the node topology is stable to make sure that no redundant
/// re-partitioning happens.
/// <para />
/// In case of partitioned caches, for better efficiency user should usually make sure that new nodes get
/// placed on the same place of consistent hash ring as the left nodes, and that nodes are restarted before
/// rebalanceDelay expires.
/// </summary>
/// <returns>Task that will be completed when rebalancing is finished.</returns>
Task Rebalance();
/// <summary>
/// Get another cache instance with no-retries behavior enabled.
/// </summary>
/// <returns>Cache with no-retries behavior enabled.</returns>
ICache<TK, TV> WithNoRetries();
/// <summary>
/// Gets an instance of cache that will be allowed to execute cache operations (read, write)
/// regardless of partition loss policy.
/// </summary>
/// <returns>Cache without partition loss protection.</returns>
ICache<TK, TV> WithPartitionRecover();
/// <summary>
/// Gets lost partitions IDs.
/// <para />
/// See also <see cref="CacheConfiguration.PartitionLossPolicy"/>
/// and <see cref="IIgnite.ResetLostPartitions(IEnumerable{string})"/>.
/// </summary>
ICollection<int> GetLostPartitions();
/// <summary>
/// Gets query metrics.
/// </summary>
/// <returns>Query metrics.</returns>
IQueryMetrics GetQueryMetrics();
/// <summary>
/// Reset query metrics.
/// </summary>
void ResetQueryMetrics();
/// <summary>
/// Efficiently preloads cache partition into page memory.
/// <para/>
/// This is useful for fast iteration over cache partition data if persistence is enabled and the data is "cold".
/// <para/>
/// Preload will reduce available amount of page memory for subsequent operations and may lead to earlier page
/// replacement.
/// <para/>
/// This method is irrelevant for in-memory caches. Calling this method on an in-memory cache will result in
/// exception.
/// </summary>
/// <param name="partition">Partition number.</param>
void PreloadPartition(int partition);
/// <summary>
/// Efficiently preloads cache partition into page memory asynchronously.
/// <para/>
/// This is useful for fast iteration over cache partition data if persistence is enabled and the data is "cold".
/// <para/>
/// Preload will reduce available amount of page memory for subsequent operations and may lead to earlier page
/// replacement.
/// <para/>
/// This method is irrelevant for in-memory caches. Calling this method on an in-memory cache will result in
/// exception.
/// </summary>
/// <param name="partition">Partition number.</param>
/// <returns>Task.</returns>
Task PreloadPartitionAsync(int partition);
/// <summary>
/// Efficiently preloads cache partition into page memory if it exists on the local node.
/// <para/>
/// This is useful for fast iteration over cache partition data if persistence is enabled and the data is "cold".
/// <para/>
/// Preload will reduce available amount of page memory for subsequent operations and may lead to earlier page
/// replacement.
/// <para/>
/// This method is irrelevant for in-memory caches.
/// </summary>
/// <param name="partition">Partition number.</param>
/// <returns><code>True</code>if partition was preloaded, <code>False</code> if it doesn't belong to local node.</returns>
bool LocalPreloadPartition(int partition);
}
}
| 54.165563 | 131 | 0.629434 | [
"CC0-1.0"
] | bratwurzt/ignite | modules/platforms/dotnet/Apache.Ignite.Core/Cache/ICache.cs | 57,253 | C# |
using System;
namespace XtermSharp {
/// <summary>
/// Represents a circular list; a list with a maximum size that wraps around when push is called overriding values at the start of the list.
/// </summary>
public class CircularList<T> {
int length;
T [] array;
int startIndex;
/// <summary>
/// Initializes a new instance of the <see cref="T:XtermSharp.CircularList`1"/> class with the specified number of elements.
/// </summary>
/// <param name="maxLength">Max length.</param>
public CircularList (int maxLength)
{
array = new T [maxLength];
length = 0;
}
// Gets the cyclic index for the specified regular index. The cyclic index can then be used on the
// backing array to get the element associated with the regular index.
int GetCyclicIndex (int index)
{
return (startIndex + index) % array.Length;
}
/// <summary>
/// Gets or sets the maximum length of the circular list
/// </summary>
/// <value>The length of the max.</value>
public int MaxLength {
get => array.Length;
set {
if (value <= 0)
throw new ArgumentException (nameof (value));
if (value == array.Length)
return;
// Reconstruct array, starting at index 0. Only transfer values from the
// indexes 0 to length.
var newArray = new T [value];
var top = Math.Min (value, array.Length);
for (int i = 0; i < top; i++)
newArray [i] = array [GetCyclicIndex (i)];
startIndex = 0;
array = newArray;
}
}
/// <summary>
/// The current length of the circular buffer
/// </summary>
/// <value>The length.</value>
public int Length {
get => length;
set {
if (value > length) {
for (int i = length; i < value; i++)
array [i] = default (T);
}
length = value;
}
}
/// <summary>
/// Invokes the specificied callback for each items of the circular list, the first parameter is the value, the second is the ith-index.
/// </summary>
/// <param name="callback">Callback.</param>
public void ForEach (Action<T, int> callback)
{
var top = length;
for (int i = 0; i < top; i++)
callback (this [i], i);
}
/// <summary>
/// Gets or sets the <see cref="T:XtermSharp.CircularList`1"/> at the specified index.
/// </summary>
/// <param name="index">Index.</param>
public T this [int index] {
get => array [GetCyclicIndex (index)];
set => array [GetCyclicIndex (index)] = value;
}
/// <summary>
/// Event raised when an item is removed from the circular array, the parameter is the number of items removed.
/// </summary>
public Action<int> Trimmed;
/// <summary>
/// Pushes a new value onto the list, wrapping around to the start of the array, overriding index 0 if the maximum length is reached
/// </summary>
/// <returns>The push.</returns>
/// <param name="value">Value to push.</param>
public void Push (T value)
{
array [GetCyclicIndex (length)] = value;
length++;
if (length == array.Length) {
startIndex++;
if (startIndex == array.Length)
startIndex = 0;
Trimmed?.Invoke (1);
}
}
public T Recycle ()
{
if (Length != MaxLength) {
throw new Exception ("Can only recycle when the buffer is full");
}
startIndex = ++startIndex % MaxLength;
return array [GetCyclicIndex (Length - 1)];
}
/// <summary>
/// Removes and returns the last value on the list.
/// </summary>
/// <returns>The popped value.</returns>
public T Pop ()
{
return array [GetCyclicIndex (length-- - 1)];
}
/// <summary>
/// Deletes and/or inserts items at a particular index (in that order).
/// </summary>
/// <returns>The splice.</returns>
/// <param name="start">The index to delete and/or insert.</param>
/// <param name="deleteCount">The number of elements to delete.</param>
/// <param name="items">The items to insert.</param>
public void Splice (int start, int deleteCount, params T [] items)
{
// delete items
if (deleteCount > 0) {
for (int i = start; i < length - deleteCount; i++)
array [GetCyclicIndex (i)] = array [GetCyclicIndex (i + deleteCount)];
length -= deleteCount;
}
if (items.Length != 0) {
// add items
for (int i = length - 1; i >= start; i--)
array [GetCyclicIndex (i + items.Length)] = array [GetCyclicIndex (i)];
for (int i = 0; i < items.Length; i++)
array [GetCyclicIndex (start + i)] = items [i];
}
// Adjust length as needed
if (length + items.Length > array.Length) {
int countToTrim = length + items.Length - array.Length;
startIndex += countToTrim;
length = array.Length;
Trimmed?.Invoke (countToTrim);
} else {
length += items.Length;
}
}
/// <summary>
/// Trims a number of items from the start of the list.
/// </summary>
/// <param name="count">The number of items to remove..</param>
public void TrimStart (int count)
{
if (count > length)
count = length;
// TODO: perhaps bug in original code, this does not clamp the value of startIndex
startIndex += count;
length -= count;
Trimmed?.Invoke (count);
}
/// <summary>
/// Shifts the elements.
/// </summary>
/// <param name="start">Start.</param>
/// <param name="count">Count.</param>
/// <param name="offset">Offset.</param>
public void ShiftElements (int start, int count, int offset)
{
if (count < 0)
return;
if (start < 0 || start >= length)
throw new ArgumentException ("Start argument is out of range");
if (start + offset < 0)
throw new ArgumentException ("Can not shift elements in list beyond index 0");
if (offset > 0) {
for (var i = count - 1; i >= 0; i--) {
this [start + i + offset] = this [start + i];
}
var expandListBy = (start + count + offset) - length;
if (expandListBy > 0) {
length += expandListBy;
while (length > array.Length) {
length--;
startIndex++;
Trimmed.Invoke (1);
}
}
} else {
for (var i = 0; i < count; i++) {
this [start + i + offset] = this [start + i];
}
}
}
public bool IsFull => Length == MaxLength;
}
}
| 28.333333 | 141 | 0.612418 | [
"MIT"
] | Therzok/XtermSharp | XtermSharp/Utils/CircularList.cs | 6,122 | C# |
using UnityEngine;
using System.Collections.Generic;
using WaterBuoyancy.Collections;
namespace WaterBuoyancy
{
[RequireComponent(typeof(BoxCollider))]
[RequireComponent(typeof(MeshFilter))]
public class WaterVolume : MonoBehaviour
{
public const string TAG = "Water Volume";
[SerializeField]
private float density = 1f;
[SerializeField]
private int rows = 10;
[SerializeField]
private int columns = 10;
[SerializeField]
private float quadSegmentSize = 1f;
//[SerializeField]
//private Transform debugTrans; // Only for debugging
private Mesh mesh;
private Vector3[] meshLocalVertices;
private Vector3[] meshWorldVertices;
public float Density
{
get
{
return this.density;
}
}
public int Rows
{
get
{
return this.rows;
}
}
public int Columns
{
get
{
return this.columns;
}
}
public float QuadSegmentSize
{
get
{
return this.quadSegmentSize;
}
}
public Mesh Mesh
{
get
{
if (this.mesh == null)
{
this.mesh = this.GetComponent<MeshFilter>().mesh;
}
return this.mesh;
}
}
protected virtual void Awake()
{
this.CacheMeshVertices();
}
protected virtual void Update()
{
this.CacheMeshVertices();
}
protected virtual void OnDrawGizmosSelected()
{
Gizmos.color = Color.green;
Gizmos.matrix = this.transform.localToWorldMatrix;
Gizmos.DrawWireCube(this.GetComponent<BoxCollider>().center, this.GetComponent<BoxCollider>().size);
}
protected virtual void OnDrawGizmos()
{
if (!Application.isPlaying)
{
Gizmos.color = Color.cyan - new Color(0f, 0f, 0f, 0.75f);
Gizmos.matrix = this.transform.localToWorldMatrix;
Gizmos.DrawCube(this.GetComponent<BoxCollider>().center - Vector3.up * 0.01f, this.GetComponent<BoxCollider>().size);
Gizmos.color = Color.cyan - new Color(0f, 0f, 0f, 0.5f);
Gizmos.DrawWireCube(this.GetComponent<BoxCollider>().center, this.GetComponent<BoxCollider>().size);
Gizmos.matrix = Matrix4x4.identity;
}
else
{
// Draw sufrace normal
//var vertices = this.meshWorldVertices;
//var triangles = this.Mesh.triangles;
//for (int i = 0; i < triangles.Length; i += 3)
//{
// Gizmos.color = Color.white;
// Gizmos.DrawLine(vertices[triangles[i + 0]], vertices[triangles[i + 1]]);
// Gizmos.DrawLine(vertices[triangles[i + 1]], vertices[triangles[i + 2]]);
// Gizmos.DrawLine(vertices[triangles[i + 2]], vertices[triangles[i + 0]]);
// Vector3 center = MathfUtils.GetAveratePoint(vertices[triangles[i + 0]], vertices[triangles[i + 1]], vertices[triangles[i + 2]]);
// Vector3 normal = this.GetSurfaceNormal(center);
// Gizmos.color = Color.green;
// Gizmos.DrawLine(center, center + normal);
//}
// Draw mesh vertices
//if (this.meshWorldVertices != null)
//{
// for (int i = 0; i < this.meshWorldVertices.Length; i++)
// {
// DebugUtils.DrawPoint(this.meshWorldVertices[i], Color.red);
// }
//}
// Test GetSurroundingTrianglePolygon(Vector3 worldPoint);
//if (debugTrans != null)
//{
// Gizmos.color = Color.blue;
// Gizmos.DrawSphere(debugTrans.position, 0.1f);
// var point = debugTrans.position;
// var triangle = this.GetSurroundingTrianglePolygon(point);
// if (triangle != null)
// {
// Gizmos.color = Color.red;
// Gizmos.DrawLine(triangle[0], triangle[1]);
// Gizmos.DrawLine(triangle[1], triangle[2]);
// Gizmos.DrawLine(triangle[2], triangle[0]);
// }
//}
}
}
public Vector3[] GetSurroundingTrianglePolygon(Vector3 worldPoint)
{
Vector3 localPoint = this.transform.InverseTransformPoint(worldPoint);
int x = Mathf.CeilToInt(localPoint.x / this.QuadSegmentSize);
int z = Mathf.CeilToInt(localPoint.z / this.QuadSegmentSize);
if (x <= 0 || z <= 0 || x >= (this.Columns + 1) || z >= (this.Rows + 1))
{
return null;
}
Vector3[] trianglePolygon = new Vector3[3];
if ((worldPoint - this.meshWorldVertices[this.GetIndex(z, x)]).sqrMagnitude <
((worldPoint - this.meshWorldVertices[this.GetIndex(z - 1, x - 1)]).sqrMagnitude))
{
trianglePolygon[0] = this.meshWorldVertices[this.GetIndex(z, x)];
}
else
{
trianglePolygon[0] = this.meshWorldVertices[this.GetIndex(z - 1, x - 1)];
}
trianglePolygon[1] = this.meshWorldVertices[this.GetIndex(z - 1, x)];
trianglePolygon[2] = this.meshWorldVertices[this.GetIndex(z, x - 1)];
return trianglePolygon;
}
public Vector3[] GetClosestPointsOnWaterSurface(Vector3 worldPoint, int pointsCount)
{
MinHeap<Vector3> allPoints = new MinHeap<Vector3>(new Vector3HorizontalDistanceComparer(worldPoint));
for (int i = 0; i < this.meshWorldVertices.Length; i++)
{
allPoints.Add(this.meshWorldVertices[i]);
}
Vector3[] closestPoints = new Vector3[pointsCount];
for (int i = 0; i < closestPoints.Length; i++)
{
closestPoints[i] = allPoints.Remove();
}
return closestPoints;
}
public Vector3 GetSurfaceNormal(Vector3 worldPoint)
{
Vector3[] meshPolygon = this.GetSurroundingTrianglePolygon(worldPoint);
if (meshPolygon != null)
{
Vector3 planeV1 = meshPolygon[1] - meshPolygon[0];
Vector3 planeV2 = meshPolygon[2] - meshPolygon[0];
Vector3 planeNormal = Vector3.Cross(planeV1, planeV2).normalized;
if (planeNormal.y < 0f)
{
planeNormal *= -1f;
}
return planeNormal;
}
return this.transform.up;
}
public float GetWaterLevel(Vector3 worldPoint)
{
Vector3[] meshPolygon = this.GetSurroundingTrianglePolygon(worldPoint);
if (meshPolygon != null)
{
Vector3 planeV1 = meshPolygon[1] - meshPolygon[0];
Vector3 planeV2 = meshPolygon[2] - meshPolygon[0];
Vector3 planeNormal = Vector3.Cross(planeV1, planeV2).normalized;
if (planeNormal.y < 0f)
{
planeNormal *= -1f;
}
// Plane equation
float yOnWaterSurface = (-(worldPoint.x * planeNormal.x) - (worldPoint.z * planeNormal.z) + Vector3.Dot(meshPolygon[0], planeNormal)) / planeNormal.y;
//Vector3 pointOnWaterSurface = new Vector3(point.x, yOnWaterSurface, point.z);
//DebugUtils.DrawPoint(pointOnWaterSurface, Color.magenta);
return yOnWaterSurface;
}
return this.transform.position.y;
}
public bool IsPointUnderWater(Vector3 worldPoint)
{
return this.GetWaterLevel(worldPoint) - worldPoint.y > 0f;
}
private int GetIndex(int row, int column)
{
return row * (this.Columns + 1) + column;
}
private void CacheMeshVertices()
{
this.meshLocalVertices = this.Mesh.vertices;
this.meshWorldVertices = this.ConvertPointsToWorldSpace(meshLocalVertices);
}
private Vector3[] ConvertPointsToWorldSpace(Vector3[] localPoints)
{
Vector3[] worldPoints = new Vector3[localPoints.Length];
for (int i = 0; i < localPoints.Length; i++)
{
worldPoints[i] = this.transform.TransformPoint(localPoints[i]);
}
return worldPoints;
}
private class Vector3HorizontalDistanceComparer : IComparer<Vector3>
{
private Vector3 distanceToVector;
public Vector3HorizontalDistanceComparer(Vector3 distanceTo)
{
this.distanceToVector = distanceTo;
}
public int Compare(Vector3 v1, Vector3 v2)
{
v1.y = 0;
v2.y = 0;
float v1Distance = (v1 - distanceToVector).sqrMagnitude;
float v2Distance = (v2 - distanceToVector).sqrMagnitude;
if (v1Distance < v2Distance)
{
return -1;
}
else if (v1Distance > v2Distance)
{
return 1;
}
else
{
return 0;
}
}
}
}
}
| 32.920792 | 166 | 0.506967 | [
"MIT"
] | AkihiroNunome/WaterBuoyancy | Assets/Water Buoyancy/Scripts/WaterVolume.cs | 9,977 | C# |
/**
* Copyright 2018 IBM Corp. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System.Collections.Generic;
using Newtonsoft.Json;
namespace IBM.WatsonDeveloperCloud.NaturalLanguageUnderstanding.v1.Model
{
/// <summary>
/// The relations between entities found in the content.
/// </summary>
public class RelationsResult : BaseModel
{
/// <summary>
/// Confidence score for the relation. Higher values indicate greater confidence.
/// </summary>
/// <value>
/// Confidence score for the relation. Higher values indicate greater confidence.
/// </value>
[JsonProperty("score", NullValueHandling = NullValueHandling.Ignore)]
public float? Score { get; set; }
/// <summary>
/// The sentence that contains the relation.
/// </summary>
/// <value>
/// The sentence that contains the relation.
/// </value>
[JsonProperty("sentence", NullValueHandling = NullValueHandling.Ignore)]
public string Sentence { get; set; }
/// <summary>
/// The type of the relation.
/// </summary>
/// <value>
/// The type of the relation.
/// </value>
[JsonProperty("type", NullValueHandling = NullValueHandling.Ignore)]
public string Type { get; set; }
/// <summary>
/// The extracted relation objects from the text.
/// </summary>
/// <value>
/// The extracted relation objects from the text.
/// </value>
[JsonProperty("arguments", NullValueHandling = NullValueHandling.Ignore)]
public List<RelationArgument> Arguments { get; set; }
}
}
| 35.333333 | 89 | 0.637017 | [
"Apache-2.0"
] | johnpisg/dotnet-standard-sdk | src/IBM.WatsonDeveloperCloud.NaturalLanguageUnderstanding.v1/Model/RelationsResult.cs | 2,226 | C# |
using Architecture.CrossCutting;
using Architecture.CrossCutting.Enums;
using DotNetCore.Domain;
using System;
namespace Architecture.Domain
{
public class UserLogEntity : Entity<long>
{
public UserLogEntity(long userId, LogType logType) : base(default)
{
UserId = userId;
LogType = logType;
DateTime = DateTime.UtcNow;
}
public long UserId { get; }
public LogType LogType { get; }
public DateTime DateTime { get; }
public UserEntity User { get; private set; }
}
}
| 22.076923 | 74 | 0.623693 | [
"MIT"
] | mramra3004/Architecture | source/Domain/UserLogEntity.cs | 574 | C# |
using NUnit.Framework;
public class GeneratedTest217
{
[Test] public void Test1() { }
[Test] public void Test2() { }
[Test] public void Test3() { }
[Test] public void Test4() { }
[Test] public void Test5() { }
[Test] public void Test6() { }
[Test] public void Test7() { }
[Test] public void Test8() { }
[Test] public void Test9() { }
[Test] public void Test10() { }
[Test] public void Test11() { }
[Test] public void Test12() { }
[Test] public void Test13() { }
[Test] public void Test14() { }
[Test] public void Test15() { }
[Test] public void Test16() { }
[Test] public void Test17() { }
[Test] public void Test18() { }
[Test] public void Test19() { }
[Test] public void Test20() { }
[Test] public void Test21() { }
[Test] public void Test22() { }
[Test] public void Test23() { }
[Test] public void Test24() { }
[Test] public void Test25() { }
[Test] public void Test26() { }
[Test] public void Test27() { }
[Test] public void Test28() { }
[Test] public void Test29() { }
[Test] public void Test30() { }
[Test] public void Test31() { }
[Test] public void Test32() { }
[Test] public void Test33() { }
[Test] public void Test34() { }
[Test] public void Test35() { }
[Test] public void Test36() { }
[Test] public void Test37() { }
[Test] public void Test38() { }
[Test] public void Test39() { }
[Test] public void Test40() { }
}
| 33.066667 | 35 | 0.5625 | [
"MIT"
] | aolszowka/nunit3-vs-adapter.issues | Issue497/GeneratedTest217.cs | 1,488 | C# |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Text.RegularExpressions;
using Microsoft.CodeAnalysis.EmbeddedLanguages.Common;
using Microsoft.CodeAnalysis.EmbeddedLanguages.VirtualChars;
using Microsoft.CodeAnalysis.PooledObjects;
using Microsoft.CodeAnalysis.Text;
namespace Microsoft.CodeAnalysis.EmbeddedLanguages.RegularExpressions
{
using static EmbeddedSyntaxHelpers;
using static RegexHelpers;
using RegexNodeOrToken = EmbeddedSyntaxNodeOrToken<RegexKind, RegexNode>;
using RegexToken = EmbeddedSyntaxToken<RegexKind>;
using RegexTrivia = EmbeddedSyntaxTrivia<RegexKind>;
/// <summary>
/// Produces a <see cref="RegexTree"/> from a sequence of <see cref="VirtualChar"/> characters.
///
/// Importantly, this parser attempts to replicate diagnostics with almost the exact same text
/// as the native .NET regex parser. This is important so that users get an understandable
/// experience where it appears to them that this is all one cohesive system and that the IDE
/// will let them discover and fix the same issues they would encounter when previously trying
/// to just compile and execute these regexes.
/// </summary>
/// <remarks>
/// Invariants we try to maintain (and should consider a bug if we do not): l 1. If the .NET
/// regex parser does not report an error for a given pattern, we should not either. it would be
/// very bad if we told the user there was something wrong with there pattern when there really
/// wasn't.
///
/// 2. If the .NET regex parser does report an error for a given pattern, we should either not
/// report an error (not recommended) or report the same error at an appropriate location in the
/// pattern. Not reporting the error can be confusing as the user will think their pattern is
/// ok, when it really is not. However, it can be acceptable to do this as it's not telling
/// them that something is actually wrong, and it may be too difficult to find and report the
/// same error. Note: there is only one time we do this in this parser (see the deviation
/// documented in <see cref="ParsePossibleEcmascriptBackreferenceEscape"/>).
///
/// Note1: "report the same error" means that we will attempt to report the error using the same
/// text the .NET regex parser uses for its error messages. This is so that the user is not
/// confused when they use the IDE vs running the regex by getting different messages for the
/// same issue.
///
/// Note2: the above invariants make life difficult at times. This happens due to the fact that
/// the .NET parser is multi-pass. Meaning it does a first scan (which may report errors), then
/// does the full parse. This means that it might report an error in a later location during
/// the initial scan than it would during the parse. We replicate that behavior to follow the
/// second invariant.
///
/// Note3: It would be nice if we could check these invariants at runtime, so we could control
/// our behavior by the behavior of the real .NET regex engine. For example, if the .NET regex
/// engine did not report any issues, we could suppress any diagnostics we generated and we
/// could log an NFW to record which pattern we deviated on so we could fix the issue for a
/// future release. However, we cannot do this as the .NET regex engine has no guarantees about
/// its performance characteristics. For example, certain regex patterns might end up causing
/// that engine to consume unbounded amounts of CPU and memory. This is because the .NET regex
/// engine is not just a parser, but something that builds an actual recognizer using techniques
/// that are not necessarily bounded. As such, while we test ourselves around it during our
/// tests, we cannot do the same at runtime as part of the IDE.
///
/// This parser was based off the corefx RegexParser based at:
/// https://github.com/dotnet/corefx/blob/f759243d724f462da0bcef54e86588f8a55352c6/src/System.Text.RegularExpressions/src/System/Text/RegularExpressions/RegexParser.cs#L1
///
/// Note4: The .NET parser itself changes over time (for example to fix behavior that even it
/// thinks is buggy). When this happens, we have to make a choice as to which behavior to
/// follow. In general, the overall principle is that we should follow the more lenient
/// behavior. If we end up taking the more strict interpretation we risk giving people an error
/// during design time that they would not get at runtime. It's far worse to have that than to
/// not report an error, even though one might happen later.
/// </remarks>
internal partial struct RegexParser
{
private readonly ImmutableDictionary<string, TextSpan> _captureNamesToSpan;
private readonly ImmutableDictionary<int, TextSpan> _captureNumbersToSpan;
private RegexLexer _lexer;
private RegexOptions _options;
private RegexToken _currentToken;
private int _recursionDepth;
private RegexParser(
VirtualCharSequence text, RegexOptions options,
ImmutableDictionary<string, TextSpan> captureNamesToSpan,
ImmutableDictionary<int, TextSpan> captureNumbersToSpan) : this()
{
_lexer = new RegexLexer(text);
_options = options;
_captureNamesToSpan = captureNamesToSpan;
_captureNumbersToSpan = captureNumbersToSpan;
// Get the first token. It is allowed to have trivia on it.
ConsumeCurrentToken(allowTrivia: true);
}
/// <summary>
/// Returns the latest token the lexer has produced, and then asks the lexer to
/// produce the next token after that.
/// </summary>
/// <param name="allowTrivia">Whether or not trivia is allowed on the next token
/// produced. In the .NET parser trivia is only allowed on a few constructs,
/// and our parser mimics that behavior. Note that even if trivia is allowed,
/// the type of trivia that can be scanned depends on the current RegexOptions.
/// For example, if <see cref="RegexOptions.IgnorePatternWhitespace"/> is currently
/// enabled, then '#...' comments are allowed. Otherwise, only '(?#...)' comments
/// are allowed.</param>
private RegexToken ConsumeCurrentToken(bool allowTrivia)
{
var previous = _currentToken;
_currentToken = _lexer.ScanNextToken(allowTrivia, _options);
return previous;
}
/// <summary>
/// Given an input text, and set of options, parses out a fully representative syntax tree
/// and list of diagnostics. Parsing should always succeed, except in the case of the stack
/// overflowing.
/// </summary>
public static RegexTree TryParse(VirtualCharSequence text, RegexOptions options)
{
if (text.IsDefault)
{
return null;
}
try
{
// Parse the tree once, to figure out the capture groups. These are needed
// to then parse the tree again, as the captures will affect how we interpret
// certain things (i.e. escape references) and what errors will be reported.
//
// This is necessary as .NET regexes allow references to *future* captures.
// As such, we don't know when we're seeing a reference if it's to something
// that exists or not.
var tree1 = new RegexParser(text, options,
ImmutableDictionary<string, TextSpan>.Empty,
ImmutableDictionary<int, TextSpan>.Empty).ParseTree();
var (captureNames, captureNumbers) = CaptureInfoAnalyzer.Analyze(text, tree1.Root, options);
var tree2 = new RegexParser(
text, options, captureNames, captureNumbers).ParseTree();
return tree2;
}
catch (InsufficientExecutionStackException)
{
return null;
}
}
private RegexTree ParseTree()
{
// Most callers to ParseAlternatingSequences are from group constructs. As those
// constructs will have already consumed the open paren, they don't want this sub-call
// to consume through close-paren tokens as they want that token for themselves.
// However, we're the topmost call and have not consumed an open paren. And, we want
// this call to consume all the way to the end, eating up excess close-paren tokens that
// are encountered.
var expression = this.ParseAlternatingSequences(consumeCloseParen: true);
Debug.Assert(_lexer.Position == _lexer.Text.Length);
Debug.Assert(_currentToken.Kind == RegexKind.EndOfFile);
var root = new RegexCompilationUnit(expression, _currentToken);
var seenDiagnostics = new HashSet<EmbeddedDiagnostic>();
var diagnostics = ArrayBuilder<EmbeddedDiagnostic>.GetInstance();
CollectDiagnostics(root, seenDiagnostics, diagnostics);
return new RegexTree(
_lexer.Text, root, diagnostics.ToImmutableAndFree(),
_captureNamesToSpan, _captureNumbersToSpan);
}
private static void CollectDiagnostics(
RegexNode node, HashSet<EmbeddedDiagnostic> seenDiagnostics, ArrayBuilder<EmbeddedDiagnostic> diagnostics)
{
foreach (var child in node)
{
if (child.IsNode)
{
CollectDiagnostics(child.Node, seenDiagnostics, diagnostics);
}
else
{
var token = child.Token;
foreach (var trivia in token.LeadingTrivia)
{
AddUniqueDiagnostics(seenDiagnostics, trivia.Diagnostics, diagnostics);
}
// We never place trailing trivia on regex tokens.
Debug.Assert(token.TrailingTrivia.IsEmpty);
AddUniqueDiagnostics(seenDiagnostics, token.Diagnostics, diagnostics);
}
}
}
/// <summary>
/// It's very common to have duplicated diagnostics. For example, consider "((". This will
/// have two 'missing )' diagnostics, both at the end. Reporting both isn't helpful, so we
/// filter duplicates out here.
/// </summary>
private static void AddUniqueDiagnostics(
HashSet<EmbeddedDiagnostic> seenDiagnostics, ImmutableArray<EmbeddedDiagnostic> from, ArrayBuilder<EmbeddedDiagnostic> to)
{
foreach (var diagnostic in from)
{
if (seenDiagnostics.Add(diagnostic))
{
to.Add(diagnostic);
}
}
}
private RegexExpressionNode ParseAlternatingSequences(bool consumeCloseParen)
{
try
{
_recursionDepth++;
StackGuard.EnsureSufficientExecutionStack(_recursionDepth);
return ParseAlternatingSequencesWorker(consumeCloseParen);
}
finally
{
_recursionDepth--;
}
}
/// <summary>
/// Parses out code of the form: ...|...|...
/// This is the type of code you have at the top level of a regex, or inside any grouping
/// contruct. Note that sequences can be empty in .NET regex. i.e. the following is legal:
///
/// ...||...
///
/// An empty sequence just means "match at every position in the test string".
/// </summary>
private RegexExpressionNode ParseAlternatingSequencesWorker(bool consumeCloseParen)
{
RegexExpressionNode current = ParseSequence(consumeCloseParen);
while (_currentToken.Kind == RegexKind.BarToken)
{
// Trivia allowed between the | and the next token.
current = new RegexAlternationNode(
current, ConsumeCurrentToken(allowTrivia: true), ParseSequence(consumeCloseParen));
}
return current;
}
private RegexSequenceNode ParseSequence(bool consumeCloseParen)
{
using var _ = ArrayBuilder<RegexExpressionNode>.GetInstance(out var builder);
while (ShouldConsumeSequenceElement(consumeCloseParen))
{
var last = builder.Count == 0 ? null : builder.Last();
builder.Add(ParsePrimaryExpressionAndQuantifiers(last));
}
// We wil commonly get tons of text nodes in a row. For example, the
// regex `abc` will be three text nodes in a row. To help save on memory
// try to merge that into one single text node.
var sequence = ArrayBuilder<RegexExpressionNode>.GetInstance();
MergeTextNodes(builder, sequence);
return new RegexSequenceNode(sequence.ToImmutableAndFree());
}
private static void MergeTextNodes(ArrayBuilder<RegexExpressionNode> list, ArrayBuilder<RegexExpressionNode> final)
{
// Iterate all the nodes in the sequence we have, adding them directly to
// `final` if they are not text nodes. If they are text nodes, we attempt
// to keep merging them with any following text nodes as long as well.
for (var index = 0; index < list.Count;)
{
var current = list[index];
if (current.Kind != RegexKind.Text)
{
// Not a text node. Just add as-is, and move to the next node.
index++;
final.Add(current);
continue;
}
// Got a text node. Try to combine it with all following nodes.
index = MergeAndAddAdjacentTextNodes(list, final, index);
}
return;
// local functions
static int MergeAndAddAdjacentTextNodes(
ArrayBuilder<RegexExpressionNode> list,
ArrayBuilder<RegexExpressionNode> final,
int index)
{
var startIndex = index;
var startTextNode = (RegexTextNode)list[startIndex];
// Keep walking forward as long as we hit text nodes and we can
// merge that text node with the previous text node.
index++;
var lastTextNode = startTextNode;
for (; index < list.Count; index++)
{
var currentNode = list[index];
if (!CanMerge(lastTextNode, currentNode))
{
// Hit something we couldn't merge with our last text node
// Break out and merge what we have so far. 'index' will
// be pointing at the right node for our caller.
break;
}
lastTextNode = (RegexTextNode)currentNode;
}
// If didn't have multiple text nodes in a row. Just return the
// starting node. Otherwise, create one text node that has a token
// that spans from the start of the first node to the end of the last node.
final.Add(startTextNode == lastTextNode
? startTextNode
: new RegexTextNode(CreateToken(
RegexKind.TextToken, startTextNode.TextToken.LeadingTrivia,
VirtualCharSequence.FromBounds(
startTextNode.TextToken.VirtualChars,
lastTextNode.TextToken.VirtualChars))));
return index;
}
// Local functions
static bool CanMerge(RegexTextNode lastNode, RegexExpressionNode next)
{
if (next.Kind == RegexKind.Text)
{
var lastTextToken = lastNode.TextToken;
var nextTextToken = ((RegexTextNode)next).TextToken;
// Can't merge if the next text node has leading trivia. Also, conservatively
// don't allow merging if there are diagnostics or values for these tokens.
// We might be able to support that, but it's easier to not do anything that
// might break an expectation someone might have downstream. /
if (lastTextToken.Diagnostics.Length == 0 &&
nextTextToken.Diagnostics.Length == 0 &&
lastTextToken.Value == null &&
nextTextToken.Value == null &&
nextTextToken.LeadingTrivia.Length == 0)
{
lastTextToken.VirtualChars.AssertAdjacentTo(nextTextToken.VirtualChars);
return true;
}
}
return false;
}
}
private bool ShouldConsumeSequenceElement(bool consumeCloseParen)
{
if (_currentToken.Kind == RegexKind.EndOfFile)
{
return false;
}
if (_currentToken.Kind == RegexKind.BarToken)
{
return false;
}
if (_currentToken.Kind == RegexKind.CloseParenToken)
{
return consumeCloseParen;
}
return true;
}
private RegexExpressionNode ParsePrimaryExpressionAndQuantifiers(RegexExpressionNode lastExpression)
{
var current = ParsePrimaryExpression(lastExpression);
if (current.Kind == RegexKind.SimpleOptionsGrouping)
{
// Simple options (i.e. "(?i-x)" can't have quantifiers attached to them).
return current;
}
return _currentToken.Kind switch
{
RegexKind.AsteriskToken => ParseZeroOrMoreQuantifier(current),
RegexKind.PlusToken => ParseOneOrMoreQuantifier(current),
RegexKind.QuestionToken => ParseZeroOrOneQuantifier(current),
RegexKind.OpenBraceToken => TryParseNumericQuantifier(current, _currentToken),
_ => current,
};
}
private RegexExpressionNode TryParseLazyQuantifier(RegexQuantifierNode quantifier)
{
if (_currentToken.Kind != RegexKind.QuestionToken)
{
return quantifier;
}
// Whitespace allowed after the question and the next sequence element.
return new RegexLazyQuantifierNode(quantifier,
ConsumeCurrentToken(allowTrivia: true));
}
private RegexExpressionNode ParseZeroOrMoreQuantifier(RegexPrimaryExpressionNode current)
{
// Whitespace allowed between the quantifier and the possible following ? or next sequence item.
return TryParseLazyQuantifier(new RegexZeroOrMoreQuantifierNode(current, ConsumeCurrentToken(allowTrivia: true)));
}
private RegexExpressionNode ParseOneOrMoreQuantifier(RegexPrimaryExpressionNode current)
{
// Whitespace allowed between the quantifier and the possible following ? or next sequence item.
return TryParseLazyQuantifier(new RegexOneOrMoreQuantifierNode(current, ConsumeCurrentToken(allowTrivia: true)));
}
private RegexExpressionNode ParseZeroOrOneQuantifier(RegexPrimaryExpressionNode current)
{
// Whitespace allowed between the quantifier and the possible following ? or next sequence item.
return TryParseLazyQuantifier(new RegexZeroOrOneQuantifierNode(current, ConsumeCurrentToken(allowTrivia: true)));
}
private RegexExpressionNode TryParseNumericQuantifier(
RegexPrimaryExpressionNode expression, RegexToken openBraceToken)
{
var start = _lexer.Position;
if (!TryParseNumericQuantifierParts(
out var firstNumberToken,
out var commaToken,
out var secondNumberToken,
out var closeBraceToken))
{
_currentToken = openBraceToken;
_lexer.Position = start;
return expression;
}
var quantifier = CreateQuantifier(
expression, openBraceToken, firstNumberToken, commaToken,
secondNumberToken, closeBraceToken);
return TryParseLazyQuantifier(quantifier);
}
private static RegexQuantifierNode CreateQuantifier(
RegexPrimaryExpressionNode expression,
RegexToken openBraceToken, RegexToken firstNumberToken, RegexToken? commaToken,
RegexToken? secondNumberToken, RegexToken closeBraceToken)
{
if (commaToken != null)
{
return secondNumberToken != null
? new RegexClosedNumericRangeQuantifierNode(expression, openBraceToken, firstNumberToken, commaToken.Value, secondNumberToken.Value, closeBraceToken)
: (RegexQuantifierNode)new RegexOpenNumericRangeQuantifierNode(expression, openBraceToken, firstNumberToken, commaToken.Value, closeBraceToken);
}
return new RegexExactNumericQuantifierNode(expression, openBraceToken, firstNumberToken, closeBraceToken);
}
private bool TryParseNumericQuantifierParts(
out RegexToken firstNumberToken, out RegexToken? commaToken,
out RegexToken? secondNumberToken, out RegexToken closeBraceToken)
{
firstNumberToken = default;
commaToken = null;
secondNumberToken = null;
closeBraceToken = default;
var firstNumber = _lexer.TryScanNumber();
if (firstNumber == null)
{
return false;
}
firstNumberToken = firstNumber.Value;
// Nothing allowed between {x,n}
ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind == RegexKind.CommaToken)
{
commaToken = _currentToken;
var start = _lexer.Position;
secondNumberToken = _lexer.TryScanNumber();
if (secondNumberToken == null)
{
// Nothing allowed between {x,n}
ResetToPositionAndConsumeCurrentToken(start, allowTrivia: false);
}
else
{
var secondNumberTokenLocal = secondNumberToken.Value;
// Nothing allowed between {x,n}
ConsumeCurrentToken(allowTrivia: false);
var val1 = (int)firstNumberToken.Value;
var val2 = (int)secondNumberTokenLocal.Value;
if (val2 < val1)
{
secondNumberTokenLocal = secondNumberTokenLocal.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Illegal_x_y_with_x_less_than_y,
secondNumberTokenLocal.GetSpan()));
secondNumberToken = secondNumberTokenLocal;
}
}
}
if (_currentToken.Kind != RegexKind.CloseBraceToken)
{
return false;
}
// Whitespace allowed between the quantifier and the possible following ? or next sequence item.
closeBraceToken = ConsumeCurrentToken(allowTrivia: true);
return true;
}
private void ResetToPositionAndConsumeCurrentToken(int position, bool allowTrivia)
{
_lexer.Position = position;
ConsumeCurrentToken(allowTrivia);
}
private RegexPrimaryExpressionNode ParsePrimaryExpression(RegexExpressionNode lastExpression)
{
switch (_currentToken.Kind)
{
case RegexKind.DotToken:
return ParseWildcard();
case RegexKind.CaretToken:
return ParseStartAnchor();
case RegexKind.DollarToken:
return ParseEndAnchor();
case RegexKind.BackslashToken:
return ParseEscape(_currentToken, allowTriviaAfterEnd: true);
case RegexKind.OpenBracketToken:
return ParseCharacterClass();
case RegexKind.OpenParenToken:
return ParseGrouping();
case RegexKind.CloseParenToken:
return ParseUnexpectedCloseParenToken();
case RegexKind.OpenBraceToken:
return ParsePossibleUnexpectedNumericQuantifier(lastExpression);
case RegexKind.AsteriskToken:
case RegexKind.PlusToken:
case RegexKind.QuestionToken:
return ParseUnexpectedQuantifier(lastExpression);
default:
return ParseText();
}
}
private RegexPrimaryExpressionNode ParsePossibleUnexpectedNumericQuantifier(RegexExpressionNode lastExpression)
{
// Native parser looks for something like {0,1} in a top level sequence and reports
// an explicit error that that's not allowed. However, something like {0, 1} is fine
// and is treated as six textual tokens.
var openBraceToken = _currentToken.With(kind: RegexKind.TextToken);
var start = _lexer.Position;
if (TryParseNumericQuantifierParts(
out _, out _, out _, out _))
{
// Report that a numeric quantifier isn't allowed here.
CheckQuantifierExpression(lastExpression, ref openBraceToken);
}
// Started with { but wasn't a numeric quantifier. This is totally legal and is just
// a textual sequence. Restart, scanning this token as a normal sequence element.
ResetToPositionAndConsumeCurrentToken(start, allowTrivia: true);
return new RegexTextNode(openBraceToken);
}
private RegexPrimaryExpressionNode ParseUnexpectedCloseParenToken()
{
var token = _currentToken.With(kind: RegexKind.TextToken).AddDiagnosticIfNone(
new EmbeddedDiagnostic(FeaturesResources.Too_many_close_parens, _currentToken.GetSpan()));
// Technically, since an error occurred, we can do whatever we want here. However,
// the spirit of the native parser is that top level sequence elements are allowed
// to have trivia. So that's the behavior we mimic.
ConsumeCurrentToken(allowTrivia: true);
return new RegexTextNode(token);
}
private RegexPrimaryExpressionNode ParseText()
{
var token = ConsumeCurrentToken(allowTrivia: true);
Debug.Assert(token.Value == null);
// Allow trivia between this piece of text and the next sequence element
return new RegexTextNode(token.With(kind: RegexKind.TextToken));
}
private RegexPrimaryExpressionNode ParseEndAnchor()
{
// Allow trivia between this anchor and the next sequence element
return new RegexAnchorNode(RegexKind.EndAnchor, ConsumeCurrentToken(allowTrivia: true));
}
private RegexPrimaryExpressionNode ParseStartAnchor()
{
// Allow trivia between this anchor and the next sequence element
return new RegexAnchorNode(RegexKind.StartAnchor, ConsumeCurrentToken(allowTrivia: true));
}
private RegexPrimaryExpressionNode ParseWildcard()
{
// Allow trivia between the . and the next sequence element
return new RegexWildcardNode(ConsumeCurrentToken(allowTrivia: true));
}
private RegexGroupingNode ParseGrouping()
{
var start = _lexer.Position;
// Check what immediately follows the (. If we have (? it is processed specially.
// However, we do not treat (? the same as ( ?
var openParenToken = ConsumeCurrentToken(allowTrivia: false);
switch (_currentToken.Kind)
{
case RegexKind.QuestionToken:
return ParseGroupQuestion(openParenToken, _currentToken);
default:
// Wasn't (? just parse this as a normal group.
_lexer.Position = start;
return ParseSimpleGroup(openParenToken);
}
}
private RegexToken ParseGroupingCloseParen()
{
switch (_currentToken.Kind)
{
case RegexKind.CloseParenToken:
// Grouping completed normally. Allow trivia between it and the next sequence element.
return ConsumeCurrentToken(allowTrivia: true);
default:
return CreateMissingToken(RegexKind.CloseParenToken).AddDiagnosticIfNone(
new EmbeddedDiagnostic(FeaturesResources.Not_enough_close_parens, GetTokenStartPositionSpan(_currentToken)));
}
}
private RegexSimpleGroupingNode ParseSimpleGroup(RegexToken openParenToken)
=> new RegexSimpleGroupingNode(
openParenToken, ParseGroupingEmbeddedExpression(_options), ParseGroupingCloseParen());
private RegexExpressionNode ParseGroupingEmbeddedExpression(RegexOptions embeddedOptions)
{
// Save and restore options when we go into, and pop out of a group node.
var currentOptions = _options;
_options = embeddedOptions;
// We're parsing the embedded sequence inside the current group. As this is a sequence
// we want to allow trivia between the current token we're on, and the first token
// of the embedded sequence.
ConsumeCurrentToken(allowTrivia: true);
// When parsing out the sequence don't grab the close paren, that will be for our caller
// to get.
var expression = this.ParseAlternatingSequences(consumeCloseParen: false);
_options = currentOptions;
return expression;
}
private TextSpan GetTokenSpanIncludingEOF(RegexToken token)
=> token.Kind == RegexKind.EndOfFile
? GetTokenStartPositionSpan(token)
: token.GetSpan();
private TextSpan GetTokenStartPositionSpan(RegexToken token)
{
return token.Kind == RegexKind.EndOfFile
? new TextSpan(_lexer.Text.Last().Span.End, 0)
: new TextSpan(token.VirtualChars[0].Span.Start, 0);
}
private RegexGroupingNode ParseGroupQuestion(RegexToken openParenToken, RegexToken questionToken)
{
var optionsToken = _lexer.TryScanOptions();
if (optionsToken != null)
{
return ParseOptionsGroupingNode(openParenToken, questionToken, optionsToken.Value);
}
var afterQuestionPos = _lexer.Position;
// Lots of possible options when we see (?. Look at the immediately following character
// (without any allowed spaces) to decide what to parse out next.
ConsumeCurrentToken(allowTrivia: false);
switch (_currentToken.Kind)
{
case RegexKind.LessThanToken:
// (?<=...) or (?<!...) or (?<...>...) or (?<...-...>...)
return ParseLookbehindOrNamedCaptureOrBalancingGrouping(openParenToken, questionToken);
case RegexKind.SingleQuoteToken:
// (?'...'...) or (?'...-...'...)
return ParseNamedCaptureOrBalancingGrouping(
openParenToken, questionToken, _currentToken);
case RegexKind.OpenParenToken:
// alternation construct (?(...) | )
return ParseConditionalGrouping(openParenToken, questionToken);
case RegexKind.ColonToken:
return ParseNonCapturingGroupingNode(openParenToken, questionToken);
case RegexKind.EqualsToken:
return ParsePositiveLookaheadGrouping(openParenToken, questionToken);
case RegexKind.ExclamationToken:
return ParseNegativeLookaheadGrouping(openParenToken, questionToken);
case RegexKind.GreaterThanToken:
return ParseAtomicGrouping(openParenToken, questionToken);
default:
if (_currentToken.Kind != RegexKind.CloseParenToken)
{
// Native parser reports "Unrecognized grouping construct", *except* for (?)
openParenToken = openParenToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Unrecognized_grouping_construct,
openParenToken.GetSpan()));
}
break;
}
// (?)
// Parse this as a normal group. The question will immediately error as it's a
// quantifier not following anything.
_lexer.Position = afterQuestionPos - 1;
return ParseSimpleGroup(openParenToken);
}
private RegexConditionalGroupingNode ParseConditionalGrouping(RegexToken openParenToken, RegexToken questionToken)
{
var innerOpenParenToken = _currentToken;
var afterInnerOpenParen = _lexer.Position;
var captureToken = _lexer.TryScanNumberOrCaptureName();
if (captureToken == null)
{
return ParseConditionalExpressionGrouping(openParenToken, questionToken);
}
var capture = captureToken.Value;
RegexToken innerCloseParenToken;
if (capture.Kind == RegexKind.NumberToken)
{
// If it's a numeric group, it has to be immediately followed by a ) and the
// numeric reference has to exist.
//
// That means that (?(4 ) is not treated as an embedded expression but as an
// error. This is different from (?(a ) which will be treated as an embedded
// expression, and different from (?(a) will be treated as an embedded
// expression or capture group depending on if 'a' is a existing capture name.
ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind == RegexKind.CloseParenToken)
{
innerCloseParenToken = _currentToken;
if (!HasCapture((int)capture.Value))
{
capture = capture.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Reference_to_undefined_group,
capture.GetSpan()));
}
}
else
{
innerCloseParenToken = CreateMissingToken(RegexKind.CloseParenToken);
capture = capture.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Malformed,
capture.GetSpan()));
MoveBackBeforePreviousScan();
}
}
else
{
// If it's a capture name, it's ok if that capture doesn't exist. In that case we
// will just treat this as an conditional expression.
if (!HasCapture((string)capture.Value))
{
_lexer.Position = afterInnerOpenParen;
return ParseConditionalExpressionGrouping(openParenToken, questionToken);
}
// Capture name existed. For this to be a capture grouping it exactly has to
// match (?(a) anything other than a close paren after the ) will make this
// into a conditional expression.
ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind != RegexKind.CloseParenToken)
{
_lexer.Position = afterInnerOpenParen;
return ParseConditionalExpressionGrouping(openParenToken, questionToken);
}
innerCloseParenToken = _currentToken;
}
// Was (?(name) or (?(num) and name/num was a legal capture name. Parse
// this out as a conditional grouping. Because we're going to be parsing out
// an embedded sequence, allow trivia before the first element.
ConsumeCurrentToken(allowTrivia: true);
var result = ParseConditionalGroupingResult();
return new RegexConditionalCaptureGroupingNode(
openParenToken, questionToken,
innerOpenParenToken, capture, innerCloseParenToken,
result, ParseGroupingCloseParen());
}
private bool HasCapture(int value)
=> _captureNumbersToSpan.ContainsKey(value);
private bool HasCapture(string value)
=> _captureNamesToSpan.ContainsKey(value);
private void MoveBackBeforePreviousScan()
{
if (_currentToken.Kind != RegexKind.EndOfFile)
{
// Move back to un-consume whatever we just consumed.
_lexer.Position--;
}
}
private RegexConditionalGroupingNode ParseConditionalExpressionGrouping(
RegexToken openParenToken, RegexToken questionToken)
{
// Reproduce very specific errors the .NET regex parser looks for. Technically,
// we would error out in these cases no matter what. However, it means we can
// stringently enforce that our parser produces the same errors as the native one.
//
// Move back before the (
_lexer.Position--;
if (_lexer.IsAt("(?#"))
{
var pos = _lexer.Position;
var comment = _lexer.ScanComment(options: default);
_lexer.Position = pos;
if (comment.Value.Diagnostics.Length > 0)
{
openParenToken = openParenToken.AddDiagnosticIfNone(comment.Value.Diagnostics[0]);
}
else
{
openParenToken = openParenToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Alternation_conditions_cannot_be_comments,
openParenToken.GetSpan()));
}
}
else if (_lexer.IsAt("(?'"))
{
openParenToken = openParenToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Alternation_conditions_do_not_capture_and_cannot_be_named,
openParenToken.GetSpan()));
}
else if (_lexer.IsAt("(?<"))
{
if (!_lexer.IsAt("(?<!") &&
!_lexer.IsAt("(?<="))
{
openParenToken = openParenToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Alternation_conditions_do_not_capture_and_cannot_be_named,
openParenToken.GetSpan()));
}
}
// Consume the ( once more.
ConsumeCurrentToken(allowTrivia: false);
Debug.Assert(_currentToken.Kind == RegexKind.OpenParenToken);
// Parse out the grouping that starts with the second open paren in (?(
// this will get us to (?(...)
var grouping = ParseGrouping();
// Now parse out the embedded expression that follows that. this will get us to
// (?(...)...
var result = ParseConditionalGroupingResult();
// Finally, grab the close paren and produce (?(...)...)
return new RegexConditionalExpressionGroupingNode(
openParenToken, questionToken,
grouping, result, ParseGroupingCloseParen());
}
private RegexExpressionNode ParseConditionalGroupingResult()
{
var currentOptions = _options;
var result = this.ParseAlternatingSequences(consumeCloseParen: false);
_options = currentOptions;
result = CheckConditionalAlternation(result);
return result;
}
private static RegexExpressionNode CheckConditionalAlternation(RegexExpressionNode result)
{
if (result is RegexAlternationNode topAlternation &&
topAlternation.Left is RegexAlternationNode)
{
return new RegexAlternationNode(
topAlternation.Left,
topAlternation.BarToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Too_many_bars_in_conditional_grouping,
topAlternation.BarToken.GetSpan())),
topAlternation.Right);
}
return result;
}
private RegexGroupingNode ParseLookbehindOrNamedCaptureOrBalancingGrouping(
RegexToken openParenToken, RegexToken questionToken)
{
var start = _lexer.Position;
// We have (?< Look for (?<= or (?<!
var lessThanToken = ConsumeCurrentToken(allowTrivia: false);
switch (_currentToken.Kind)
{
case RegexKind.EqualsToken:
return new RegexPositiveLookbehindGroupingNode(
openParenToken, questionToken, lessThanToken, _currentToken,
ParseGroupingEmbeddedExpression(_options | RegexOptions.RightToLeft), ParseGroupingCloseParen());
case RegexKind.ExclamationToken:
return new RegexNegativeLookbehindGroupingNode(
openParenToken, questionToken, lessThanToken, _currentToken,
ParseGroupingEmbeddedExpression(_options | RegexOptions.RightToLeft), ParseGroupingCloseParen());
default:
// Didn't have a lookbehind group. Parse out as (?<...> or (?<...-...>
_lexer.Position = start;
return ParseNamedCaptureOrBalancingGrouping(openParenToken, questionToken, lessThanToken);
}
}
private RegexGroupingNode ParseNamedCaptureOrBalancingGrouping(
RegexToken openParenToken, RegexToken questionToken, RegexToken openToken)
{
if (_lexer.Position == _lexer.Text.Length)
{
openParenToken = openParenToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Unrecognized_grouping_construct,
GetSpan(openParenToken, openToken)));
}
// (?<...>...) or (?<...-...>...)
// (?'...'...) or (?'...-...'...)
var captureToken = _lexer.TryScanNumberOrCaptureName();
if (captureToken == null)
{
// Can't have any trivia between the elements in this grouping header.
ConsumeCurrentToken(allowTrivia: false);
captureToken = CreateMissingToken(RegexKind.CaptureNameToken);
if (_currentToken.Kind == RegexKind.MinusToken)
{
return ParseBalancingGrouping(
openParenToken, questionToken, openToken, captureToken.Value);
}
else
{
openParenToken = openParenToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Invalid_group_name_Group_names_must_begin_with_a_word_character,
GetTokenSpanIncludingEOF(_currentToken)));
// If we weren't at the end of the text, go back to before whatever character
// we just consumed.
MoveBackBeforePreviousScan();
}
}
var capture = captureToken.Value;
if (capture.Kind == RegexKind.NumberToken && (int)capture.Value == 0)
{
capture = capture.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Capture_number_cannot_be_zero,
capture.GetSpan()));
}
// Can't have any trivia between the elements in this grouping header.
ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind == RegexKind.MinusToken)
{
// Have (?<...- parse out the balancing group form.
return ParseBalancingGrouping(
openParenToken, questionToken,
openToken, capture);
}
var closeToken = ParseCaptureGroupingCloseToken(ref openParenToken, openToken);
return new RegexCaptureGroupingNode(
openParenToken, questionToken,
openToken, capture, closeToken,
ParseGroupingEmbeddedExpression(_options), ParseGroupingCloseParen());
}
private RegexToken ParseCaptureGroupingCloseToken(ref RegexToken openParenToken, RegexToken openToken)
{
if ((openToken.Kind == RegexKind.LessThanToken && _currentToken.Kind == RegexKind.GreaterThanToken) ||
(openToken.Kind == RegexKind.SingleQuoteToken && _currentToken.Kind == RegexKind.SingleQuoteToken))
{
return _currentToken;
}
if (_currentToken.Kind == RegexKind.EndOfFile)
{
openParenToken = openParenToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Unrecognized_grouping_construct,
GetSpan(openParenToken, openToken)));
}
else
{
openParenToken = openParenToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Invalid_group_name_Group_names_must_begin_with_a_word_character,
_currentToken.GetSpan()));
// Rewind to where we were before seeing this bogus character.
_lexer.Position--;
}
return CreateMissingToken(
openToken.Kind == RegexKind.LessThanToken
? RegexKind.GreaterThanToken : RegexKind.SingleQuoteToken);
}
private RegexBalancingGroupingNode ParseBalancingGrouping(
RegexToken openParenToken, RegexToken questionToken,
RegexToken openToken, RegexToken firstCapture)
{
var minusToken = _currentToken;
var secondCapture = _lexer.TryScanNumberOrCaptureName();
if (secondCapture == null)
{
// Invalid group name: Group names must begin with a word character
ConsumeCurrentToken(allowTrivia: false);
openParenToken = openParenToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Invalid_group_name_Group_names_must_begin_with_a_word_character,
GetTokenSpanIncludingEOF(_currentToken)));
// If we weren't at the end of the text, go back to before whatever character
// we just consumed.
MoveBackBeforePreviousScan();
secondCapture = CreateMissingToken(RegexKind.CaptureNameToken);
}
var second = secondCapture.Value;
CheckCapture(ref second);
// Can't have any trivia between the elements in this grouping header.
ConsumeCurrentToken(allowTrivia: false);
var closeToken = ParseCaptureGroupingCloseToken(ref openParenToken, openToken);
return new RegexBalancingGroupingNode(
openParenToken, questionToken,
openToken, firstCapture, minusToken, second, closeToken,
ParseGroupingEmbeddedExpression(_options), ParseGroupingCloseParen());
}
private void CheckCapture(ref RegexToken captureToken)
{
if (captureToken.IsMissing)
{
// Don't need to check for a synthesized error capture token.
return;
}
if (captureToken.Kind == RegexKind.NumberToken)
{
var val = (int)captureToken.Value;
if (!HasCapture(val))
{
captureToken = captureToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
string.Format(FeaturesResources.Reference_to_undefined_group_number_0, val),
captureToken.GetSpan()));
}
}
else
{
var val = (string)captureToken.Value;
if (!HasCapture(val))
{
captureToken = captureToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
string.Format(FeaturesResources.Reference_to_undefined_group_name_0, val),
captureToken.GetSpan()));
}
}
}
private RegexNonCapturingGroupingNode ParseNonCapturingGroupingNode(RegexToken openParenToken, RegexToken questionToken)
=> new RegexNonCapturingGroupingNode(
openParenToken, questionToken, _currentToken,
ParseGroupingEmbeddedExpression(_options), ParseGroupingCloseParen());
private RegexPositiveLookaheadGroupingNode ParsePositiveLookaheadGrouping(RegexToken openParenToken, RegexToken questionToken)
=> new RegexPositiveLookaheadGroupingNode(
openParenToken, questionToken, _currentToken,
ParseGroupingEmbeddedExpression(_options & ~RegexOptions.RightToLeft), ParseGroupingCloseParen());
private RegexNegativeLookaheadGroupingNode ParseNegativeLookaheadGrouping(RegexToken openParenToken, RegexToken questionToken)
=> new RegexNegativeLookaheadGroupingNode(
openParenToken, questionToken, _currentToken,
ParseGroupingEmbeddedExpression(_options & ~RegexOptions.RightToLeft), ParseGroupingCloseParen());
private RegexAtomicGroupingNode ParseAtomicGrouping(RegexToken openParenToken, RegexToken questionToken)
=> new RegexAtomicGroupingNode(
openParenToken, questionToken, _currentToken,
ParseGroupingEmbeddedExpression(_options), ParseGroupingCloseParen());
private RegexGroupingNode ParseOptionsGroupingNode(
RegexToken openParenToken, RegexToken questionToken, RegexToken optionsToken)
{
// Only (?opts:...) or (?opts) are allowed. After the opts must be a : or )
ConsumeCurrentToken(allowTrivia: false);
switch (_currentToken.Kind)
{
case RegexKind.CloseParenToken:
// Allow trivia after the options and the next element in the sequence.
_options = GetNewOptionsFromToken(_options, optionsToken);
return new RegexSimpleOptionsGroupingNode(
openParenToken, questionToken, optionsToken,
ConsumeCurrentToken(allowTrivia: true));
case RegexKind.ColonToken:
return ParseNestedOptionsGroupingNode(openParenToken, questionToken, optionsToken);
default:
return new RegexSimpleOptionsGroupingNode(
openParenToken, questionToken, optionsToken,
CreateMissingToken(RegexKind.CloseParenToken).AddDiagnosticIfNone(
new EmbeddedDiagnostic(FeaturesResources.Unrecognized_grouping_construct, openParenToken.GetSpan())));
}
}
private RegexNestedOptionsGroupingNode ParseNestedOptionsGroupingNode(
RegexToken openParenToken, RegexToken questionToken, RegexToken optionsToken)
=> new RegexNestedOptionsGroupingNode(
openParenToken, questionToken, optionsToken, _currentToken,
ParseGroupingEmbeddedExpression(GetNewOptionsFromToken(_options, optionsToken)), ParseGroupingCloseParen());
private static bool IsTextChar(RegexToken currentToken, char ch)
=> currentToken.Kind == RegexKind.TextToken && currentToken.VirtualChars.Length == 1 && currentToken.VirtualChars[0].Value == ch;
private static RegexOptions GetNewOptionsFromToken(RegexOptions currentOptions, RegexToken optionsToken)
{
var copy = currentOptions;
var on = true;
foreach (var ch in optionsToken.VirtualChars)
{
switch (ch.Value)
{
case '-': on = false; break;
case '+': on = true; break;
default:
var newOption = OptionFromCode(ch);
if (on)
{
copy |= newOption;
}
else
{
copy &= ~newOption;
}
break;
}
}
return copy;
}
private static RegexOptions OptionFromCode(VirtualChar ch)
{
switch (ch.Value)
{
case 'i': case 'I': return RegexOptions.IgnoreCase;
case 'm': case 'M': return RegexOptions.Multiline;
case 'n': case 'N': return RegexOptions.ExplicitCapture;
case 's': case 'S': return RegexOptions.Singleline;
case 'x': case 'X': return RegexOptions.IgnorePatternWhitespace;
default:
throw new InvalidOperationException();
}
}
private RegexBaseCharacterClassNode ParseCharacterClass()
{
var openBracketToken = _currentToken;
Debug.Assert(openBracketToken.Kind == RegexKind.OpenBracketToken);
var caretToken = CreateMissingToken(RegexKind.CaretToken);
var closeBracketToken = CreateMissingToken(RegexKind.CloseBracketToken);
// trivia is not allowed anywhere in a character class
ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind == RegexKind.CaretToken)
{
caretToken = _currentToken;
}
else
{
MoveBackBeforePreviousScan();
}
// trivia is not allowed anywhere in a character class
ConsumeCurrentToken(allowTrivia: false);
using var _ = ArrayBuilder<RegexExpressionNode>.GetInstance(out var builder);
while (_currentToken.Kind != RegexKind.EndOfFile)
{
Debug.Assert(_currentToken.VirtualChars.Length == 1);
if (_currentToken.Kind == RegexKind.CloseBracketToken && builder.Count > 0)
{
// Allow trivia after the character class, and whatever is next in the sequence.
closeBracketToken = ConsumeCurrentToken(allowTrivia: true);
break;
}
ParseCharacterClassComponents(builder);
}
// We wil commonly get tons of text nodes in a row. For example, the
// regex `[abc]` will be three text nodes in a row. To help save on memory
// try to merge that into one single text node.
var contents = ArrayBuilder<RegexExpressionNode>.GetInstance();
MergeTextNodes(builder, contents);
if (closeBracketToken.IsMissing)
{
closeBracketToken = closeBracketToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Unterminated_character_class_set,
GetTokenStartPositionSpan(_currentToken)));
}
var components = new RegexSequenceNode(contents.ToImmutableAndFree());
return caretToken.IsMissing
? (RegexBaseCharacterClassNode)new RegexCharacterClassNode(openBracketToken, components, closeBracketToken)
: new RegexNegatedCharacterClassNode(openBracketToken, caretToken, components, closeBracketToken);
}
private void ParseCharacterClassComponents(ArrayBuilder<RegexExpressionNode> components)
{
var left = ParseSingleCharacterClassComponent(isFirst: components.Count == 0, afterRangeMinus: false);
if (left.Kind == RegexKind.CharacterClassEscape ||
left.Kind == RegexKind.CategoryEscape ||
IsEscapedMinus(left))
{
// \s or \p{Lu} or \- on the left of a minus doesn't start a range. If there is a following
// minus, it's just treated textually.
components.Add(left);
return;
}
if (_currentToken.Kind == RegexKind.MinusToken && !_lexer.IsAt("]"))
{
// trivia is not allowed anywhere in a character class
var minusToken = ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind == RegexKind.OpenBracketToken)
{
components.Add(left);
components.Add(ParseCharacterClassSubtractionNode(minusToken));
}
else
{
// Note that behavior of parsing here changed in .net. See issue:
// https://github.com/dotnet/corefx/issues/31786
//
// We follow the latest behavior in .net which parses things correctly.
var right = ParseSingleCharacterClassComponent(isFirst: false, afterRangeMinus: true);
if (TryGetRangeComponentValue(left, out var leftCh) &&
TryGetRangeComponentValue(right, out var rightCh) &&
leftCh > rightCh)
{
minusToken = minusToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.x_y_range_in_reverse_order,
minusToken.GetSpan()));
}
components.Add(new RegexCharacterClassRangeNode(left, minusToken, right));
}
}
else
{
components.Add(left);
}
}
private static bool IsEscapedMinus(RegexNode node)
=> node is RegexSimpleEscapeNode simple && IsTextChar(simple.TypeToken, '-');
private bool TryGetRangeComponentValue(RegexExpressionNode component, out int ch)
{
// Don't bother examining the component if it has any errors already. This also means
// we don't have to worry about running into invalid escape sequences and the like.
if (!HasProblem(component))
{
return TryGetRangeComponentValueWorker(component, out ch);
}
ch = default;
return false;
}
private bool TryGetRangeComponentValueWorker(RegexNode component, out int ch)
{
switch (component.Kind)
{
case RegexKind.SimpleEscape:
var escapeNode = (RegexSimpleEscapeNode)component;
ch = MapEscapeChar(escapeNode.TypeToken.VirtualChars[0]).Value;
return true;
case RegexKind.ControlEscape:
var controlEscape = (RegexControlEscapeNode)component;
var controlCh = controlEscape.ControlToken.VirtualChars[0].Value;
// \ca interpreted as \cA
if (controlCh >= 'a' && controlCh <= 'z')
{
controlCh -= (char)('a' - 'A');
}
// The control characters have values mapping from the A-Z range to numeric
// values 1-26. So, to map that, we subtract 'A' from the value (which would
// give us 0-25) and then add '1' back to it.
ch = controlCh - 'A' + 1;
return true;
case RegexKind.OctalEscape:
ch = GetCharValue(((RegexOctalEscapeNode)component).OctalText, withBase: 8);
return true;
case RegexKind.HexEscape:
ch = GetCharValue(((RegexHexEscapeNode)component).HexText, withBase: 16);
return true;
case RegexKind.UnicodeEscape:
ch = GetCharValue(((RegexUnicodeEscapeNode)component).HexText, withBase: 16);
return true;
case RegexKind.PosixProperty:
// When the native parser sees [:...:] it treats this as if it just saw '[' and skipped the
// rest.
ch = '[';
return true;
case RegexKind.Text:
ch = ((RegexTextNode)component).TextToken.VirtualChars[0].Value;
return true;
case RegexKind.Sequence:
var sequence = (RegexSequenceNode)component;
#if DEBUG
Debug.Assert(sequence.ChildCount > 0);
for (int i = 0, n = sequence.ChildCount - 1; i < n; i++)
{
Debug.Assert(IsEscapedMinus(sequence.ChildAt(i).Node));
}
#endif
var last = sequence.ChildAt(sequence.ChildCount - 1).Node;
if (IsEscapedMinus(last))
{
break;
}
return TryGetRangeComponentValueWorker(last, out ch);
}
ch = default;
return false;
}
private static int GetCharValue(RegexToken hexText, int withBase)
{
unchecked
{
var total = 0;
foreach (var vc in hexText.VirtualChars)
{
total *= withBase;
total += HexValue(vc);
}
return total;
}
}
private static int HexValue(VirtualChar ch)
{
Debug.Assert(RegexLexer.IsHexChar(ch));
unchecked
{
var temp = ch.Value - '0';
if (temp >= 0 && temp <= 9)
return temp;
temp = ch.Value - 'a';
if (temp >= 0 && temp <= 5)
return temp + 10;
temp = ch.Value - 'A';
if (temp >= 0 && temp <= 5)
return temp + 10;
}
throw new InvalidOperationException();
}
private bool HasProblem(RegexNodeOrToken component)
{
if (component.IsNode)
{
foreach (var child in component.Node)
{
if (HasProblem(child))
{
return true;
}
}
}
else
{
var token = component.Token;
if (token.IsMissing ||
token.Diagnostics.Length > 0)
{
return true;
}
foreach (var trivia in token.LeadingTrivia)
{
if (trivia.Diagnostics.Length > 0)
{
return true;
}
}
}
return false;
}
private RegexPrimaryExpressionNode ParseSingleCharacterClassComponent(bool isFirst, bool afterRangeMinus)
{
if (_currentToken.Kind == RegexKind.BackslashToken && _lexer.Position < _lexer.Text.Length)
{
var backslashToken = _currentToken;
// trivia is not allowed anywhere in a character class, and definitely not between
// a \ and the following character.
ConsumeCurrentToken(allowTrivia: false);
Debug.Assert(_currentToken.VirtualChars.Length == 1);
var nextChar = _currentToken.VirtualChars[0];
switch (nextChar.Value)
{
case 'D':
case 'd':
case 'S':
case 's':
case 'W':
case 'w':
case 'p':
case 'P':
if (afterRangeMinus)
{
backslashToken = backslashToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
string.Format(FeaturesResources.Cannot_include_class_0_in_character_range, nextChar),
GetSpan(backslashToken, _currentToken)));
}
// move back before the character we just scanned.
// trivia is not allowed anywhere in a character class.
// The above list are character class and category escapes. ParseEscape can
// handle both of those, so we just defer to it.
_lexer.Position--;
return ParseEscape(backslashToken, allowTriviaAfterEnd: false);
case '-':
// trivia is not allowed anywhere in a character class.
// We just let the basic consumption code pull out a token for us, we then
// convert that to text since we treat all characters after the - as text no
// matter what.
return new RegexSimpleEscapeNode(
backslashToken, ConsumeCurrentToken(allowTrivia: false).With(kind: RegexKind.TextToken));
default:
// trivia is not allowed anywhere in a character class.
// Note: it is very intentional that we're calling ParseCharEscape and not
// ParseEscape. Normal escapes are not interpreted the same way inside a
// character class. For example \b is not an anchor in a character class.
// And things like \k'...' are not k-captures, etc. etc.
_lexer.Position--;
return ParseCharEscape(backslashToken, allowTriviaAfterEnd: false);
}
}
if (!afterRangeMinus &&
!isFirst &&
_currentToken.Kind == RegexKind.MinusToken &&
_lexer.IsAt("["))
{
// have a trailing subtraction.
// trivia is not allowed anywhere in a character class
return ParseCharacterClassSubtractionNode(
ConsumeCurrentToken(allowTrivia: false));
}
// From the .NET regex code:
// This is code for Posix style properties - [:Ll:] or [:IsTibetan:].
// It currently doesn't do anything other than skip the whole thing!
if (!afterRangeMinus && _currentToken.Kind == RegexKind.OpenBracketToken && _lexer.IsAt(":"))
{
var beforeBracketPos = _lexer.Position - 1;
// trivia is not allowed anywhere in a character class
ConsumeCurrentToken(allowTrivia: false);
var captureName = _lexer.TryScanCaptureName();
if (captureName.HasValue && _lexer.IsAt(":]"))
{
_lexer.Position += 2;
var textChars = _lexer.GetSubPattern(beforeBracketPos, _lexer.Position);
var token = CreateToken(RegexKind.TextToken, ImmutableArray<RegexTrivia>.Empty, textChars);
// trivia is not allowed anywhere in a character class
ConsumeCurrentToken(allowTrivia: false);
return new RegexPosixPropertyNode(token);
}
else
{
// Reset to back where we were.
// trivia is not allowed anywhere in a character class
_lexer.Position = beforeBracketPos;
ConsumeCurrentToken(allowTrivia: false);
Debug.Assert(_currentToken.Kind == RegexKind.OpenBracketToken);
}
}
// trivia is not allowed anywhere in a character class
return new RegexTextNode(
ConsumeCurrentToken(allowTrivia: false).With(kind: RegexKind.TextToken));
}
private RegexPrimaryExpressionNode ParseCharacterClassSubtractionNode(RegexToken minusToken)
{
var charClass = ParseCharacterClass();
if (_currentToken.Kind != RegexKind.CloseBracketToken && _currentToken.Kind != RegexKind.EndOfFile)
{
minusToken = minusToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.A_subtraction_must_be_the_last_element_in_a_character_class,
GetTokenStartPositionSpan(minusToken)));
}
return new RegexCharacterClassSubtractionNode(minusToken, charClass);
}
/// <summary>
/// Parses out an escape sequence. Escape sequences are allowed in top level sequences
/// and in character classes. In a top level sequence trivia will be allowed afterwards,
/// but in a character class trivia is not allowed afterwards.
/// </summary>
private RegexEscapeNode ParseEscape(RegexToken backslashToken, bool allowTriviaAfterEnd)
{
Debug.Assert(_lexer.Text[_lexer.Position - 1] == '\\');
// No spaces between \ and next char.
ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind == RegexKind.EndOfFile)
{
backslashToken = backslashToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Illegal_backslash_at_end_of_pattern,
backslashToken.GetSpan()));
return new RegexSimpleEscapeNode(backslashToken, CreateMissingToken(RegexKind.TextToken));
}
Debug.Assert(_currentToken.VirtualChars.Length == 1);
switch (_currentToken.VirtualChars[0].Value)
{
case 'b':
case 'B':
case 'A':
case 'G':
case 'Z':
case 'z':
return new RegexAnchorEscapeNode(
backslashToken, ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd));
case 'w':
case 'W':
case 's':
case 'S':
case 'd':
case 'D':
return new RegexCharacterClassEscapeNode(
backslashToken, ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd));
case 'p':
case 'P':
return ParseCategoryEscape(backslashToken, allowTriviaAfterEnd);
}
// Move back to after the backslash
_lexer.Position--;
return ParseBasicBackslash(backslashToken, allowTriviaAfterEnd);
}
private RegexEscapeNode ParseBasicBackslash(RegexToken backslashToken, bool allowTriviaAfterEnd)
{
Debug.Assert(_lexer.Text[_lexer.Position - 1] == '\\');
// No spaces between \ and next char.
ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind == RegexKind.EndOfFile)
{
backslashToken = backslashToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Illegal_backslash_at_end_of_pattern,
backslashToken.GetSpan()));
return new RegexSimpleEscapeNode(backslashToken, CreateMissingToken(RegexKind.TextToken));
}
Debug.Assert(_currentToken.VirtualChars.Length == 1);
var ch = _currentToken.VirtualChars[0];
if (ch == 'k')
{
return ParsePossibleKCaptureEscape(backslashToken, allowTriviaAfterEnd);
}
if (ch == '<' || ch == '\'')
{
_lexer.Position--;
return ParsePossibleCaptureEscape(backslashToken, allowTriviaAfterEnd);
}
if (ch >= '1' && ch <= '9')
{
_lexer.Position--;
return ParsePossibleBackreferenceEscape(backslashToken, allowTriviaAfterEnd);
}
_lexer.Position--;
return ParseCharEscape(backslashToken, allowTriviaAfterEnd);
}
private RegexEscapeNode ParsePossibleBackreferenceEscape(RegexToken backslashToken, bool allowTriviaAfterEnd)
{
Debug.Assert(_lexer.Text[_lexer.Position - 1] == '\\');
return HasOption(_options, RegexOptions.ECMAScript)
? ParsePossibleEcmascriptBackreferenceEscape(backslashToken, allowTriviaAfterEnd)
: ParsePossibleRegularBackreferenceEscape(backslashToken, allowTriviaAfterEnd);
}
private RegexEscapeNode ParsePossibleEcmascriptBackreferenceEscape(
RegexToken backslashToken, bool allowTriviaAfterEnd)
{
// Small deviation: Ecmascript allows references only to captures that precede
// this position (unlike .NET which allows references in any direction). However,
// because we don't track position, we just consume the entire back-reference.
//
// This is addressable if we add position tracking when we locate all the captures.
Debug.Assert(_lexer.Text[_lexer.Position - 1] == '\\');
var start = _lexer.Position;
var bestPosition = -1;
var capVal = 0;
while (_lexer.Position < _lexer.Text.Length &&
_lexer.Text[_lexer.Position] is var ch &&
(ch >= '0' && ch <= '9'))
{
unchecked
{
capVal *= 10;
capVal += (ch.Value - '0');
}
_lexer.Position++;
if (HasCapture(capVal))
{
bestPosition = _lexer.Position;
}
}
if (bestPosition != -1)
{
var numberToken = CreateToken(
RegexKind.NumberToken, ImmutableArray<RegexTrivia>.Empty,
_lexer.GetSubPattern(start, bestPosition)).With(value: capVal);
ResetToPositionAndConsumeCurrentToken(bestPosition, allowTrivia: allowTriviaAfterEnd);
return new RegexBackreferenceEscapeNode(backslashToken, numberToken);
}
_lexer.Position = start;
return ParseCharEscape(backslashToken, allowTriviaAfterEnd);
}
private RegexEscapeNode ParsePossibleRegularBackreferenceEscape(
RegexToken backslashToken, bool allowTriviaAfterEnd)
{
Debug.Assert(_lexer.Text[_lexer.Position - 1] == '\\');
var start = _lexer.Position;
var numberToken = _lexer.TryScanNumber().Value;
var capVal = (int)numberToken.Value;
if (HasCapture(capVal) ||
capVal <= 9)
{
CheckCapture(ref numberToken);
ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd);
return new RegexBackreferenceEscapeNode(backslashToken, numberToken);
}
_lexer.Position = start;
return ParseCharEscape(backslashToken, allowTriviaAfterEnd);
}
private RegexEscapeNode ParsePossibleCaptureEscape(RegexToken backslashToken, bool allowTriviaAfterEnd)
{
Debug.Assert(_lexer.Text[_lexer.Position - 1] == '\\');
Debug.Assert(_lexer.Text[_lexer.Position] == '<' ||
_lexer.Text[_lexer.Position] == '\'');
var afterBackslashPosition = _lexer.Position;
ScanCaptureParts(allowTriviaAfterEnd, out var openToken, out var capture, out var closeToken);
if (openToken.IsMissing || capture.IsMissing || closeToken.IsMissing)
{
_lexer.Position = afterBackslashPosition;
return ParseCharEscape(backslashToken, allowTriviaAfterEnd);
}
return new RegexCaptureEscapeNode(
backslashToken, openToken, capture, closeToken);
}
private RegexEscapeNode ParsePossibleKCaptureEscape(RegexToken backslashToken, bool allowTriviaAfterEnd)
{
var typeToken = _currentToken;
var afterBackslashPosition = _lexer.Position - @"k".Length;
ScanCaptureParts(allowTriviaAfterEnd, out var openToken, out var capture, out var closeToken);
if (openToken.IsMissing)
{
backslashToken = backslashToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Malformed_named_back_reference,
GetSpan(backslashToken, typeToken)));
return new RegexSimpleEscapeNode(backslashToken, typeToken.With(kind: RegexKind.TextToken));
}
if (capture.IsMissing || closeToken.IsMissing)
{
// Native parser falls back to normal escape scanning, if it doesn't see a capture,
// or close brace. For normal .NET regexes, this will then fail later (as \k is not
// a legal escape), but will succeed for ecmascript regexes.
_lexer.Position = afterBackslashPosition;
return ParseCharEscape(backslashToken, allowTriviaAfterEnd);
}
return new RegexKCaptureEscapeNode(
backslashToken, typeToken, openToken, capture, closeToken);
}
private void ScanCaptureParts(
bool allowTriviaAfterEnd, out RegexToken openToken, out RegexToken capture, out RegexToken closeToken)
{
openToken = CreateMissingToken(RegexKind.LessThanToken);
capture = CreateMissingToken(RegexKind.CaptureNameToken);
closeToken = CreateMissingToken(RegexKind.GreaterThanToken);
// No trivia allowed in <cap> or 'cap'
ConsumeCurrentToken(allowTrivia: false);
if (_lexer.Position < _lexer.Text.Length &&
(_currentToken.Kind == RegexKind.LessThanToken || _currentToken.Kind == RegexKind.SingleQuoteToken))
{
openToken = _currentToken;
}
else
{
return;
}
var captureToken = _lexer.TryScanNumberOrCaptureName();
capture = captureToken == null
? CreateMissingToken(RegexKind.CaptureNameToken)
: captureToken.Value;
// No trivia allowed in <cap> or 'cap'
ConsumeCurrentToken(allowTrivia: false);
closeToken = CreateMissingToken(RegexKind.GreaterThanToken);
if (!capture.IsMissing &&
((openToken.Kind == RegexKind.LessThanToken && _currentToken.Kind == RegexKind.GreaterThanToken) ||
(openToken.Kind == RegexKind.SingleQuoteToken && _currentToken.Kind == RegexKind.SingleQuoteToken)))
{
CheckCapture(ref capture);
closeToken = ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd);
}
}
private RegexEscapeNode ParseCharEscape(RegexToken backslashToken, bool allowTriviaAfterEnd)
{
Debug.Assert(_lexer.Text[_lexer.Position - 1] == '\\');
// no trivia between \ and the next char
ConsumeCurrentToken(allowTrivia: false);
Debug.Assert(_currentToken.VirtualChars.Length == 1);
var ch = _currentToken.VirtualChars[0];
if (ch >= '0' && ch <= '7')
{
_lexer.Position--;
var octalDigits = _lexer.ScanOctalCharacters(_options);
Debug.Assert(octalDigits.VirtualChars.Length > 0);
ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd);
return new RegexOctalEscapeNode(backslashToken, octalDigits);
}
switch (ch.Value)
{
case 'a':
case 'b':
case 'e':
case 'f':
case 'n':
case 'r':
case 't':
case 'v':
return new RegexSimpleEscapeNode(
backslashToken, ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd));
case 'x':
return ParseHexEscape(backslashToken, allowTriviaAfterEnd);
case 'u':
return ParseUnicodeEscape(backslashToken, allowTriviaAfterEnd);
case 'c':
return ParseControlEscape(backslashToken, allowTriviaAfterEnd);
default:
var typeToken = ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd).With(kind: RegexKind.TextToken);
if (!HasOption(_options, RegexOptions.ECMAScript) && RegexCharClass.IsWordChar(ch))
{
typeToken = typeToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
string.Format(FeaturesResources.Unrecognized_escape_sequence_0, ch),
typeToken.GetSpan()));
}
return new RegexSimpleEscapeNode(backslashToken, typeToken);
}
}
private RegexEscapeNode ParseUnicodeEscape(RegexToken backslashToken, bool allowTriviaAfterEnd)
{
var typeToken = _currentToken;
var hexChars = _lexer.ScanHexCharacters(4);
ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd);
return new RegexUnicodeEscapeNode(backslashToken, typeToken, hexChars);
}
private RegexEscapeNode ParseHexEscape(RegexToken backslashToken, bool allowTriviaAfterEnd)
{
var typeToken = _currentToken;
var hexChars = _lexer.ScanHexCharacters(2);
ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd);
return new RegexHexEscapeNode(backslashToken, typeToken, hexChars);
}
private RegexControlEscapeNode ParseControlEscape(RegexToken backslashToken, bool allowTriviaAfterEnd)
{
// Nothing allowed between \c and the next char
var typeToken = ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind == RegexKind.EndOfFile)
{
typeToken = typeToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Missing_control_character,
typeToken.GetSpan()));
return new RegexControlEscapeNode(backslashToken, typeToken, CreateMissingToken(RegexKind.TextToken));
}
Debug.Assert(_currentToken.VirtualChars.Length == 1);
var ch = _currentToken.VirtualChars[0].Value;
unchecked
{
// From: https://github.com/dotnet/corefx/blob/80e220fc7009de0f0611ee6b52d4d5ffd25eb6c7/src/System.Text.RegularExpressions/src/System/Text/RegularExpressions/RegexParser.cs#L1450
// Note: Roslyn accepts a control escape that current .NET parser does not.
// Specifically: \c[
//
// It is a bug that the .NET parser does not support this construct. The bug was
// reported at: https://github.com/dotnet/corefx/issues/26501 and was fixed for
// CoreFx with https://github.com/dotnet/corefx/commit/80e220fc7009de0f0611ee6b52d4d5ffd25eb6c7
//
// Because it was a bug, we follow the correct behavior. That means we will not
// report a diagnostic for a Regex that someone might run on a previous version of
// .NET that ends up throwing at runtime. That's acceptable. Our goal is to match
// the latest .NET 'correct' behavior. Not intermediary points with bugs that have
// since been fixed.
// \ca interpreted as \cA
if (ch >= 'a' && ch <= 'z')
{
ch -= (char)('a' - 'A');
}
if (ch >= '@' && ch <= '_')
{
var controlToken = ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd).With(kind: RegexKind.TextToken);
return new RegexControlEscapeNode(backslashToken, typeToken, controlToken);
}
else
{
typeToken = typeToken.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Unrecognized_control_character,
_currentToken.GetSpan()));
// Don't consume the bogus control character.
return new RegexControlEscapeNode(backslashToken, typeToken, CreateMissingToken(RegexKind.TextToken));
}
}
}
private RegexEscapeNode ParseCategoryEscape(RegexToken backslash, bool allowTriviaAfterEnd)
{
Debug.Assert(_lexer.Text[_lexer.Position - 1] is var ch && (ch == 'P' || ch == 'p'));
var typeToken = _currentToken;
var start = _lexer.Position;
if (!TryGetCategoryEscapeParts(
allowTriviaAfterEnd,
out var openBraceToken,
out var categoryToken,
out var closeBraceToken,
out var message))
{
ResetToPositionAndConsumeCurrentToken(start, allowTrivia: allowTriviaAfterEnd);
typeToken = typeToken.With(kind: RegexKind.TextToken).AddDiagnosticIfNone(new EmbeddedDiagnostic(
message, GetSpan(backslash, typeToken)));
return new RegexSimpleEscapeNode(backslash, typeToken);
}
return new RegexCategoryEscapeNode(backslash, typeToken, openBraceToken, categoryToken, closeBraceToken);
}
private bool TryGetCategoryEscapeParts(
bool allowTriviaAfterEnd,
out RegexToken openBraceToken,
out RegexToken categoryToken,
out RegexToken closeBraceToken,
out string message)
{
openBraceToken = default;
categoryToken = default;
closeBraceToken = default;
message = null;
if (_lexer.Text.Length - _lexer.Position < "{x}".Length)
{
message = FeaturesResources.Incomplete_character_escape;
return false;
}
// no whitespace in \p{x}
ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind != RegexKind.OpenBraceToken)
{
message = FeaturesResources.Malformed_character_escape;
return false;
}
openBraceToken = _currentToken;
var category = _lexer.TryScanEscapeCategory();
// no whitespace in \p{x}
ConsumeCurrentToken(allowTrivia: false);
if (_currentToken.Kind != RegexKind.CloseBraceToken)
{
message = FeaturesResources.Incomplete_character_escape;
return false;
}
if (category == null)
{
message = FeaturesResources.Unknown_property;
return false;
}
categoryToken = category.Value;
closeBraceToken = ConsumeCurrentToken(allowTrivia: allowTriviaAfterEnd);
return true;
}
private RegexTextNode ParseUnexpectedQuantifier(RegexExpressionNode lastExpression)
{
// This is just a bogus element in the higher level sequence. Allow trivia
// after this to abide by the spirit of the native parser.
var token = ConsumeCurrentToken(allowTrivia: true);
CheckQuantifierExpression(lastExpression, ref token);
return new RegexTextNode(token.With(kind: RegexKind.TextToken));
}
private static void CheckQuantifierExpression(RegexExpressionNode current, ref RegexToken token)
{
if (current == null ||
current.Kind == RegexKind.SimpleOptionsGrouping)
{
token = token.AddDiagnosticIfNone(new EmbeddedDiagnostic(
FeaturesResources.Quantifier_x_y_following_nothing, token.GetSpan()));
}
else if (current is RegexQuantifierNode ||
current is RegexLazyQuantifierNode)
{
token = token.AddDiagnosticIfNone(new EmbeddedDiagnostic(
string.Format(FeaturesResources.Nested_quantifier_0, token.VirtualChars.First()), token.GetSpan()));
}
}
}
}
| 44.240194 | 194 | 0.578178 | [
"MIT"
] | Kuinox/roslyn | src/Features/Core/Portable/EmbeddedLanguages/RegularExpressions/RegexParser.cs | 91,358 | C# |
using Trowel.BspEditor.Primitives.MapObjects;
using Trowel.Common.Threading;
using Trowel.DataStructures.Geometric;
using System.Collections.Generic;
using System.Linq;
namespace Trowel.BspEditor.Tools.Vertex.Selection
{
public class MutableSolid
{
public IList<MutableFace> Faces { get; }
public Box BoundingBox => new Box(Faces.SelectMany(x => x.Vertices.Select(v => v.Position)));
public MutableSolid(Solid solid)
{
Faces = new ThreadSafeList<MutableFace>(solid.Faces.Select(x => new MutableFace(x)));
}
}
} | 30.368421 | 101 | 0.694974 | [
"BSD-3-Clause"
] | mattiascibien/trowel | Trowel.BspEditor.Tools/Vertex/Selection/MutableSolid.cs | 579 | C# |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Management.Automation;
namespace Microsoft.PowerShell.ScheduledJob
{
/// <summary>
/// Base class for NewScheduledJobOption, SetScheduledJobOption cmdlets.
/// </summary>
public abstract class ScheduledJobOptionCmdletBase : ScheduleJobCmdletBase
{
#region Parameters
/// <summary>
/// Options parameter set name.
/// </summary>
protected const string OptionsParameterSet = "Options";
/// <summary>
/// Scheduled job task is run with elevated privileges when this switch is selected.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public SwitchParameter RunElevated
{
get { return _runElevated; }
set { _runElevated = value; }
}
private SwitchParameter _runElevated = false;
/// <summary>
/// Scheduled job task is hidden in Windows Task Scheduler when true.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public SwitchParameter HideInTaskScheduler
{
get { return _hideInTaskScheduler; }
set { _hideInTaskScheduler = value; }
}
private SwitchParameter _hideInTaskScheduler = false;
/// <summary>
/// Scheduled job task will be restarted when machine becomes idle. This is applicable
/// only if the job was configured to stop when no longer idle.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public SwitchParameter RestartOnIdleResume
{
get { return _restartOnIdleResume; }
set { _restartOnIdleResume = value; }
}
private SwitchParameter _restartOnIdleResume = false;
/// <summary>
/// Provides task scheduler options for multiple running instances of the job.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public TaskMultipleInstancePolicy MultipleInstancePolicy
{
get { return _multipleInstancePolicy; }
set { _multipleInstancePolicy = value; }
}
private TaskMultipleInstancePolicy _multipleInstancePolicy = TaskMultipleInstancePolicy.IgnoreNew;
/// <summary>
/// Prevents the job task from being started manually via Task Scheduler UI.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public SwitchParameter DoNotAllowDemandStart
{
get { return _doNotAllowDemandStart; }
set { _doNotAllowDemandStart = value; }
}
private SwitchParameter _doNotAllowDemandStart = false;
/// <summary>
/// Allows the job task to be run only when network connection available.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public SwitchParameter RequireNetwork
{
get { return _requireNetwork; }
set { _requireNetwork = value; }
}
private SwitchParameter _requireNetwork = false;
/// <summary>
/// Stops running job started by Task Scheduler if computer is no longer idle.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public SwitchParameter StopIfGoingOffIdle
{
get { return _stopIfGoingOffIdle; }
set { _stopIfGoingOffIdle = value; }
}
private SwitchParameter _stopIfGoingOffIdle = false;
/// <summary>
/// Will wake the computer to run the job if computer is in sleep mode when
/// trigger activates.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public SwitchParameter WakeToRun
{
get { return _wakeToRun; }
set { _wakeToRun = value; }
}
private SwitchParameter _wakeToRun = false;
/// <summary>
/// Continue running task job if computer going on battery.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public SwitchParameter ContinueIfGoingOnBattery
{
get { return _continueIfGoingOnBattery; }
set { _continueIfGoingOnBattery = value; }
}
private SwitchParameter _continueIfGoingOnBattery = false;
/// <summary>
/// Will start job task even if computer is running on battery power.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public SwitchParameter StartIfOnBattery
{
get { return _startIfOnBattery; }
set { _startIfOnBattery = value; }
}
private SwitchParameter _startIfOnBattery = false;
/// <summary>
/// Specifies how long Task Scheduler will wait for idle time after a trigger has
/// activated before giving up trying to run job during computer idle.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public TimeSpan IdleTimeout
{
get { return _idleTimeout; }
set { _idleTimeout = value; }
}
private TimeSpan _idleTimeout = new TimeSpan(1, 0, 0);
/// <summary>
/// How long the computer needs to be idle before a triggered job task is started.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public TimeSpan IdleDuration
{
get { return _idleDuration; }
set { _idleDuration = value; }
}
private TimeSpan _idleDuration = new TimeSpan(0, 10, 0);
/// <summary>
/// Will start job task if machine is idle.
/// </summary>
[Parameter(ParameterSetName = ScheduledJobOptionCmdletBase.OptionsParameterSet)]
public SwitchParameter StartIfIdle
{
get { return _startIfIdle; }
set { _startIfIdle = value; }
}
private SwitchParameter _startIfIdle = false;
#endregion
#region Cmdlet Overrides
/// <summary>
/// Begin processing.
/// </summary>
protected override void BeginProcessing()
{
// Validate parameters.
if (MyInvocation.BoundParameters.ContainsKey(nameof(IdleTimeout)) &&
_idleTimeout < TimeSpan.Zero)
{
throw new PSArgumentException(ScheduledJobErrorStrings.InvalidIdleTimeout);
}
if (MyInvocation.BoundParameters.ContainsKey(nameof(IdleDuration)) &&
_idleDuration < TimeSpan.Zero)
{
throw new PSArgumentException(ScheduledJobErrorStrings.InvalidIdleDuration);
}
}
#endregion
}
}
| 37.56701 | 106 | 0.629391 | [
"MIT"
] | Francisco-Gamino/PowerShell | src/Microsoft.PowerShell.ScheduledJob/commands/ScheduledJobOptionCmdletBase.cs | 7,288 | C# |
using System.Net;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Promitor.Agents.Core;
using Promitor.Agents.Core.Contracts;
using Promitor.Tests.Integration.Clients;
using Promitor.Tests.Integration.Extensions;
using Xunit;
using Xunit.Abstractions;
namespace Promitor.Tests.Integration.Services.Scraper
{
public class SystemTests : ScraperIntegrationTest
{
public SystemTests(ITestOutputHelper testOutput)
: base(testOutput)
{
}
[Fact]
public async Task System_GetInfo_ReturnsOk()
{
// Arrange
var scraperClient = new ScraperClient(Configuration, Logger);
// Act
var response = await scraperClient.GetSystemInfoAsync();
// Assert
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var rawPayload=await response.Content.ReadAsStringAsync();
Assert.NotEmpty(rawPayload);
var systemInfo = JsonConvert.DeserializeObject<SystemInfo>(rawPayload);
Assert.NotNull(systemInfo);
Assert.Equal(ExpectedVersion, systemInfo.Version);
Assert.True(response.Headers.Contains(HttpHeaders.AgentVersion));
Assert.Equal(ExpectedVersion, response.Headers.GetFirstOrDefaultHeaderValue(HttpHeaders.AgentVersion));
}
}
}
| 32.97561 | 115 | 0.681213 | [
"MIT"
] | 3lvia/promitor | src/Promitor.Tests.Integration/Services/Scraper/SystemTests.cs | 1,354 | C# |
using ICSharpCode.SharpZipLib.Core;
using ICSharpCode.SharpZipLib.Zip;
using Org.BouncyCastle.Crypto;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.OpenSsl;
using Org.BouncyCastle.Security;
using Renci.SshNet;
using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
namespace AesRsaBenchmark
{
internal class Program
{
public static async Task Main()
{
var basePath = Directory.GetCurrentDirectory();
var password = GetUniqueKey(128);
var timestamp = $"{DateTime.Now:yyyyMMddHHmmssfff}";
var outputPath = $"{basePath}\\output\\{timestamp}";
var rsaPublic = $"{basePath}\\files\\public.key";
var rsaPrivate = $"{basePath}\\files\\private.key";
var folderToEncrypt = $"{basePath}\\files\\zip";
var decryptedFolder = $"{outputPath}\\zip";
var encryptedOutputFile = $"{outputPath}\\{timestamp}.zip";
var encryptedPasswordFile = $"{outputPath}\\{timestamp}.txt";
var sftpHost = "sftp.foo.com";
var sftpUsername = "guest";
if (!Directory.Exists(outputPath)) Directory.CreateDirectory(outputPath);
await File.WriteAllTextAsync(encryptedPasswordFile, Convert.ToBase64String(RSAEncrypt(password, rsaPublic)));
await ZipDirectory(folderToEncrypt, encryptedOutputFile, password);
UnzipDirectory(encryptedOutputFile, decryptedFolder, RSADecrypt(Convert.FromBase64String(File.ReadAllText(encryptedPasswordFile)), rsaPrivate));
//await UploadFiles(timestamp, outputPath, rsaPrivate, sftpHost, sftpUsername);
}
private static async Task UploadFiles(string timestamp, string outputPath, string rsaPrivate, string sftpHost, string sftpUsername)
{
using var client = new SftpClient(sftpHost, sftpUsername, new PrivateKeyFile(File.OpenRead(rsaPrivate)))
{
BufferSize = 4096,
OperationTimeout = TimeSpan.FromHours(1),
};
client.Connect();
client.CreateDirectory(timestamp);
foreach (var file in Directory.GetFiles(outputPath))
{
using var zipUpload = File.OpenRead(file);
await Task.Factory.FromAsync((callback, stateObject) => client.BeginUploadFile(zipUpload, $"{timestamp}\\{Path.GetFileName(file)}", callback, stateObject), result => client.EndUploadFile(result), null);
}
client.Disconnect();
}
private static async Task ZipDirectory(string DirectoryPath, string OutputFilePath, string password)
{
using var OutputStream = new ZipOutputStream(File.Create(OutputFilePath))
{
Password = password,
};
OutputStream.SetLevel(9);
var buffer = new byte[4096];
foreach (var file in Directory.GetFiles(DirectoryPath))
{
var entry = new ZipEntry(Path.GetFileName(file))
{
DateTime = DateTime.Now,
AESKeySize = 256,
};
OutputStream.PutNextEntry(entry);
using var fs = File.OpenRead(file);
int sourceBytes;
do
{
sourceBytes = await fs.ReadAsync(buffer.AsMemory(0, buffer.Length));
await OutputStream.WriteAsync(buffer.AsMemory(0, sourceBytes));
} while (sourceBytes > 0);
}
OutputStream.Finish();
OutputStream.Close();
}
private static void UnzipDirectory(string FileZipPath, string OutputFilePath, string password)
{
ZipFile file = null;
try
{
var fs = File.OpenRead(FileZipPath);
file = new ZipFile(fs)
{
Password = password
};
foreach (ZipEntry zipEntry in file)
{
if (!zipEntry.IsFile)
{
continue;
}
var fullZipToPath = Path.Combine(OutputFilePath, zipEntry.Name);
var directoryName = Path.GetDirectoryName(fullZipToPath);
if (directoryName.Length > 0)
{
Directory.CreateDirectory(directoryName);
}
using var streamWriter = File.Create(fullZipToPath);
StreamUtils.Copy(file.GetInputStream(zipEntry), streamWriter, new byte[4096]);
}
}
finally
{
if (file != null)
{
file.IsStreamOwner = true;
file.Close();
}
}
}
private static byte[] RSAEncrypt(string DataToEncrypt, string filename) => ImportPublicKey(filename).Encrypt(Encoding.ASCII.GetBytes(DataToEncrypt), false);
private static string RSADecrypt(byte[] DataToDecrypt, string filename) => Encoding.ASCII.GetString(ImportPrivateKey(filename).Decrypt(DataToDecrypt, false));
private static RSACryptoServiceProvider ImportPrivateKey(string keyPath)
{
var csp = new RSACryptoServiceProvider();
csp.ImportParameters(DotNetUtilities.ToRSAParameters((RsaPrivateCrtKeyParameters)((AsymmetricCipherKeyPair)new PemReader(new StringReader(File.ReadAllText(keyPath))).ReadObject()).Private));
return csp;
}
private static RSACryptoServiceProvider ImportPublicKey(string keyPath)
{
var csp = new RSACryptoServiceProvider();
csp.ImportParameters(DotNetUtilities.ToRSAParameters((RsaKeyParameters)(AsymmetricKeyParameter)new PemReader(new StringReader(File.ReadAllText(keyPath))).ReadObject()));
return csp;
}
public static string GetUniqueKey(int size)
{
var chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890".ToCharArray();
var data = new byte[4 * size];
using (var crypto = new RNGCryptoServiceProvider())
{
crypto.GetBytes(data);
}
var result = new StringBuilder(size);
for (var i = 0; i < size; i++)
{
var rnd = BitConverter.ToUInt32(data, i * 4);
var idx = rnd % chars.Length;
result.Append(chars[idx]);
}
return result.ToString();
}
}
}
| 39.110465 | 218 | 0.580497 | [
"MIT"
] | Xeinaemm/AesRsa | source/AesRsa/Program.cs | 6,729 | C# |
/*
Copyright (c) 2019, Lars Brubaker, John Lewin
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
*/
using System;
using System.Collections.Generic;
using MatterHackers.MatterControl.SlicerConfiguration;
using MatterHackers.VectorMath;
namespace MatterControl.Printing.PrintLeveling
{
public class LevelWizardMesh : LevelingPlan
{
private int gridWidth;
private int gridHeight;
public LevelWizardMesh(PrintHostConfig printer, int width, int height)
: base(printer)
{
this.gridWidth = width;
this.gridHeight = height;
}
public override int ProbeCount => gridWidth * gridHeight;
public override IEnumerable<Vector2> GetPrintLevelPositionToSample()
{
Vector2 bedSize = printer.Settings.GetValue<Vector2>(SettingsKey.bed_size);
Vector2 printCenter = printer.Settings.GetValue<Vector2>(SettingsKey.print_center);
if (printer.Settings.GetValue<BedShape>(SettingsKey.bed_shape) == BedShape.Circular)
{
// reduce the bed size by the ratio of the radius (square root of 2) so that the sample positions will fit on a circular bed
bedSize *= 1.0 / Math.Sqrt(2);
}
double halfXSize = (bedSize.X / 2) * .8;
double xStep = (halfXSize * 2) / (gridWidth - 1);
double halfYSize = (bedSize.Y / 2) * .8;
double yStep = (halfYSize * 2) / (gridHeight - 1);
for (int y = 0; y < gridHeight; y++)
{
// make it such that every other line is printed from right to left
for (int x = 0; x < gridWidth; x++)
{
int dirX = x;
if ((y % 2) == 1)
{
dirX = (gridWidth - 1) - x;
}
var samplePosition = new Vector2();
samplePosition.X = printCenter.X - halfXSize + (dirX * xStep);
samplePosition.Y = printCenter.Y - halfYSize + (y * yStep);
yield return samplePosition;
}
}
}
}
} | 37 | 128 | 0.739981 | [
"BSD-2-Clause"
] | jingliang2005/MatterControl | MatterControl.Printing/PrinterCommunication/Leveling/LevelWizardMesh.cs | 3,221 | C# |
namespace TddShop.Cli.Order.Models
{
public class ItemModel
{
public string Name { get; set; }
public string Category { get; set; }
public decimal Price { get; set; }
public int Quantity { get; set; }
}
}
| 22.727273 | 44 | 0.58 | [
"MIT"
] | MDhouse/TDD | TddShop/TddShop.Cli/Order/Models/ItemModel.cs | 252 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
namespace kubiaaspnet
{
public class Program
{
public static void Main(string[] args)
{
CreateHostBuilder(args).Build().Run();
}
public static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.ConfigureWebHostDefaults(webBuilder =>
{
webBuilder.UseStartup<Startup>();
});
}
}
| 25.666667 | 70 | 0.645022 | [
"MIT"
] | AnturGyffrous/kubernetes-in-action | Second Edition/Chapter 2/kubiaaspnet/kubiaaspnet/Program.cs | 693 | C# |
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
namespace DiscordPipeImpersonator.Payload
{
public class EnumValueAttribute : Attribute
{
public string Value { get; set; }
public EnumValueAttribute(string value)
{
this.Value = value;
}
}
public class EnumSnakeCaseConverter : JsonConverter
{
public override bool CanConvert(Type objectType)
{
return objectType.IsEnum;
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.Value == null) return null;
object val = null;
if (TryParseEnum(objectType, (string)reader.Value, out val))
return val;
return existingValue;
}
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
var enumtype = value.GetType();
var name = Enum.GetName(enumtype, value);
//Get each member and look for hte correct one
var members = enumtype.GetMembers(BindingFlags.Public | BindingFlags.Static);
foreach (var m in members)
{
if (m.Name.Equals(name))
{
var attributes = m.GetCustomAttributes(typeof(EnumValueAttribute), true);
if (attributes.Length > 0)
{
name = ((EnumValueAttribute)attributes[0]).Value;
}
}
}
writer.WriteValue(name);
}
public bool TryParseEnum(Type enumType, string str, out object obj)
{
//Make sure the string isn;t null
if (str == null)
{
obj = null;
return false;
}
//Get the real type
Type type = enumType;
if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>))
type = type.GetGenericArguments().First();
//Make sure its actually a enum
if (!type.IsEnum)
{
obj = null;
return false;
}
//Get each member and look for hte correct one
var members = type.GetMembers(BindingFlags.Public | BindingFlags.Static);
foreach (var m in members)
{
var attributes = m.GetCustomAttributes(typeof(EnumValueAttribute), true);
foreach (var a in attributes)
{
var enumval = (EnumValueAttribute)a;
if (str.Equals(enumval.Value))
{
obj = Enum.Parse(type, m.Name, ignoreCase: true);
return true;
}
}
}
//We failed
obj = null;
return false;
}
}
internal class EventPayload : IPayload
{
/// <summary>
/// The data the server sent too us
/// </summary>
[JsonProperty("data", NullValueHandling = NullValueHandling.Ignore)]
public JObject Data { get; set; }
/// <summary>
/// The type of event the server sent
/// </summary>
[JsonProperty("evt"), JsonConverter(typeof(EnumSnakeCaseConverter))]
public ServerEvent? Event { get; set; }
public EventPayload() : base() { Data = null; }
public EventPayload(long nonce) : base(nonce) { Data = null; }
/// <summary>
/// Sets the obejct stored within the data.
/// </summary>
/// <param name="obj"></param>
public void SetObject(object obj)
{
Data = JObject.FromObject(obj);
}
/// <summary>
/// Gets the object stored within the Data
/// </summary>
/// <typeparam name="T"></typeparam>
/// <returns></returns>
public T GetObject<T>()
{
if (Data == null) return default(T);
return Data.ToObject<T>();
}
public override string ToString()
{
return "Event " + base.ToString() + ", Event: " + (Event.HasValue ? Event.ToString() : "N/A");
}
}
/// <summary>
/// The possible commands that can be sent and received by the server.
/// </summary>
enum Command
{
/// <summary>
/// event dispatch
/// </summary>
[EnumValue("DISPATCH")]
Dispatch,
/// <summary>
/// Called to set the activity
/// </summary>
[EnumValue("SET_ACTIVITY")]
SetActivity,
/// <summary>
/// used to subscribe to an RPC event
/// </summary>
[EnumValue("SUBSCRIBE")]
Subscribe,
/// <summary>
/// used to unsubscribe from an RPC event
/// </summary>
[EnumValue("UNSUBSCRIBE")]
Unsubscribe,
/// <summary>
/// Used to accept join requests.
/// </summary>
[EnumValue("SEND_ACTIVITY_JOIN_INVITE")]
SendActivityJoinInvite,
/// <summary>
/// Used to reject join requests.
/// </summary>
[EnumValue("CLOSE_ACTIVITY_JOIN_REQUEST")]
CloseActivityJoinRequest,
/// <summary>
/// used to authorize a new client with your app
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
Authorize,
/// <summary>
/// used to authenticate an existing client with your app
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
Authenticate,
/// <summary>
/// used to retrieve guild information from the client
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
GetGuild,
/// <summary>
/// used to retrieve a list of guilds from the client
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
GetGuilds,
/// <summary>
/// used to retrieve channel information from the client
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
GetChannel,
/// <summary>
/// used to retrieve a list of channels for a guild from the client
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
GetChannels,
/// <summary>
/// used to change voice settings of users in voice channels
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
SetUserVoiceSettings,
/// <summary>
/// used to join or leave a voice channel, group dm, or dm
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
SelectVoiceChannel,
/// <summary>
/// used to get the current voice channel the client is in
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
GetSelectedVoiceChannel,
/// <summary>
/// used to join or leave a text channel, group dm, or dm
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
SelectTextChannel,
/// <summary>
/// used to retrieve the client's voice settings
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
GetVoiceSettings,
/// <summary>
/// used to set the client's voice settings
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
SetVoiceSettings,
/// <summary>
/// used to capture a keyboard shortcut entered by the user RPC Events
/// </summary>
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
CaptureShortcut
}
enum ServerEvent
{
/// <summary>
/// Sent when the server is ready to accept messages
/// </summary>
[EnumValue("READY")]
Ready,
/// <summary>
/// Sent when something bad has happened
/// </summary>
[EnumValue("ERROR")]
Error,
/// <summary>
/// Join Event
/// </summary>
[EnumValue("ACTIVITY_JOIN")]
ActivityJoin,
/// <summary>
/// Spectate Event
/// </summary>
[EnumValue("ACTIVITY_SPECTATE")]
ActivitySpectate,
/// <summary>
/// Request Event
/// </summary>
[EnumValue("ACTIVITY_JOIN_REQUEST")]
ActivityJoinRequest,
#region RPC Protocols
//Old things that are obsolete
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
GuildStatus,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
GuildCreate,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
ChannelCreate,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
VoiceChannelSelect,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
VoiceStateCreated,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
VoiceStateUpdated,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
VoiceStateDelete,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
VoiceSettingsUpdate,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
VoiceConnectionStatus,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
SpeakingStart,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
SpeakingStop,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
MessageCreate,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
MessageUpdate,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
MessageDelete,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
NotificationCreate,
[Obsolete("This value is appart of the RPC API and is not supported by this library.", true)]
CaptureShortcutChange
#endregion
}
internal abstract class IPayload
{
/// <summary>
/// The type of payload
/// </summary>
[JsonProperty("cmd"), JsonConverter(typeof(EnumSnakeCaseConverter))]
public Command Command { get; set; }
/// <summary>
/// A incremental value to help identify payloads
/// </summary>
[JsonProperty("nonce")]
public string Nonce { get; set; }
public IPayload() { }
public IPayload(long nonce)
{
Nonce = nonce.ToString();
}
public override string ToString()
{
return "Payload || Command: " + Command.ToString() + ", Nonce: " + (Nonce != null ? Nonce.ToString() : "NULL");
}
}
}
| 33.776243 | 124 | 0.560972 | [
"Apache-2.0"
] | UWPCommunity/Quarrel | src/_Libs/NamedPipeServer/Payload/IPayload.cs | 12,229 | C# |
namespace WindowsServiceClient
{
partial class Form1
{
/// <summary>
/// 必需的设计器变量。
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// 清理所有正在使用的资源。
/// </summary>
/// <param name="disposing">如果应释放托管资源,为 true;否则为 false。</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows 窗体设计器生成的代码
/// <summary>
/// 设计器支持所需的方法 - 不要修改
/// 使用代码编辑器修改此方法的内容。
/// </summary>
private void InitializeComponent()
{
this.button1 = new System.Windows.Forms.Button();
this.button2 = new System.Windows.Forms.Button();
this.button3 = new System.Windows.Forms.Button();
this.button4 = new System.Windows.Forms.Button();
this.listView1 = new System.Windows.Forms.ListView();
this.button5 = new System.Windows.Forms.Button();
this.button6 = new System.Windows.Forms.Button();
this.SuspendLayout();
//
// button1
//
this.button1.Location = new System.Drawing.Point(145, 12);
this.button1.Name = "button1";
this.button1.Size = new System.Drawing.Size(75, 23);
this.button1.TabIndex = 0;
this.button1.Text = "安装";
this.button1.UseVisualStyleBackColor = true;
this.button1.Click += new System.EventHandler(this.button1_Click);
//
// button2
//
this.button2.Location = new System.Drawing.Point(255, 12);
this.button2.Name = "button2";
this.button2.Size = new System.Drawing.Size(75, 23);
this.button2.TabIndex = 0;
this.button2.Text = "启动";
this.button2.UseVisualStyleBackColor = true;
this.button2.Click += new System.EventHandler(this.button2_Click);
//
// button3
//
this.button3.Location = new System.Drawing.Point(369, 12);
this.button3.Name = "button3";
this.button3.Size = new System.Drawing.Size(75, 23);
this.button3.TabIndex = 0;
this.button3.Text = "停止";
this.button3.UseVisualStyleBackColor = true;
this.button3.Click += new System.EventHandler(this.button3_Click);
//
// button4
//
this.button4.Location = new System.Drawing.Point(472, 12);
this.button4.Name = "button4";
this.button4.Size = new System.Drawing.Size(75, 23);
this.button4.TabIndex = 0;
this.button4.Text = "卸载";
this.button4.UseVisualStyleBackColor = true;
this.button4.Click += new System.EventHandler(this.button4_Click);
//
// listView1
//
this.listView1.HideSelection = false;
this.listView1.Location = new System.Drawing.Point(12, 80);
this.listView1.Name = "listView1";
this.listView1.Size = new System.Drawing.Size(1116, 412);
this.listView1.TabIndex = 1;
this.listView1.UseCompatibleStateImageBehavior = false;
this.listView1.View = System.Windows.Forms.View.Details;
//
// button5
//
this.button5.Location = new System.Drawing.Point(134, 511);
this.button5.Name = "button5";
this.button5.Size = new System.Drawing.Size(106, 23);
this.button5.TabIndex = 2;
this.button5.Text = "获取所有服务";
this.button5.UseVisualStyleBackColor = true;
this.button5.Click += new System.EventHandler(this.button5_Click);
//
// button6
//
this.button6.Location = new System.Drawing.Point(273, 511);
this.button6.Name = "button6";
this.button6.Size = new System.Drawing.Size(99, 23);
this.button6.TabIndex = 3;
this.button6.Text = "获取信息";
this.button6.UseVisualStyleBackColor = true;
this.button6.Click += new System.EventHandler(this.button6_Click);
//
// Form1
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(1140, 687);
this.Controls.Add(this.button6);
this.Controls.Add(this.button5);
this.Controls.Add(this.listView1);
this.Controls.Add(this.button4);
this.Controls.Add(this.button3);
this.Controls.Add(this.button2);
this.Controls.Add(this.button1);
this.Name = "Form1";
this.Text = "Form1";
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Button button1;
private System.Windows.Forms.Button button2;
private System.Windows.Forms.Button button3;
private System.Windows.Forms.Button button4;
private System.Windows.Forms.ListView listView1;
private System.Windows.Forms.Button button5;
private System.Windows.Forms.Button button6;
}
}
| 39.335714 | 78 | 0.556746 | [
"Apache-2.0"
] | ggwhsd/CSharpStudy | OtherProjects/DemoService1/WindowsServiceClient/Form1.Designer.cs | 5,699 | C# |
public class Vector2
{
public int X;
public int Y;
public Vector2(int x, int y)
{
this.X = x;
this.Y = y;
}
public void Add(Vector2 value)
{
this.X += value.X;
this.Y += value.Y;
}
public bool IsColliding(Vector2 value, int length)
{
if (this.X >= value.X && this.X < value.X + length && this.Y == value.Y)
{
return true;
}
return false;
}
} | 17.259259 | 80 | 0.476395 | [
"MIT"
] | Bullsized/Zariba-Game-Academy | 01 Introduction to Programming with Games/01-12 The Spaghetti Meteor Frenzy/MeteorFrenzy/MeteorFrenzy/Classes/Vector2.cs | 468 | C# |
/*
* Copyright (c) Contributors, http://vision-sim.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
* For an explanation of the license of each contributor and the content it
* covers please see the Licenses directory.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Vision-Sim Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System.Collections.Generic;
using OpenMetaverse;
namespace Vision.Framework.Utilities
{
public interface BaseCacheAccount
{
UUID PrincipalID { get; set; }
string Name { get; set; }
}
public class GenericAccountCache<T> where T : BaseCacheAccount
{
private double CACHE_EXPIRATION_SECONDS = 6*60*1000;
// 6 hour cache on user accounts, since they should not change
private bool m_allowNullCaching = true;
private readonly ExpiringCache<string, UUID> m_NameCache;
private readonly ExpiringCache<UUID, T> m_UUIDCache;
private readonly Dictionary<UUID, int> m_nullCacheTimes = new Dictionary<UUID, int>();
public GenericAccountCache()
{
m_UUIDCache = new ExpiringCache<UUID, T>();
m_NameCache = new ExpiringCache<string, UUID>();
}
public GenericAccountCache(double expirationTime)
{
CACHE_EXPIRATION_SECONDS = expirationTime;
m_UUIDCache = new ExpiringCache<UUID, T>();
m_NameCache = new ExpiringCache<string, UUID>();
}
public void Cache(UUID userID, T account)
{
if (!m_allowNullCaching && account == null)
return;
if (account == null)
{
if (!m_nullCacheTimes.ContainsKey(userID))
m_nullCacheTimes[userID] = 0;
else
m_nullCacheTimes[userID]++;
if (m_nullCacheTimes[userID] < 5)
return;
}
else if (m_nullCacheTimes.ContainsKey(userID))
m_nullCacheTimes.Remove(userID);
// Cache even null accounts
m_UUIDCache.AddOrUpdate(userID, account, CACHE_EXPIRATION_SECONDS);
if (account != null && !string.IsNullOrEmpty(account.Name))
m_NameCache.AddOrUpdate(account.Name, account.PrincipalID, CACHE_EXPIRATION_SECONDS);
//MainConsole.Instance.DebugFormat("[USER CACHE]: cached user {0}", userID);
}
public void Remove(UUID userID, string name)
{
m_UUIDCache.Remove(userID);
m_NameCache.Remove(name);
}
public bool Get(UUID userID, out T account)
{
if (m_UUIDCache.TryGetValue(userID, out account))
return true;
return false;
}
public bool Get(string name, out T account)
{
account = default(T);
UUID uuid = UUID.Zero;
if (m_NameCache.TryGetValue(name, out uuid))
if (m_UUIDCache.TryGetValue(uuid, out account))
return true;
return false;
}
}
} | 40.725664 | 102 | 0.629509 | [
"MIT"
] | VisionSim/Vision-Sim | Vision/Framework/Utilities/GenericAccountCache.cs | 4,602 | C# |
using Hik.DataAccess.Metadata;
using System;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace Hik.DataAccess.Data
{
[Table(Tables.MediaFile)]
public class MediaFile
{
[Key]
[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
public int Id { get; set; }
[ForeignKey("JobTrigger")]
public int JobTriggerId { get; set; }
public string Name { get; set; }
public string Path { get; set; }
[Display(Name = "Date"), DisplayFormat(DataFormatString = Consts.DisplayDateTimeFormat), DataType(DataType.DateTime)]
public DateTime Date { get; set; }
public int? Duration { get; set; }
public long Size { get; set; }
public JobTrigger JobTrigger { get; set; }
public virtual DownloadDuration DownloadDuration { get; set; }
public virtual DownloadHistory DownloadHistory { get; set; }
public virtual DeleteHistory DeleteHistory { get; set; }
}
}
| 27.342105 | 125 | 0.661213 | [
"MIT"
] | vov4uk/HikConsole | src/Hik.DataAccess/Data/MediaFile.cs | 1,041 | C# |
using System.Collections.Generic;
using System.Threading.Tasks;
namespace BotsDotNet.WebExTeams.SparkDotNet
{
public partial class Spark
{
private string membershipsBase = "/v1/memberships";
/// <summary>
/// Lists all room memberships. By default, lists memberships for rooms to which the authenticated user belongs.
/// - Use query parameters to filter the response.
/// - Use roomId to list memberships for a room, by ID.
/// - Use either personId or personEmail to filter the results.
/// </summary>
/// <param name="roomId"></param>
/// <param name="personId"></param>
/// <param name="personEmail"></param>
/// <param name="max"></param>
/// <returns>A List of Membership objects.</returns>
public async Task<List<Membership>> GetMembershipsAsync(string roomId = null, string personId = null, string personEmail = null, int max = 0)
{
var queryParams = new Dictionary<string, string>();
if (roomId != null) queryParams.Add("roomId",roomId);
if (personId != null) queryParams.Add("personId",personId);
if (personEmail != null) queryParams.Add("personEmail",personEmail);
if (max > 0) queryParams.Add("max",max.ToString());
var path = getURL(membershipsBase, queryParams);
return await GetItemsAsync<Membership>(path);
}
/// <summary>
/// Get details for a membership by ID.
/// Specify the membership ID in the membershipId URI parameter.
/// </summary>
/// <param name="membershipId"></param>
/// <returns>Membership object.</returns>
public async Task<Membership> GetMembershipAsync(string membershipId)
{
var queryParams = new Dictionary<string, string>();
var path = getURL($"{membershipsBase}/{membershipId}", queryParams);
return await GetItemAsync<Membership>(path);
}
/// <summary>
/// Add someone to a room by Person ID or email address; optionally making them a moderator.
/// </summary>
/// <param name="roomId"></param>
/// <param name="personId"></param>
/// <param name="personEmail"></param>
/// <param name="isModerator"></param>
/// <returns>Membership object.</returns>
public async Task<Membership> CreateMembershipAsync(string roomId, string personId = null, string personEmail = null, bool isModerator = false)
{
var postBody = new Dictionary<string, object>();
postBody.Add("roomId", roomId);
if (personId != null) { postBody.Add("personId", personId); }
if (personEmail != null) { postBody.Add("personEmail", personEmail); }
postBody.Add("isModerator", isModerator);
return await PostItemAsync<Membership>(membershipsBase, postBody);
}
/// <summary>
/// Deletes a membership by ID.
/// Specify the membership ID in the membershipId URI parameter.
/// </summary>
/// <param name="membershipId"></param>
/// <returns>Boolean representing the success of the operation.</returns>
public async Task<bool> DeleteMembershipAsync(string membershipId)
{
return await DeleteItemAsync($"{membershipsBase}/{membershipId}");
}
/// <summary>
/// Updates properties for a membership by ID.
/// Specify the membership ID in the membershipId URI parameter.
/// </summary>
/// <param name="membershipId"></param>
/// <param name="isModerator"></param>
/// <returns>Membership object.</returns>
public async Task<Membership> UpdateMembershipAsync(string membershipId, bool isModerator)
{
var putBody = new Dictionary<string, object>();
putBody.Add("isModerator",isModerator);
var path = $"{membershipsBase}/{membershipId}";
return await UpdateItemAsync<Membership>(path, putBody);
}
}
} | 44.782609 | 151 | 0.604854 | [
"MIT"
] | JTOne123/botsdotnet | BotsDotNet.WebExTeams/SparkDotNet/APIPartials/SparkMemberships.cs | 4,120 | C# |
/* New BSD License
-------------------------------------------------------------------------------
Copyright (c) 2006-2012, EntitySpaces, LLC
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the EntitySpaces, LLC nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL EntitySpaces, LLC BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-------------------------------------------------------------------------------
*/
using System;
using System.Data;
namespace EntitySpaces.MetadataEngine.MySql
{
public class MySqlParameters : Parameters
{
public MySqlParameters()
{
}
override internal void LoadAll()
{
try
{
// DataTable metaData = this.LoadData(OleDbSchemaGuid.Procedure_Parameters,
// new object[]{this.Procedure.Database.Name, null, this.Procedure.Name});
//
// PopulateArray(metaData);
}
catch {}
}
}
}
| 38.690909 | 79 | 0.710056 | [
"Unlicense"
] | EntitySpaces/EntitySpaces-CompleteSource | CodeGeneration/ClassLibraries/EntitySpaces.MetadataEngine/MySql/Parameters.cs | 2,128 | C# |
/* INFINITY CODE 2013-2019 */
/* http://www.infinity-code.com */
using UnityEngine;
namespace InfinityCode.RealWorldTerrain.Tools
{
public static class RealWorldTerrainUpdateNeighbors
{
public static void Update(RealWorldTerrainContainer container)
{
RealWorldTerrainVector2i terrainCount = container.terrainCount;
RealWorldTerrainItem[] terrains = container.terrains;
for (int x = 0; x < terrainCount.x; x++)
{
for (int y = 0; y < terrainCount.y; y++)
{
int index = y * terrainCount.x + x;
Terrain bottom = y > 0 ? terrains[index - terrainCount.x].terrain : null;
Terrain top = y < terrainCount.y - 1 ? terrains[index + terrainCount.x].terrain : null;
Terrain left = x > 0 ? terrains[index - 1].terrain : null;
Terrain right = x < terrainCount.x - 1 ? terrains[index + 1].terrain : null;
terrains[index].terrain.SetNeighbors(left, top, right, bottom);
}
}
foreach (RealWorldTerrainItem terrain in terrains) terrain.terrain.Flush();
}
}
} | 41 | 107 | 0.562602 | [
"MIT"
] | zFz0000/UnitySimulasiGerakParabola | Assets/Infinity Code/Real World Terrain/Scripts/Editor/Tools/RealWorldTerrainUpdateNeighbors.cs | 1,230 | C# |
namespace WebApi.Modules.Common.FeatureFlags
{
/// <summary>
/// Features Flags Enum.
/// </summary>
public enum CustomFeature
{
/// <summary>
/// Product
/// </summary>
CreateProduct,
/// <summary>
/// Get Product.
/// </summary>
GetProduct,
/// <summary>
/// Get Products.
/// </summary>
GetProducts,
/// <summary>
/// Filter errors out.
/// </summary>
ErrorFilter,
/// <summary>
/// Use Swagger.
/// </summary>
Swagger,
/// <summary>
/// Use SQL Server Persistence.
/// </summary>
SQLServer,
/// <summary>
/// Edit Product
/// </summary>
EditProduct,
/// <summary>
/// Delete Product
/// </summary>
DeleteProduct,
/// <summary>
/// Get All Products
/// </summary>
GetAllProducts
}
}
| 19.37037 | 44 | 0.413958 | [
"Apache-2.0"
] | satishchatap/zellar | WebApi/Modules/Common/FeatureFlags/CustomFeature.cs | 1,046 | C# |
namespace Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Models.Api20200601
{
using static Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Extensions;
/// <summary>Properties of a private endpoint connection.</summary>
public partial class PrivateEndpointConnectionProperties
{
/// <summary>
/// <c>AfterFromJson</c> will be called after the json deserialization has finished, allowing customization of the object
/// before it is returned. Implement this method in a partial class to enable this behavior
/// </summary>
/// <param name="json">The JsonNode that should be deserialized into this object.</param>
partial void AfterFromJson(Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject json);
/// <summary>
/// <c>AfterToJson</c> will be called after the json erialization has finished, allowing customization of the <see cref="Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject"
/// /> before it is returned. Implement this method in a partial class to enable this behavior
/// </summary>
/// <param name="container">The JSON container that the serialization result will be placed in.</param>
partial void AfterToJson(ref Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject container);
/// <summary>
/// <c>BeforeFromJson</c> will be called before the json deserialization has commenced, allowing complete customization of
/// the object before it is deserialized.
/// If you wish to disable the default deserialization entirely, return <c>true</c> in the <see "returnNow" /> output parameter.
/// Implement this method in a partial class to enable this behavior.
/// </summary>
/// <param name="json">The JsonNode that should be deserialized into this object.</param>
/// <param name="returnNow">Determines if the rest of the deserialization should be processed, or if the method should return
/// instantly.</param>
partial void BeforeFromJson(Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject json, ref bool returnNow);
/// <summary>
/// <c>BeforeToJson</c> will be called before the json serialization has commenced, allowing complete customization of the
/// object before it is serialized.
/// If you wish to disable the default serialization entirely, return <c>true</c> in the <see "returnNow" /> output parameter.
/// Implement this method in a partial class to enable this behavior.
/// </summary>
/// <param name="container">The JSON container that the serialization result will be placed in.</param>
/// <param name="returnNow">Determines if the rest of the serialization should be processed, or if the method should return
/// instantly.</param>
partial void BeforeToJson(ref Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject container, ref bool returnNow);
/// <summary>
/// Deserializes a <see cref="Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonNode"/> into an instance of Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Models.Api20200601.IPrivateEndpointConnectionProperties.
/// </summary>
/// <param name="node">a <see cref="Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonNode" /> to deserialize from.</param>
/// <returns>
/// an instance of Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Models.Api20200601.IPrivateEndpointConnectionProperties.
/// </returns>
public static Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Models.Api20200601.IPrivateEndpointConnectionProperties FromJson(Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonNode node)
{
return node is Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject json ? new PrivateEndpointConnectionProperties(json) : null;
}
/// <summary>
/// Deserializes a Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject into a new instance of <see cref="PrivateEndpointConnectionProperties" />.
/// </summary>
/// <param name="json">A Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject instance to deserialize from.</param>
internal PrivateEndpointConnectionProperties(Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject json)
{
bool returnNow = false;
BeforeFromJson(json, ref returnNow);
if (returnNow)
{
return;
}
{_privateEndpoint = If( json?.PropertyT<Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject>("privateEndpoint"), out var __jsonPrivateEndpoint) ? Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Models.Api20200601.PrivateEndpoint.FromJson(__jsonPrivateEndpoint) : PrivateEndpoint;}
{_privateLinkServiceConnectionState = If( json?.PropertyT<Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject>("privateLinkServiceConnectionState"), out var __jsonPrivateLinkServiceConnectionState) ? Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Models.Api20200601.PrivateLinkServiceConnectionState.FromJson(__jsonPrivateLinkServiceConnectionState) : PrivateLinkServiceConnectionState;}
{_provisioningState = If( json?.PropertyT<Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonString>("provisioningState"), out var __jsonProvisioningState) ? (string)__jsonProvisioningState : (string)ProvisioningState;}
AfterFromJson(json);
}
/// <summary>
/// Serializes this instance of <see cref="PrivateEndpointConnectionProperties" /> into a <see cref="Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonNode"
/// />.
/// </summary>
/// <param name="container">The <see cref="Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject"/> container to serialize this object into. If the caller
/// passes in <c>null</c>, a new instance will be created and returned to the caller.</param>
/// <param name="serializationMode">Allows the caller to choose the depth of the serialization. See <see cref="Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.SerializationMode"/>.</param>
/// <returns>
/// a serialized instance of <see cref="PrivateEndpointConnectionProperties" /> as a <see cref="Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonNode" />.
/// </returns>
public Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonNode ToJson(Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject container, Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.SerializationMode serializationMode)
{
container = container ?? new Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonObject();
bool returnNow = false;
BeforeToJson(ref container, ref returnNow);
if (returnNow)
{
return container;
}
AddIf( null != this._privateEndpoint ? (Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonNode) this._privateEndpoint.ToJson(null,serializationMode) : null, "privateEndpoint" ,container.Add );
AddIf( null != this._privateLinkServiceConnectionState ? (Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonNode) this._privateLinkServiceConnectionState.ToJson(null,serializationMode) : null, "privateLinkServiceConnectionState" ,container.Add );
if (serializationMode.HasFlag(Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.SerializationMode.IncludeReadOnly))
{
AddIf( null != (((object)this._provisioningState)?.ToString()) ? (Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonNode) new Microsoft.Azure.PowerShell.Cmdlets.AppConfiguration.Runtime.Json.JsonString(this._provisioningState.ToString()) : null, "provisioningState" ,container.Add );
}
AfterToJson(ref container);
return container;
}
}
} | 78.816514 | 429 | 0.717379 | [
"MIT"
] | 3quanfeng/azure-powershell | src/AppConfiguration/generated/api/Models/Api20200601/PrivateEndpointConnectionProperties.json.cs | 8,483 | C# |
/******************************************************************************
* Spine Runtimes License Agreement
* Last updated January 1, 2020. Replaces all prior versions.
*
* Copyright (c) 2013-2020, Esoteric Software LLC
*
* Integration of the Spine Runtimes into software or otherwise creating
* derivative works of the Spine Runtimes is permitted under the terms and
* conditions of Section 2 of the Spine Editor License Agreement:
* http://esotericsoftware.com/spine-editor-license
*
* Otherwise, it is permitted to integrate the Spine Runtimes into software
* or otherwise create derivative works of the Spine Runtimes (collectively,
* "Products"), provided that each user of the Products must obtain their own
* Spine Editor license and redistribution of the Products in any form must
* include this license and copyright notice.
*
* THE SPINE RUNTIMES ARE PROVIDED BY ESOTERIC SOFTWARE LLC "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL ESOTERIC SOFTWARE LLC BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES,
* BUSINESS INTERRUPTION, OR LOSS OF USE, DATA, OR PROFITS) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THE SPINE RUNTIMES, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*****************************************************************************/
#if UNITY_2018_3 || UNITY_2019 || UNITY_2018_3_OR_NEWER
#define NEW_PREFAB_SYSTEM
#endif
#define SPINE_SKELETONMECANIM
using UnityEngine;
using UnityEditor;
using UnityEditorInternal;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.IO;
using Spine;
namespace Spine.Unity.Editor {
/// <summary>
/// [SUPPORTS]
/// Linear, Constant, and Bezier Curves*
/// Inverse Kinematics*
/// Inherit Rotation
/// Translate Timeline
/// Rotate Timeline
/// Scale Timeline**
/// Event Timeline***
/// Attachment Timeline
///
/// RegionAttachment
/// MeshAttachment (optionally Skinned)
///
/// [LIMITATIONS]
/// *Bezier Curves are baked into the animation at 60fps and are not realtime. Use bakeIncrement constant to adjust key density if desired.
/// *Inverse Kinematics is baked into the animation at 60fps and are not realtime. Use bakeIncrement constant to adjust key density if desired.
/// ***Events may only fire 1 type of data per event in Unity safely so priority to String data if present in Spine key, otherwise a Float is sent whether the Spine key was Int or Float with priority given to Int.
///
/// [DOES NOT SUPPORT]
/// FFD (Unity does not provide access to BlendShapes with code)
/// Color Keys (Maybe one day when Unity supports full FBX standard and provides access with code)
/// Draw Order Keyframes
/// </summary>
public static class SkeletonBaker {
#region SkeletonMecanim's Mecanim Clips
#if SPINE_SKELETONMECANIM
public static void UpdateMecanimClips (SkeletonDataAsset skeletonDataAsset) {
if (skeletonDataAsset.controller == null)
return;
SkeletonBaker.GenerateMecanimAnimationClips(skeletonDataAsset);
}
public static void GenerateMecanimAnimationClips (SkeletonDataAsset skeletonDataAsset) {
var data = skeletonDataAsset.GetSkeletonData(true);
if (data == null) {
Debug.LogError("SkeletonData loading failed!", skeletonDataAsset);
return;
}
string dataPath = AssetDatabase.GetAssetPath(skeletonDataAsset);
string controllerPath = dataPath.Replace(AssetUtility.SkeletonDataSuffix, "_Controller").Replace(".asset", ".controller");
UnityEditor.Animations.AnimatorController controller;
if (skeletonDataAsset.controller != null) {
controller = (UnityEditor.Animations.AnimatorController)skeletonDataAsset.controller;
controllerPath = AssetDatabase.GetAssetPath(controller);
} else {
if (File.Exists(controllerPath)) {
if (EditorUtility.DisplayDialog("Controller Overwrite Warning", "Unknown Controller already exists at: " + controllerPath, "Update", "Overwrite")) {
controller = (UnityEditor.Animations.AnimatorController)AssetDatabase.LoadAssetAtPath(controllerPath, typeof(RuntimeAnimatorController));
} else {
controller = (UnityEditor.Animations.AnimatorController)UnityEditor.Animations.AnimatorController.CreateAnimatorControllerAtPath(controllerPath);
}
} else {
controller = (UnityEditor.Animations.AnimatorController)UnityEditor.Animations.AnimatorController.CreateAnimatorControllerAtPath(controllerPath);
}
}
skeletonDataAsset.controller = controller;
EditorUtility.SetDirty(skeletonDataAsset);
UnityEngine.Object[] objs = AssetDatabase.LoadAllAssetsAtPath(controllerPath);
var unityAnimationClipTable = new Dictionary<string, AnimationClip>();
var spineAnimationTable = new Dictionary<string, Spine.Animation>();
foreach (var o in objs) {
//Debug.LogFormat("({0}){1} : {3} + {2} + {4}", o.GetType(), o.name, o.hideFlags, o.GetInstanceID(), o.GetHashCode());
// There is a bug in Unity 5.3.3 (and likely before) that creates
// a duplicate AnimationClip when you duplicate a Mecanim Animator State.
// These duplicates seem to be identifiable by their HideFlags, so we'll exclude them.
if (o is AnimationClip) {
var clip = o as AnimationClip;
if (!clip.HasFlag(HideFlags.HideInHierarchy)) {
if (unityAnimationClipTable.ContainsKey(clip.name)) {
Debug.LogWarningFormat("Duplicate AnimationClips were found named {0}", clip.name);
}
unityAnimationClipTable.Add(clip.name, clip);
}
}
}
foreach (var animations in data.Animations) {
string animationName = animations.Name; // Review for unsafe names. Requires runtime implementation too.
spineAnimationTable.Add(animationName, animations);
if (unityAnimationClipTable.ContainsKey(animationName) == false) {
AnimationClip newClip = new AnimationClip {
name = animationName
};
//AssetDatabase.CreateAsset(newClip, Path.GetDirectoryName(dataPath) + "/" + animationName + ".asset");
AssetDatabase.AddObjectToAsset(newClip, controller);
unityAnimationClipTable.Add(animationName, newClip);
}
AnimationClip clip = unityAnimationClipTable[animationName];
clip.SetCurve("", typeof(GameObject), "dummy", AnimationCurve.Linear(0, 0, animations.Duration, 0));
var settings = AnimationUtility.GetAnimationClipSettings(clip);
settings.stopTime = animations.Duration;
SetAnimationSettings(clip, settings);
AnimationUtility.SetAnimationEvents(clip, new AnimationEvent[0]);
foreach (Timeline t in animations.Timelines) {
if (t is EventTimeline)
ParseEventTimeline((EventTimeline)t, clip, SendMessageOptions.DontRequireReceiver);
}
EditorUtility.SetDirty(clip);
unityAnimationClipTable.Remove(animationName);
}
foreach (var clip in unityAnimationClipTable.Values) {
AnimationClip.DestroyImmediate(clip, true);
}
AssetDatabase.Refresh();
AssetDatabase.SaveAssets();
}
static bool HasFlag (this UnityEngine.Object o, HideFlags flagToCheck) {
return (o.hideFlags & flagToCheck) == flagToCheck;
}
#endif
#endregion
#region Prefab and AnimationClip Baking
/// <summary>
/// Interval between key sampling for Bezier curves, IK controlled bones, and Inherit Rotation effected bones.
/// </summary>
const float BakeIncrement = 1 / 60f;
public static void BakeToPrefab (SkeletonDataAsset skeletonDataAsset, ExposedList<Skin> skins, string outputPath = "", bool bakeAnimations = true, bool bakeIK = true, SendMessageOptions eventOptions = SendMessageOptions.DontRequireReceiver) {
if (skeletonDataAsset == null || skeletonDataAsset.GetSkeletonData(true) == null) {
Debug.LogError("Could not export Spine Skeleton because SkeletonDataAsset is null or invalid!");
return;
}
if (outputPath == "") {
outputPath = System.IO.Path.GetDirectoryName(AssetDatabase.GetAssetPath(skeletonDataAsset)).Replace('\\', '/') + "/Baked";
System.IO.Directory.CreateDirectory(outputPath);
}
var skeletonData = skeletonDataAsset.GetSkeletonData(true);
bool hasAnimations = bakeAnimations && skeletonData.Animations.Count > 0;
UnityEditor.Animations.AnimatorController controller = null;
if (hasAnimations) {
string controllerPath = outputPath + "/" + skeletonDataAsset.skeletonJSON.name + " Controller.controller";
bool newAnimContainer = false;
var runtimeController = AssetDatabase.LoadAssetAtPath(controllerPath, typeof(RuntimeAnimatorController));
if (runtimeController != null) {
controller = (UnityEditor.Animations.AnimatorController)runtimeController;
} else {
controller = UnityEditor.Animations.AnimatorController.CreateAnimatorControllerAtPath(controllerPath);
newAnimContainer = true;
}
var existingClipTable = new Dictionary<string, AnimationClip>();
var unusedClipNames = new List<string>();
Object[] animObjs = AssetDatabase.LoadAllAssetsAtPath(controllerPath);
foreach (Object o in animObjs) {
if (o is AnimationClip) {
var clip = (AnimationClip)o;
existingClipTable.Add(clip.name, clip);
unusedClipNames.Add(clip.name);
}
}
Dictionary<int, List<string>> slotLookup = new Dictionary<int, List<string>>();
int skinCount = skins.Count;
for (int s = 0; s < skeletonData.Slots.Count; s++) {
List<string> attachmentNames = new List<string>();
for (int i = 0; i < skinCount; i++) {
var skin = skins.Items[i];
var skinEntries = new List<Skin.SkinEntry>();
skin.GetAttachments(s, skinEntries);
foreach (var entry in skinEntries) {
if (!attachmentNames.Contains(entry.Name))
attachmentNames.Add(entry.Name);
}
}
slotLookup.Add(s, attachmentNames);
}
foreach (var anim in skeletonData.Animations) {
AnimationClip clip = null;
if (existingClipTable.ContainsKey(anim.Name)) {
clip = existingClipTable[anim.Name];
}
clip = ExtractAnimation(anim.Name, skeletonData, slotLookup, bakeIK, eventOptions, clip);
if (unusedClipNames.Contains(clip.name)) {
unusedClipNames.Remove(clip.name);
} else {
AssetDatabase.AddObjectToAsset(clip, controller);
controller.AddMotion(clip);
}
}
if (newAnimContainer) {
EditorUtility.SetDirty(controller);
AssetDatabase.SaveAssets();
AssetDatabase.ImportAsset(controllerPath, ImportAssetOptions.ForceUpdate);
AssetDatabase.Refresh();
} else {
foreach (string str in unusedClipNames) {
AnimationClip.DestroyImmediate(existingClipTable[str], true);
}
EditorUtility.SetDirty(controller);
AssetDatabase.SaveAssets();
AssetDatabase.ImportAsset(controllerPath, ImportAssetOptions.ForceUpdate);
AssetDatabase.Refresh();
}
}
foreach (var skin in skins) {
bool newPrefab = false;
string prefabPath = outputPath + "/" + skeletonDataAsset.skeletonJSON.name + " (" + skin.Name + ").prefab";
Object prefab = AssetDatabase.LoadAssetAtPath(prefabPath, typeof(GameObject));
if (prefab == null) {
#if NEW_PREFAB_SYSTEM
GameObject emptyGameObject = new GameObject();
prefab = PrefabUtility.SaveAsPrefabAssetAndConnect(emptyGameObject, prefabPath, InteractionMode.AutomatedAction);
GameObject.DestroyImmediate(emptyGameObject);
#else
prefab = PrefabUtility.CreateEmptyPrefab(prefabPath);
#endif
newPrefab = true;
}
Dictionary<string, Mesh> meshTable = new Dictionary<string, Mesh>();
List<string> unusedMeshNames = new List<string>();
Object[] assets = AssetDatabase.LoadAllAssetsAtPath(prefabPath);
foreach (var obj in assets) {
if (obj is Mesh) {
meshTable.Add(obj.name, (Mesh)obj);
unusedMeshNames.Add(obj.name);
}
}
GameObject prefabRoot = EditorInstantiation.NewGameObject("root", true);
Dictionary<string, Transform> slotTable = new Dictionary<string, Transform>();
Dictionary<string, Transform> boneTable = new Dictionary<string, Transform>();
List<Transform> boneList = new List<Transform>();
//create bones
for (int i = 0; i < skeletonData.Bones.Count; i++) {
var boneData = skeletonData.Bones.Items[i];
Transform boneTransform = EditorInstantiation.NewGameObject(boneData.Name, true).transform;
boneTransform.parent = prefabRoot.transform;
boneTable.Add(boneTransform.name, boneTransform);
boneList.Add(boneTransform);
}
for (int i = 0; i < skeletonData.Bones.Count; i++) {
var boneData = skeletonData.Bones.Items[i];
Transform boneTransform = boneTable[boneData.Name];
Transform parentTransform = null;
if (i > 0)
parentTransform = boneTable[boneData.Parent.Name];
else
parentTransform = boneTransform.parent;
boneTransform.parent = parentTransform;
boneTransform.localPosition = new Vector3(boneData.X, boneData.Y, 0);
var tm = boneData.TransformMode;
if (tm.InheritsRotation())
boneTransform.localRotation = Quaternion.Euler(0, 0, boneData.Rotation);
else
boneTransform.rotation = Quaternion.Euler(0, 0, boneData.Rotation);
if (tm.InheritsScale())
boneTransform.localScale = new Vector3(boneData.ScaleX, boneData.ScaleY, 1);
}
//create slots and attachments
for (int slotIndex = 0; slotIndex < skeletonData.Slots.Count; slotIndex++) {
var slotData = skeletonData.Slots.Items[slotIndex];
Transform slotTransform = EditorInstantiation.NewGameObject(slotData.Name, true).transform;
slotTransform.parent = prefabRoot.transform;
slotTable.Add(slotData.Name, slotTransform);
var skinEntries = new List<Skin.SkinEntry>();
skin.GetAttachments(slotIndex, skinEntries);
if (skin != skeletonData.DefaultSkin)
skeletonData.DefaultSkin.GetAttachments(slotIndex, skinEntries);
for (int a = 0; a < skinEntries.Count; a++) {
var attachment = skinEntries[a].Attachment;
string attachmentName = skinEntries[a].Name;
string attachmentMeshName = "[" + slotData.Name + "] " + attachmentName;
Vector3 offset = Vector3.zero;
float rotation = 0;
Mesh mesh = null;
Material material = null;
bool isWeightedMesh = false;
if (meshTable.ContainsKey(attachmentMeshName))
mesh = meshTable[attachmentMeshName];
if (attachment is RegionAttachment) {
var regionAttachment = (RegionAttachment)attachment;
offset.x = regionAttachment.X;
offset.y = regionAttachment.Y;
rotation = regionAttachment.Rotation;
mesh = ExtractRegionAttachment(attachmentMeshName, regionAttachment, mesh);
material = attachment.GetMaterial();
unusedMeshNames.Remove(attachmentMeshName);
if (newPrefab || meshTable.ContainsKey(attachmentMeshName) == false)
AssetDatabase.AddObjectToAsset(mesh, prefab);
} else if (attachment is MeshAttachment) {
var meshAttachment = (MeshAttachment)attachment;
isWeightedMesh = (meshAttachment.Bones != null);
offset.x = 0;
offset.y = 0;
rotation = 0;
if (isWeightedMesh)
mesh = ExtractWeightedMeshAttachment(attachmentMeshName, meshAttachment, slotIndex, skeletonData, boneList, mesh);
else
mesh = ExtractMeshAttachment(attachmentMeshName, meshAttachment, mesh);
material = attachment.GetMaterial();
unusedMeshNames.Remove(attachmentMeshName);
if (newPrefab || meshTable.ContainsKey(attachmentMeshName) == false)
AssetDatabase.AddObjectToAsset(mesh, prefab);
} else
continue;
Transform attachmentTransform = EditorInstantiation.NewGameObject(attachmentName, true).transform;
attachmentTransform.parent = slotTransform;
attachmentTransform.localPosition = offset;
attachmentTransform.localRotation = Quaternion.Euler(0, 0, rotation);
if (isWeightedMesh) {
attachmentTransform.position = Vector3.zero;
attachmentTransform.rotation = Quaternion.identity;
var skinnedMeshRenderer = attachmentTransform.gameObject.AddComponent<SkinnedMeshRenderer>();
skinnedMeshRenderer.rootBone = boneList[0];
skinnedMeshRenderer.bones = boneList.ToArray();
skinnedMeshRenderer.sharedMesh = mesh;
} else {
attachmentTransform.gameObject.AddComponent<MeshFilter>().sharedMesh = mesh;
attachmentTransform.gameObject.AddComponent<MeshRenderer>();
}
attachmentTransform.GetComponent<Renderer>().sharedMaterial = material;
attachmentTransform.GetComponent<Renderer>().sortingOrder = slotIndex;
if (attachmentName != slotData.AttachmentName)
attachmentTransform.gameObject.SetActive(false);
}
}
foreach (var slotData in skeletonData.Slots) {
Transform slotTransform = slotTable[slotData.Name];
slotTransform.parent = boneTable[slotData.BoneData.Name];
slotTransform.localPosition = Vector3.zero;
slotTransform.localRotation = Quaternion.identity;
slotTransform.localScale = Vector3.one;
}
if (hasAnimations) {
var animator = prefabRoot.AddComponent<Animator>();
animator.applyRootMotion = false;
animator.runtimeAnimatorController = (RuntimeAnimatorController)controller;
EditorGUIUtility.PingObject(controller);
}
if (newPrefab) {
#if NEW_PREFAB_SYSTEM
PrefabUtility.SaveAsPrefabAssetAndConnect(prefabRoot, prefabPath, InteractionMode.AutomatedAction);
#else
PrefabUtility.ReplacePrefab(prefabRoot, prefab, ReplacePrefabOptions.ConnectToPrefab);
#endif
} else {
foreach (string str in unusedMeshNames) {
Mesh.DestroyImmediate(meshTable[str], true);
}
#if NEW_PREFAB_SYSTEM
PrefabUtility.SaveAsPrefabAssetAndConnect(prefabRoot, prefabPath, InteractionMode.AutomatedAction);
#else
PrefabUtility.ReplacePrefab(prefabRoot, prefab, ReplacePrefabOptions.ReplaceNameBased);
#endif
}
EditorGUIUtility.PingObject(prefab);
AssetDatabase.Refresh();
AssetDatabase.SaveAssets();
GameObject.DestroyImmediate(prefabRoot);
}
}
#region Attachment Baking
static Bone DummyBone;
static Slot DummySlot;
internal static Bone GetDummyBone () {
if (DummyBone != null)
return DummyBone;
SkeletonData skelData = new SkeletonData();
BoneData data = new BoneData(0, "temp", null) {
ScaleX = 1,
ScaleY = 1,
Length = 100
};
skelData.Bones.Add(data);
Skeleton skeleton = new Skeleton(skelData);
Bone bone = new Bone(data, skeleton, null);
bone.UpdateWorldTransform();
DummyBone = bone;
return DummyBone;
}
internal static Slot GetDummySlot () {
if (DummySlot != null)
return DummySlot;
Bone bone = GetDummyBone();
SlotData data = new SlotData(0, "temp", bone.Data);
Slot slot = new Slot(data, bone);
DummySlot = slot;
return DummySlot;
}
internal static Mesh ExtractRegionAttachment (string name, RegionAttachment attachment, Mesh mesh = null, bool centered = true) {
var bone = GetDummyBone();
if (centered) {
bone.X = -attachment.X;
bone.Y = -attachment.Y;
}
bone.UpdateWorldTransform();
Vector2[] uvs = ExtractUV(attachment.UVs);
float[] floatVerts = new float[8];
attachment.ComputeWorldVertices(bone, floatVerts, 0);
Vector3[] verts = ExtractVerts(floatVerts);
//unrotate verts now that they're centered
if (centered) {
for (int i = 0; i < verts.Length; i++)
verts[i] = Quaternion.Euler(0, 0, -attachment.Rotation) * verts[i];
}
int[] triangles = { 1, 3, 0, 2, 3, 1 };
Color color = attachment.GetColor();
if (mesh == null)
mesh = new Mesh();
mesh.triangles = new int[0];
mesh.vertices = verts;
mesh.uv = uvs;
mesh.triangles = triangles;
mesh.colors = new [] { color, color, color, color };
mesh.RecalculateBounds();
mesh.RecalculateNormals();
mesh.name = name;
return mesh;
}
internal static Mesh ExtractMeshAttachment (string name, MeshAttachment attachment, Mesh mesh = null) {
var slot = GetDummySlot();
slot.Bone.X = 0;
slot.Bone.Y = 0;
slot.Bone.UpdateWorldTransform();
Vector2[] uvs = ExtractUV(attachment.UVs);
float[] floatVerts = new float[attachment.WorldVerticesLength];
attachment.ComputeWorldVertices(slot, floatVerts);
Vector3[] verts = ExtractVerts(floatVerts);
int[] triangles = attachment.Triangles;
Color color = attachment.GetColor();
if (mesh == null)
mesh = new Mesh();
mesh.triangles = new int[0];
mesh.vertices = verts;
mesh.uv = uvs;
mesh.triangles = triangles;
Color[] colors = new Color[verts.Length];
for (int i = 0; i < verts.Length; i++)
colors[i] = color;
mesh.colors = colors;
mesh.RecalculateBounds();
mesh.RecalculateNormals();
mesh.name = name;
return mesh;
}
public class BoneWeightContainer {
public struct Pair {
public Transform bone;
public float weight;
public Pair (Transform bone, float weight) {
this.bone = bone;
this.weight = weight;
}
}
public List<Transform> bones;
public List<float> weights;
public List<Pair> pairs;
public BoneWeightContainer () {
this.bones = new List<Transform>();
this.weights = new List<float>();
this.pairs = new List<Pair>();
}
public void Add (Transform transform, float weight) {
bones.Add(transform);
weights.Add(weight);
pairs.Add(new Pair(transform, weight));
}
}
internal static Mesh ExtractWeightedMeshAttachment (string name, MeshAttachment attachment, int slotIndex, SkeletonData skeletonData, List<Transform> boneList, Mesh mesh = null) {
if (!attachment.IsWeighted())
throw new System.ArgumentException("Mesh is not weighted.", "attachment");
Skeleton skeleton = new Skeleton(skeletonData);
skeleton.UpdateWorldTransform();
float[] floatVerts = new float[attachment.WorldVerticesLength];
attachment.ComputeWorldVertices(skeleton.Slots.Items[slotIndex], floatVerts);
Vector2[] uvs = ExtractUV(attachment.UVs);
Vector3[] verts = ExtractVerts(floatVerts);
int[] triangles = attachment.Triangles;
Color color = new Color(attachment.R, attachment.G, attachment.B, attachment.A);
mesh = mesh ?? new Mesh();
mesh.triangles = new int[0];
mesh.vertices = verts;
mesh.uv = uvs;
mesh.triangles = triangles;
Color[] colors = new Color[verts.Length];
for (int i = 0; i < verts.Length; i++)
colors[i] = color;
mesh.colors = colors;
mesh.name = name;
mesh.RecalculateNormals();
mesh.RecalculateBounds();
// Handle weights and binding
var weightTable = new Dictionary<int, BoneWeightContainer>();
var warningBuilder = new System.Text.StringBuilder();
int[] bones = attachment.Bones;
float[] weights = attachment.Vertices;
for (int w = 0, v = 0, b = 0, n = bones.Length; v < n; w += 2) {
int nn = bones[v++] + v;
for (; v < nn; v++, b += 3) {
Transform boneTransform = boneList[bones[v]];
int vIndex = w / 2;
BoneWeightContainer container;
if (weightTable.ContainsKey(vIndex))
container = weightTable[vIndex];
else {
container = new BoneWeightContainer();
weightTable.Add(vIndex, container);
}
float weight = weights[b + 2];
container.Add(boneTransform, weight);
}
}
BoneWeight[] boneWeights = new BoneWeight[weightTable.Count];
for (int i = 0; i < weightTable.Count; i++) {
BoneWeight bw = new BoneWeight();
var container = weightTable[i];
var pairs = container.pairs.OrderByDescending(pair => pair.weight).ToList();
for (int b = 0; b < pairs.Count; b++) {
if (b > 3) {
if (warningBuilder.Length == 0)
warningBuilder.Insert(0, "[Weighted Mesh: " + name + "]\r\nUnity only supports 4 weight influences per vertex! The 4 strongest influences will be used.\r\n");
warningBuilder.AppendFormat("{0} ignored on vertex {1}!\r\n", pairs[b].bone.name, i);
continue;
}
int boneIndex = boneList.IndexOf(pairs[b].bone);
float weight = pairs[b].weight;
switch (b) {
case 0:
bw.boneIndex0 = boneIndex;
bw.weight0 = weight;
break;
case 1:
bw.boneIndex1 = boneIndex;
bw.weight1 = weight;
break;
case 2:
bw.boneIndex2 = boneIndex;
bw.weight2 = weight;
break;
case 3:
bw.boneIndex3 = boneIndex;
bw.weight3 = weight;
break;
}
}
boneWeights[i] = bw;
}
Matrix4x4[] bindPoses = new Matrix4x4[boneList.Count];
for (int i = 0; i < boneList.Count; i++) {
bindPoses[i] = boneList[i].worldToLocalMatrix;
}
mesh.boneWeights = boneWeights;
mesh.bindposes = bindPoses;
string warningString = warningBuilder.ToString();
if (warningString.Length > 0)
Debug.LogWarning(warningString);
return mesh;
}
internal static Vector2[] ExtractUV (float[] floats) {
Vector2[] arr = new Vector2[floats.Length / 2];
for (int i = 0; i < floats.Length; i += 2) {
arr[i / 2] = new Vector2(floats[i], floats[i + 1]);
}
return arr;
}
internal static Vector3[] ExtractVerts (float[] floats) {
Vector3[] arr = new Vector3[floats.Length / 2];
for (int i = 0; i < floats.Length; i += 2) {
arr[i / 2] = new Vector3(floats[i], floats[i + 1], 0);// *scale;
}
return arr;
}
#endregion
#region Animation Baking
static AnimationClip ExtractAnimation (string name, SkeletonData skeletonData, Dictionary<int, List<string>> slotLookup, bool bakeIK, SendMessageOptions eventOptions, AnimationClip clip = null) {
var animation = skeletonData.FindAnimation(name);
var timelines = animation.Timelines;
if (clip == null) {
clip = new AnimationClip();
} else {
clip.ClearCurves();
AnimationUtility.SetAnimationEvents(clip, new AnimationEvent[0]);
}
clip.name = name;
Skeleton skeleton = new Skeleton(skeletonData);
List<int> ignoreRotateTimelineIndexes = new List<int>();
if (bakeIK) {
foreach (IkConstraint i in skeleton.IkConstraints) {
foreach (Bone b in i.Bones) {
int index = skeleton.FindBoneIndex(b.Data.Name);
ignoreRotateTimelineIndexes.Add(index);
BakeBoneConstraints(b, animation, clip);
}
}
}
foreach (Bone b in skeleton.Bones) {
if (!b.Data.TransformMode.InheritsRotation()) {
int index = skeleton.FindBoneIndex(b.Data.Name);
if (ignoreRotateTimelineIndexes.Contains(index) == false) {
ignoreRotateTimelineIndexes.Add(index);
BakeBoneConstraints(b, animation, clip);
}
}
}
foreach (Timeline t in timelines) {
skeleton.SetToSetupPose();
if (t is ScaleTimeline) {
ParseScaleTimeline(skeleton, (ScaleTimeline)t, clip);
} else if (t is TranslateTimeline) {
ParseTranslateTimeline(skeleton, (TranslateTimeline)t, clip);
} else if (t is RotateTimeline) {
//bypass any rotation keys if they're going to get baked anyway to prevent localEulerAngles vs Baked collision
if (ignoreRotateTimelineIndexes.Contains(((RotateTimeline)t).BoneIndex) == false)
ParseRotateTimeline(skeleton, (RotateTimeline)t, clip);
} else if (t is AttachmentTimeline) {
ParseAttachmentTimeline(skeleton, (AttachmentTimeline)t, slotLookup, clip);
} else if (t is EventTimeline) {
ParseEventTimeline((EventTimeline)t, clip, eventOptions);
}
}
var settings = AnimationUtility.GetAnimationClipSettings(clip);
settings.loopTime = true;
settings.stopTime = Mathf.Max(clip.length, 0.001f);
SetAnimationSettings(clip, settings);
clip.EnsureQuaternionContinuity();
EditorUtility.SetDirty(clip);
return clip;
}
static int BinarySearch (float[] values, float target) {
int low = 0;
int high = values.Length - 2;
if (high == 0) return 1;
int current = (int)((uint)high >> 1);
while (true) {
if (values[(current + 1)] <= target)
low = current + 1;
else
high = current;
if (low == high) return (low + 1);
current = (int)((uint)(low + high) >> 1);
}
}
static void BakeBoneConstraints (Bone bone, Spine.Animation animation, AnimationClip clip) {
Skeleton skeleton = bone.Skeleton;
bool inheritRotation = bone.Data.TransformMode.InheritsRotation();
animation.Apply(skeleton, 0, 0, false, null, 1f, MixBlend.Setup, MixDirection.In);
skeleton.UpdateWorldTransform();
float duration = animation.Duration;
AnimationCurve curve = new AnimationCurve();
List<Keyframe> keys = new List<Keyframe>();
float rotation = bone.AppliedRotation;
if (!inheritRotation)
rotation = GetUninheritedAppliedRotation(bone);
keys.Add(new Keyframe(0, rotation, 0, 0));
int listIndex = 1;
float r = rotation;
int steps = Mathf.CeilToInt(duration / BakeIncrement);
float currentTime = 0;
float angle = rotation;
for (int i = 1; i <= steps; i++) {
currentTime += BakeIncrement;
if (i == steps)
currentTime = duration;
animation.Apply(skeleton, 0, currentTime, true, null, 1f, MixBlend.Setup, MixDirection.In);
skeleton.UpdateWorldTransform();
int pIndex = listIndex - 1;
Keyframe pk = keys[pIndex];
pk = keys[pIndex];
rotation = inheritRotation ? bone.AppliedRotation : GetUninheritedAppliedRotation(bone);
angle += Mathf.DeltaAngle(angle, rotation);
r = angle;
float rOut = (r - pk.value) / (currentTime - pk.time);
pk.outTangent = rOut;
keys.Add(new Keyframe(currentTime, r, rOut, 0));
keys[pIndex] = pk;
listIndex++;
}
curve = EnsureCurveKeyCount(new AnimationCurve(keys.ToArray()));
string path = GetPath(bone.Data);
string propertyName = "localEulerAnglesBaked";
EditorCurveBinding xBind = EditorCurveBinding.FloatCurve(path, typeof(Transform), propertyName + ".x");
AnimationUtility.SetEditorCurve(clip, xBind, new AnimationCurve());
EditorCurveBinding yBind = EditorCurveBinding.FloatCurve(path, typeof(Transform), propertyName + ".y");
AnimationUtility.SetEditorCurve(clip, yBind, new AnimationCurve());
EditorCurveBinding zBind = EditorCurveBinding.FloatCurve(path, typeof(Transform), propertyName + ".z");
AnimationUtility.SetEditorCurve(clip, zBind, curve);
}
static void ParseTranslateTimeline (Skeleton skeleton, TranslateTimeline timeline, AnimationClip clip) {
var boneData = skeleton.Data.Bones.Items[timeline.BoneIndex];
var bone = skeleton.Bones.Items[timeline.BoneIndex];
AnimationCurve xCurve = new AnimationCurve();
AnimationCurve yCurve = new AnimationCurve();
AnimationCurve zCurve = new AnimationCurve();
float endTime = timeline.Frames[(timeline.FrameCount * 3) - 3];
float currentTime = timeline.Frames[0];
List<Keyframe> xKeys = new List<Keyframe>();
List<Keyframe> yKeys = new List<Keyframe>();
xKeys.Add(new Keyframe(timeline.Frames[0], timeline.Frames[1] + boneData.X, 0, 0));
yKeys.Add(new Keyframe(timeline.Frames[0], timeline.Frames[2] + boneData.Y, 0, 0));
int listIndex = 1;
int frameIndex = 1;
int f = 3;
float[] frames = timeline.Frames;
skeleton.SetToSetupPose();
float lastTime = 0;
while (currentTime < endTime) {
int pIndex = listIndex - 1;
float curveType = timeline.GetCurveType(frameIndex - 1);
if (curveType == 0) {
//linear
Keyframe px = xKeys[pIndex];
Keyframe py = yKeys[pIndex];
float time = frames[f];
float x = frames[f + 1] + boneData.X;
float y = frames[f + 2] + boneData.Y;
float xOut = (x - px.value) / (time - px.time);
float yOut = (y - py.value) / (time - py.time);
px.outTangent = xOut;
py.outTangent = yOut;
xKeys.Add(new Keyframe(time, x, xOut, 0));
yKeys.Add(new Keyframe(time, y, yOut, 0));
xKeys[pIndex] = px;
yKeys[pIndex] = py;
currentTime = time;
timeline.Apply(skeleton, lastTime, currentTime, null, 1, MixBlend.Setup, MixDirection.In);
lastTime = time;
listIndex++;
} else if (curveType == 1) {
//stepped
Keyframe px = xKeys[pIndex];
Keyframe py = yKeys[pIndex];
float time = frames[f];
float x = frames[f + 1] + boneData.X;
float y = frames[f + 2] + boneData.Y;
float xOut = float.PositiveInfinity;
float yOut = float.PositiveInfinity;
px.outTangent = xOut;
py.outTangent = yOut;
xKeys.Add(new Keyframe(time, x, xOut, 0));
yKeys.Add(new Keyframe(time, y, yOut, 0));
xKeys[pIndex] = px;
yKeys[pIndex] = py;
currentTime = time;
timeline.Apply(skeleton, lastTime, currentTime, null, 1, MixBlend.Setup, MixDirection.In);
lastTime = time;
listIndex++;
} else if (curveType == 2) {
//bezier
Keyframe px = xKeys[pIndex];
Keyframe py = yKeys[pIndex];
float time = frames[f];
int steps = Mathf.FloorToInt((time - px.time) / BakeIncrement);
for (int i = 1; i <= steps; i++) {
currentTime += BakeIncrement;
if (i == steps)
currentTime = time;
timeline.Apply(skeleton, lastTime, currentTime, null, 1, MixBlend.Setup, MixDirection.In);
px = xKeys[listIndex - 1];
py = yKeys[listIndex - 1];
float xOut = (bone.X - px.value) / (currentTime - px.time);
float yOut = (bone.Y - py.value) / (currentTime - py.time);
px.outTangent = xOut;
py.outTangent = yOut;
xKeys.Add(new Keyframe(currentTime, bone.X, xOut, 0));
yKeys.Add(new Keyframe(currentTime, bone.Y, yOut, 0));
xKeys[listIndex - 1] = px;
yKeys[listIndex - 1] = py;
listIndex++;
lastTime = currentTime;
}
}
frameIndex++;
f += 3;
}
xCurve = EnsureCurveKeyCount(new AnimationCurve(xKeys.ToArray()));
yCurve = EnsureCurveKeyCount(new AnimationCurve(yKeys.ToArray()));
string path = GetPath(boneData);
const string propertyName = "localPosition";
clip.SetCurve(path, typeof(Transform), propertyName + ".x", xCurve);
clip.SetCurve(path, typeof(Transform), propertyName + ".y", yCurve);
clip.SetCurve(path, typeof(Transform), propertyName + ".z", zCurve);
}
static void ParseScaleTimeline (Skeleton skeleton, ScaleTimeline timeline, AnimationClip clip) {
var boneData = skeleton.Data.Bones.Items[timeline.BoneIndex];
var bone = skeleton.Bones.Items[timeline.BoneIndex];
AnimationCurve xCurve = new AnimationCurve();
AnimationCurve yCurve = new AnimationCurve();
AnimationCurve zCurve = new AnimationCurve();
float endTime = timeline.Frames[(timeline.FrameCount * 3) - 3];
float currentTime = timeline.Frames[0];
List<Keyframe> xKeys = new List<Keyframe>();
List<Keyframe> yKeys = new List<Keyframe>();
xKeys.Add(new Keyframe(timeline.Frames[0], timeline.Frames[1] * boneData.ScaleX, 0, 0));
yKeys.Add(new Keyframe(timeline.Frames[0], timeline.Frames[2] * boneData.ScaleY, 0, 0));
int listIndex = 1;
int frameIndex = 1;
int f = 3;
float[] frames = timeline.Frames;
skeleton.SetToSetupPose();
float lastTime = 0;
while (currentTime < endTime) {
int pIndex = listIndex - 1;
float curveType = timeline.GetCurveType(frameIndex - 1);
if (curveType == 0) {
//linear
Keyframe px = xKeys[pIndex];
Keyframe py = yKeys[pIndex];
float time = frames[f];
float x = frames[f + 1] * boneData.ScaleX;
float y = frames[f + 2] * boneData.ScaleY;
float xOut = (x - px.value) / (time - px.time);
float yOut = (y - py.value) / (time - py.time);
px.outTangent = xOut;
py.outTangent = yOut;
xKeys.Add(new Keyframe(time, x, xOut, 0));
yKeys.Add(new Keyframe(time, y, yOut, 0));
xKeys[pIndex] = px;
yKeys[pIndex] = py;
currentTime = time;
timeline.Apply(skeleton, lastTime, currentTime, null, 1, MixBlend.Setup, MixDirection.In);
lastTime = time;
listIndex++;
} else if (curveType == 1) {
//stepped
Keyframe px = xKeys[pIndex];
Keyframe py = yKeys[pIndex];
float time = frames[f];
float x = frames[f + 1] * boneData.ScaleX;
float y = frames[f + 2] * boneData.ScaleY;
float xOut = float.PositiveInfinity;
float yOut = float.PositiveInfinity;
px.outTangent = xOut;
py.outTangent = yOut;
xKeys.Add(new Keyframe(time, x, xOut, 0));
yKeys.Add(new Keyframe(time, y, yOut, 0));
xKeys[pIndex] = px;
yKeys[pIndex] = py;
currentTime = time;
timeline.Apply(skeleton, lastTime, currentTime, null, 1, MixBlend.Setup, MixDirection.In);
lastTime = time;
listIndex++;
} else if (curveType == 2) {
//bezier
Keyframe px = xKeys[pIndex];
Keyframe py = yKeys[pIndex];
float time = frames[f];
int steps = Mathf.FloorToInt((time - px.time) / BakeIncrement);
for (int i = 1; i <= steps; i++) {
currentTime += BakeIncrement;
if (i == steps)
currentTime = time;
timeline.Apply(skeleton, lastTime, currentTime, null, 1, MixBlend.Setup, MixDirection.In);
px = xKeys[listIndex - 1];
py = yKeys[listIndex - 1];
float xOut = (bone.ScaleX - px.value) / (currentTime - px.time);
float yOut = (bone.ScaleY - py.value) / (currentTime - py.time);
px.outTangent = xOut;
py.outTangent = yOut;
xKeys.Add(new Keyframe(currentTime, bone.ScaleX, xOut, 0));
yKeys.Add(new Keyframe(currentTime, bone.ScaleY, yOut, 0));
xKeys[listIndex - 1] = px;
yKeys[listIndex - 1] = py;
listIndex++;
lastTime = currentTime;
}
}
frameIndex++;
f += 3;
}
xCurve = EnsureCurveKeyCount(new AnimationCurve(xKeys.ToArray()));
yCurve = EnsureCurveKeyCount(new AnimationCurve(yKeys.ToArray()));
string path = GetPath(boneData);
string propertyName = "localScale";
clip.SetCurve(path, typeof(Transform), propertyName + ".x", xCurve);
clip.SetCurve(path, typeof(Transform), propertyName + ".y", yCurve);
clip.SetCurve(path, typeof(Transform), propertyName + ".z", zCurve);
}
static void ParseRotateTimeline (Skeleton skeleton, RotateTimeline timeline, AnimationClip clip) {
var boneData = skeleton.Data.Bones.Items[timeline.BoneIndex];
var bone = skeleton.Bones.Items[timeline.BoneIndex];
var curve = new AnimationCurve();
float endTime = timeline.Frames[(timeline.FrameCount * 2) - 2];
float currentTime = timeline.Frames[0];
var keys = new List<Keyframe>();
float rotation = timeline.Frames[1] + boneData.Rotation;
keys.Add(new Keyframe(timeline.Frames[0], rotation, 0, 0));
int listIndex = 1;
int frameIndex = 1;
int f = 2;
float[] frames = timeline.Frames;
skeleton.SetToSetupPose();
float lastTime = 0;
float angle = rotation;
while (currentTime < endTime) {
int pIndex = listIndex - 1;
float curveType = timeline.GetCurveType(frameIndex - 1);
if (curveType == 0) {
//linear
Keyframe pk = keys[pIndex];
float time = frames[f];
rotation = frames[f + 1] + boneData.Rotation;
angle += Mathf.DeltaAngle(angle, rotation);
float r = angle;
float rOut = (r - pk.value) / (time - pk.time);
pk.outTangent = rOut;
keys.Add(new Keyframe(time, r, rOut, 0));
keys[pIndex] = pk;
currentTime = time;
timeline.Apply(skeleton, lastTime, currentTime, null, 1, MixBlend.Setup, MixDirection.In);
lastTime = time;
listIndex++;
} else if (curveType == 1) {
//stepped
Keyframe pk = keys[pIndex];
float time = frames[f];
rotation = frames[f + 1] + boneData.Rotation;
angle += Mathf.DeltaAngle(angle, rotation);
float r = angle;
float rOut = float.PositiveInfinity;
pk.outTangent = rOut;
keys.Add(new Keyframe(time, r, rOut, 0));
keys[pIndex] = pk;
currentTime = time;
timeline.Apply(skeleton, lastTime, currentTime, null, 1, MixBlend.Setup, MixDirection.In);
lastTime = time;
listIndex++;
} else if (curveType == 2) {
//bezier
Keyframe pk = keys[pIndex];
float time = frames[f];
timeline.Apply(skeleton, lastTime, currentTime, null, 1, MixBlend.Setup, MixDirection.In);
skeleton.UpdateWorldTransform();
rotation = frames[f + 1] + boneData.Rotation;
angle += Mathf.DeltaAngle(angle, rotation);
float r = angle;
int steps = Mathf.FloorToInt((time - pk.time) / BakeIncrement);
for (int i = 1; i <= steps; i++) {
currentTime += BakeIncrement;
if (i == steps)
currentTime = time;
timeline.Apply(skeleton, lastTime, currentTime, null, 1, MixBlend.Setup, MixDirection.In);
skeleton.UpdateWorldTransform();
pk = keys[listIndex - 1];
rotation = bone.Rotation;
angle += Mathf.DeltaAngle(angle, rotation);
r = angle;
float rOut = (r - pk.value) / (currentTime - pk.time);
pk.outTangent = rOut;
keys.Add(new Keyframe(currentTime, r, rOut, 0));
keys[listIndex - 1] = pk;
listIndex++;
lastTime = currentTime;
}
}
frameIndex++;
f += 2;
}
curve = EnsureCurveKeyCount(new AnimationCurve(keys.ToArray()));
string path = GetPath(boneData);
const string propertyName = "localEulerAnglesBaked";
EditorCurveBinding xBind = EditorCurveBinding.FloatCurve(path, typeof(Transform), propertyName + ".x");
AnimationUtility.SetEditorCurve(clip, xBind, new AnimationCurve());
EditorCurveBinding yBind = EditorCurveBinding.FloatCurve(path, typeof(Transform), propertyName + ".y");
AnimationUtility.SetEditorCurve(clip, yBind, new AnimationCurve());
EditorCurveBinding zBind = EditorCurveBinding.FloatCurve(path, typeof(Transform), propertyName + ".z");
AnimationUtility.SetEditorCurve(clip, zBind, curve);
}
static void ParseEventTimeline (EventTimeline timeline, AnimationClip clip, SendMessageOptions eventOptions) {
float[] frames = timeline.Frames;
var events = timeline.Events;
var animEvents = new List<AnimationEvent>();
for (int i = 0, n = frames.Length; i < n; i++) {
var spineEvent = events[i];
string eventName = spineEvent.Data.Name;
if (SpineEditorUtilities.Preferences.mecanimEventIncludeFolderName)
eventName = eventName.Replace("/", ""); // calls method FolderNameEventName()
else
eventName = eventName.Substring(eventName.LastIndexOf('/') + 1); // calls method EventName()
var unityAnimationEvent = new AnimationEvent {
time = frames[i],
functionName = eventName,
messageOptions = eventOptions
};
if (!string.IsNullOrEmpty(spineEvent.String)) {
unityAnimationEvent.stringParameter = spineEvent.String;
} else if (spineEvent.Int != 0) {
unityAnimationEvent.intParameter = spineEvent.Int;
} else if (spineEvent.Float != 0) {
unityAnimationEvent.floatParameter = spineEvent.Float;
} // else, paramless function/Action.
animEvents.Add(unityAnimationEvent);
}
AnimationUtility.SetAnimationEvents(clip, animEvents.ToArray());
}
static void ParseAttachmentTimeline (Skeleton skeleton, AttachmentTimeline timeline, Dictionary<int, List<string>> slotLookup, AnimationClip clip) {
var attachmentNames = slotLookup[timeline.SlotIndex];
string bonePath = GetPath(skeleton.Slots.Items[timeline.SlotIndex].Bone.Data);
string slotPath = bonePath + "/" + skeleton.Slots.Items[timeline.SlotIndex].Data.Name;
Dictionary<string, AnimationCurve> curveTable = new Dictionary<string, AnimationCurve>();
foreach (string str in attachmentNames) {
curveTable.Add(str, new AnimationCurve());
}
float[] frames = timeline.Frames;
if (frames[0] != 0) {
string startingName = skeleton.Slots.Items[timeline.SlotIndex].Data.AttachmentName;
foreach (var pair in curveTable) {
if (startingName == "" || startingName == null) {
pair.Value.AddKey(new Keyframe(0, 0, float.PositiveInfinity, float.PositiveInfinity));
} else {
if (pair.Key == startingName) {
pair.Value.AddKey(new Keyframe(0, 1, float.PositiveInfinity, float.PositiveInfinity));
} else {
pair.Value.AddKey(new Keyframe(0, 0, float.PositiveInfinity, float.PositiveInfinity));
}
}
}
}
float currentTime = timeline.Frames[0];
float endTime = frames[frames.Length - 1];
int f = 0;
while (currentTime < endTime) {
float time = frames[f];
int frameIndex = (time >= frames[frames.Length - 1] ? frames.Length : BinarySearch(frames, time)) - 1;
string name = timeline.AttachmentNames[frameIndex];
foreach (var pair in curveTable) {
if (name == "") {
pair.Value.AddKey(new Keyframe(time, 0, float.PositiveInfinity, float.PositiveInfinity));
} else {
if (pair.Key == name) {
pair.Value.AddKey(new Keyframe(time, 1, float.PositiveInfinity, float.PositiveInfinity));
} else {
pair.Value.AddKey(new Keyframe(time, 0, float.PositiveInfinity, float.PositiveInfinity));
}
}
}
currentTime = time;
f += 1;
}
foreach (var pair in curveTable) {
string path = slotPath + "/" + pair.Key;
string prop = "m_IsActive";
clip.SetCurve(path, typeof(GameObject), prop, pair.Value);
}
}
static AnimationCurve EnsureCurveKeyCount (AnimationCurve curve) {
if (curve.length == 1)
curve.AddKey(curve.keys[0].time + 0.25f, curve.keys[0].value);
return curve;
}
static float GetUninheritedAppliedRotation (Bone b) {
Bone parent = b.Parent;
float angle = b.AppliedRotation;
while (parent != null) {
angle -= parent.AppliedRotation;
parent = parent.Parent;
}
return angle;
}
#endregion
#endregion
#region Region Baking
public static GameObject BakeRegion (SpineAtlasAsset atlasAsset, AtlasRegion region, bool autoSave = true) {
atlasAsset.GetAtlas(); // Initializes atlasAsset.
string atlasAssetPath = AssetDatabase.GetAssetPath(atlasAsset);
string atlasAssetDirPath = Path.GetDirectoryName(atlasAssetPath).Replace('\\', '/');
string bakedDirPath = Path.Combine(atlasAssetDirPath, atlasAsset.name);
string bakedPrefabPath = Path.Combine(bakedDirPath, AssetUtility.GetPathSafeName(region.name) + ".prefab").Replace("\\", "/");
GameObject prefab = (GameObject)AssetDatabase.LoadAssetAtPath(bakedPrefabPath, typeof(GameObject));
GameObject root;
Mesh mesh;
bool isNewPrefab = false;
if (!Directory.Exists(bakedDirPath))
Directory.CreateDirectory(bakedDirPath);
if (prefab == null) {
root = EditorInstantiation.NewGameObject("temp", true, typeof(MeshFilter), typeof(MeshRenderer));
#if NEW_PREFAB_SYSTEM
prefab = PrefabUtility.SaveAsPrefabAsset(root, bakedPrefabPath);
#else
prefab = PrefabUtility.CreatePrefab(bakedPrefabPath, root);
#endif
isNewPrefab = true;
Object.DestroyImmediate(root);
}
mesh = (Mesh)AssetDatabase.LoadAssetAtPath(bakedPrefabPath, typeof(Mesh));
Material mat = null;
mesh = atlasAsset.GenerateMesh(region.name, mesh, out mat);
if (isNewPrefab) {
AssetDatabase.AddObjectToAsset(mesh, prefab);
prefab.GetComponent<MeshFilter>().sharedMesh = mesh;
}
EditorUtility.SetDirty(mesh);
EditorUtility.SetDirty(prefab);
if (autoSave) {
AssetDatabase.SaveAssets();
AssetDatabase.Refresh();
}
prefab.GetComponent<MeshRenderer>().sharedMaterial = mat;
return prefab;
}
#endregion
static string GetPath (BoneData b) {
return GetPathRecurse(b).Substring(1);
}
static string GetPathRecurse (BoneData b) {
if (b == null) return "";
return GetPathRecurse(b.Parent) + "/" + b.Name;
}
static void SetAnimationSettings (AnimationClip clip, AnimationClipSettings settings) {
AnimationUtility.SetAnimationClipSettings(clip, settings);
}
}
}
| 32.262911 | 244 | 0.682064 | [
"MIT"
] | BaekNothing/Dalpangyeekiwoogi | Assets/Spine/Editor/spine-unity/Editor/Windows/SkeletonBaker.cs | 48,104 | C# |
namespace XenAdmin.Controls.Wlb
{
partial class WlbReportSubscriptionView
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(WlbReportSubscriptionView));
this.btnDelete = new System.Windows.Forms.Button();
this.btnChange = new System.Windows.Forms.Button();
this.labelSubscription = new System.Windows.Forms.Label();
this.btnClose = new System.Windows.Forms.Button();
this.tableLayoutPanelSubscriptionDetails = new System.Windows.Forms.TableLayoutPanel();
this.pdSectionParameters = new XenAdmin.Controls.PDSection();
this.pdSectionGeneral = new XenAdmin.Controls.PDSection();
this.pdSectionDelivery = new XenAdmin.Controls.PDSection();
this.pdSectionSchedule = new XenAdmin.Controls.PDSection();
this.pdSectionHistory = new XenAdmin.Controls.PDSection();
this.panelTopControls = new System.Windows.Forms.Panel();
this.flowLayoutPanelTopButtons = new System.Windows.Forms.FlowLayoutPanel();
this.flowLayoutPanel1 = new System.Windows.Forms.FlowLayoutPanel();
this.flowLayoutPanelLowerButtons = new System.Windows.Forms.FlowLayoutPanel();
this.panelCenter = new System.Windows.Forms.Panel();
this.panel1 = new System.Windows.Forms.Panel();
this.tableLayoutPanelSubscriptionDetails.SuspendLayout();
this.panelTopControls.SuspendLayout();
this.flowLayoutPanelTopButtons.SuspendLayout();
this.flowLayoutPanelLowerButtons.SuspendLayout();
this.panelCenter.SuspendLayout();
this.panel1.SuspendLayout();
this.SuspendLayout();
//
// btnDelete
//
resources.ApplyResources(this.btnDelete, "btnDelete");
this.btnDelete.Name = "btnDelete";
this.btnDelete.UseVisualStyleBackColor = true;
this.btnDelete.Click += new System.EventHandler(this.btnDelete_Click);
//
// btnChange
//
resources.ApplyResources(this.btnChange, "btnChange");
this.btnChange.Name = "btnChange";
this.btnChange.UseVisualStyleBackColor = true;
this.btnChange.Click += new System.EventHandler(this.btnChange_Click);
//
// labelSubscription
//
resources.ApplyResources(this.labelSubscription, "labelSubscription");
this.labelSubscription.Name = "labelSubscription";
//
// btnClose
//
resources.ApplyResources(this.btnClose, "btnClose");
this.btnClose.Name = "btnClose";
this.btnClose.UseVisualStyleBackColor = true;
this.btnClose.Click += new System.EventHandler(this.btnClose_Click);
//
// tableLayoutPanelSubscriptionDetails
//
resources.ApplyResources(this.tableLayoutPanelSubscriptionDetails, "tableLayoutPanelSubscriptionDetails");
this.tableLayoutPanelSubscriptionDetails.BackColor = System.Drawing.Color.Transparent;
this.tableLayoutPanelSubscriptionDetails.Controls.Add(this.pdSectionParameters, 0, 1);
this.tableLayoutPanelSubscriptionDetails.Controls.Add(this.pdSectionGeneral, 0, 0);
this.tableLayoutPanelSubscriptionDetails.Controls.Add(this.pdSectionDelivery, 0, 2);
this.tableLayoutPanelSubscriptionDetails.Controls.Add(this.pdSectionSchedule, 0, 3);
this.tableLayoutPanelSubscriptionDetails.Controls.Add(this.pdSectionHistory, 0, 4);
this.tableLayoutPanelSubscriptionDetails.Name = "tableLayoutPanelSubscriptionDetails";
//
// pdSectionParameters
//
this.pdSectionParameters.BackColor = System.Drawing.Color.Gainsboro;
resources.ApplyResources(this.pdSectionParameters, "pdSectionParameters");
this.pdSectionParameters.MinimumSize = new System.Drawing.Size(0, 34);
this.pdSectionParameters.Name = "pdSectionParameters";
this.pdSectionParameters.ShowCellToolTips = false;
//
// pdSectionGeneral
//
this.pdSectionGeneral.BackColor = System.Drawing.Color.Gainsboro;
resources.ApplyResources(this.pdSectionGeneral, "pdSectionGeneral");
this.pdSectionGeneral.MinimumSize = new System.Drawing.Size(0, 34);
this.pdSectionGeneral.Name = "pdSectionGeneral";
this.pdSectionGeneral.ShowCellToolTips = false;
//
// pdSectionDelivery
//
this.pdSectionDelivery.BackColor = System.Drawing.Color.Gainsboro;
resources.ApplyResources(this.pdSectionDelivery, "pdSectionDelivery");
this.pdSectionDelivery.MinimumSize = new System.Drawing.Size(0, 34);
this.pdSectionDelivery.Name = "pdSectionDelivery";
this.pdSectionDelivery.ShowCellToolTips = false;
//
// pdSectionSchedule
//
this.pdSectionSchedule.BackColor = System.Drawing.Color.Gainsboro;
resources.ApplyResources(this.pdSectionSchedule, "pdSectionSchedule");
this.pdSectionSchedule.MinimumSize = new System.Drawing.Size(0, 34);
this.pdSectionSchedule.Name = "pdSectionSchedule";
this.pdSectionSchedule.ShowCellToolTips = false;
//
// pdSectionHistory
//
this.pdSectionHistory.BackColor = System.Drawing.Color.Gainsboro;
resources.ApplyResources(this.pdSectionHistory, "pdSectionHistory");
this.pdSectionHistory.MinimumSize = new System.Drawing.Size(0, 34);
this.pdSectionHistory.Name = "pdSectionHistory";
this.pdSectionHistory.ShowCellToolTips = false;
//
// panelTopControls
//
this.panelTopControls.Controls.Add(this.flowLayoutPanelTopButtons);
this.panelTopControls.Controls.Add(this.flowLayoutPanel1);
this.panelTopControls.Controls.Add(this.labelSubscription);
resources.ApplyResources(this.panelTopControls, "panelTopControls");
this.panelTopControls.Name = "panelTopControls";
//
// flowLayoutPanelTopButtons
//
resources.ApplyResources(this.flowLayoutPanelTopButtons, "flowLayoutPanelTopButtons");
this.flowLayoutPanelTopButtons.Controls.Add(this.btnDelete);
this.flowLayoutPanelTopButtons.Controls.Add(this.btnChange);
this.flowLayoutPanelTopButtons.Name = "flowLayoutPanelTopButtons";
//
// flowLayoutPanel1
//
resources.ApplyResources(this.flowLayoutPanel1, "flowLayoutPanel1");
this.flowLayoutPanel1.Name = "flowLayoutPanel1";
//
// flowLayoutPanelLowerButtons
//
resources.ApplyResources(this.flowLayoutPanelLowerButtons, "flowLayoutPanelLowerButtons");
this.flowLayoutPanelLowerButtons.Controls.Add(this.btnClose);
this.flowLayoutPanelLowerButtons.Name = "flowLayoutPanelLowerButtons";
//
// panelCenter
//
resources.ApplyResources(this.panelCenter, "panelCenter");
this.panelCenter.Controls.Add(this.tableLayoutPanelSubscriptionDetails);
this.panelCenter.Name = "panelCenter";
//
// panel1
//
this.panel1.Controls.Add(this.flowLayoutPanelLowerButtons);
resources.ApplyResources(this.panel1, "panel1");
this.panel1.Name = "panel1";
//
// WlbReportSubscriptionView
//
resources.ApplyResources(this, "$this");
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Dpi;
this.Controls.Add(this.panelCenter);
this.Controls.Add(this.panelTopControls);
this.Controls.Add(this.panel1);
this.MinimumSize = new System.Drawing.Size(671, 278);
this.Name = "WlbReportSubscriptionView";
this.Load += new System.EventHandler(this.ReportSubscriptionView_Load);
this.Resize += new System.EventHandler(this.WlbReportSubscriptionView_Resize);
this.tableLayoutPanelSubscriptionDetails.ResumeLayout(false);
this.panelTopControls.ResumeLayout(false);
this.panelTopControls.PerformLayout();
this.flowLayoutPanelTopButtons.ResumeLayout(false);
this.flowLayoutPanelLowerButtons.ResumeLayout(false);
this.panelCenter.ResumeLayout(false);
this.panel1.ResumeLayout(false);
this.panel1.PerformLayout();
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
internal System.Windows.Forms.Button btnDelete;
internal System.Windows.Forms.Button btnChange;
private System.Windows.Forms.Label labelSubscription;
private System.Windows.Forms.Button btnClose;
private System.Windows.Forms.TableLayoutPanel tableLayoutPanelSubscriptionDetails;
private PDSection pdSectionGeneral;
private PDSection pdSectionParameters;
private PDSection pdSectionDelivery;
private PDSection pdSectionSchedule;
private PDSection pdSectionHistory;
private System.Windows.Forms.Panel panelTopControls;
private System.Windows.Forms.FlowLayoutPanel flowLayoutPanelLowerButtons;
private System.Windows.Forms.Panel panelCenter;
private System.Windows.Forms.FlowLayoutPanel flowLayoutPanel1;
private System.Windows.Forms.FlowLayoutPanel flowLayoutPanelTopButtons;
private System.Windows.Forms.Panel panel1;
}
}
| 51.409302 | 158 | 0.63286 | [
"BSD-2-Clause"
] | ChrisH4rding/xenadmin | XenAdmin/Controls/Wlb/WlbReportSubscriptionView.Designer.cs | 11,053 | C# |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:2.0.50727.1433
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace EPiServer.Templates.AlloyTech.Forum.Pages {
/// <summary>
/// Post class.
/// </summary>
/// <remarks>
/// Auto-generated class.
/// </remarks>
public partial class Post {
}
}
| 27.130435 | 80 | 0.445513 | [
"MIT"
] | Episerver-trainning/episerver6r2_sso | Templates/AlloyTech/Forum/Pages/Post.aspx.designer.cs | 624 | C# |
using System;
using System.Collections.Generic;
using System.ComponentModel.Design;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using ChangeLanguageVersionExtension.ProjectVersionServices;
using EnvDTE80;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.OLE.Interop;
using Microsoft.VisualStudio.Shell;
using Microsoft.VisualStudio.Shell.Interop;
using Microsoft.Win32;
using Task = System.Threading.Tasks.Task;
namespace ChangeLanguageVersionExtension
{
[PackageRegistration(UseManagedResourcesOnly = true, AllowsBackgroundLoading = true)]
[ProvideAutoLoad(VSConstants.UICONTEXT.CSharpProject_string, PackageAutoLoadFlags.BackgroundLoad)]
[InstalledProductRegistration("#110", "#112", "1.0", IconResourceID = 400)]
[ProvideMenuResource("Menus.ctmenu", 1)]
[Guid(PackageGuids.guidChangeLanguageVersionCommandPackageString)]
[SuppressMessage("StyleCop.CSharp.DocumentationRules", "SA1650:ElementDocumentationMustBeSpelledCorrectly", Justification = "pkgdef, VS and vsixmanifest are valid VS terms")]
public sealed class ChangeLanguageVersionCommandPackage : AsyncPackage
{
private IProjectVersionService projectVersionService;
private List<ChangeLanguageVersionCommand> commands;
private readonly List<(string version, int commandId)> versionCommands = new List<(string version, int commandId)> {
(LanguageVersions.Default,PackageIds.cmdSetToDefault),
(LanguageVersions.Latest,PackageIds.cmdSetToLatest),
(LanguageVersions.LatestMajor,PackageIds.cmdSetToLatestMajor),
(LanguageVersions.CSharp5, PackageIds.cmdSetToCSharp5),
(LanguageVersions.CSharp6, PackageIds.cmdSetToCSharp6),
(LanguageVersions.CSharp7, PackageIds.cmdSetToCSharp7),
(LanguageVersions.CSharp71, PackageIds.cmdSetToCSharp71),
(LanguageVersions.CSharp72, PackageIds.cmdSetToCSharp72),
(LanguageVersions.CSharp73, PackageIds.cmdSetToCSharp73),
(LanguageVersions.CSharp8, PackageIds.cmdSetToCSharp8),
(LanguageVersions.Preview, PackageIds.cmdSetToPreview),
};
private void HandleChecked(object sender, EventArgs e)
{
foreach (var command in commands)
{
if (command != (ChangeLanguageVersionCommand)sender)
{
command.Checked = false;
}
}
}
protected override async Task InitializeAsync(CancellationToken cancellationToken, IProgress<ServiceProgressData> progress)
{
await this.JoinableTaskFactory.SwitchToMainThreadAsync(cancellationToken);
projectVersionService = new ProjectVersionService((DTE2)GetGlobalService(typeof(SDTE)));
var commandService = (OleMenuCommandService)(await GetServiceAsync((typeof(IMenuCommandService))));
commands = versionCommands.Select(c => new ChangeLanguageVersionCommand(this, projectVersionService, c.version, new CommandID(PackageGuids.guidChangeLanguageVersionCommandPackageCmdSet, c.commandId))).ToList();
var currentLanguageVersion = projectVersionService.GetLanguageVersion();
var availableVersions = projectVersionService.GetAvailableLanguageVersions();
foreach (var command in commands)
{
if (command.LanguageVersion == currentLanguageVersion)
{
command.Checked = true;
}
command.OnChecked += HandleChecked;
command.Visible = availableVersions.Contains(command.LanguageVersion);
commandService.AddCommand(command);
}
}
}
}
| 66.620253 | 223 | 0.527646 | [
"MIT"
] | conwid/ChangeLanguageVersionExtension | ChangeLanguageVersionExtension/ChangeLanguageVersionCommandPackage.cs | 5,265 | C# |
using AMaaS.Core.Sdk.Models.Utils;
using AMaaS.Core.Sdk.Transactions.Enums;
using AMaaS.Core.Sdk.Transactions.Models;
using System;
using System.Collections.Generic;
using System.Text;
using Xunit;
namespace AMaaS.Core.Sdk.Transactions.Tests
{
public class ModelTests
{
[Fact]
public void EqualityTests()
{
var transaction1 = new Transaction
{
AssetManagerId = 103,
TransactionId = "Tran123",
AssetBookId = "Book123",
SettlementCurrency = "USD",
TransactionCurrency = "USD",
TransactionAction = TransactionAction.ShortSell
};
var transaction2 = transaction1;
Assert.Equal(transaction1, transaction2);
Assert.Equal(transaction1.GetHashCode(), transaction2.GetHashCode());
transaction2 = new Transaction
{
AssetManagerId = 103,
TransactionId = "Tran123",
AssetBookId = "Book123",
SettlementCurrency = "USD",
TransactionCurrency = "USD",
TransactionAction = TransactionAction.ShortSell
};
Assert.Equal(transaction1, transaction2);
Assert.Equal(transaction1.GetHashCode(), transaction2.GetHashCode());
//these should be ignored by equality check
transaction2.CreatedBy = "John";
transaction2.UpdatedBy = "Watson";
transaction2.CreatedTime = DateTime.Now;
transaction2.UpdatedTime = DateTime.Now;
Assert.Equal(transaction1, transaction2);
Assert.Equal(transaction1.GetHashCode(), transaction2.GetHashCode());
transaction2.ExecutionTime = DateTime.Now;
transaction2.TransactionAction = TransactionAction.Sell;
//TODO: Need to implement equality from AMaaSModel and have it work for all subclasses
Assert.NotEqual(transaction1, transaction2);
Assert.NotEqual(transaction1.GetHashCode(), transaction2.GetHashCode());
}
[Fact]
public void TestJsonSerialization()
{
var transaction1 = new Transaction
{
AssetManagerId = 103,
TransactionId = "Tran123",
AssetBookId = "Book123",
SettlementCurrency = "USD",
TransactionCurrency = "USD",
TransactionAction = TransactionAction.ShortSell,
Parties = new Dictionary<string, Party> { { "Counterparty", new Party { PartyId = "P123", Version = 2 } } }
};
var jsonTransaction = SerializationUtils.ToJson(transaction1);
var transaction2 = SerializationUtils.FromJson<Transaction>(jsonTransaction);
Assert.Equal(transaction1, transaction2);
}
}
}
| 41.319444 | 124 | 0.58084 | [
"Apache-2.0"
] | amaas-fintech/amaas-core-sdk-dotnet | Tests/AMaaS.Core.Sdk.Transactions.Tests/ModelTests.cs | 2,977 | C# |
/*
* Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.RDS.Model
{
/// <summary>
/// Container for the parameters to the AddTagsToResource operation.
/// <para> Adds metadata tags to a DB Instance. These tags can also be used with cost allocation reporting to track cost associated with a DB
/// Instance.</para> <para>For an overview on tagging DB Instances, see DB Instance Tags. </para>
/// </summary>
/// <seealso cref="Amazon.RDS.AmazonRDS.AddTagsToResource"/>
public class AddTagsToResourceRequest : AmazonWebServiceRequest
{
private string resourceName;
private List<Tag> tags = new List<Tag>();
/// <summary>
/// The DB Instance the tags will be added to. This value is an Amazon Resource Name (ARN). For information about creating an ARN, see <a
/// href="http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Tagging.html#USER_Tagging.ARN"> Constructing an RDS Amazon Resource Name
/// (ARN)</a>.
///
/// </summary>
public string ResourceName
{
get { return this.resourceName; }
set { this.resourceName = value; }
}
/// <summary>
/// Sets the ResourceName property
/// </summary>
/// <param name="resourceName">The value to set for the ResourceName property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AddTagsToResourceRequest WithResourceName(string resourceName)
{
this.resourceName = resourceName;
return this;
}
// Check to see if ResourceName property is set
internal bool IsSetResourceName()
{
return this.resourceName != null;
}
/// <summary>
/// The tags to be assigned to the DB Instance.
///
/// </summary>
public List<Tag> Tags
{
get { return this.tags; }
set { this.tags = value; }
}
/// <summary>
/// Adds elements to the Tags collection
/// </summary>
/// <param name="tags">The values to add to the Tags collection </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AddTagsToResourceRequest WithTags(params Tag[] tags)
{
foreach (Tag element in tags)
{
this.tags.Add(element);
}
return this;
}
/// <summary>
/// Adds elements to the Tags collection
/// </summary>
/// <param name="tags">The values to add to the Tags collection </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AddTagsToResourceRequest WithTags(IEnumerable<Tag> tags)
{
foreach (Tag element in tags)
{
this.tags.Add(element);
}
return this;
}
// Check to see if Tags property is set
internal bool IsSetTags()
{
return this.tags.Count > 0;
}
}
}
| 36.760684 | 177 | 0.614508 | [
"Apache-2.0"
] | mahanthbeeraka/dataservices-sdk-dotnet | AWSSDK/Amazon.RDS/Model/AddTagsToResourceRequest.cs | 4,301 | C# |
namespace flier268.Win32API
{
public struct LDT_ENTRY
{
public short LimitLow;
public short BaseLow;
public int HighWord;
}
} | 14.818182 | 30 | 0.613497 | [
"MIT"
] | flier268/Win32API | flier268.Win32API.Kernel32/LDT_ENTRY.cs | 163 | C# |
#pragma checksum "..\..\PositionForm.xaml" "{406ea660-64cf-4c82-b6f0-42d48172a799}" "C938A01CB87334E5F35351F436B9C0AF"
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
using App_Manager;
using System;
using System.Diagnostics;
using System.Windows;
using System.Windows.Automation;
using System.Windows.Controls;
using System.Windows.Controls.Primitives;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Ink;
using System.Windows.Input;
using System.Windows.Markup;
using System.Windows.Media;
using System.Windows.Media.Animation;
using System.Windows.Media.Effects;
using System.Windows.Media.Imaging;
using System.Windows.Media.Media3D;
using System.Windows.Media.TextFormatting;
using System.Windows.Navigation;
using System.Windows.Shapes;
using System.Windows.Shell;
namespace App_Manager {
/// <summary>
/// PositionForm
/// </summary>
public partial class PositionForm : System.Windows.Window, System.Windows.Markup.IComponentConnector {
#line 10 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBox Company;
#line default
#line hidden
#line 11 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBox Position;
#line default
#line hidden
#line 12 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBox Date_of_App;
#line default
#line hidden
#line 13 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBox Requisition_ID;
#line default
#line hidden
#line 14 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBox Addition_Info;
#line default
#line hidden
#line 16 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBlock Company_Head;
#line default
#line hidden
#line 17 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBlock Position_Head;
#line default
#line hidden
#line 18 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBlock Date_Head;
#line default
#line hidden
#line 19 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBlock Req_Head;
#line default
#line hidden
#line 20 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBlock Additional_Head;
#line default
#line hidden
#line 22 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.Button closeButton;
#line default
#line hidden
#line 23 "..\..\PositionForm.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.Button saveButton;
#line default
#line hidden
private bool _contentLoaded;
/// <summary>
/// InitializeComponent
/// </summary>
[System.Diagnostics.DebuggerNonUserCodeAttribute()]
[System.CodeDom.Compiler.GeneratedCodeAttribute("PresentationBuildTasks", "4.0.0.0")]
public void InitializeComponent() {
if (_contentLoaded) {
return;
}
_contentLoaded = true;
System.Uri resourceLocater = new System.Uri("/App_Manager;component/positionform.xaml", System.UriKind.Relative);
#line 1 "..\..\PositionForm.xaml"
System.Windows.Application.LoadComponent(this, resourceLocater);
#line default
#line hidden
}
[System.Diagnostics.DebuggerNonUserCodeAttribute()]
[System.CodeDom.Compiler.GeneratedCodeAttribute("PresentationBuildTasks", "4.0.0.0")]
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Design", "CA1033:InterfaceMethodsShouldBeCallableByChildTypes")]
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1800:DoNotCastUnnecessarily")]
void System.Windows.Markup.IComponentConnector.Connect(int connectionId, object target) {
switch (connectionId)
{
case 1:
this.Company = ((System.Windows.Controls.TextBox)(target));
return;
case 2:
this.Position = ((System.Windows.Controls.TextBox)(target));
return;
case 3:
this.Date_of_App = ((System.Windows.Controls.TextBox)(target));
return;
case 4:
this.Requisition_ID = ((System.Windows.Controls.TextBox)(target));
return;
case 5:
this.Addition_Info = ((System.Windows.Controls.TextBox)(target));
return;
case 6:
this.Company_Head = ((System.Windows.Controls.TextBlock)(target));
return;
case 7:
this.Position_Head = ((System.Windows.Controls.TextBlock)(target));
return;
case 8:
this.Date_Head = ((System.Windows.Controls.TextBlock)(target));
return;
case 9:
this.Req_Head = ((System.Windows.Controls.TextBlock)(target));
return;
case 10:
this.Additional_Head = ((System.Windows.Controls.TextBlock)(target));
return;
case 11:
this.closeButton = ((System.Windows.Controls.Button)(target));
#line 22 "..\..\PositionForm.xaml"
this.closeButton.Click += new System.Windows.RoutedEventHandler(this.close_Click);
#line default
#line hidden
return;
case 12:
this.saveButton = ((System.Windows.Controls.Button)(target));
#line 23 "..\..\PositionForm.xaml"
this.saveButton.Click += new System.Windows.RoutedEventHandler(this.save_Click);
#line default
#line hidden
return;
}
this._contentLoaded = true;
}
}
}
| 38.040359 | 141 | 0.620771 | [
"MIT"
] | BadJukeBox/Application-Manager | App_Manager/App_Manager/obj/Debug/PositionForm.g.cs | 8,485 | C# |
/*
* DocuSign REST API
*
* The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
*
* OpenAPI spec version: v2.1
* Contact: devcenter@docusign.com
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
using SwaggerDateConverter = DocuSign.eSign.Client.SwaggerDateConverter;
namespace DocuSign.eSign.Model
{
/// <summary>
/// Filter
/// </summary>
[DataContract]
public partial class Filter : IEquatable<Filter>, IValidatableObject
{
public Filter()
{
// Empty Constructor
}
/// <summary>
/// Initializes a new instance of the <see cref="Filter" /> class.
/// </summary>
/// <param name="ActionRequired">Access token information..</param>
/// <param name="Expires">Expires.</param>
/// <param name="FolderIds">FolderIds.</param>
/// <param name="FromDateTime">FromDateTime.</param>
/// <param name="IsTemplate">IsTemplate.</param>
/// <param name="Order">Order.</param>
/// <param name="OrderBy">OrderBy.</param>
/// <param name="SearchTarget">SearchTarget.</param>
/// <param name="SearchText">SearchText.</param>
/// <param name="Status">Indicates the envelope status. Valid values are: * sent - The envelope is sent to the recipients. * created - The envelope is saved as a draft and can be modified and sent later..</param>
/// <param name="ToDateTime">Must be set to \"bearer\"..</param>
public Filter(string ActionRequired = default(string), string Expires = default(string), string FolderIds = default(string), string FromDateTime = default(string), string IsTemplate = default(string), string Order = default(string), string OrderBy = default(string), string SearchTarget = default(string), string SearchText = default(string), string Status = default(string), string ToDateTime = default(string))
{
this.ActionRequired = ActionRequired;
this.Expires = Expires;
this.FolderIds = FolderIds;
this.FromDateTime = FromDateTime;
this.IsTemplate = IsTemplate;
this.Order = Order;
this.OrderBy = OrderBy;
this.SearchTarget = SearchTarget;
this.SearchText = SearchText;
this.Status = Status;
this.ToDateTime = ToDateTime;
}
/// <summary>
/// Access token information.
/// </summary>
/// <value>Access token information.</value>
[DataMember(Name="actionRequired", EmitDefaultValue=false)]
public string ActionRequired { get; set; }
/// <summary>
/// Gets or Sets Expires
/// </summary>
[DataMember(Name="expires", EmitDefaultValue=false)]
public string Expires { get; set; }
/// <summary>
/// Gets or Sets FolderIds
/// </summary>
[DataMember(Name="folderIds", EmitDefaultValue=false)]
public string FolderIds { get; set; }
/// <summary>
/// Gets or Sets FromDateTime
/// </summary>
[DataMember(Name="fromDateTime", EmitDefaultValue=false)]
public string FromDateTime { get; set; }
/// <summary>
/// Gets or Sets IsTemplate
/// </summary>
[DataMember(Name="isTemplate", EmitDefaultValue=false)]
public string IsTemplate { get; set; }
/// <summary>
/// Gets or Sets Order
/// </summary>
[DataMember(Name="order", EmitDefaultValue=false)]
public string Order { get; set; }
/// <summary>
/// Gets or Sets OrderBy
/// </summary>
[DataMember(Name="orderBy", EmitDefaultValue=false)]
public string OrderBy { get; set; }
/// <summary>
/// Gets or Sets SearchTarget
/// </summary>
[DataMember(Name="searchTarget", EmitDefaultValue=false)]
public string SearchTarget { get; set; }
/// <summary>
/// Gets or Sets SearchText
/// </summary>
[DataMember(Name="searchText", EmitDefaultValue=false)]
public string SearchText { get; set; }
/// <summary>
/// Indicates the envelope status. Valid values are: * sent - The envelope is sent to the recipients. * created - The envelope is saved as a draft and can be modified and sent later.
/// </summary>
/// <value>Indicates the envelope status. Valid values are: * sent - The envelope is sent to the recipients. * created - The envelope is saved as a draft and can be modified and sent later.</value>
[DataMember(Name="status", EmitDefaultValue=false)]
public string Status { get; set; }
/// <summary>
/// Must be set to \"bearer\".
/// </summary>
/// <value>Must be set to \"bearer\".</value>
[DataMember(Name="toDateTime", EmitDefaultValue=false)]
public string ToDateTime { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class Filter {\n");
sb.Append(" ActionRequired: ").Append(ActionRequired).Append("\n");
sb.Append(" Expires: ").Append(Expires).Append("\n");
sb.Append(" FolderIds: ").Append(FolderIds).Append("\n");
sb.Append(" FromDateTime: ").Append(FromDateTime).Append("\n");
sb.Append(" IsTemplate: ").Append(IsTemplate).Append("\n");
sb.Append(" Order: ").Append(Order).Append("\n");
sb.Append(" OrderBy: ").Append(OrderBy).Append("\n");
sb.Append(" SearchTarget: ").Append(SearchTarget).Append("\n");
sb.Append(" SearchText: ").Append(SearchText).Append("\n");
sb.Append(" Status: ").Append(Status).Append("\n");
sb.Append(" ToDateTime: ").Append(ToDateTime).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as Filter);
}
/// <summary>
/// Returns true if Filter instances are equal
/// </summary>
/// <param name="other">Instance of Filter to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(Filter other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.ActionRequired == other.ActionRequired ||
this.ActionRequired != null &&
this.ActionRequired.Equals(other.ActionRequired)
) &&
(
this.Expires == other.Expires ||
this.Expires != null &&
this.Expires.Equals(other.Expires)
) &&
(
this.FolderIds == other.FolderIds ||
this.FolderIds != null &&
this.FolderIds.Equals(other.FolderIds)
) &&
(
this.FromDateTime == other.FromDateTime ||
this.FromDateTime != null &&
this.FromDateTime.Equals(other.FromDateTime)
) &&
(
this.IsTemplate == other.IsTemplate ||
this.IsTemplate != null &&
this.IsTemplate.Equals(other.IsTemplate)
) &&
(
this.Order == other.Order ||
this.Order != null &&
this.Order.Equals(other.Order)
) &&
(
this.OrderBy == other.OrderBy ||
this.OrderBy != null &&
this.OrderBy.Equals(other.OrderBy)
) &&
(
this.SearchTarget == other.SearchTarget ||
this.SearchTarget != null &&
this.SearchTarget.Equals(other.SearchTarget)
) &&
(
this.SearchText == other.SearchText ||
this.SearchText != null &&
this.SearchText.Equals(other.SearchText)
) &&
(
this.Status == other.Status ||
this.Status != null &&
this.Status.Equals(other.Status)
) &&
(
this.ToDateTime == other.ToDateTime ||
this.ToDateTime != null &&
this.ToDateTime.Equals(other.ToDateTime)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.ActionRequired != null)
hash = hash * 59 + this.ActionRequired.GetHashCode();
if (this.Expires != null)
hash = hash * 59 + this.Expires.GetHashCode();
if (this.FolderIds != null)
hash = hash * 59 + this.FolderIds.GetHashCode();
if (this.FromDateTime != null)
hash = hash * 59 + this.FromDateTime.GetHashCode();
if (this.IsTemplate != null)
hash = hash * 59 + this.IsTemplate.GetHashCode();
if (this.Order != null)
hash = hash * 59 + this.Order.GetHashCode();
if (this.OrderBy != null)
hash = hash * 59 + this.OrderBy.GetHashCode();
if (this.SearchTarget != null)
hash = hash * 59 + this.SearchTarget.GetHashCode();
if (this.SearchText != null)
hash = hash * 59 + this.SearchText.GetHashCode();
if (this.Status != null)
hash = hash * 59 + this.Status.GetHashCode();
if (this.ToDateTime != null)
hash = hash * 59 + this.ToDateTime.GetHashCode();
return hash;
}
}
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| 41.928571 | 420 | 0.536542 | [
"MIT"
] | MaxMood96/docusign-esign-csharp-client | sdk/src/DocuSign.eSign/Model/Filter.cs | 11,740 | C# |
using System.Text.RegularExpressions;
namespace AmplaData.Display
{
public static class DisplayStringExtensions
{
private static readonly Regex SeparateWordsRegex = new Regex("([A-Z][a-z])", RegexOptions.Compiled);
public static string ToSeparatedWords(this string value)
{
return SeparateWordsRegex.Replace(value, " $1").Trim();
}
}
} | 28.142857 | 108 | 0.670051 | [
"MIT"
] | Ampla/Ampla-Data | src/AmplaData/Display/DisplayStringExtensions.cs | 396 | C# |
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using V308CMS.Data.Metadata;
using V308CMS.Data.Models;
namespace V308CMS.Data.Models
{
[Table("role")]
[MetadataType(typeof(RoleMetadata))]
public class Role
{
public Role()
{
this.Permissions = new HashSet<Permission>();
}
public int Id { get; set; }
public string Name { get; set; }
public string Description { get; set; }
public byte Status { get; set; }
public virtual ICollection<Permission> Permissions { get; set; }
public virtual ICollection<Admin> AdminAccounts { get; set; }
}
} | 28.038462 | 73 | 0.651578 | [
"Unlicense"
] | giaiphapictcom/mamoo.vn | V308CMS.Data/Models/Role.cs | 731 | C# |
//
// Copyright (c) 2004-2011 Jaroslaw Kowalski <jaak@jkowalski.net>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog.UnitTests.Config
{
using NUnit.Framework;
#if !NUNIT
using SetUp = Microsoft.VisualStudio.TestTools.UnitTesting.TestInitializeAttribute;
using TestFixture = Microsoft.VisualStudio.TestTools.UnitTesting.TestClassAttribute;
using Test = Microsoft.VisualStudio.TestTools.UnitTesting.TestMethodAttribute;
using TearDown = Microsoft.VisualStudio.TestTools.UnitTesting.TestCleanupAttribute;
#endif
[TestFixture]
public class CaseSensitivityTests : NLogTestBase
{
[Test]
public void LowerCaseTest()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog>
<targets><target name='debug' type='debug' layout='${message}' /></targets>
<rules>
<logger name='*' minlevel='info' appendto='debug'>
<filters>
<whencontains layout='${message}' substring='msg' action='ignore' />
</filters>
</logger>
</rules>
</nlog>");
Logger logger = LogManager.GetLogger("A");
logger.Debug("msg");
logger.Info("msg");
logger.Warn("msg");
logger.Error("msg");
logger.Fatal("msg");
logger.Debug("message");
AssertDebugCounter("debug", 0);
logger.Info("message");
AssertDebugCounter("debug", 1);
logger.Warn("message");
AssertDebugCounter("debug", 2);
logger.Error("message");
AssertDebugCounter("debug", 3);
logger.Fatal("message");
AssertDebugCounter("debug", 4);
}
[Test]
public void UpperCaseTest()
{
LogManager.Configuration = CreateConfigurationFromString(@"
<nlog throwExceptions='true'>
<TARGETS><TARGET NAME='DEBUG' TYPE='DEBUG' LAYOUT='${MESSAGE}' /></TARGETS>
<RULES>
<LOGGER NAME='*' MINLEVEL='INFO' APPENDTO='DEBUG'>
<FILTERS>
<WHENCONTAINS LAYOUT='${MESSAGE}' SUBSTRING='msg' ACTION='IGNORE' />
</FILTERS>
</LOGGER>
</RULES>
</nlog>");
Logger logger = LogManager.GetLogger("A");
logger.Debug("msg");
logger.Info("msg");
logger.Warn("msg");
logger.Error("msg");
logger.Fatal("msg");
logger.Debug("message");
AssertDebugCounter("debug", 0);
logger.Info("message");
AssertDebugCounter("debug", 1);
logger.Warn("message");
AssertDebugCounter("debug", 2);
logger.Error("message");
AssertDebugCounter("debug", 3);
logger.Fatal("message");
AssertDebugCounter("debug", 4);
}
}
} | 37.762295 | 96 | 0.602344 | [
"BSD-3-Clause"
] | SNBnani/NLog | tests/NLog.UnitTests/Config/CaseSensitivityTests.cs | 4,607 | C# |
using EawXBuild.Core;
namespace EawXBuild.Configuration.Lua.v1
{
public class LuaProject
{
private readonly IBuildComponentFactory _factory;
public LuaProject(string name, IBuildComponentFactory factory)
{
_factory = factory;
Project = factory.MakeProject();
Project.Name = name;
}
public IProject Project { get; }
public LuaJob job(string jobName)
{
IJob job = _factory.MakeJob(jobName);
Project.AddJob(job);
return new LuaJob(job);
}
}
} | 23.64 | 70 | 0.583756 | [
"MIT"
] | AlamoEngine-Tools/eaw-ci | eawx-build/Configuration/Lua/v1/LuaProject.cs | 591 | C# |
using Machine.Specifications;
using PlainElastic.Net.IndexSettings;
using PlainElastic.Net.Utils;
namespace PlainElastic.Net.Tests.Builders.IndexSettings
{
[Subject(typeof(NGramTokenizer))]
class When_empty_NGramTokenizer_built
{
Because of = () => result = new NGramTokenizer()
.Name("name")
.ToString();
It should_return_correct_result = () => result.ShouldEqual("'name': { 'type': 'nGram' }".AltQuote());
private static string result;
}
} | 31.833333 | 109 | 0.589878 | [
"MIT"
] | AAATechGuy/PlainElastic.Net | src/PlainElastic.Net.Tests/Builders/Analysis/Tokenizers/NGram/When_empty_NGramTokenizer_built.cs | 575 | C# |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Memcache.V1Beta2.Snippets
{
// [START memcache_v1beta2_generated_CloudMemcache_ListInstances_async]
using Google.Api.Gax;
using Google.Api.Gax.ResourceNames;
using Google.Cloud.Memcache.V1Beta2;
using System;
using System.Linq;
using System.Threading.Tasks;
public sealed partial class GeneratedCloudMemcacheClientSnippets
{
/// <summary>Snippet for ListInstancesAsync</summary>
/// <remarks>
/// This snippet has been automatically generated for illustrative purposes only.
/// It may require modifications to work in your environment.
/// </remarks>
public async Task ListInstancesRequestObjectAsync()
{
// Create client
CloudMemcacheClient cloudMemcacheClient = await CloudMemcacheClient.CreateAsync();
// Initialize request argument(s)
ListInstancesRequest request = new ListInstancesRequest
{
ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"),
Filter = "",
OrderBy = "",
};
// Make the request
PagedAsyncEnumerable<ListInstancesResponse, Instance> response = cloudMemcacheClient.ListInstancesAsync(request);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((Instance item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((ListInstancesResponse page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (Instance item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<Instance> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (Instance item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
}
}
// [END memcache_v1beta2_generated_CloudMemcache_ListInstances_async]
}
| 41.710843 | 125 | 0.63056 | [
"Apache-2.0"
] | AlexandrTrf/google-cloud-dotnet | apis/Google.Cloud.Memcache.V1Beta2/Google.Cloud.Memcache.V1Beta2.GeneratedSnippets/CloudMemcacheClient.ListInstancesRequestObjectAsyncSnippet.g.cs | 3,462 | C# |
using System.Threading.Tasks;
using Abp.Dependency;
using Castle.Core.Logging;
namespace Afonsoft.Ranking.Net.Sms
{
public class SmsSender : ISmsSender, ITransientDependency
{
public ILogger Logger { get; set; }
public SmsSender()
{
Logger = NullLogger.Instance;
}
public Task SendAsync(string number, string message)
{
/* Implement this service to send SMS to users (can be used for two factor auth). */
Logger.Warn("Sending SMS is not implemented! Message content:");
Logger.Warn("Number : " + number);
Logger.Warn("Message : " + message);
return Task.FromResult(0);
}
}
}
| 25.785714 | 96 | 0.596953 | [
"MIT"
] | afonsoft/Ranking | src/Afonsoft.Ranking.Core/Net/Sms/SmsSender.cs | 724 | C# |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the config-2014-11-12.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Net;
using System.Text;
using System.Xml.Serialization;
using Amazon.ConfigService.Model;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
using Amazon.Runtime.Internal.Transform;
using Amazon.Runtime.Internal.Util;
using ThirdParty.Json.LitJson;
namespace Amazon.ConfigService.Model.Internal.MarshallTransformations
{
/// <summary>
/// Response Unmarshaller for ValidationException Object
/// </summary>
public class ValidationExceptionUnmarshaller : IErrorResponseUnmarshaller<ValidationException, JsonUnmarshallerContext>
{
/// <summary>
/// Unmarshaller the response from the service to the response class.
/// </summary>
/// <param name="context"></param>
/// <returns></returns>
public ValidationException Unmarshall(JsonUnmarshallerContext context)
{
return this.Unmarshall(context, new ErrorResponse());
}
/// <summary>
/// Unmarshaller the response from the service to the response class.
/// </summary>
/// <param name="context"></param>
/// <param name="errorResponse"></param>
/// <returns></returns>
public ValidationException Unmarshall(JsonUnmarshallerContext context, ErrorResponse errorResponse)
{
context.Read();
ValidationException unmarshalledObject = new ValidationException(errorResponse.Message, errorResponse.InnerException,
errorResponse.Type, errorResponse.Code, errorResponse.RequestId, errorResponse.StatusCode);
int targetDepth = context.CurrentDepth;
while (context.ReadAtDepth(targetDepth))
{
}
return unmarshalledObject;
}
private static ValidationExceptionUnmarshaller _instance = new ValidationExceptionUnmarshaller();
/// <summary>
/// Gets the singleton.
/// </summary>
public static ValidationExceptionUnmarshaller Instance
{
get
{
return _instance;
}
}
}
} | 35.305882 | 130 | 0.647118 | [
"Apache-2.0"
] | philasmar/aws-sdk-net | sdk/src/Services/ConfigService/Generated/Model/Internal/MarshallTransformations/ValidationExceptionUnmarshaller.cs | 3,001 | C# |
using Microsoft.VisualBasic.Activities;
using OpenRPA.Interfaces;
using OpenRPA.Interfaces.Selector;
using System;
using System.Activities;
using System.Activities.Expressions;
using System.Activities.Presentation.Model;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Media;
using System.Windows.Media.Imaging;
namespace OpenRPA.Activities
{
public partial class DetectorDesigner : INotifyPropertyChanged
{
public DetectorDesigner()
{
InitializeComponent();
}
public event PropertyChangedEventHandler PropertyChanged;
private void NotifyPropertyChanged(String propertyName)
{
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName));
}
public System.Collections.ObjectModel.ObservableCollection<IDetectorPlugin> detectorPlugins
{
get
{
return Plugins.detectorPlugins;
}
}
}
} | 26.825 | 99 | 0.707363 | [
"MPL-2.0"
] | igaisin/openrpa | OpenRPA/Activities/DetectorDesigner.xaml.cs | 1,075 | C# |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class PlayerController : MonoBehaviour
{
private const float zeroF = 0.0f;
[SerializeField]
public LaserController currentLaserSelected;
[SerializeField]
private LaserController lastLaserSelected;
[SerializeField]
bool pointerHit = false;
private float rotationSpeed = 100.0f;
private float panSpeed = 5.0f;
void Start()
{
}
// Update is called once per frame
void Update()
{
InputControls();
var moveX = GetAxisSpeed("Horizontal", panSpeed);
var moveZ = GetAxisSpeed("Vertical", panSpeed);
transform.Translate(moveX, zeroF, moveZ);
}
float GetDeltaSpeed(float a)
{
return (Time.deltaTime * a);
}
float GetAxisSpeed(string type, float speed)
{
return Input.GetAxis(type) * GetDeltaSpeed(speed);
}
void SelectObjects()
{
Ray ray = GetComponentInChildren<Camera>().ScreenPointToRay(Input.mousePosition);
RaycastHit hit;
if (Physics.Raycast(ray, out hit))
{
// the object identified by hit.transform was clicked
// do whatever you want
currentLaserSelected = hit.collider.GetComponent<LaserController>();
if (currentLaserSelected == null)
{
if (lastLaserSelected != null)
{
lastLaserSelected.Selected(false);
lastLaserSelected = null;
}
pointerHit = false;
}
if (lastLaserSelected == null && currentLaserSelected != null)
{
currentLaserSelected.Selected(true);
lastLaserSelected = currentLaserSelected;
pointerHit = true;
}
if (currentLaserSelected != lastLaserSelected)
{
currentLaserSelected.Selected(true);
lastLaserSelected.Selected(false);
lastLaserSelected = currentLaserSelected;
pointerHit = true;
}
}
else
{
if (currentLaserSelected != null)
{
currentLaserSelected.Selected(false);
}
currentLaserSelected = null;
lastLaserSelected = null;
pointerHit = false;
}
}
void RotatePlayerView(Transform t)
{
var x = GetAxisSpeed("Mouse X", rotationSpeed);
var y = GetAxisSpeed("Mouse Y", -1* rotationSpeed);
t.Rotate(y, x, zeroF);
t.rotation = Quaternion.Euler(t.rotation.eulerAngles.x, t.rotation.eulerAngles.y, zeroF);
}
void InputControls()
{
if (Input.GetMouseButtonDown(0))
{
SelectObjects();
}
if (Input.GetKey(KeyCode.Mouse1))
{
RotatePlayerView(transform);
}
if (Input.GetKeyDown(KeyCode.Q) && pointerHit)
{
currentLaserSelected.transform.Rotate(0, -90.0f, 0);
}
if (Input.GetKeyDown(KeyCode.E) && pointerHit)
{
currentLaserSelected.transform.Rotate(0, 90.0f, 0);
}
}
}
| 27.125 | 97 | 0.558525 | [
"MIT"
] | Xamdes/Electrogen | Assets/Scripts/PlayerController.cs | 3,257 | C# |
// Copyright 2013 The Noda Time Authors. All rights reserved.
// Use of this source code is governed by the Apache License 2.0,
// as found in the LICENSE.txt file.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Xml.Linq;
namespace DocumentVersions
{
/// <summary>
/// Tool to annotate an XML documentation file with the version in which each member was introduced,
/// and whether or not it's present in the PCL build.
/// </summary>
internal static class Program
{
private static int Main(string[] args)
{
if (args.Length != 5)
{
Console.WriteLine("Usage: DocumentVersions <desktop XML file> <desktop reference XML file> <PCL XML file> <previous versions directory> <output history file>");
Console.WriteLine("The reference XML file is generated by MRefBuilder.exe (part of Sandcastle)");
return 1;
}
var desktopXml = XDocument.Load(args[0]);
var referenceXml = XDocument.Load(args[1]);
var currentVersion = DetermineCurrentVersion(referenceXml);
var publicMembers = FindPublicMembers(referenceXml);
var pclMembers = LoadMembers(args[2]);
var versionMap = FindFirstVersions(Path.GetFileName(args[0]), args[3]);
// Remove any information from a previous run
desktopXml.Descendants("since").Remove();
desktopXml.Descendants("pcl").Remove();
foreach (var member in desktopXml.Descendants("member"))
{
string name = member.Attribute("name").Value;
if (!publicMembers.Contains(name))
{
continue;
}
member.Add(new XElement("pcl", new XAttribute("supported", pclMembers.Contains(name))));
string firstVersion;
if (!versionMap.TryGetValue(name, out firstVersion))
{
versionMap[name] = currentVersion;
firstVersion = currentVersion;
}
member.Add(new XElement("since", firstVersion));
}
desktopXml.Save(args[0]);
var memberByVersion = versionMap.GroupBy(pair => pair.Value, pair => pair.Key);
using (var historyWriter = File.CreateText(args[4]))
{
foreach (var group in memberByVersion.OrderByDescending(g => g.Key))
{
historyWriter.WriteLine(group.Key);
foreach (var name in group.OrderBy(name => name.Substring(2)))
{
historyWriter.WriteLine(name);
}
historyWriter.WriteLine();
}
}
return 0;
}
private static string DetermineCurrentVersion(XDocument referenceXml)
{
return referenceXml.Descendants("type")
.Where(t => (string)t.Attribute("api") == "T:System.Reflection.AssemblyInformationalVersionAttribute")
.Select(t => t.Parent.Descendants("value").Single().Value)
.Single();
}
private static HashSet<string> FindPublicMembers(XDocument referenceXml)
{
// TODO: Include protected and protected internal methods of public types.
var ids = referenceXml.Descendants("api")
.Where(t => (string) t.Elements("apidata").Attributes("group").Select(a => (string)a).FirstOrDefault() != "namespace")
.Where(t => (string) t.Elements().Attributes("visibility").Single().Value == "public")
.Select(t => (string) t.Attribute("id"));
return new HashSet<string>(ids);
}
private static Dictionary<string, string> FindFirstVersions(string baseName, string directory)
{
// We really have files like foo.xml-1.0.0, so let's look for the - as well.
baseName += "-";
Dictionary<string, string> firstVersions = new Dictionary<string, string>();
// Load the files in reverse order, so that later versions are replaced by earlier ones.
foreach (var file in Directory.GetFiles(directory).Where(file => Path.GetFileName(file).StartsWith(baseName)).OrderByDescending(x => x))
{
string version = Path.GetFileName(file).Substring(baseName.Length);
var members = LoadMembers(file);
foreach (var member in members)
{
firstVersions[member] = version;
}
}
return firstVersions;
}
private static HashSet<string> LoadMembers(string file)
{
XDocument doc = XDocument.Load(file);
var members = doc.Descendants("member").Select(m => m.Attribute("name").Value);
return new HashSet<string>(members);
}
}
}
| 44.166667 | 176 | 0.571599 | [
"Apache-2.0"
] | ivandrofly/nodatime | build/DocumentVersions/Program.cs | 5,037 | C# |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Reflection.PortableExecutable;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.CodeAnalysis.PdbSourceDocument
{
internal interface ISourceLinkService
{
Task<SourceFilePathResult?> GetSourceFilePathAsync(string url, string relativePath, CancellationToken cancellationToken);
Task<PdbFilePathResult?> GetPdbFilePathAsync(string dllPath, PEReader peReader, bool useDefaultSymbolServers, CancellationToken cancellationToken);
}
// The following types mirror types in Microsoft.VisualStudio.Debugger.Contracts which cannot be referenced at this layer
/// <summary>
/// The result of findding a PDB file
/// </summary>
/// <param name="PdbFilePath">The path to the PDB file in the debugger cache</param>
internal record PdbFilePathResult(string PdbFilePath);
/// <summary>
/// The result of finding a source file via SourceLink
/// </summary>
/// <param name="SourceFilePath">The path to the source file in the debugger cache</param>
internal record SourceFilePathResult(string SourceFilePath);
}
| 41 | 155 | 0.754573 | [
"MIT"
] | AlexanderSemenyak/roslyn | src/Features/Core/Portable/PdbSourceDocument/ISourceLinkService.cs | 1,314 | C# |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.EditAndContinue;
using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces;
using Microsoft.CodeAnalysis.Test.Utilities;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Test.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.EditAndContinue.UnitTests
{
[UseExportProvider]
public class EmitSolutionUpdateResultsTests
{
[Fact]
public async Task GetHotReloadDiagnostics()
{
using var workspace = new TestWorkspace(composition: FeaturesTestCompositions.Features);
var sourcePath = Path.Combine(TempRoot.Root, "x", "a.cs");
var razorPath = Path.Combine(TempRoot.Root, "a.razor");
var document = workspace.CurrentSolution.
AddProject("proj", "proj", LanguageNames.CSharp).
WithMetadataReferences(TargetFrameworkUtil.GetReferences(TargetFramework.Standard)).
AddDocument(sourcePath, SourceText.From("class C {}", Encoding.UTF8), filePath: Path.Combine(TempRoot.Root, sourcePath));
var solution = document.Project.Solution;
var diagnosticData = ImmutableArray.Create(
new DiagnosticData(
id: "CS0001",
category: "Test",
message: "warning",
enuMessageForBingSearch: "test2 message format",
severity: DiagnosticSeverity.Warning,
defaultSeverity: DiagnosticSeverity.Warning,
isEnabledByDefault: true,
warningLevel: 0,
customTags: ImmutableArray.Create("Test2"),
properties: ImmutableDictionary<string, string?>.Empty,
document.Project.Id,
new DiagnosticDataLocation(document.Id, new TextSpan(1, 2), "a.cs", 0, 0, 0, 5, "a.razor", 10, 10, 10, 15),
language: "C#",
title: "title",
description: "description",
helpLink: "http://link"),
new DiagnosticData(
id: "CS0012",
category: "Test",
message: "error",
enuMessageForBingSearch: "test2 message format",
severity: DiagnosticSeverity.Error,
defaultSeverity: DiagnosticSeverity.Warning,
isEnabledByDefault: true,
warningLevel: 0,
customTags: ImmutableArray.Create("Test2"),
properties: ImmutableDictionary<string, string?>.Empty,
document.Project.Id,
new DiagnosticDataLocation(document.Id, new TextSpan(1, 2), originalFilePath: sourcePath, 0, 0, 0, 5, mappedFilePath: @"..\a.razor", 10, 10, 10, 15),
language: "C#",
title: "title",
description: "description",
helpLink: "http://link"));
var rudeEdits = ImmutableArray.Create(
(document.Id, ImmutableArray.Create(new RudeEditDiagnostic(RudeEditKind.Insert, TextSpan.FromBounds(1, 10), 123, new[] { "a" }))),
(document.Id, ImmutableArray.Create(new RudeEditDiagnostic(RudeEditKind.Delete, TextSpan.FromBounds(1, 10), 123, new[] { "b" }))));
var actual = await EmitSolutionUpdateResults.GetHotReloadDiagnosticsAsync(solution, diagnosticData, rudeEdits, CancellationToken.None);
AssertEx.Equal(new[]
{
$@"Error CS0012: {razorPath} (10,10)-(10,15): error",
$@"Error ENC0021: {sourcePath} (0,1)-(0,10): {string.Format(FeaturesResources.Adding_0_will_prevent_the_debug_session_from_continuing, "a")}",
$@"Error ENC0033: {sourcePath} (0,1)-(0,10): {string.Format(FeaturesResources.Deleting_0_will_prevent_the_debug_session_from_continuing, "b")}"
}, actual.Select(d => $"{d.Severity} {d.Id}: {d.FilePath} {d.Span.GetDebuggerDisplay()}: {d.Message}"));
}
}
}
| 48.902174 | 169 | 0.60858 | [
"MIT"
] | Acidburn0zzz/roslyn | src/EditorFeatures/Test/EditAndContinue/EmitSolutionUpdateResultsTests.cs | 4,501 | C# |
// Copyright (c) @asmichi (https://github.com/asmichi). Licensed under the MIT License. See LICENCE in the project root for details.
using System;
using System.Collections.Generic;
using System.Linq;
using Asmichi.Utilities;
using Xunit;
using Xunit.Sdk;
using static Asmichi.ProcessManagement.ChildProcessExecutionTestUtil;
using KV = System.Collections.Generic.KeyValuePair<string, string>;
namespace Asmichi.ProcessManagement
{
// NOTE: These tests will fail if the current process has "A" or "BB" as environment variables.
public sealed class ChildProcessTest_EnvironmentVariables
{
// Assumes we do not change the environment variables of the current process.
[Fact]
public void InheritsEnvironmentVariables()
{
var expected = GetProcessEnvVars();
AssertEnvironmentVariables(expected, null, Array.Empty<KV>(), true);
}
[Fact]
public void CanAddEnvironmentVariables()
{
var extraEnvVars = new KV[]
{
new("A", "A"),
new("BB", "BB"),
};
var expected = GetProcessEnvVars().Concat(extraEnvVars);
AssertEnvironmentVariables(expected, null, extraEnvVars, true);
}
[Fact]
public void CanRemoveEnvironmentVariables()
{
var extraEnvVars = new KV[]
{
new("A", null!),
new("BB", ""),
};
var processEnvVars = GetProcessEnvVars();
var contextEnvVars = new Dictionary<string, string>(processEnvVars)
{
{ "A", "A" },
{ "BB", "BB" },
};
var context = new ChildProcessCreationContext(contextEnvVars);
AssertEnvironmentVariables(processEnvVars, context, extraEnvVars, true);
}
[Fact]
public void CanDisableEnvironmentVariableInheritance()
{
var nonEmptyProcessEnvVars = GetProcessEnvVars().Where(x => !string.IsNullOrEmpty(x.Value)).ToArray();
var contextEnvVars = new Dictionary<string, string>(nonEmptyProcessEnvVars)
{
{ "A", "A" },
{ "BB", "BB" },
};
var context = new ChildProcessCreationContext(contextEnvVars);
AssertEnvironmentVariables(nonEmptyProcessEnvVars, context, nonEmptyProcessEnvVars.ToArray(), false);
}
private static void AssertEnvironmentVariables(
IEnumerable<KV> expected,
ChildProcessCreationContext? context,
KV[] extraEnvVars,
bool inheritFromContext)
{
var actual = ExecuteForEnvironmentVariables(context, extraEnvVars, inheritFromContext);
var orderedExpected = expected.OrderBy(x => x, EnvironmentVariablePairNameComparer.DefaultThenOrdinal).ToArray();
if (!orderedExpected.SequenceEqual(actual))
{
// To diagnose environment-dependent issues, print all environment variables.
var message =
$"Expected: {ToString(orderedExpected)}\n" +
$"Actual: {ToString(actual)}";
throw new XunitException(message);
}
static string ToString(KV[] kvs) => "[" + string.Join(", ", kvs.Select(x => $"{x.Key}={x.Value}")) + "]";
}
private static KV[] ExecuteForEnvironmentVariables(
ChildProcessCreationContext? context,
KV[] extraEnvVars,
bool inheritFromContext)
{
var si = new ChildProcessStartInfo(TestUtil.TestChildNativePath, "DumpEnvironmentVariables")
{
StdOutputRedirection = OutputRedirection.OutputPipe,
Flags = inheritFromContext ? ChildProcessFlags.None : ChildProcessFlags.DisableEnvironmentVariableInheritance,
CreationContext = context,
ExtraEnvironmentVariables = extraEnvVars,
};
var output = ExecuteForStandardOutput(si);
var childEnvVars =
output.Split(new char[] { '\0' }, StringSplitOptions.RemoveEmptyEntries)
.Select(ToKeyValuePair)
.ToArray();
return childEnvVars;
static KV ToKeyValuePair(string envVar)
{
int index = envVar.IndexOf('=', StringComparison.Ordinal);
var name = envVar.Substring(0, index);
var value = envVar.Substring(index + 1);
return new(name, value);
}
}
private static ArraySegment<KV> GetProcessEnvVars() => EnvironmentVariableListUtil.ToSortedDistinctKeyValuePairs(Environment.GetEnvironmentVariables());
}
}
| 37.80315 | 160 | 0.595084 | [
"MIT"
] | asmichi/ChildProcess | src/ChildProcess.Test/ProcessManagement/ChildProcessTest_EnvironmentVariables.cs | 4,801 | C# |
#region Apache License Version 2.0
/*----------------------------------------------------------------
Copyright 2018 Jeffrey Su & Suzhou Senparc Network Technology Co.,Ltd.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied. See the License for the specific language governing permissions
and limitations under the License.
Detail: https://github.com/JeffreySu/WeiXinMPSDK/blob/master/license.md
----------------------------------------------------------------*/
#endregion Apache License Version 2.0
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Senparc.Weixin.MP.Entities.Request;
namespace Senparc.Weixin.MP.Test.Entities.Request
{
[TestClass]
public class PostModelTest
{
[TestMethod]
public void SetSecretInfoTest()
{
var postModel = new PostModel();
postModel.SetSecretInfo("A","B","C");
Assert.AreEqual("A", postModel.Token);
Assert.AreEqual("B", postModel.EncodingAESKey);
Assert.AreEqual("C", postModel.AppId);
}
}
}
| 34.25 | 90 | 0.66357 | [
"Apache-2.0"
] | 007008aabb/WeiXinMPSDK | src/Senparc.Weixin.MP/Senparc.Weixin.MP.Test/Entities/Request/PostModelTest.cs | 1,509 | C# |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
using Azure.Functions.Cli.Common;
using Colors.Net;
using static Azure.Functions.Cli.Common.OutputTheme;
using static Colors.Net.StringStaticMethods;
namespace Azure.Functions.Cli.Helpers
{
public static class PythonHelpers
{
private static readonly string[] _workerPackages = new[] { "azure-functions==1.0.0b5", "azure-functions-worker==1.0.0b11" };
private static readonly string _pythonDefaultExecutableVar = "languageWorkers:python:defaultExecutablePath";
private static bool InVirtualEnvironment => !string.IsNullOrEmpty(VirtualEnvironmentPath);
public static string VirtualEnvironmentPath => Environment.GetEnvironmentVariable("VIRTUAL_ENV");
public static async Task SetupPythonProject()
{
await ValidatePythonVersion(errorOutIfOld: false);
CreateRequirements();
await EnsureVirtualEnvrionmentIgnored();
}
public static async Task EnsureVirtualEnvrionmentIgnored()
{
if (InVirtualEnvironment)
{
try
{
var virtualEnvName = Path.GetFileNameWithoutExtension(VirtualEnvironmentPath);
if (FileSystemHelpers.DirectoryExists(Path.Join(Environment.CurrentDirectory, virtualEnvName)))
{
var funcIgnorePath = Path.Join(Environment.CurrentDirectory, Constants.FuncIgnoreFile);
// If .funcignore exists and already has the venv name, we are done here
if (FileSystemHelpers.FileExists(funcIgnorePath))
{
var rawfuncIgnoreContents = await FileSystemHelpers.ReadAllTextFromFileAsync(funcIgnorePath);
if (rawfuncIgnoreContents.Contains(Environment.NewLine + virtualEnvName))
{
return;
}
}
// Write the current env to .funcignore
ColoredConsole.WriteLine($"Writing {Constants.FuncIgnoreFile}");
using (var fileStream = FileSystemHelpers.OpenFile(funcIgnorePath, FileMode.Append, FileAccess.Write))
using (var streamWriter = new StreamWriter(fileStream))
{
await streamWriter.WriteAsync(Environment.NewLine + virtualEnvName);
await streamWriter.FlushAsync();
}
}
}
catch (Exception)
{
// Safe execution, we aren't harmed by failures here
}
}
}
private static void CreateRequirements()
{
if (!FileSystemHelpers.FileExists(Constants.RequirementsTxt))
{
FileSystemHelpers.WriteAllTextToFile(Constants.RequirementsTxt, Constants.PythonFunctionsLibrary);
}
else
{
ColoredConsole.WriteLine($"{Constants.RequirementsTxt} already exists. Skipped!");
}
}
public static async Task<string> ValidatePythonVersion(bool setWorkerExecutable = false, bool errorIfNoExactMatch = false, bool errorOutIfOld = true)
{
// If users are overriding this value, we don't have to worry about verification
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable(_pythonDefaultExecutableVar)))
{
return Environment.GetEnvironmentVariable(_pythonDefaultExecutableVar);
}
const string pythonStr = "python";
const string python3Str = "python3";
const string python36Str = "python3.6";
const string py36Str = "3.6";
const string py3Str = "3.";
const string warningMessage = "Python 3.6.x is recommended, and used in Azure Functions. You are using Python version {0}.";
const string errorIfNotExactMessage = "Python 3.6.x is required, and used in Azure Functions. You are using Python version {0}. "
+ "Please install Python 3.6, and use a virtual environment to switch to Python 3.6.";
const string errorMessageOldPy = "Python 3.x (recommended version 3.6.x) is required. Found python versions ({0}).";
const string errorMessageNoPy = "Python 3.x (recommended version 3.6.x) is required. No Python versions were found.";
var pythonExeVersionTask = VerifyVersion(pythonStr);
var python3ExeVersionTask = VerifyVersion(python3Str);
var python36ExeVersionTask = VerifyVersion(python36Str);
var pythonExeVersion = await Utilities.SafeExecution(async () => await pythonExeVersionTask) ?? string.Empty;
var python3ExeVersion = await Utilities.SafeExecution(async () => await python3ExeVersionTask) ?? string.Empty;
var python36ExeVersion = await Utilities.SafeExecution(async () => await python36ExeVersionTask) ?? string.Empty;
var exeToVersion = new Dictionary<string, string>()
{
{ pythonStr, pythonExeVersion },
{ python3Str, python3ExeVersion },
{ python36Str, python36ExeVersion }
};
// If any of the possible python executables had 3.6, we are good.
var exeWith36KeyPair = exeToVersion.Where(kv => kv.Value.Contains(py36Str)).ToList();
if (exeWith36KeyPair.Count() != 0)
{
SetWorkerPathIfNeeded(setWorkerExecutable, exeWith36KeyPair[0].Key);
return exeWith36KeyPair[0].Key;
}
// If any of the possible python executables are 3.x, we warn them and go ahead.
var exeWith3KeyPair = exeToVersion.Where(kv => kv.Value.Contains(py3Str)).ToList();
if (exeWith3KeyPair.Count() != 0)
{
if (errorIfNoExactMatch) throw new CliException(string.Format(errorIfNotExactMessage, exeWith3KeyPair[0].Value));
SetWorkerPathIfNeeded(setWorkerExecutable, exeWith3KeyPair[0].Key);
ColoredConsole.WriteLine(WarningColor(string.Format(warningMessage, exeWith3KeyPair[0].Value)));
return exeWith3KeyPair[0].Key;
}
// If we found any python versions at all, we warn or error out if flag enabled.
var anyPyVersions = exeToVersion.Where(kv => !string.IsNullOrEmpty(kv.Value)).Select(kv => kv.Value).ToList();
if (anyPyVersions.Count != 0)
{
if (errorIfNoExactMatch) throw new CliException(string.Format(errorIfNotExactMessage, exeWith3KeyPair[0].Value));
if (errorOutIfOld) throw new CliException(string.Format(errorMessageOldPy, string.Join(", ", anyPyVersions)));
else ColoredConsole.WriteLine(WarningColor(string.Format(errorMessageOldPy, string.Join(", ", anyPyVersions))));
}
// If we didn't find python at all, we warn or error out if flag enabled.
else
{
if (errorOutIfOld) throw new CliException(errorMessageNoPy);
else ColoredConsole.WriteLine(WarningColor(errorMessageNoPy));
}
return null;
}
private static void SetWorkerPathIfNeeded(bool setWorker, string pyExe)
{
if (setWorker)
{
Environment.SetEnvironmentVariable(_pythonDefaultExecutableVar, pyExe, EnvironmentVariableTarget.Process);
if (StaticSettings.IsDebug)
{
ColoredConsole.WriteLine(VerboseColor($"{_pythonDefaultExecutableVar} set to {pyExe}"));
}
}
}
public static async Task<string> VerifyVersion(string pythonExe = "python")
{
var exe = new Executable(pythonExe, "--version");
var sb = new StringBuilder();
int exitCode = -1;
try
{
exitCode = await exe.RunAsync(l => sb.AppendLine(l), e => sb.AppendLine(e));
}
catch (Exception)
{
throw new CliException("Unable to verify Python version. Please make sure you have Python 3.6 installed.");
}
if (exitCode == 0)
{
var trials = 0;
// this delay to make sure the output
while (string.IsNullOrWhiteSpace(sb.ToString()) && trials < 5)
{
trials++;
await Task.Delay(TimeSpan.FromMilliseconds(200));
}
return sb.ToString().Trim();
}
else
{
throw new CliException($"Error running {exe.Command}");
}
}
public static async Task<Stream> ZipToSquashfsStream(Stream stream)
{
var tmpFile = Path.GetTempFileName();
using (stream)
using (var fileStream = FileSystemHelpers.OpenFile(tmpFile, FileMode.OpenOrCreate, FileAccess.Write))
{
stream.Seek(0, SeekOrigin.Begin);
await stream.CopyToAsync(fileStream);
}
string containerId = null;
try
{
containerId = await DockerHelpers.DockerRun(Constants.DockerImages.LinuxPythonImageAmd64, command: "sleep infinity");
await DockerHelpers.CopyToContainer(containerId, tmpFile, $"/file.zip");
var scriptFilePath = Path.GetTempFileName();
await FileSystemHelpers.WriteAllTextToFileAsync(scriptFilePath, (await StaticResources.ZipToSquashfsScript).Replace("\r\n", "\n"));
await DockerHelpers.CopyToContainer(containerId, scriptFilePath, Constants.StaticResourcesNames.ZipToSquashfs);
await DockerHelpers.ExecInContainer(containerId, $"chmod +x /{Constants.StaticResourcesNames.ZipToSquashfs}");
await DockerHelpers.ExecInContainer(containerId, $"/{Constants.StaticResourcesNames.ZipToSquashfs}");
await DockerHelpers.CopyFromContainer(containerId, $"/file.squashfs", tmpFile);
const int defaultBufferSize = 4096;
return new FileStream(tmpFile, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, defaultBufferSize, FileOptions.DeleteOnClose);
}
finally
{
if (!string.IsNullOrEmpty(containerId))
{
await DockerHelpers.KillContainer(containerId, ignoreError: true);
}
}
}
private static async Task<bool> ArePackagesInSync(string requirementsTxt, string pythonPackages)
{
var md5File = Path.Combine(pythonPackages, $"{Constants.RequirementsTxt}.md5");
if (!FileSystemHelpers.FileExists(md5File))
{
return false;
}
var packagesMd5 = await FileSystemHelpers.ReadAllTextFromFileAsync(md5File);
var requirementsTxtMd5 = SecurityHelpers.CalculateMd5(requirementsTxt);
return packagesMd5 == requirementsTxtMd5;
}
internal static async Task<Stream> GetPythonDeploymentPackage(IEnumerable<string> files, string functionAppRoot, bool buildNativeDeps, BuildOption buildOption, string additionalPackages)
{
var reqTxtFile = Path.Combine(functionAppRoot, Constants.RequirementsTxt);
if (!FileSystemHelpers.FileExists(reqTxtFile))
{
throw new CliException($"{Constants.RequirementsTxt} is not found. " +
$"{Constants.RequirementsTxt} is required for python function apps. Please make sure to generate one before publishing.");
}
var packagesLocation = Path.Combine(functionAppRoot, Constants.ExternalPythonPackages);
if (FileSystemHelpers.DirectoryExists(packagesLocation))
{
// Only update packages if checksum of requirements.txt does not match
// If build option is remote, we don't need to verify if packages are in sync, as we need to delete them regardless
if (buildOption != BuildOption.Remote && await ArePackagesInSync(reqTxtFile, packagesLocation))
{
ColoredConsole.WriteLine(Yellow($"Directory {Constants.ExternalPythonPackages} already in sync with {Constants.RequirementsTxt}. Skipping restoring dependencies..."));
return ZipHelper.CreateZip(files.Union(FileSystemHelpers.GetFiles(packagesLocation)), functionAppRoot);
}
ColoredConsole.WriteLine($"Deleting the old {Constants.ExternalPythonPackages} directory");
FileSystemHelpers.DeleteDirectorySafe(packagesLocation);
}
FileSystemHelpers.EnsureDirectory(packagesLocation);
// Only one of the remote build or build-native-deps flag can be chosen
if (buildNativeDeps && buildOption == BuildOption.Remote)
{
throw new CliException("Cannot perform '--build-native-deps' along with '--build remote'");
}
if (buildNativeDeps)
{
if (CommandChecker.CommandExists("docker") && await DockerHelpers.VerifyDockerAccess())
{
await RestorePythonRequirementsDocker(functionAppRoot, packagesLocation, additionalPackages);
}
else
{
throw new CliException("Docker is required to build native dependencies for python function apps");
}
}
else if (buildOption == BuildOption.Remote)
{
// No-ops, python packages will be resolved on the server side
}
else
{
await RestorePythonRequirementsPackapp(functionAppRoot, packagesLocation);
}
// No need to generate and compare .md5 when using remote build
if (buildOption != BuildOption.Remote)
{
// Store a checksum of requirements.txt
var md5FilePath = Path.Combine(packagesLocation, $"{Constants.RequirementsTxt}.md5");
await FileSystemHelpers.WriteAllTextToFileAsync(md5FilePath, SecurityHelpers.CalculateMd5(reqTxtFile));
}
return ZipHelper.CreateZip(files.Union(FileSystemHelpers.GetFiles(packagesLocation)), functionAppRoot);
}
private static async Task RestorePythonRequirementsPackapp(string functionAppRoot, string packagesLocation)
{
var packApp = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "tools", "python", "packapp");
var pythonExe = await ValidatePythonVersion(errorOutIfOld: true);
var exe = new Executable(pythonExe, $"\"{packApp}\" --platform linux --python-version 36 --packages-dir-name {Constants.ExternalPythonPackages} \"{functionAppRoot}\" --verbose");
var sbErrors = new StringBuilder();
var exitCode = await exe.RunAsync(o => ColoredConsole.WriteLine(o), e => sbErrors.AppendLine(e));
if (exitCode != 0)
{
var errorMessage = "There was an error restoring dependencies. " + sbErrors.ToString();
// If --build-native-deps if required, we exit with this specific code to notify other toolings
// If this exit code changes, partner tools must be updated
if (exitCode == ExitCodes.BuildNativeDepsRequired)
{
ColoredConsole.WriteLine(ErrorColor(errorMessage));
Environment.Exit(ExitCodes.BuildNativeDepsRequired);
}
throw new CliException(errorMessage);
}
}
private static async Task RestorePythonRequirementsDocker(string functionAppRoot, string packagesLocation, string additionalPackages)
{
// Configurable settings
var pythonDockerImageSetting = Environment.GetEnvironmentVariable(Constants.PythonDockerImageVersionSetting);
var dockerSkipPullFlagSetting = Environment.GetEnvironmentVariable(Constants.PythonDockerImageSkipPull);
var dockerRunSetting = Environment.GetEnvironmentVariable(Constants.PythonDockerRunCommand);
var dockerImage = string.IsNullOrEmpty(pythonDockerImageSetting)
? Constants.DockerImages.LinuxPythonImageAmd64
: pythonDockerImageSetting;
if (string.IsNullOrEmpty(dockerSkipPullFlagSetting) ||
!(dockerSkipPullFlagSetting.Equals("true", StringComparison.OrdinalIgnoreCase) || dockerSkipPullFlagSetting == "1"))
{
await DockerHelpers.DockerPull(dockerImage);
}
var containerId = string.Empty;
try
{
if (string.IsNullOrEmpty(dockerRunSetting))
{
containerId = await DockerHelpers.DockerRun(dockerImage, command: "sleep infinity");
}
else
{
(var output, _, _) = await DockerHelpers.RunDockerCommand(dockerRunSetting);
containerId = output.ToString().Trim();
}
await DockerHelpers.CopyToContainer(containerId, Constants.RequirementsTxt, $"/{Constants.RequirementsTxt}");
var scriptFilePath = Path.GetTempFileName();
await FileSystemHelpers.WriteAllTextToFileAsync(scriptFilePath, (await StaticResources.PythonDockerBuildScript).Replace("\r\n", "\n"));
if (!string.IsNullOrWhiteSpace(additionalPackages))
{
// Give the container time to start up
await Task.Delay(TimeSpan.FromSeconds(3));
await DockerHelpers.ExecInContainer(containerId, $"apt-get update");
await DockerHelpers.ExecInContainer(containerId, $"apt-get install -y {additionalPackages}");
}
await DockerHelpers.CopyToContainer(containerId, scriptFilePath, Constants.StaticResourcesNames.PythonDockerBuild);
await DockerHelpers.ExecInContainer(containerId, $"chmod +x /{Constants.StaticResourcesNames.PythonDockerBuild}");
await DockerHelpers.ExecInContainer(containerId, $"/{Constants.StaticResourcesNames.PythonDockerBuild}");
await DockerHelpers.CopyFromContainer(containerId, $"/{Constants.ExternalPythonPackages}/.", packagesLocation);
}
finally
{
if (!string.IsNullOrEmpty(containerId))
{
await DockerHelpers.KillContainer(containerId, ignoreError: true);
}
}
}
private static string CopyToTemp(IEnumerable<string> files, string rootPath)
{
var tmp = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
FileSystemHelpers.EnsureDirectory(tmp);
foreach (var file in files)
{
var relativeFileName = file.Replace(rootPath, string.Empty).Trim(Path.DirectorySeparatorChar);
var relativeDirName = Path.GetDirectoryName(relativeFileName);
FileSystemHelpers.EnsureDirectory(Path.Combine(tmp, relativeDirName));
FileSystemHelpers.Copy(file, Path.Combine(tmp, relativeFileName));
}
return tmp;
}
}
}
| 49.770574 | 194 | 0.610632 | [
"MIT"
] | kashimiz/azure-functions-core-tools | src/Azure.Functions.Cli/Helpers/PythonHelpers.cs | 19,960 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Claims;
using System.Threading.Tasks;
using System.Web;
using System.Web.Mvc;
using Microsoft.AspNet.Identity;
using Microsoft.AspNet.Identity.EntityFramework;
using Microsoft.Owin.Security;
using EmpleoDotNet.Models;
namespace EmpleoDotNet.Controllers
{
/// <summary>
/// Este es el controlador de cuentas de usuario
/// </summary>
[Authorize]
public class AccountController : Controller
{
public AccountController()
: this(new UserManager<ApplicationUser>(new UserStore<ApplicationUser>(new ApplicationDbContext())))
{
}
public AccountController(UserManager<ApplicationUser> userManager)
{
UserManager = userManager;
}
public UserManager<ApplicationUser> UserManager { get; private set; }
//
// GET: /Account/Login
[AllowAnonymous]
public ActionResult Login(string returnUrl)
{
ViewBag.ReturnUrl = returnUrl;
return View();
}
//
// POST: /Account/Login
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public async Task<ActionResult> Login(LoginViewModel model, string returnUrl)
{
if (ModelState.IsValid)
{
var user = await UserManager.FindAsync(model.UserName, model.Password);
if (user != null)
{
await SignInAsync(user, model.RememberMe);
return RedirectToLocal(returnUrl);
}
else
{
ModelState.AddModelError("", "Invalid username or password.");
}
}
// If we got this far, something failed, redisplay form
return View(model);
}
//
// GET: /Account/Register
[AllowAnonymous]
public ActionResult Register()
{
return View();
}
//
// POST: /Account/Register
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public async Task<ActionResult> Register(RegisterViewModel model)
{
if (ModelState.IsValid)
{
var user = new ApplicationUser() { UserName = model.UserName };
var result = await UserManager.CreateAsync(user, model.Password);
if (result.Succeeded)
{
await SignInAsync(user, isPersistent: false);
return RedirectToAction("Index", "Home");
}
else
{
AddErrors(result);
}
}
// If we got this far, something failed, redisplay form
return View(model);
}
//
// POST: /Account/Disassociate
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<ActionResult> Disassociate(string loginProvider, string providerKey)
{
ManageMessageId? message = null;
IdentityResult result = await UserManager.RemoveLoginAsync(User.Identity.GetUserId(), new UserLoginInfo(loginProvider, providerKey));
if (result.Succeeded)
{
message = ManageMessageId.RemoveLoginSuccess;
}
else
{
message = ManageMessageId.Error;
}
return RedirectToAction("Manage", new { Message = message });
}
//
// GET: /Account/Manage
public ActionResult Manage(ManageMessageId? message)
{
ViewBag.StatusMessage =
message == ManageMessageId.ChangePasswordSuccess ? "Your password has been changed."
: message == ManageMessageId.SetPasswordSuccess ? "Your password has been set."
: message == ManageMessageId.RemoveLoginSuccess ? "The external login was removed."
: message == ManageMessageId.Error ? "An error has occurred."
: "";
ViewBag.HasLocalPassword = HasPassword();
ViewBag.ReturnUrl = Url.Action("Manage");
return View();
}
//
// POST: /Account/Manage
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<ActionResult> Manage(ManageUserViewModel model)
{
bool hasPassword = HasPassword();
ViewBag.HasLocalPassword = hasPassword;
ViewBag.ReturnUrl = Url.Action("Manage");
if (hasPassword)
{
if (ModelState.IsValid)
{
IdentityResult result = await UserManager.ChangePasswordAsync(User.Identity.GetUserId(), model.OldPassword, model.NewPassword);
if (result.Succeeded)
{
return RedirectToAction("Manage", new { Message = ManageMessageId.ChangePasswordSuccess });
}
else
{
AddErrors(result);
}
}
}
else
{
// User does not have a password so remove any validation errors caused by a missing OldPassword field
ModelState state = ModelState["OldPassword"];
if (state != null)
{
state.Errors.Clear();
}
if (ModelState.IsValid)
{
IdentityResult result = await UserManager.AddPasswordAsync(User.Identity.GetUserId(), model.NewPassword);
if (result.Succeeded)
{
return RedirectToAction("Manage", new { Message = ManageMessageId.SetPasswordSuccess });
}
else
{
AddErrors(result);
}
}
}
// If we got this far, something failed, redisplay form
return View(model);
}
//
// POST: /Account/ExternalLogin
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public ActionResult ExternalLogin(string provider, string returnUrl)
{
// Request a redirect to the external login provider
return new ChallengeResult(provider, Url.Action("ExternalLoginCallback", "Account", new { ReturnUrl = returnUrl }));
}
//
// GET: /Account/ExternalLoginCallback
[AllowAnonymous]
public async Task<ActionResult> ExternalLoginCallback(string returnUrl)
{
var loginInfo = await AuthenticationManager.GetExternalLoginInfoAsync();
if (loginInfo == null)
{
return RedirectToAction("Login");
}
// Sign in the user with this external login provider if the user already has a login
var user = await UserManager.FindAsync(loginInfo.Login);
if (user != null)
{
await SignInAsync(user, isPersistent: false);
return RedirectToLocal(returnUrl);
}
else
{
// If the user does not have an account, then prompt the user to create an account
ViewBag.ReturnUrl = returnUrl;
ViewBag.LoginProvider = loginInfo.Login.LoginProvider;
return View("ExternalLoginConfirmation", new ExternalLoginConfirmationViewModel { UserName = loginInfo.DefaultUserName });
}
}
//
// POST: /Account/LinkLogin
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult LinkLogin(string provider)
{
// Request a redirect to the external login provider to link a login for the current user
return new ChallengeResult(provider, Url.Action("LinkLoginCallback", "Account"), User.Identity.GetUserId());
}
//
// GET: /Account/LinkLoginCallback
public async Task<ActionResult> LinkLoginCallback()
{
var loginInfo = await AuthenticationManager.GetExternalLoginInfoAsync(XsrfKey, User.Identity.GetUserId());
if (loginInfo == null)
{
return RedirectToAction("Manage", new { Message = ManageMessageId.Error });
}
var result = await UserManager.AddLoginAsync(User.Identity.GetUserId(), loginInfo.Login);
if (result.Succeeded)
{
return RedirectToAction("Manage");
}
return RedirectToAction("Manage", new { Message = ManageMessageId.Error });
}
//
// POST: /Account/ExternalLoginConfirmation
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public async Task<ActionResult> ExternalLoginConfirmation(ExternalLoginConfirmationViewModel model, string returnUrl)
{
if (User.Identity.IsAuthenticated)
{
return RedirectToAction("Manage");
}
if (ModelState.IsValid)
{
// Get the information about the user from the external login provider
var info = await AuthenticationManager.GetExternalLoginInfoAsync();
if (info == null)
{
return View("ExternalLoginFailure");
}
var user = new ApplicationUser() { UserName = model.UserName };
var result = await UserManager.CreateAsync(user);
if (result.Succeeded)
{
result = await UserManager.AddLoginAsync(user.Id, info.Login);
if (result.Succeeded)
{
await SignInAsync(user, isPersistent: false);
return RedirectToLocal(returnUrl);
}
}
AddErrors(result);
}
ViewBag.ReturnUrl = returnUrl;
return View(model);
}
//
// POST: /Account/LogOff
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult LogOff()
{
AuthenticationManager.SignOut();
return RedirectToAction("Index", "Home");
}
//
// GET: /Account/ExternalLoginFailure
[AllowAnonymous]
public ActionResult ExternalLoginFailure()
{
return View();
}
[ChildActionOnly]
public ActionResult RemoveAccountList()
{
var linkedAccounts = UserManager.GetLogins(User.Identity.GetUserId());
ViewBag.ShowRemoveButton = HasPassword() || linkedAccounts.Count > 1;
return (ActionResult)PartialView("_RemoveAccountPartial", linkedAccounts);
}
protected override void Dispose(bool disposing)
{
if (disposing && UserManager != null)
{
UserManager.Dispose();
UserManager = null;
}
base.Dispose(disposing);
}
#region Helpers
// Used for XSRF protection when adding external logins
private const string XsrfKey = "XsrfId";
private IAuthenticationManager AuthenticationManager
{
get
{
return HttpContext.GetOwinContext().Authentication;
}
}
private async Task SignInAsync(ApplicationUser user, bool isPersistent)
{
AuthenticationManager.SignOut(DefaultAuthenticationTypes.ExternalCookie);
var identity = await UserManager.CreateIdentityAsync(user, DefaultAuthenticationTypes.ApplicationCookie);
AuthenticationManager.SignIn(new AuthenticationProperties() { IsPersistent = isPersistent }, identity);
}
private void AddErrors(IdentityResult result)
{
foreach (var error in result.Errors)
{
ModelState.AddModelError("", error);
}
}
private bool HasPassword()
{
var user = UserManager.FindById(User.Identity.GetUserId());
if (user != null)
{
return user.PasswordHash != null;
}
return false;
}
public enum ManageMessageId
{
ChangePasswordSuccess,
SetPasswordSuccess,
RemoveLoginSuccess,
Error
}
private ActionResult RedirectToLocal(string returnUrl)
{
if (Url.IsLocalUrl(returnUrl))
{
return Redirect(returnUrl);
}
else
{
return RedirectToAction("Index", "Home");
}
}
private class ChallengeResult : HttpUnauthorizedResult
{
public ChallengeResult(string provider, string redirectUri) : this(provider, redirectUri, null)
{
}
public ChallengeResult(string provider, string redirectUri, string userId)
{
LoginProvider = provider;
RedirectUri = redirectUri;
UserId = userId;
}
public string LoginProvider { get; set; }
public string RedirectUri { get; set; }
public string UserId { get; set; }
public override void ExecuteResult(ControllerContext context)
{
var properties = new AuthenticationProperties() { RedirectUri = RedirectUri };
if (UserId != null)
{
properties.Dictionary[XsrfKey] = UserId;
}
context.HttpContext.GetOwinContext().Authentication.Challenge(properties, LoginProvider);
}
}
#endregion
}
} | 34.257908 | 147 | 0.54098 | [
"Unlicense"
] | gavilanch/empleo-dot-net | EmpleoDotNet/Controllers/AccountController.cs | 14,082 | C# |
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Threading.Tasks;
using Pulumi.Serialization;
namespace Pulumi.AzureNative.Storage.V20210201.Inputs
{
/// <summary>
/// The service properties for soft delete.
/// </summary>
public sealed class DeleteRetentionPolicyArgs : Pulumi.ResourceArgs
{
/// <summary>
/// Indicates the number of days that the deleted item should be retained. The minimum specified value can be 1 and the maximum value can be 365.
/// </summary>
[Input("days")]
public Input<int>? Days { get; set; }
/// <summary>
/// Indicates whether DeleteRetentionPolicy is enabled.
/// </summary>
[Input("enabled")]
public Input<bool>? Enabled { get; set; }
public DeleteRetentionPolicyArgs()
{
}
}
}
| 29.914286 | 153 | 0.641834 | [
"Apache-2.0"
] | polivbr/pulumi-azure-native | sdk/dotnet/Storage/V20210201/Inputs/DeleteRetentionPolicyArgs.cs | 1,047 | C# |
using System;
using Microsoft.AspNetCore.Components.Rendering;
using Microsoft.AspNetCore.Components;
using Microsoft.JSInterop;
namespace StableCube.Bulzor.Components.MediaPlayer
{
public class BulMediaTrack : BulComponentBase
{
[Parameter]
public Uri SrcUri { get; set; }
[Parameter]
public string Kind { get; set; }
[Parameter]
public string Label { get; set; }
[Parameter]
public string SrcLang { get; set; }
[Parameter]
public bool IsDefault { get; set; }
protected override void BuildBulma()
{
}
protected override void BuildRenderTree(RenderTreeBuilder builder)
{
BuildBulma();
builder.OpenElement(0, "track");
builder.AddAttribute(1, "src", SrcUri);
builder.AddAttribute(2, "kind", Kind);
builder.AddAttribute(3, "label", Label);
builder.AddAttribute(4, "srclang", SrcLang);
builder.AddAttribute(5, "default", IsDefault);
builder.CloseElement();
}
}
} | 27.547619 | 75 | 0.569576 | [
"MIT"
] | StableCube/Bulzor | Components.MediaPlayer/src/Components/BulMediaTrack.cs | 1,159 | C# |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics.CodeAnalysis;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CodeActions;
using Microsoft.CodeAnalysis.CodeFixes;
using Microsoft.CodeAnalysis.Editing;
using Microsoft.CodeAnalysis.Formatting;
namespace Microsoft.NetCore.Analyzers.Performance
{
/// <summary>
/// CA1827: Do not use Count()/LongCount() when Any() can be used.
/// CA1828: Do not use CountAsync()/LongCountAsync() when AnyAsync() can be used.
/// </summary>
public abstract class DoNotUseCountWhenAnyCanBeUsedFixer : CodeFixProvider
{
private const string AsyncMethodName = "AnyAsync";
private const string SyncMethodName = "Any";
/// <summary>
/// A list of diagnostic IDs that this provider can provider fixes for.
/// </summary>
/// <value>The fixable diagnostic ids.</value>
public override ImmutableArray<string> FixableDiagnosticIds { get; } =
ImmutableArray.Create(
UseCountProperlyAnalyzer.CA1827,
UseCountProperlyAnalyzer.CA1828);
/// <summary>
/// Gets an optional <see cref="FixAllProvider" /> that can fix all/multiple occurrences of diagnostics fixed by this code fix provider.
/// Return null if the provider doesn't support fix all/multiple occurrences.
/// Otherwise, you can return any of the well known fix all providers from <see cref="WellKnownFixAllProviders" /> or implement your own fix all provider.
/// </summary>
/// <returns>FixAllProvider.</returns>
public sealed override FixAllProvider GetFixAllProvider()
{
// See https://github.com/dotnet/roslyn/blob/master/docs/analyzers/FixAllProvider.md for more information on Fix All Providers
return WellKnownFixAllProviders.BatchFixer;
}
/// <summary>
/// Computes one or more fixes for the specified <see cref="CodeFixContext" />.
/// </summary>
/// <param name="context">A <see cref="CodeFixContext" /> containing context information about the diagnostics to fix.
/// The context must only contain diagnostics with a <see cref="Diagnostic.Id" /> included in the <see cref="CodeFixProvider.FixableDiagnosticIds" />
/// for the current provider.</param>
/// <returns>A <see cref="Task" /> that represents the asynchronous operation.</returns>
public sealed override async Task RegisterCodeFixesAsync(CodeFixContext context)
{
var root = await context.Document.GetSyntaxRootAsync(context.CancellationToken).ConfigureAwait(false);
var node = root.FindNode(context.Span);
var properties = context.Diagnostics[0].Properties;
var shouldNegateKey = properties.ContainsKey(UseCountProperlyAnalyzer.ShouldNegateKey);
var isAsync = properties.ContainsKey(UseCountProperlyAnalyzer.IsAsyncKey) ||
context.Diagnostics[0].Id == UseCountProperlyAnalyzer.CA1828;
if (node is object &&
properties.TryGetValue(UseCountProperlyAnalyzer.OperationKey, out var operation) &&
this.TryGetFixer(node, operation, isAsync, out var expression, out var arguments))
{
context.RegisterCodeFix(
new DoNotUseCountWhenAnyCanBeUsedCodeAction(isAsync, context.Document, node, expression, arguments, shouldNegateKey),
context.Diagnostics);
}
}
/// <summary>
/// Tries to get a fixer for the specified <paramref name="node" />.
/// </summary>
/// <param name="node">The node to get a fixer for.</param>
/// <param name="operation">The operation to get the fixer from.</param>
/// <param name="isAsync"><see langword="true" /> if it's an asynchronous method; <see langword="false" /> otherwise.</param>
/// <param name="expression">If this method returns <see langword="true" />, contains the expression to be used to invoke <c>Any</c>.</param>
/// <param name="arguments">If this method returns <see langword="true" />, contains the arguments from <c>Any</c> to be used on <c>Count</c>.</param>
/// <returns><see langword="true" /> if a fixer was found., <see langword="false" /> otherwise.</returns>
protected abstract bool TryGetFixer(
SyntaxNode node,
string operation,
bool isAsync,
[NotNullWhen(returnValue: true)] out SyntaxNode? expression,
[NotNullWhen(returnValue: true)] out IEnumerable<SyntaxNode>? arguments);
private class DoNotUseCountWhenAnyCanBeUsedCodeAction : CodeAction
{
private readonly bool _isAsync;
private readonly Document _document;
private readonly SyntaxNode _pattern;
private readonly SyntaxNode _expression;
private readonly IEnumerable<SyntaxNode> _arguments;
private readonly bool _shouldNegateKey;
public DoNotUseCountWhenAnyCanBeUsedCodeAction(
bool isAsync,
Document document,
SyntaxNode pattern,
SyntaxNode expression,
IEnumerable<SyntaxNode> arguments,
bool shouldNegateKey)
{
this._isAsync = isAsync;
this._document = document;
this._pattern = pattern;
this._expression = expression;
this._arguments = arguments;
this._shouldNegateKey = shouldNegateKey;
var title = !isAsync ?
MicrosoftNetCoreAnalyzersResources.DoNotUseCountWhenAnyCanBeUsedTitle :
MicrosoftNetCoreAnalyzersResources.DoNotUseCountAsyncWhenAnyAsyncCanBeUsedTitle;
this.Title = title;
this.EquivalenceKey = title;
}
public override string Title { get; }
public override string EquivalenceKey { get; }
protected override async Task<Document> GetChangedDocumentAsync(CancellationToken cancellationToken)
{
var editor = await DocumentEditor.CreateAsync(this._document, cancellationToken).ConfigureAwait(false);
var generator = editor.Generator;
var memberAccess = generator.MemberAccessExpression(this._expression.WithoutTrailingTrivia(), this._isAsync ? AsyncMethodName : SyncMethodName);
var replacementSyntax = generator.InvocationExpression(memberAccess, _arguments);
if (this._isAsync)
{
replacementSyntax = generator.AwaitExpression(replacementSyntax);
}
if (this._shouldNegateKey)
{
replacementSyntax = generator.LogicalNotExpression(replacementSyntax);
}
replacementSyntax = replacementSyntax
.WithAdditionalAnnotations(Formatter.Annotation)
.WithTriviaFrom(this._pattern);
editor.ReplaceNode(this._pattern, replacementSyntax);
return editor.GetChangedDocument();
}
}
}
}
| 49.854305 | 162 | 0.64373 | [
"Apache-2.0"
] | Atrejoe/roslyn-analyzers | src/NetAnalyzers/Core/Microsoft.NetCore.Analyzers/Performance/DoNotUseCountWhenAnyCanBeUsed.Fixer.cs | 7,530 | C# |
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Text;
namespace Data.Models
{
public class SigninToken
{
[Key]
public Guid Token { get; set; }
public Guid UserId { get; set; }
[ForeignKey("UserId")]
public ApplicationUser User { get; set; }
public TokenStatus TokenStatus { get; set; } = TokenStatus.Acceptable;
}
public enum TokenStatus
{
Acceptable,
UnAcceptable
}
}
| 21.961538 | 78 | 0.65324 | [
"Apache-2.0"
] | elmurphy/WebAPISeed | Data/Models/SigninToken.cs | 573 | C# |
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
namespace Episerver.Labs.Divvy.Webhooks
{
public class DivvyWebhookManager : Queue<DivvyWebhook>
{
private System.Timers.Timer timer;
private bool working;
public static DivvyWebhookManager Instance;
static DivvyWebhookManager()
{
Instance = new DivvyWebhookManager();
}
public DivvyWebhookManager()
{
timer = new System.Timers.Timer
{
Interval = DivvyManager.Settings.WebhookTimerInterval
};
timer.Elapsed += (s, e) =>
{
if (!working)
{
Process();
}
};
timer.Start();
DivvyLogManager.Log($"Webhook Manager Initialized", new { timer.Interval });
}
public void Add(DivvyWebhook webhook)
{
DivvyLogManager.Log($"Enqueing Webhook", webhook);
Enqueue(webhook);
}
private void Process()
{
if (!this.Any())
{
return;
}
DivvyLogManager.Log($"Processing Webhook Queue. {this.Count()} item(s)");
working = true;
while (this.Any())
{
var webhook = Dequeue();
Execute(webhook);
Thread.Sleep(DivvyManager.Settings.WebhookTimerInterval);
};
working = false;
DivvyLogManager.Log($"Webhook Queue Processing Complete");
}
private void Execute(DivvyWebhook webhook)
{
DivvyLogManager.Log($"Executing Webhook", webhook);
var sw = Stopwatch.StartNew();
var httpClient = new HttpClient();
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Token", DivvyManager.Settings.AccessToken.ToString());
var result = httpClient.PostAsJsonAsync<object>(webhook.Uri.AbsoluteUri, webhook.Data).Result;
DivvyLogManager.Log($"Webhook Executed in {sw.ElapsedMilliseconds}ms", new { result.StatusCode });
}
}
} | 29.5 | 146 | 0.558018 | [
"MIT"
] | episerver/episerver-divvy | src/Webhooks/DivvyWebhookManager.cs | 2,303 | C# |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using Windows.ApplicationModel;
using Windows.ApplicationModel.Activation;
using Windows.Foundation;
using Windows.Foundation.Collections;
using Microsoft.UI.Xaml;
using Microsoft.UI.Xaml.Controls;
using Microsoft.UI.Xaml.Controls.Primitives;
using Microsoft.UI.Xaml.Data;
using Microsoft.UI.Xaml.Input;
using Microsoft.UI.Xaml.Media;
using Microsoft.UI.Xaml.Navigation;
// To learn more about WinUI, the WinUI project structure,
// and more about our project templates, see: http://aka.ms/winui-project-info.
namespace OBRVisualizer
{
/// <summary>
/// Provides application-specific behavior to supplement the default Application class.
/// </summary>
sealed partial class App : Application
{
/// <summary>
/// Initializes the singleton application object. This is the first line of authored code
/// executed, and as such is the logical equivalent of main() or WinMain().
/// </summary>
public App()
{
this.InitializeComponent();
this.Suspending += OnSuspending;
}
/// <summary>
/// Invoked when the application is launched normally by the end user. Other entry points
/// will be used such as when the application is launched to open a specific file.
/// </summary>
/// <param name="e">Details about the launch request and process.</param>
protected override void OnLaunched(Microsoft.UI.Xaml.LaunchActivatedEventArgs e)
{
Frame rootFrame = Window.Current.Content as Frame;
// Do not repeat app initialization when the Window already has content,
// just ensure that the window is active
if (rootFrame == null)
{
// Create a Frame to act as the navigation context and navigate to the first page
rootFrame = new Frame();
rootFrame.NavigationFailed += OnNavigationFailed;
if (e.UWPLaunchActivatedEventArgs.PreviousExecutionState == ApplicationExecutionState.Terminated)
{
//TODO: Load state from previously suspended application
}
// Place the frame in the current Window
Window.Current.Content = rootFrame;
}
if (e.UWPLaunchActivatedEventArgs.PrelaunchActivated == false)
{
if (rootFrame.Content == null)
{
// When the navigation stack isn't restored navigate to the first page,
// configuring the new page by passing required information as a navigation
// parameter
rootFrame.Navigate(typeof(MainPage), e.Arguments);
}
// Ensure the current window is active
Window.Current.Activate();
}
}
/// <summary>
/// Invoked when Navigation to a certain page fails
/// </summary>
/// <param name="sender">The Frame which failed navigation</param>
/// <param name="e">Details about the navigation failure</param>
void OnNavigationFailed(object sender, NavigationFailedEventArgs e)
{
throw new Exception("Failed to load Page " + e.SourcePageType.FullName);
}
/// <summary>
/// Invoked when application execution is being suspended. Application state is saved
/// without knowing whether the application will be terminated or resumed with the contents
/// of memory still intact.
/// </summary>
/// <param name="sender">The source of the suspend request.</param>
/// <param name="e">Details about the suspend request.</param>
private void OnSuspending(object sender, SuspendingEventArgs e)
{
var deferral = e.SuspendingOperation.GetDeferral();
//TODO: Save application state and stop any background activity
deferral.Complete();
}
}
}
| 39.932692 | 113 | 0.625813 | [
"MIT"
] | Joey35233/OBRVisualizer | App.xaml.cs | 4,155 | C# |
//
// ManagerTest.cs
//
// Author:
// Zachary Gramana <zack@xamarin.com>
//
// Copyright (c) 2014 Xamarin Inc
// Copyright (c) 2014 .NET Foundation
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//
// Copyright (c) 2014 Couchbase, Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Couchbase.Lite.Db;
using NUnit.Framework;
using Couchbase.Lite.Storage.SQLCipher;
namespace Couchbase.Lite
{
[TestFixture("ForestDB")]
public class ManagerTest : LiteTestCase
{
public ManagerTest(string storageType) : base(storageType) {}
[Test]
public void TestServer()
{
//to ensure this test is easily repeatable we will explicitly remove
//any stale foo.cblite
var mustExist = true;
Database old = manager.GetDatabase("foo", mustExist);
if (old != null)
{
old.Delete();
}
mustExist = false;
var db = manager.GetDatabase("foo", mustExist);
Assert.IsNotNull(db);
Assert.AreEqual("foo", db.Name);
Assert.IsTrue(db.DbDirectory.StartsWith(GetServerPath()));
Assert.IsFalse(db.Exists());
// because foo doesn't exist yet
List<string> databaseNames = manager.AllDatabaseNames.ToList();
Assert.IsTrue(!databaseNames.Contains("foo"));
Assert.DoesNotThrow(db.Open);
Assert.IsTrue(db.Exists());
databaseNames = manager.AllDatabaseNames.ToList();
Assert.IsTrue(databaseNames.Contains("foo"));
db.Close();
db.Delete();
}
/// <exception cref="System.Exception"></exception>
[Test]
public void TestUpgradeOldDatabaseFiles()
{
var testDirName = "test-directory-" + DateTime.UtcNow.MillisecondsSinceEpoch();
var rootDirPath = RootDirectory.FullName;
var testDirPath = Path.Combine(rootDirPath, testDirName);
var testDirInfo = Directory.CreateDirectory(testDirPath);
var dbStream = GetAsset("withattachments.cblite");
var destStream = File.OpenWrite(Path.Combine(testDirPath, "withattachments" + Manager.DatabaseSuffixv1));
dbStream.CopyTo(destStream);
dbStream.Dispose();
destStream.Dispose();
var attStream = GetAsset("attachment.blob");
Directory.CreateDirectory(Path.Combine(testDirPath, "withattachments/attachments"));
destStream = File.OpenWrite(Path.Combine(testDirPath, "withattachments/attachments/356a192b7913b04c54574d18c28d46e6395428ab.blob"));
attStream.CopyTo(destStream);
destStream.Dispose();
attStream.Dispose();
StopCBLite();
manager = new Manager(testDirInfo, Manager.DefaultOptions);
var db = manager.GetDatabase("withattachments", true);
int version = DatabaseUpgraderFactory.SchemaVersion(Path.Combine(db.DbDirectory, "db.sqlite3"));
Assert.IsTrue(version >= 101, "Upgrade failed");
Assert.IsFalse(Directory.Exists(Path.Combine(testDirPath, "withattachments/attachments")), "Failed to remove old attachments dir");
Assert.IsTrue(Directory.Exists(db.AttachmentStorePath), "Failed to create new attachments dir");
}
[Test]
public void TestReplaceDatabaseNamedNoAttachments() {
//Copy database from assets to local storage
var dbStream = GetAsset("noattachments.cblite");
manager.ReplaceDatabase("replaced", dbStream, null);
dbStream.Dispose();
//Now validate the number of files in the DB
var db = manager.GetDatabase("replaced");
Assert.AreEqual(10, db.GetDocumentCount());
db.Dispose();
}
[Test]
public void TestReplaceDatabaseNamedWithAttachments() {
var dbStream = GetAsset("withattachments.cblite");
var attachments = new Dictionary<string, Stream>();
attachments["356a192b7913b04c54574d18c28d46e6395428ab.blob"] = GetAsset("attachment.blob");
manager.ReplaceDatabase("replaced", dbStream, attachments);
dbStream.Dispose();
//Validate the number of files in the DB
Assert.AreEqual(1, manager.GetDatabase("replaced").GetDocumentCount());
var doc = manager.GetDatabase("replaced").GetExistingDocument("168e0c56-4588-4df4-8700-4d5115fa9c74");
Assert.IsNotNull(doc);
Assert.IsNotNull(doc.CurrentRevision.Attachments.ElementAt(0));
Assert.IsNotNull(doc.CurrentRevision.Attachments.ElementAt(0).Content);
}
[Test]
public void TestReplaceWithIosDatabase() {
if (_storageType == "SQLite") {
using (var assetStream = GetAsset("ios104.zip")) {
manager.ReplaceDatabase("iosdb", assetStream, true);
}
var db = manager.GetExistingDatabase("iosdb");
Assert.IsNotNull(db, "Failed to import database");
var doc = db.GetExistingDocument("BC38EA44-E153-429A-A698-0CBE6B0090C4");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(doc.CurrentRevision.AttachmentNames.Count(), 2, "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
var view = db.GetView("view");
view.SetMap((d, emit) =>
{
if (d["_id"].Equals("BC38EA44-E153-429A-A698-0CBE6B0090C4")) {
emit(d["_id"], null);
}
}, "1");
var result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
db.Dispose();
using (var assetStream = GetAsset("ios110.zip")) {
manager.ReplaceDatabase("iosdb", assetStream, true);
}
db = manager.GetExistingDatabase("iosdb");
Assert.IsNotNull(db, "Failed to import database");
doc = db.GetExistingDocument("-iTji_n2zmHpmgYecaRHqZE");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(doc.CurrentRevision.AttachmentNames.Count(), 2, "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
view = db.GetView("view");
view.SetMap((d, emit) =>
{
if (d["_id"].Equals("-iTji_n2zmHpmgYecaRHqZE")) {
emit(d["_id"], null);
}
}, "1");
result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
db.Dispose();
using (var assetStream = GetAsset("ios120.zip")) {
manager.ReplaceDatabase("iosdb", assetStream, true);
}
db = manager.GetExistingDatabase("iosdb");
Assert.IsNotNull(db, "Failed to import database");
doc = db.GetExistingDocument("doc1");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(2, doc.CurrentRevision.AttachmentNames.Count(), "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
view = db.GetView("view");
view.SetMap((d, emit) =>
{
if (d["_id"].Equals("doc1")) {
emit(d["_id"], null);
}
}, "1");
result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
} else {
using (var assetStream = GetAsset("ios120-forestdb.zip")) {
manager.ReplaceDatabase("iosdb", assetStream, true);
}
var db = manager.GetExistingDatabase("iosdb");
Assert.IsNotNull(db, "Failed to import database");
var doc = db.GetExistingDocument("doc1");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(1, doc.CurrentRevision.AttachmentNames.Count(), "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
doc = db.GetExistingDocument("doc2");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(1, doc.CurrentRevision.AttachmentNames.Count(), "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
var view = db.GetView("view");
view.SetMap((d, emit) =>
{
if (d["_id"].Equals("doc1")) {
emit(d["_id"], null);
}
}, "1");
var result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
}
}
[Test]
public void TestReplaceWithAndroidDatabase() {
if (_storageType == "SQLite") {
using (var assetStream = GetAsset("android104.zip")) {
manager.ReplaceDatabase("androiddb", assetStream, true);
}
var db = manager.GetExistingDatabase("androiddb");
Assert.IsNotNull(db, "Failed to import database");
var doc = db.GetExistingDocument("66ac306d-de93-46c8-b60f-946c16ac4a1d");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(doc.CurrentRevision.AttachmentNames.Count(), 1, "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
var view = db.GetView("view");
view.SetMap((d, emit) =>
{
if (d["_id"].Equals("66ac306d-de93-46c8-b60f-946c16ac4a1d")) {
emit(d["_id"], null);
}
}, "1");
var result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
db.Dispose();
using (var assetStream = GetAsset("android110.zip")) {
manager.ReplaceDatabase("androiddb", assetStream, true);
}
db = manager.GetExistingDatabase("androiddb");
Assert.IsNotNull(db, "Failed to import database");
doc = db.GetExistingDocument("d3e80747-2568-47c8-81e8-a04ba1b5c5d4");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(doc.CurrentRevision.AttachmentNames.Count(), 1, "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
view = db.GetView("view");
view.SetMap((d, emit) =>
{
if (d["_id"].Equals("d3e80747-2568-47c8-81e8-a04ba1b5c5d4")) {
emit(d["_id"], null);
}
}, "1");
result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
db.Dispose();
using (var assetStream = GetAsset("android120.zip")) {
manager.ReplaceDatabase("androiddb", assetStream, true);
}
db = manager.GetExistingDatabase("androiddb");
Assert.IsNotNull(db, "Failed to import database");
doc = db.GetExistingDocument("doc1");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(doc.CurrentRevision.AttachmentNames.Count(), 1, "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
doc = db.GetExistingDocument("doc2");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(doc.CurrentRevision.AttachmentNames.Count(), 1, "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
Assert.IsNotNull(db.GetExistingLocalDocument("local1"));
view = db.GetView("view");
view.SetMap((d, emit) =>
{
if (d["_id"].Equals("doc1")) {
emit(d["_id"], null);
}
}, "1");
result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
} else {
using (var assetStream = GetAsset("android120-forestdb.zip")) {
manager.ReplaceDatabase("androiddb", assetStream, true);
}
var db = manager.GetExistingDatabase("androiddb");
Assert.IsNotNull(db, "Failed to import database");
var doc = db.GetExistingDocument("doc1");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(1, doc.CurrentRevision.AttachmentNames.Count(), "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
doc = db.GetExistingDocument("doc2");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(1, doc.CurrentRevision.AttachmentNames.Count(), "Failed to get attachments from imported database");
using (var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
var view = db.GetView("view");
view.SetMap((d, emit) =>
{
if (d["_id"].Equals("doc1")) {
emit(d["_id"], null);
}
}, "1");
var result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
}
}
[Test]
public void TestReplaceFailure()
{
var doc = database.CreateDocument();
doc.PutProperties(new Dictionary<string, object> {
{ "foo", "bar" }
});
Assert.Throws(typeof(ArgumentException), () =>
{
using (var assetStream = GetAsset("android104.zip")) {
manager.ReplaceDatabase(database.Name, assetStream, false);
}
});
// Verify that the original DB is intact
doc = database.GetExistingDocument(doc.Id);
Assert.IsNotNull(doc, "Failed to get original document");
Assert.AreEqual("bar", doc.UserProperties["foo"]);
}
}
}
| 46.10101 | 144 | 0.570004 | [
"Apache-2.0"
] | APXLabs/couchbase-lite-net | src/Couchbase.Lite.Tests.Shared/ManagerTest.cs | 18,256 | C# |
using System;
using System.Security.Cryptography;
namespace StupidFirewallManager.Common.Encryption
{
public static class EncryptionUtils
{
public const Int32 saltSize = 8;
public static byte[] GenerateRandomSalt()
{
using var csp = new RNGCryptoServiceProvider();
byte[] salt = new byte[saltSize];
csp.GetBytes(salt);
return salt;
}
public static ICryptoTransform GetEncryptorFromPassword(
this Aes aes,
string password,
byte[] salt)
{
using var pdb = new PasswordDeriveBytes(password, salt);
var key = pdb.GetBytes(32);
var IV = pdb.GetBytes(16);
return aes.CreateEncryptor(key, IV);
}
public static ICryptoTransform GetDecryptorFromPassword(
this Aes aes,
string password,
byte[] salt)
{
using var pdb = new PasswordDeriveBytes(password, salt);
var key = pdb.GetBytes(32);
var IV = pdb.GetBytes(16);
return aes.CreateDecryptor(key, IV);
}
}
}
| 29.243902 | 69 | 0.546289 | [
"MIT"
] | AlkampferOpenSource/StupidFirewallManager | src/StupidFirewallManager.Common/Encryption/EncryptionUtils.cs | 1,201 | C# |
using CodeWars;
using NUnit.Framework;
namespace CodeWarsTests
{
[TestFixture]
public class ConvertIntegerToBinaryTests
{
[Test]
public void BasicTests()
{
Assert.AreEqual("10", ConvertIntegerToBinary.ToBinary(2));
Assert.AreEqual("11", ConvertIntegerToBinary.ToBinary(3));
Assert.AreEqual("100", ConvertIntegerToBinary.ToBinary(4));
Assert.AreEqual("101", ConvertIntegerToBinary.ToBinary(5));
}
}
} | 27.611111 | 71 | 0.637827 | [
"MIT"
] | a-kozhanov/codewars-csharp | CodeWarsTests/7kyu/ConvertIntegerToBinaryTests.cs | 499 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Locust.Collections
{
public class CharBuffer
{
private int bufferSize;
private char[] buffer;
private int position;
public CharBuffer(): this(16)
{
}
public CharBuffer(int bufferSize)
{
if (bufferSize <= 0)
bufferSize = 16;
this.bufferSize = bufferSize;
this.buffer = new char[bufferSize];
}
public int Length
{
get { return position; }
}
public int BufferLength
{
get { return buffer.Length; }
}
public void Reset()
{
position = 0;
}
public void Append(char ch)
{
if (position == buffer.Length)
{
Array.Resize(ref buffer, buffer.Length + bufferSize);
}
buffer[position] = ch;
position++;
}
public void Append(string s)
{
foreach (var ch in s)
{
Append(ch);
}
}
public void AppendFormat(string format, params object[] args)
{
var s = string.Format(format, args);
foreach (var ch in s)
{
Append(ch);
}
}
public override string ToString()
{
return new string(buffer, 0, position);
}
}
}
| 21.04 | 69 | 0.465779 | [
"MIT"
] | mansoor-omrani/Locust.NET | Library/Locust.Collections/CharBuffer.cs | 1,580 | C# |
#region License
//
// Copyright 2002-2019 Drew Noakes
// Ported from Java to C# by Yakov Danilov for Imazen LLC in 2014
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// More information about this project is available at:
//
// https://github.com/drewnoakes/metadata-extractor-dotnet
// https://drewnoakes.com/code/exif/
//
#endregion
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
namespace MetadataExtractor.Formats.Jpeg
{
/// <summary>An enumeration of the known segment types found in JPEG files.</summary>
/// <remarks>
/// <list type="bullet">
/// <item>http://www.ozhiker.com/electronics/pjmt/jpeg_info/app_segments.html</item>
/// <item>http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/JPEG.html</item>
/// <item>http://lad.dsc.ufcg.edu.br/multimidia/jpegmarker.pdf</item>
/// <item>http://dev.exiv2.org/projects/exiv2/wiki/The_Metadata_in_JPEG_files</item>
/// </list>
/// </remarks>
/// <author>Drew Noakes https://drewnoakes.com</author>
[SuppressMessage("ReSharper", "UnusedMember.Global")]
public enum JpegSegmentType : byte
{
/// <summary>For temporary use in arithmetic coding.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Tem = 0x01,
/// <summary>Start Of Image segment. Begins the compressed JPEG data stream.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Soi = 0xD8,
/// <summary>Define Quantization Table.</summary>
/// <remarks>Specifies one or more quantization tables.</remarks>
Dqt = 0xDB,
/// <summary>Start-of-Frame, non-differential Huffman coding frame, baseline DCT.</summary>
/// <remarks>
/// Indicates that this is a baseline DCT-based JPEG, and specifies the width,
/// height, number of components, and component subsampling (e.g., 4:2:0).
/// </remarks>
Sof0 = 0xC0,
/// <summary>Start-of-Frame, non-differential Huffman coding frame, extended sequential DCT.</summary>
Sof1 = 0xC1,
/// <summary>Start-of-Frame, non-differential Huffman coding frame, progressive DCT.</summary>
/// <remarks>
/// Indicates that this is a progressive DCT-based JPEG, and specifies the width,
/// height, number of components, and component subsampling (e.g., 4:2:0).
/// </remarks>
Sof2 = 0xC2,
/// <summary>Start-of-Frame, non-differential Huffman coding frame, lossless sequential.</summary>
Sof3 = 0xC3,
/// <summary>Define Huffman Table(s).</summary>
/// <remarks>Specifies one or more Huffman tables.</remarks>
Dht = 0xC4,
/// <summary>Start-of-Frame, differential Huffman coding frame, differential sequential DCT.</summary>
Sof5 = 0xC5,
/// <summary>Start-of-Frame, differential Huffman coding frame, differential progressive DCT.</summary>
Sof6 = 0xC6,
/// <summary>Start-of-Frame, differential Huffman coding frame, differential lossless.</summary>
Sof7 = 0xC7,
/// <summary>Start-of-Frame, non-differential arithmetic coding frame, extended sequential DCT.</summary>
Sof9 = 0xC9,
/// <summary>Start-of-Frame, non-differential arithmetic coding frame, progressive DCT.</summary>
Sof10 = 0xCA,
/// <summary>Start-of-Frame, non-differential arithmetic coding frame, lossless sequential.</summary>
Sof11 = 0xCB,
/// <summary>Define Arithmetic Coding table(s).</summary>
Dac = 0xCC,
/// <summary>Start-of-Frame, differential arithmetic coding frame, differential sequential DCT.</summary>
Sof13 = 0xCD,
/// <summary>Start-of-Frame, differential arithmetic coding frame, differential progressive DCT.</summary>
Sof14 = 0xCE,
/// <summary>Start-of-Frame, differential arithmetic coding frame, differential lossless.</summary>
Sof15 = 0xCF,
/// <summary>Restart.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Rst0 = 0xD0,
/// <summary>Restart.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Rst1 = 0xD1,
/// <summary>Restart.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Rst2 = 0xD2,
/// <summary>Restart.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Rst3 = 0xD3,
/// <summary>Restart.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Rst4 = 0xD4,
/// <summary>Restart.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Rst5 = 0xD5,
/// <summary>Restart.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Rst6 = 0xD6,
/// <summary>Restart.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Rst7 = 0xD7,
/// <summary>End-of-Image. Terminates the JPEG compressed data stream that started at <see cref="Soi"/>.</summary>
/// <remarks>No length or parameter sequence follows this marker.</remarks>
Eoi = 0xD9,
/// <summary>Start-of-Scan.</summary>
/// <remarks>
/// Begins a top-to-bottom scan of the image.
/// In baseline DCT JPEG images, there is generally a single scan.
/// Progressive DCT JPEG images usually contain multiple scans.
/// This marker specifies which slice of data it will contain, and is
/// immediately followed by entropy-coded data.
/// </remarks>
Sos = 0xDA,
/// <summary>Define Number of Lines.</summary>
Dnl = 0xDC,
/// <summary>Define Restart Interval.</summary>
/// <remarks>
/// Specifies the interval between RSTn markers, in macroblocks.
/// This marker is followed by two bytes indicating the fixed size so
/// it can be treated like any other variable size segment.
/// </remarks>
Dri = 0xDD,
/// <summary>Define Hierarchical Progression.</summary>
Dhp = 0xDE,
/// <summary>Expand reference components.</summary>
Exp = 0xDF,
/// <summary>Application specific, type 0. Commonly contains JFIF, JFXX.</summary>
App0 = 0xE0,
/// <summary>Application specific, type 1. Commonly contains Exif. XMP data is also kept in here, though usually in a second instance.</summary>
App1 = 0xE1,
/// <summary>Application specific, type 2. Commonly contains ICC.</summary>
App2 = 0xE2,
/// <summary>Application specific, type 3.</summary>
App3 = 0xE3,
/// <summary>Application specific, type 4.</summary>
App4 = 0xE4,
/// <summary>Application specific, type 5.</summary>
App5 = 0xE5,
/// <summary>Application specific, type 6.</summary>
App6 = 0xE6,
/// <summary>Application specific, type 7.</summary>
App7 = 0xE7,
/// <summary>Application specific, type 8.</summary>
App8 = 0xE8,
/// <summary>Application specific, type 9.</summary>
App9 = 0xE9,
/// <summary>Application specific, type A. Can contain Unicode comments, though <see cref="Com"/> is more commonly used for comments.</summary>
AppA = 0xEA,
/// <summary>Application specific, type B.</summary>
AppB = 0xEB,
/// <summary>Application specific, type C.</summary>
AppC = 0xEC,
/// <summary>Application specific, type D. Commonly contains IPTC, Photoshop data.</summary>
AppD = 0xED,
/// <summary>Application specific, type E. Commonly contains Adobe data.</summary>
AppE = 0xEE,
/// <summary>Application specific, type F.</summary>
AppF = 0xEF,
/// <summary>JPEG comment (text).</summary>
Com = 0xFE
}
/// <summary>
/// Extension methods for <see cref="JpegSegmentType"/> enum.
/// </summary>
public static class JpegSegmentTypeExtensions
{
/// <summary>Gets whether this JPEG segment type might contain metadata.</summary>
/// <remarks>Used to exclude large image-data-only segment from certain types of processing.</remarks>
public static bool CanContainMetadata(this JpegSegmentType type)
{
switch (type)
{
case JpegSegmentType.Soi:
case JpegSegmentType.Dqt:
case JpegSegmentType.Dht:
return false;
default:
return true;
}
}
/// <summary>Gets JPEG segment types that might contain metadata.</summary>
#if NET35
public static IEnumerable<JpegSegmentType> CanContainMetadataTypes { get; }
#else
public static IReadOnlyList<JpegSegmentType> CanContainMetadataTypes { get; }
#endif
= Enum.GetValues(typeof(JpegSegmentType)).Cast<JpegSegmentType>().Where(type => type.CanContainMetadata()).ToList();
/// <summary>Gets whether this JPEG segment type's marker is followed by a length indicator.</summary>
public static bool ContainsPayload(this JpegSegmentType type)
{
// ReSharper disable once SwitchStatementMissingSomeCases
switch (type)
{
case JpegSegmentType.Soi:
case JpegSegmentType.Eoi:
case JpegSegmentType.Rst0:
case JpegSegmentType.Rst1:
case JpegSegmentType.Rst2:
case JpegSegmentType.Rst3:
case JpegSegmentType.Rst4:
case JpegSegmentType.Rst5:
case JpegSegmentType.Rst6:
case JpegSegmentType.Rst7:
return false;
default:
return true;
}
}
/// <summary>Gets whether this JPEG segment is intended to hold application specific data.</summary>
public static bool IsApplicationSpecific(this JpegSegmentType type) => type >= JpegSegmentType.App0 && type <= JpegSegmentType.AppF;
}
}
| 39.120996 | 152 | 0.625125 | [
"Apache-2.0"
] | XelaNimed/metadata-extractor-dotnet | MetadataExtractor/Formats/Jpeg/JpegSegmentType.cs | 10,993 | C# |
namespace TestNetCore.Microsoft.Extensions.DependencyInjection.Helpers
{
public class Class : IInterface
{
}
} | 19.833333 | 72 | 0.764706 | [
"Apache-2.0"
] | DrunkRussianGun/CSharpTests | TestAspNetCore/Microsoft/Extensions/DependencyInjection/Helpers/Service.cs | 121 | C# |
using QuanLyNhaSach.Objects;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace QuanLyNhaSach.Bus
{
public class DeleteData
{
public static void DeleteOldBill(Bill bill)
{
if (bill.ID > 0)
{
foreach (var item in bill.BillItems)
{
if (item.Number>0)
{
Adapters.BookAdapter.UpdateNumber(item.Book.ID, item.Book.Number + item.Number);
}
}
Adapters.BillItemAdapter.ClearBillItems(bill.ID);
Adapters.BillAdapter.DeleteBill(bill.ID);
}
}
public static void DeleteManagerListAddedBook(ManagerListAddedBook mlab)
{
if (mlab.ListAddedBook.Count <= 0)
Adapters.ManagerListAddedBookAdapter.Delete(mlab);
else
{
foreach (AddedBook item in mlab.ListAddedBook)
{
item.IsDeletedItem = true;
Bus.SaveChanges.SaveChangesListAddedBook(mlab);
}
Adapters.ManagerListAddedBookAdapter.Delete(mlab);
}
}
}
}
| 29.340909 | 104 | 0.529822 | [
"MIT"
] | HCB2-NPT/QuanLyNhaSach | QuanLyNhaSach/Bus/DeleteData.cs | 1,293 | C# |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace Hooker.WebApi.Areas.HelpPage.SampleGeneration
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException(nameof(api));
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
var enumerable = parameterNames as string[] ?? parameterNames.ToArray();
Type type = ResolveType(api, controllerName, actionName, enumerable, sampleDirection, out formatters);
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, enumerable, sampleDirection);
var samples = actionSamples.ToDictionary(actionSample => actionSample.Key.MediaType, actionSample => WrapSampleIfString(actionSample.Value));
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, enumerable, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException(nameof(sampleDirection), (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException(nameof(api));
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter?.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException(nameof(formatter));
}
if (mediaType == null)
{
throw new ArgumentNullException(nameof(mediaType));
}
object sample;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
ms?.Dispose();
content?.Dispose();
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
return aggregateException != null ? aggregateException.Flatten().InnerException : exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
return from sample in ActionSamples let sampleKey = sample.Key where String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection select sample;
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
} | 48.933806 | 229 | 0.590657 | [
"MIT"
] | TimMurphy/debug-http-post | source/Hooker.WebApi/Areas/HelpPage/SampleGeneration/HelpPageSampleGenerator.cs | 20,699 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Runtime.InteropServices;
using ILRuntime.CLR.TypeSystem;
using ILRuntime.CLR.Method;
using ILRuntime.Runtime.Enviorment;
using ILRuntime.Runtime.Intepreter;
using ILRuntime.Runtime.Stack;
using ILRuntime.Reflection;
using ILRuntime.CLR.Utils;
namespace ILRuntime.Runtime.Generated
{
unsafe class ETModel_EventAttribute_Binding
{
public static void Register(ILRuntime.Runtime.Enviorment.AppDomain app)
{
BindingFlags flag = BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static | BindingFlags.DeclaredOnly;
MethodBase method;
Type[] args;
Type type = typeof(ETModel.EventAttribute);
args = new Type[]{};
method = type.GetMethod("get_Type", flag, null, args, null);
app.RegisterCLRMethodRedirection(method, get_Type_0);
}
static StackObject* get_Type_0(ILIntepreter __intp, StackObject* __esp, IList<object> __mStack, CLRMethod __method, bool isNewObj)
{
ILRuntime.Runtime.Enviorment.AppDomain __domain = __intp.AppDomain;
StackObject* ptr_of_this_method;
StackObject* __ret = ILIntepreter.Minus(__esp, 1);
ptr_of_this_method = ILIntepreter.Minus(__esp, 1);
ETModel.EventAttribute instance_of_this_method = (ETModel.EventAttribute)typeof(ETModel.EventAttribute).CheckCLRTypes(StackObject.ToObject(ptr_of_this_method, __domain, __mStack));
__intp.Free(ptr_of_this_method);
var result_of_this_method = instance_of_this_method.Type;
return ILIntepreter.PushObject(__ret, __mStack, result_of_this_method);
}
}
}
| 34.134615 | 192 | 0.710423 | [
"MIT"
] | 13294029724/ET | Unity/Assets/Model/ILBinding/ETModel_EventAttribute_Binding.cs | 1,775 | C# |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the codeartifact-2018-09-22.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using System.Net;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.CodeArtifact.Model
{
/// <summary>
/// Container for the parameters to the DisposePackageVersions operation.
/// Deletes the assets in package versions and sets the package versions' status to <code>Disposed</code>.
/// A disposed package version cannot be restored in your repository because its assets
/// are deleted.
///
///
/// <para>
/// To view all disposed package versions in a repository, use <a href="https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_ListPackageVersions.html">
/// <code>ListPackageVersions</code> </a> and set the <a href="https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_ListPackageVersions.html#API_ListPackageVersions_RequestSyntax">
/// <code>status</code> </a> parameter to <code>Disposed</code>.
/// </para>
///
/// <para>
/// To view information about a disposed package version, use <a href="https://docs.aws.amazon.com/codeartifact/latest/APIReference/API_DescribePackageVersion.html">
/// <code>DescribePackageVersion</code> </a>..
/// </para>
/// </summary>
public partial class DisposePackageVersionsRequest : AmazonCodeArtifactRequest
{
private string _domain;
private string _domainOwner;
private PackageVersionStatus _expectedStatus;
private PackageFormat _format;
private string _awsNamespace;
private string _package;
private string _repository;
private Dictionary<string, string> _versionRevisions = new Dictionary<string, string>();
private List<string> _versions = new List<string>();
/// <summary>
/// Gets and sets the property Domain.
/// <para>
/// The name of the domain that contains the repository you want to dispose.
/// </para>
/// </summary>
[AWSProperty(Required=true, Min=2, Max=50)]
public string Domain
{
get { return this._domain; }
set { this._domain = value; }
}
// Check to see if Domain property is set
internal bool IsSetDomain()
{
return this._domain != null;
}
/// <summary>
/// Gets and sets the property DomainOwner.
/// <para>
/// The 12-digit account number of the AWS account that owns the domain. It does not
/// include dashes or spaces.
/// </para>
/// </summary>
[AWSProperty(Min=12, Max=12)]
public string DomainOwner
{
get { return this._domainOwner; }
set { this._domainOwner = value; }
}
// Check to see if DomainOwner property is set
internal bool IsSetDomainOwner()
{
return this._domainOwner != null;
}
/// <summary>
/// Gets and sets the property ExpectedStatus.
/// <para>
/// The expected status of the package version to dispose. Valid values are:
/// </para>
/// <ul> <li>
/// <para>
/// <code>Published</code>
/// </para>
/// </li> <li>
/// <para>
/// <code>Unfinished</code>
/// </para>
/// </li> <li>
/// <para>
/// <code>Unlisted</code>
/// </para>
/// </li> <li>
/// <para>
/// <code>Archived</code>
/// </para>
/// </li> <li>
/// <para>
/// <code>Disposed</code>
/// </para>
/// </li> </ul>
/// </summary>
public PackageVersionStatus ExpectedStatus
{
get { return this._expectedStatus; }
set { this._expectedStatus = value; }
}
// Check to see if ExpectedStatus property is set
internal bool IsSetExpectedStatus()
{
return this._expectedStatus != null;
}
/// <summary>
/// Gets and sets the property Format.
/// <para>
/// A format that specifies the type of package versions you want to dispose. The valid
/// values are:
/// </para>
/// <ul> <li>
/// <para>
/// <code>npm</code>
/// </para>
/// </li> <li>
/// <para>
/// <code>pypi</code>
/// </para>
/// </li> <li>
/// <para>
/// <code>maven</code>
/// </para>
/// </li> </ul>
/// </summary>
[AWSProperty(Required=true)]
public PackageFormat Format
{
get { return this._format; }
set { this._format = value; }
}
// Check to see if Format property is set
internal bool IsSetFormat()
{
return this._format != null;
}
/// <summary>
/// Gets and sets the property Namespace.
/// <para>
/// The namespace of the package. The package component that specifies its namespace
/// depends on its type. For example:
/// </para>
/// <ul> <li>
/// <para>
/// The namespace of a Maven package is its <code>groupId</code>.
/// </para>
/// </li> <li>
/// <para>
/// The namespace of an npm package is its <code>scope</code>.
/// </para>
/// </li> <li>
/// <para>
/// A Python package does not contain a corresponding component, so Python packages do
/// not have a namespace.
/// </para>
/// </li> </ul>
/// </summary>
[AWSProperty(Min=1, Max=255)]
public string Namespace
{
get { return this._awsNamespace; }
set { this._awsNamespace = value; }
}
// Check to see if Namespace property is set
internal bool IsSetNamespace()
{
return this._awsNamespace != null;
}
/// <summary>
/// Gets and sets the property Package.
/// <para>
/// The name of the package with the versions you want to dispose.
/// </para>
/// </summary>
[AWSProperty(Required=true, Min=1, Max=255)]
public string Package
{
get { return this._package; }
set { this._package = value; }
}
// Check to see if Package property is set
internal bool IsSetPackage()
{
return this._package != null;
}
/// <summary>
/// Gets and sets the property Repository.
/// <para>
/// The name of the repository that contains the package versions you want to dispose.
///
/// </para>
/// </summary>
[AWSProperty(Required=true, Min=2, Max=100)]
public string Repository
{
get { return this._repository; }
set { this._repository = value; }
}
// Check to see if Repository property is set
internal bool IsSetRepository()
{
return this._repository != null;
}
/// <summary>
/// Gets and sets the property VersionRevisions.
/// <para>
/// The revisions of the package versions you want to dispose.
/// </para>
/// </summary>
public Dictionary<string, string> VersionRevisions
{
get { return this._versionRevisions; }
set { this._versionRevisions = value; }
}
// Check to see if VersionRevisions property is set
internal bool IsSetVersionRevisions()
{
return this._versionRevisions != null && this._versionRevisions.Count > 0;
}
/// <summary>
/// Gets and sets the property Versions.
/// <para>
/// The versions of the package you want to dispose.
/// </para>
/// </summary>
[AWSProperty(Required=true)]
public List<string> Versions
{
get { return this._versions; }
set { this._versions = value; }
}
// Check to see if Versions property is set
internal bool IsSetVersions()
{
return this._versions != null && this._versions.Count > 0;
}
}
} | 32.30742 | 196 | 0.54457 | [
"Apache-2.0"
] | altso/aws-sdk-net | sdk/src/Services/CodeArtifact/Generated/Model/DisposePackageVersionsRequest.cs | 9,143 | C# |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated from a template.
//
// Manual changes to this file may cause unexpected behavior in your application.
// Manual changes to this file will be overwritten if the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace MVCStokTakibi.Models.Entity
{
using System;
using System.Collections.Generic;
public partial class TBLKATEGORILER
{
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public TBLKATEGORILER()
{
this.TBLURUNLER = new HashSet<TBLURUNLER>();
}
public short KATEGORIID { get; set; }
public string KATEGORIAD { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<TBLURUNLER> TBLURUNLER { get; set; }
}
}
| 37.2 | 128 | 0.58871 | [
"MIT"
] | mhmtlger12/MVC_Stok_Takibi | MVCStokTakibi/Models/Entity/TBLKATEGORILER.cs | 1,116 | C# |
using System;
using System.Web.Http;
using System.Web.Mvc;
using NetFxWithDITest.Areas.HelpPage.ModelDescriptions;
using NetFxWithDITest.Areas.HelpPage.Models;
namespace NetFxWithDITest.Areas.HelpPage.Controllers
{
/// <summary>
/// The controller that will handle requests for the help page.
/// </summary>
public class HelpController : Controller
{
private const string ErrorViewName = "Error";
public HelpController()
: this(GlobalConfiguration.Configuration)
{
}
public HelpController(HttpConfiguration config)
{
Configuration = config;
}
public HttpConfiguration Configuration { get; private set; }
public ActionResult Index()
{
ViewBag.DocumentationProvider = Configuration.Services.GetDocumentationProvider();
return View(Configuration.Services.GetApiExplorer().ApiDescriptions);
}
public ActionResult Api(string apiId)
{
if (!String.IsNullOrEmpty(apiId))
{
HelpPageApiModel apiModel = Configuration.GetHelpPageApiModel(apiId);
if (apiModel != null)
{
return View(apiModel);
}
}
return View(ErrorViewName);
}
public ActionResult ResourceModel(string modelName)
{
if (!String.IsNullOrEmpty(modelName))
{
ModelDescriptionGenerator modelDescriptionGenerator = Configuration.GetModelDescriptionGenerator();
ModelDescription modelDescription;
if (modelDescriptionGenerator.GeneratedModels.TryGetValue(modelName, out modelDescription))
{
return View(modelDescription);
}
}
return View(ErrorViewName);
}
}
} | 30.222222 | 115 | 0.600315 | [
"Apache-2.0"
] | JonasSyrstad/Stardust.Rest | NetFxWithDITest/Areas/HelpPage/Controllers/HelpController.cs | 1,904 | C# |
using AltV.Net;
using AltV.Net.Data;
using AltV.Net.Enums;
using altvtutorial.Database;
using altvtutorial.MyEntitys;
using System.Text.RegularExpressions;
namespace altvtutorial {
class PlayerEvents : IScript {
//[ScriptEvent(ScriptEventType.EventName)] -> ServerEvent
//Alt.Emit() -> Server to Server
//Alt.EmitAllClient() -> Server to all Clients
//player.Emit() -> Server to Client
//[ClientEvent("eventname")] -> Client to Server
[ScriptEvent(ScriptEventType.PlayerConnect)]
public void OnPlayerConnect(MyPlayer player, string reason) {
player.Model = (uint)PedModel.FreemodeMale01;
player.Spawn(new Position(0, 0, 75), 0);
player.Emit("altvtutorial:configflags");
//Alt.Emit("eventname");
/*if (PlayerDatabase.DoesPlayerNameExists(player.Name)) {
player.LoadPlayer(player.Name);
} else {
player.CreatePlayer(player.Name, "1234");
}
player.SendNotification($"Cash: ~b~{player.Cash}$");*/
}
[ScriptEvent(ScriptEventType.PlayerDisconnect)]
public void OnPlayerDisconnect(MyPlayer player, string reason) {
if (player.IsLoggedIn) player.Save();
}
[ScriptEvent(ScriptEventType.PlayerEnterVehicle)]
public void OnPlayerEnterVehicle(MyVehicle vehicle, MyPlayer player, byte seat) {
vehicle.SecondaryColorRgb = new Rgba(255, 0, 0, 255);
player.SendNotification("Fahrzeug betreten!");
vehicle.RadioStation = (uint)RadioStation.FlyloFm;
}
[ScriptEvent(ScriptEventType.PlayerLeaveVehicle)]
public void OnPlayerLeaveVehicle(MyVehicle vehicle, MyPlayer player, byte seat) {
vehicle.SecondaryColorRgb = new Rgba(255, 255, 255, 255);
player.SendNotification("Fahrzeug verlassen!");
}
//[ServerEvent("eventname")]
[ClientEvent("alttutorial:loginAttempt")]
public void OnLoginAttempt(MyPlayer player, string username, string password) {
if (player.IsLoggedIn || username.Length < 4 || password.Length < 4) return;
//Vorname_Nachname
Regex regex = new Regex(@"([a-zA-Z]+)_([a-zA-Z]+)");
if(!regex.IsMatch(username)) {
player.Emit("alttutorial:loginError", 1, "Der Name muss dem Format: Vorname_Nachname entsprechen.");
return;
}
if(!PlayerDatabase.DoesPlayerNameExists(username)) {
player.Emit("alttutorial:loginError", 1, "Der Name ist nicht vergeben!");
return;
}
if(PlayerDatabase.CheckLoginDetails(username, password)) {
//Passwort ist korrekt
player.LoadPlayer(username);
player.Spawn(new Position(0, 0, 72), 0);
player.Emit("alttutorial:loginSuccess");
player.SendNotification("Erfolgreich eingeloggt!");
if (player.HasData("alttutorial:loginattempts")) player.DeleteData("alttutorial:loginattempts");
} else {
//Passwort ist nicht korrekt
player.Emit("alttutorial:loginError", 1, "Login Daten stimmen nicht überein!");
int attempts = 1;
if(player.HasData("alttutorial:loginattempts")) {
player.GetData("alttutorial:loginattempts", out attempts);
if (attempts == 2) player.Kick("Zu viele Loginversuche.");
else attempts++;
}
player.SetData("alttutorial:loginattempts", attempts);
}
}
[ClientEvent("alttutorial:registerAttempt")]
public void OnRegisterAttempt(MyPlayer player, string username, string password) {
if (player.IsLoggedIn || username.Length < 4 || password.Length < 4) return;
//Vorname_Nachname
Regex regex = new Regex(@"([a-zA-Z]+)_([a-zA-Z]+)");
if (!regex.IsMatch(username)) {
player.Emit("alttutorial:loginError", 2, "Der Name muss dem Format: Vorname_Nachname entsprechen.");
return;
}
if(PlayerDatabase.DoesPlayerNameExists(username)) {
player.Emit("alttutorial:loginError", 2, "Name ist bereits vergeben!");
} else {
player.CreatePlayer(username, password);
player.Spawn(new Position(0, 0, 72), 0);
player.Emit("alttutorial:loginSuccess");
}
}
}
}
| 37.362903 | 116 | 0.58774 | [
"Unlicense"
] | Flashrex/altv-tutorial | altv-tutorial/altvtutorial/altvtutorial/PlayerEvents.cs | 4,636 | C# |
namespace Mistletoe.Dispatcher
{
using NLog;
class Helper
{
internal static Logger LoggerInstance;
}
}
| 12.9 | 46 | 0.635659 | [
"MIT"
] | mossandlichens/Mistletoe | Mistletoe.Dispatcher/Helper.cs | 131 | C# |
//
// Copyright (c) .NET Foundation and Contributors
// Portions Copyright (c) Microsoft Corporation. All rights reserved.
// See LICENSE file in the project root for full license information.
//
namespace System
{
using Runtime.CompilerServices;
/// <summary>
/// Represents a time interval.
/// </summary>
/// <remarks>
/// A <see cref="TimeSpan "/> object represents a time interval (duration of time or elapsed time) that is measured as a positive or negative number of days, hours, minutes, seconds, and fractions of a second. The <see cref="TimeSpan "/> structure can also be used to represent the time of day, but only if the time is unrelated to a particular date. Otherwise, the <see cref="DateTime "/> structure should be used instead.
/// The value of a <see cref="TimeSpan"/> object is the number of ticks that equal the represented time interval. A tick is equal to 100 nanoseconds, or one ten-millionth of a second. The value of a <see cref="TimeSpan"/> object can range from <see cref="TimeSpan.MinValue"/> to <see cref="TimeSpan.MaxValue"/>.
/// </remarks>
[Serializable]
public struct TimeSpan
{
internal long _ticks;
internal const long MaxMilliSeconds = Int64.MaxValue / TicksPerMillisecond;
internal const long MinMilliSeconds = Int64.MinValue / TicksPerMillisecond;
/// <summary>
/// Represents the number of ticks in 1 millisecond. This field is constant.
/// </summary>
public const long TicksPerMillisecond = 10000;
private const double MillisecondsPerTick = 1.0 / TicksPerMillisecond;
/// <summary>
/// Represents the number of ticks in 1 second.
/// </summary>
public const long TicksPerSecond = TicksPerMillisecond * 1000; // 10.000.000
private const double SecondsPerTick = 1.0 / TicksPerSecond; // 0.0001
/// <summary>
/// Represents the number of ticks in 1 minute. This field is constant.
/// </summary>
public const long TicksPerMinute = TicksPerSecond * 60; // 600.000.000
private const double MinutesPerTick = 1.0 / TicksPerMinute; // 1.6666666666667e-9
/// <summary>
/// Represents the number of ticks in 1 hour. This field is constant.
/// </summary>
public const long TicksPerHour = TicksPerMinute * 60; // 36.000.000.000
private const double HoursPerTick = 1.0 / TicksPerHour; // 2.77777777777777778e-11
/// <summary>
/// Represents the number of ticks in 1 day. This field is constant.
/// </summary>
public const long TicksPerDay = TicksPerHour * 24; // 864.000.000.000
private const double DaysPerTick = 1.0 / TicksPerDay; // 1.1574074074074074074e-12
/// <summary>
/// Represents the zero <see cref="TimeSpan"/> value. This field is read-only.
/// </summary>
public static readonly TimeSpan Zero = new TimeSpan(0);
/// <summary>
/// Represents the maximum <see cref="TimeSpan"/> value. This field is read-only.
/// </summary>
public static readonly TimeSpan MaxValue = new TimeSpan(Int64.MaxValue);
/// <summary>
/// Represents the minimum <see cref="TimeSpan"/> value. This field is read-only.
/// </summary>
public static readonly TimeSpan MinValue = new TimeSpan(Int64.MinValue);
/// <summary>
/// Initializes a new instance of the <see cref="TimeSpan"/> structure to the specified number of ticks.
/// </summary>
/// <param name="ticks">A time period expressed in 100-nanosecond units.</param>
public TimeSpan(long ticks)
{
_ticks = ticks;
}
/// <summary>
/// Initializes a new instance of the <see cref="TimeSpan"/> structure to a specified number of hours, minutes, and seconds.
/// </summary>
/// <param name="hours">Number of hours.</param>
/// <param name="minutes">Number of minutes.</param>
/// <param name="seconds">Number of seconds.</param>
[MethodImpl(MethodImplOptions.InternalCall)]
public extern TimeSpan(int hours, int minutes, int seconds);
/// <summary>
/// Initializes a new instance of the <see cref="TimeSpan"/> structure to a specified number of days, hours, minutes, and seconds.
/// </summary>
/// <param name="days">Number of days.</param>
/// <param name="hours">Number of hours.</param>
/// <param name="minutes">Number of minutes.</param>
/// <param name="seconds">Number of seconds.</param>
[MethodImpl(MethodImplOptions.InternalCall)]
public extern TimeSpan(int days, int hours, int minutes, int seconds);
/// <summary>
/// Initializes a new instance of the <see cref="TimeSpan"/> structure to a specified number of days, hours, minutes, seconds and milliseconds.
/// </summary>
/// <param name="days">Number of days.</param>
/// <param name="hours">Number of hours.</param>
/// <param name="minutes">Number of minutes.</param>
/// <param name="seconds">Number of seconds.</param>
/// <param name="milliseconds">Number of milliseconds.</param>
[MethodImpl(MethodImplOptions.InternalCall)]
public extern TimeSpan(int days, int hours, int minutes, int seconds, int milliseconds);
/// <summary>
/// Gets the number of ticks that represent the value of the current <see cref="TimeSpan"/> structure.
/// </summary>
/// <value>The number of ticks contained in this instance. </value>
public long Ticks => _ticks;
/// <summary>
/// Gets the days component of the time interval represented by the current <see cref="TimeSpan"/> structure.
/// </summary>
/// <value>The day component of this instance. The return value can be positive or negative.</value>
public int Days => (int)(_ticks / TicksPerDay);
/// <summary>
/// Gets the hours component of the time interval represented by the current <see cref="TimeSpan"/> structure.
/// </summary>
/// <value>The hour component of this instance. The return value ranges from -23 through 23.</value>
public int Hours => (int)((_ticks / TicksPerHour) % 24);
/// <summary>
/// Gets the milliseconds component of the time interval represented by the current <see cref="TimeSpan"/> structure.
/// </summary>
/// <value>The millisecond component of this instance. The return value ranges from -999 through 999.</value>
public int Milliseconds => (int)((_ticks / TicksPerMillisecond) % 1000);
/// <summary>
/// Gets the minutes component of the time interval represented by the current <see cref="TimeSpan"/> structure.
/// </summary>
/// <value>The minute component of this instance. The return value ranges from -59 through 59.</value>
public int Minutes => (int)((_ticks / TicksPerMinute) % 60);
/// <summary>
/// Gets the seconds component of the time interval represented by the current <see cref="TimeSpan"/> structure.
/// </summary>
/// <value>The second component of this instance. The return value ranges from -59 through 59.</value>
public int Seconds => (int)((_ticks / TicksPerSecond) % 60);
/// <summary>
/// Gets the value of the current <see cref="TimeSpan"/> structure expressed in whole and fractional days.
/// </summary>
/// <value>The total number of days represented by this instance.</value>
public double TotalDays => (_ticks) * DaysPerTick;
/// <summary>
/// Gets the value of the current <see cref="TimeSpan"/> structure expressed in whole and fractional hours.
/// </summary>
/// <value>The total number of hours represented by this instance.</value>
public double TotalHours => _ticks * HoursPerTick;
/// <summary>
/// Gets the value of the current <see cref="TimeSpan"/> structure expressed in whole and fractional milliseconds.
/// </summary>
/// <value>The total number of milliseconds represented by this instance.</value>
public double TotalMilliseconds
{
get
{
double temp = _ticks * MillisecondsPerTick;
if (temp > MaxMilliSeconds)
return MaxMilliSeconds;
if (temp < MinMilliSeconds)
return MinMilliSeconds;
return temp;
}
}
/// <summary>
/// Gets the value of the current <see cref="TimeSpan"/> structure expressed in whole and fractional minutes.
/// </summary>
/// <value>The total number of minutes represented by this instance.</value>
public double TotalMinutes => _ticks * MinutesPerTick;
/// <summary>
/// Gets the value of the current <see cref="TimeSpan"/> structure expressed in whole and fractional seconds.
/// </summary>
/// <value>The total number of seconds represented by this instance.</value>
public double TotalSeconds => _ticks * SecondsPerTick;
/// <summary>
/// Returns a new <see cref="TimeSpan"/> object whose value is the sum of the specified <see cref="TimeSpan"/> object and this instance.
/// </summary>
/// <param name="ts">The time interval to add.</param>
/// <returns>A new object that represents the value of this instance plus the value of ts.</returns>
public TimeSpan Add(TimeSpan ts) => new TimeSpan(_ticks + ts._ticks);
/// <summary>
/// Compares two <see cref="TimeSpan"/> values and returns an integer that indicates whether the first value is shorter than, equal to, or longer than the second value.
/// </summary>
/// <param name="t1">The first time interval to compare.</param>
/// <param name="t2">The second time interval to compare.</param>
/// <returns>One of the following values :
/// <para>-1 if t1 is shorter than t2.</para>
/// <para>0 if t1 is equal to t2.</para>
/// <para>1 if t1 is longer than t2.</para>
/// </returns>
[MethodImpl(MethodImplOptions.InternalCall)]
public static extern int Compare(TimeSpan t1, TimeSpan t2);
/// <summary>
/// Compares this instance to a specified object and returns an integer that indicates whether this instance is shorter than, equal to, or longer than the specified object.
/// </summary>
/// <param name="value">An object to compare, or null.</param>
/// <returns>One of the following values :
/// <para>-1 if This instance is shorter than value.</para>
/// <para>0 if This instance is equal to value.</para>
/// <para>1 if This instance is longer than value or value is null.</para>
/// </returns>
[MethodImpl(MethodImplOptions.InternalCall)]
public extern int CompareTo(Object value);
/// <summary>
/// Returns a new <see cref="TimeSpan"/> object whose value is the absolute value of the current <see cref="TimeSpan"/> object.
/// </summary>
/// <returns>A new object whose value is the absolute value of the current <see cref="TimeSpan"/> object.</returns>
public TimeSpan Duration() => new TimeSpan(_ticks >= 0 ? _ticks : -_ticks);
/// <summary>
/// Returns a value indicating whether this instance is equal to a specified object.
/// </summary>
/// <param name="value">An object to compare with this instance.</param>
/// <returns>true if value is a <see cref="TimeSpan"/> object that represents the same time interval as the current <see cref="TimeSpan"/> structure; otherwise, false.</returns>
[MethodImpl(MethodImplOptions.InternalCall)]
public override extern bool Equals(Object value);
/// <summary>
/// Returns a value that indicates whether two specified instances of <see cref="TimeSpan"/> are equal.
/// </summary>
/// <param name="t1">The first time interval to compare.</param>
/// <param name="t2">The second time interval to compare.</param>
/// <returns>true if the values of t1 and t2 are equal; otherwise, false.</returns>
[MethodImpl(MethodImplOptions.InternalCall)]
public static extern bool Equals(TimeSpan t1, TimeSpan t2);
/// <summary>
/// Returns a new <see cref="TimeSpan"/> object whose value is the negated value of this instance.
/// </summary>
/// <returns>A new object with the same numeric value as this instance, but with the opposite sign.</returns>
public TimeSpan Negate() => new TimeSpan(-_ticks);
/// <summary>
/// Returns a new <see cref="TimeSpan"/> object whose value is the difference between the specified <see cref="TimeSpan"/> object and this instance.
/// </summary>
/// <param name="ts">The time interval to be subtracted.</param>
/// <returns>A new time interval whose value is the result of the value of this instance minus the value of ts.</returns>
public TimeSpan Subtract(TimeSpan ts) => new TimeSpan(_ticks - ts._ticks);
/// <summary>
/// Returns a <see cref="TimeSpan"/> that represents a specified time, where the specification is in units of ticks.
/// </summary>
/// <param name="value">A number of ticks that represent a time.</param>
/// <returns>An object that represents value.</returns>
public static TimeSpan FromTicks(long value) => new TimeSpan(value);
/// <summary>
/// Returns a <see cref="TimeSpan"/> that represents a specified time, where the specification is in units of milliseconds.
/// </summary>
/// <param name="value">A number of milliseconds that represent a time.</param>
/// <returns>An object that represents value.</returns>
public static TimeSpan FromMilliseconds(long value) => new TimeSpan(TimeSpan.TicksPerMillisecond * value);
/// <summary>
/// Returns a <see cref="TimeSpan"/> that represents a specified time, where the specification is in units of seconds.
/// </summary>
/// <param name="value">A number of seconds that represent a time.</param>
/// <returns>An object that represents value.</returns>
public static TimeSpan FromSeconds(long value) => new TimeSpan(TimeSpan.TicksPerSecond * value);
/// <summary>
/// Returns a <see cref="TimeSpan"/> that represents a specified time, where the specification is in units of minute.
/// </summary>
/// <param name="value">A number of minute that represent a time.</param>
/// <returns>An object that represents value.</returns>
public static TimeSpan FromMinutes(long value) => new TimeSpan(TimeSpan.TicksPerMinute * value);
/// <summary>
/// Returns a <see cref="TimeSpan"/> that represents a specified time, where the specification is in units of hours.
/// </summary>
/// <param name="value">A number of hours that represent a time.</param>
/// <returns>An object that represents value.</returns>
public static TimeSpan FromHours(long value) => new TimeSpan(TimeSpan.TicksPerHour * value);
/// <summary>
/// Returns a <see cref="TimeSpan"/> that represents a specified time, where the specification is in units of days.
/// </summary>
/// <param name="value">A number of days that represent a time.</param>
/// <returns>An object that represents value.</returns>
public static TimeSpan FromDays(long value) => new TimeSpan(TimeSpan.TicksPerDay * value);
/// <summary>
/// Converts the value of the current <see cref="TimeSpan"/> object to its equivalent string representation.
/// </summary>
/// <returns>The string representation of the current <see cref="TimeSpan"/> value.</returns>
/// <remarks>The returned string is formatted with the "c" format specifier and has the following format: [-][d.]hh:mm:ss[.fffffff]</remarks>
[MethodImpl(MethodImplOptions.InternalCall)]
public override extern String ToString();
/// <summary>
/// Returns a <see cref="TimeSpan"/> whose value is the negated value of the specified instance.
/// </summary>
/// <param name="t">The time interval to be negated.</param>
/// <returns>An object that has the same numeric value as this instance, but the opposite sign.</returns>
public static TimeSpan operator -(TimeSpan t) => new TimeSpan(-t._ticks);
/// <summary>
/// Subtracts a specified <see cref="TimeSpan"/> from another specified TimeSpan.
/// </summary>
/// <param name="t1">The minuend.</param>
/// <param name="t2">The subtrahend.</param>
/// <returns>An object whose value is the result of the value of t1 minus the value of t2.</returns>
public static TimeSpan operator -(TimeSpan t1, TimeSpan t2) => new TimeSpan(t1._ticks - t2._ticks);
/// <summary>
/// Returns the specified instance of TimeSpan.
/// </summary>
/// <param name="t">The time interval to return.</param>
/// <returns>The time interval specified by t.</returns>
public static TimeSpan operator +(TimeSpan t) => t;
/// <summary>
/// Adds two specified <see cref="TimeSpan"/> instances.
/// </summary>
/// <param name="t1">The first time interval to add.</param>
/// <param name="t2">The second time interval to add.</param>
/// <returns>An object whose value is the sum of the values of t1 and t2.</returns>
public static TimeSpan operator +(TimeSpan t1, TimeSpan t2) => new TimeSpan(t1._ticks + t2._ticks);
/// <summary>
/// Indicates whether two <see cref="TimeSpan"/> instances are equal
/// </summary>
/// <param name="t1">The first time interval to compare.</param>
/// <param name="t2">The second time interval to compare.</param>
/// <returns>true if the values of t1 and t2 are equal; otherwise, false.</returns>
public static bool operator ==(TimeSpan t1, TimeSpan t2) => t1._ticks == t2._ticks;
/// <summary>
/// Indicates whether two <see cref="TimeSpan"/> instances are not equal.
/// </summary>
/// <param name="t1">The first time interval to compare.</param>
/// <param name="t2">The second time interval to compare.</param>
/// <returns>true if the values of t1 and t2 are not equal; otherwise, false.</returns>
public static bool operator !=(TimeSpan t1, TimeSpan t2) => t1._ticks != t2._ticks;
/// <summary>
/// Indicates whether a specified <see cref="TimeSpan"/> is less than another specified TimeSpan.
/// </summary>
/// <param name="t1">The first time interval to compare.</param>
/// <param name="t2">The second time interval to compare.</param>
/// <returns>true if the value of t1 is less than the value of t2; otherwise, false.</returns>
public static bool operator <(TimeSpan t1, TimeSpan t2) => t1._ticks < t2._ticks;
/// <summary>
/// Indicates whether a specified <see cref="TimeSpan"/> is less than or equal to another specified TimeSpan.
/// </summary>
/// <param name="t1">The first time interval to compare.</param>
/// <param name="t2">The second time interval to compare.</param>
/// <returns>true if the value of t1 is less than or equal to the value of t2; otherwise, false.</returns>
public static bool operator <=(TimeSpan t1, TimeSpan t2) => t1._ticks <= t2._ticks;
/// <summary>
/// Indicates whether a specified <see cref="TimeSpan"/> is greater than another specified TimeSpan.
/// </summary>
/// <param name="t1">The first time interval to compare.</param>
/// <param name="t2">The second time interval to compare.</param>
/// <returns>true if the value of t1 is greater than the value of t2; otherwise, false.</returns>
public static bool operator >(TimeSpan t1, TimeSpan t2) => t1._ticks > t2._ticks;
/// <summary>
/// Indicates whether a specified <see cref="TimeSpan"/> is greater than or equal to another specified TimeSpan.
/// </summary>
/// <param name="t1">The first time interval to compare.</param>
/// <param name="t2">The second time interval to compare.</param>
/// <returns>true if the value of t1 is greater than or equal to the value of t2; otherwise, false.</returns>
public static bool operator >=(TimeSpan t1, TimeSpan t2) => t1._ticks >= t2._ticks;
/// <summary>
/// Returns the hash code for this instance.
/// </summary>
/// <returns>A 32-bit signed integer hash code.</returns>
public override int GetHashCode()
{
return (int)_ticks ^ (int)(_ticks >> 32);
}
}
}
| 54.188295 | 427 | 0.627958 | [
"MIT"
] | Eclo/lib-CoreLibrary | nanoFramework.CoreLibrary/System/TimeSpan.cs | 21,296 | C# |
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Text;
namespace WebResume.Models
{
public class StringItem
{
[Key]
public int Id { get; set; }
[Required]
public string Name { get; set; }
public string Description { get; set; }
}
}
| 20.176471 | 47 | 0.638484 | [
"MIT"
] | adamrebacz/WebResume | WebResume.Models/StringItem.cs | 345 | C# |
namespace Nager.Country.CountryInfo
{
/// <summary>
/// Lesotho
/// </summary>
public class LesothoInfo : ICountryInfo
{
public string CommonName => "Lesotho";
public string OfficialName => "Kingdom of Lesotho";
public Translation[] Translations => new []
{
new Translation(LanguageCode.AR, "ليسوتو"),
new Translation(LanguageCode.AZ, "Lesoto"),
new Translation(LanguageCode.BE, "Лесота"),
new Translation(LanguageCode.BG, "Лесото"),
new Translation(LanguageCode.BS, "Lesoto"),
new Translation(LanguageCode.CA, "Lesotho"),
new Translation(LanguageCode.CS, "Lesotho"),
new Translation(LanguageCode.DA, "Lesotho"),
new Translation(LanguageCode.DE, "Lesotho"),
new Translation(LanguageCode.EL, "Βασίλειο του Λεσότο"),
new Translation(LanguageCode.EN, "Lesotho"),
new Translation(LanguageCode.ES, "Lesotho"),
new Translation(LanguageCode.ET, "Lesotho"),
new Translation(LanguageCode.FA, "لسوتو"),
new Translation(LanguageCode.FI, "Lesotho"),
new Translation(LanguageCode.FR, "Lesotho"),
new Translation(LanguageCode.HE, "לסוטו"),
new Translation(LanguageCode.HR, "Lesoto"),
new Translation(LanguageCode.HU, "Lesotho"),
new Translation(LanguageCode.HY, "Լեսոտո"),
new Translation(LanguageCode.ID, "Lesotho"),
new Translation(LanguageCode.IT, "Lesotho"),
new Translation(LanguageCode.JA, "レソト"),
new Translation(LanguageCode.KA, "ლესოთო"),
new Translation(LanguageCode.KK, "Лесото"),
new Translation(LanguageCode.KO, "레소토"),
new Translation(LanguageCode.KY, "Лесото"),
new Translation(LanguageCode.LT, "Lesotas"),
new Translation(LanguageCode.LV, "Lesoto"),
new Translation(LanguageCode.MK, "Лесото"),
new Translation(LanguageCode.MN, "Лесото"),
new Translation(LanguageCode.NB, "Lesotho"),
new Translation(LanguageCode.NL, "Lesotho"),
new Translation(LanguageCode.NN, "Lesotho"),
new Translation(LanguageCode.PL, "Lesotho"),
new Translation(LanguageCode.PT, "Lesoto"),
new Translation(LanguageCode.RO, "Lesotho"),
new Translation(LanguageCode.RU, "Лесото"),
new Translation(LanguageCode.SK, "Lesotho"),
new Translation(LanguageCode.SL, "Lesoto"),
new Translation(LanguageCode.SR, "Лесото"),
new Translation(LanguageCode.SV, "Lesotho"),
new Translation(LanguageCode.TR, "Lesoto"),
new Translation(LanguageCode.UK, "Лесото"),
new Translation(LanguageCode.UZ, "Lesoto"),
new Translation(LanguageCode.ZH, "赖索托"),
};
public Alpha2Code Alpha2Code => Alpha2Code.LS;
public Alpha3Code Alpha3Code => Alpha3Code.LSO;
public int NumericCode => 426;
public string[] TLD => new [] { ".ls" };
public Region Region => Region.Africa;
public SubRegion SubRegion => SubRegion.SouthernAfrica;
public Alpha2Code[] BorderCountrys => new Alpha2Code[]
{
Alpha2Code.ZA,
};
public string[] Currencies => new [] { "LSL", "ZAR" };
public string[] CallingCodes => new [] { "266" };
}
}
| 45.103896 | 68 | 0.600346 | [
"MIT"
] | MrGrabazu/Nager.Country | src/Nager.Country/CountryInfo/LesothoInfo.cs | 3,596 | C# |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading.Tasks;
using GraphQL.Language.AST;
using GraphQL.Resolvers;
using GraphQL.Types;
using static GraphQL.Execution.ExecutionHelper;
namespace GraphQL.Execution
{
public abstract class ExecutionStrategy : IExecutionStrategy
{
public virtual async Task<ExecutionResult> ExecuteAsync(ExecutionContext context)
{
var rootType = GetOperationRootType(context.Document, context.Schema, context.Operation);
var rootNode = BuildExecutionRootNode(context, rootType);
await ExecuteNodeTreeAsync(context, rootNode)
.ConfigureAwait(false);
// After the entire node tree has been executed, get the values
var data = rootNode.ToValue();
return new ExecutionResult
{
Data = data
}.With(context);
}
protected abstract Task ExecuteNodeTreeAsync(ExecutionContext context, ObjectExecutionNode rootNode);
public static RootExecutionNode BuildExecutionRootNode(ExecutionContext context, IObjectGraphType rootType)
{
var root = new RootExecutionNode(rootType)
{
Result = context.RootValue
};
var fields = CollectFields(
context,
rootType,
context.Operation.SelectionSet);
SetSubFieldNodes(context, root, fields);
return root;
}
public static void SetSubFieldNodes(ExecutionContext context, ObjectExecutionNode parent)
{
var fields = CollectFields(context, parent.GetObjectGraphType(context.Schema), parent.Field?.SelectionSet);
SetSubFieldNodes(context, parent, fields);
}
public static void SetSubFieldNodes(ExecutionContext context, ObjectExecutionNode parent, Dictionary<string, Field> fields)
{
var parentType = parent.GetObjectGraphType(context.Schema);
var subFields = new Dictionary<string, ExecutionNode>(fields.Count);
foreach (var kvp in fields)
{
var name = kvp.Key;
var field = kvp.Value;
if (!ShouldIncludeNode(context, field.Directives))
continue;
var fieldDefinition = GetFieldDefinition(context.Document, context.Schema, parentType, field);
if (fieldDefinition == null)
continue;
var node = BuildExecutionNode(parent, fieldDefinition.ResolvedType, field, fieldDefinition);
if (node == null)
continue;
subFields[kvp.Key] = node;
}
parent.SubFields = subFields;
}
public static void SetArrayItemNodes(ExecutionContext context, ArrayExecutionNode parent)
{
var listType = (ListGraphType)parent.GraphType;
var itemType = listType.ResolvedType;
if (itemType is NonNullGraphType nonNullGraphType)
itemType = nonNullGraphType.ResolvedType;
if (!(parent.Result is IEnumerable data))
{
var error = new ExecutionError("User error: expected an IEnumerable list though did not find one.");
throw error;
}
var index = 0;
var arrayItems = (data is ICollection collection)
? new List<ExecutionNode>(collection.Count)
: new List<ExecutionNode>();
foreach (var d in data)
{
var path = AppendPath(parent.Path, (index++).ToString());
if (d != null)
{
var node = BuildExecutionNode(parent, itemType, parent.Field, parent.FieldDefinition, path);
node.Result = d;
if (node is ObjectExecutionNode objectNode)
{
SetSubFieldNodes(context, objectNode);
}
else if (node is ArrayExecutionNode arrayNode)
{
SetArrayItemNodes(context, arrayNode);
}
arrayItems.Add(node);
}
else
{
if (listType.ResolvedType is NonNullGraphType)
{
var error = new ExecutionError(
"Cannot return null for non-null type."
+ $" Field: {parent.Name}, Type: {parent.FieldDefinition.ResolvedType}.");
error.AddLocation(parent.Field, context.Document);
error.Path = path;
context.Errors.Add(error);
return;
}
var valueExecutionNode = new ValueExecutionNode(parent, itemType, parent.Field, parent.FieldDefinition, path)
{
Result = null
};
arrayItems.Add(valueExecutionNode);
}
}
parent.Items = arrayItems;
}
public static ExecutionNode BuildExecutionNode(ExecutionNode parent, IGraphType graphType, Field field, FieldType fieldDefinition, string[] path = null)
{
path = path ?? AppendPath(parent.Path, field.Name);
if (graphType is NonNullGraphType nonNullFieldType)
graphType = nonNullFieldType.ResolvedType;
switch (graphType)
{
case ListGraphType listGraphType:
return new ArrayExecutionNode(parent, graphType, field, fieldDefinition, path);
case IObjectGraphType objectGraphType:
return new ObjectExecutionNode(parent, graphType, field, fieldDefinition, path);
case IAbstractGraphType abstractType:
return new ObjectExecutionNode(parent, graphType, field, fieldDefinition, path);
case ScalarGraphType scalarType:
return new ValueExecutionNode(parent, graphType, field, fieldDefinition, path);
default:
throw new InvalidOperationException($"Unexpected type: {graphType}");
}
}
/// <summary>
/// Execute a single node
/// </summary>
/// <remarks>
/// Builds child nodes, but does not execute them
/// </remarks>
protected virtual async Task<ExecutionNode> ExecuteNodeAsync(ExecutionContext context, ExecutionNode node)
{
context.CancellationToken.ThrowIfCancellationRequested();
if (node.IsResultSet)
return node;
ResolveFieldContext resolveContext = null;
try
{
var arguments = GetArgumentValues(context.Schema, node.FieldDefinition.Arguments, node.Field.Arguments, context.Variables);
var subFields = SubFieldsFor(context, node.FieldDefinition.ResolvedType, node.Field);
resolveContext = new ResolveFieldContext
{
FieldName = node.Field.Name,
FieldAst = node.Field,
FieldDefinition = node.FieldDefinition,
ReturnType = node.FieldDefinition.ResolvedType,
ParentType = node.GetParentType(context.Schema),
Arguments = arguments,
Source = node.Source,
Schema = context.Schema,
Document = context.Document,
Fragments = context.Fragments,
RootValue = context.RootValue,
UserContext = context.UserContext,
Operation = context.Operation,
Variables = context.Variables,
CancellationToken = context.CancellationToken,
Metrics = context.Metrics,
Errors = context.Errors,
Path = node.Path,
SubFields = subFields
};
var resolver = node.FieldDefinition.Resolver ?? new NameFieldResolver();
var result = resolver.Resolve(resolveContext);
if (result is Task task)
{
await task.ConfigureAwait(false);
result = task.GetResult();
}
node.Result = result;
ValidateNodeResult(context, node);
// Build child nodes
if (node.Result != null)
{
if (node is ObjectExecutionNode objectNode)
{
SetSubFieldNodes(context, objectNode);
}
else if (node is ArrayExecutionNode arrayNode)
{
SetArrayItemNodes(context, arrayNode);
}
}
}
catch (ExecutionError error)
{
error.AddLocation(node.Field, context.Document);
error.Path = node.Path;
context.Errors.Add(error);
node.Result = null;
}
catch (Exception ex)
{
if (context.ThrowOnUnhandledException)
throw;
if (context.UnhandledExceptionDelegate != null)
{
var exceptionContext = new UnhandledExceptionContext(context, resolveContext, ex);
context.UnhandledExceptionDelegate(exceptionContext);
ex = exceptionContext.Exception;
}
var error = new ExecutionError($"Error trying to resolve {node.Name}.", ex);
error.AddLocation(node.Field, context.Document);
error.Path = node.Path;
context.Errors.Add(error);
node.Result = null;
}
return node;
}
protected virtual void ValidateNodeResult(ExecutionContext context, ExecutionNode node)
{
var result = node.Result;
IGraphType fieldType = node.FieldDefinition.ResolvedType;
var objectType = fieldType as IObjectGraphType;
if (fieldType is NonNullGraphType nonNullType)
{
if (result == null)
{
throw new ExecutionError("Cannot return null for non-null type."
+ $" Field: {node.Name}, Type: {nonNullType}.");
}
objectType = nonNullType.ResolvedType as IObjectGraphType;
}
if (result == null)
{
return;
}
if (fieldType is IAbstractGraphType abstractType)
{
objectType = abstractType.GetObjectType(result, context.Schema);
if (objectType == null)
{
throw new ExecutionError(
$"Abstract type {abstractType.Name} must resolve to an Object type at " +
$"runtime for field {node.Parent.GraphType.Name}.{node.Name} " +
$"with value '{result}', received 'null'.");
}
if (!abstractType.IsPossibleType(objectType))
{
throw new ExecutionError($"Runtime Object type \"{objectType}\" is not a possible type for \"{abstractType}\".");
}
}
if (objectType?.IsTypeOf != null && !objectType.IsTypeOf(result))
{
throw new ExecutionError($"Expected value of type \"{objectType}\" for \"{objectType.Name}\" but got: {result}.");
}
}
protected virtual async Task OnBeforeExecutionStepAwaitedAsync(ExecutionContext context)
{
foreach (var listener in context.Listeners)
{
await listener.BeforeExecutionStepAwaitedAsync(context.UserContext, context.CancellationToken)
.ConfigureAwait(false);
}
}
}
}
| 36.949102 | 160 | 0.535775 | [
"MIT"
] | MikeyFriedChicken/graphql-dotnet | src/GraphQL/Execution/ExecutionStrategy.cs | 12,341 | C# |
using System;
using System.Collections.Generic;
using System.Data;
namespace SVFileMapper.Extensions
{
public class FailedColumnConversion
{
public DataColumn? Column { get; set; }
public string? Reason { get; set; } = "";
}
public class UnmatchedResult
{
public DataRow? Row { get; set; }
public IEnumerable<FailedColumnConversion> FailedColumnConversions { get; set; } =
Array.Empty<FailedColumnConversion>();
}
public sealed class ParseResults<T>
{
public ParseResults(IEnumerable<T> matched, IEnumerable<UnmatchedResult> unmatchedLines)
{
Matched = matched;
UnmatchedLines = unmatchedLines;
}
public IEnumerable<T> Matched { get; }
public IEnumerable<UnmatchedResult> UnmatchedLines { get; }
public void Deconstruct(out IEnumerable<T> parsed, out IEnumerable<UnmatchedResult> failed)
{
parsed = Matched;
failed = UnmatchedLines;
}
}
public sealed class CastResult<T>
{
public CastResult(bool success, T parsedObject, DataRow row,
IEnumerable<FailedColumnConversion>? failedColumnConversions = null,
string failureReason = "")
{
Success = success;
ParsedObject = parsedObject;
Row = row;
FailedColumnConversions = failedColumnConversions ?? Array.Empty<FailedColumnConversion>();
FailureReason = failureReason;
}
public bool Success { get; }
public T ParsedObject { get; }
public DataRow Row { get; }
public IEnumerable<FailedColumnConversion> FailedColumnConversions { get; }
public string FailureReason { get; }
}
internal static class Extensions
{
public static (IReadOnlyList<TSource> Satisfied, IReadOnlyList<TSource> Falsified) Partition<TSource>(
this IEnumerable<TSource> source, Func<TSource, bool> predicate)
{
var satisfied = new List<TSource>();
var falsified = new List<TSource>();
foreach (TSource value in source) (predicate(value) ? satisfied : falsified).Add(value);
return (satisfied, falsified);
}
}
} | 32.585714 | 110 | 0.624288 | [
"MIT"
] | simon-curtis/SVFileMapper | SVFileMapper/Extensions/Extensions.cs | 2,283 | C# |
// (C) Copyright 2021 by
//
using System;
using Autodesk.AutoCAD.Runtime;
using Autodesk.AutoCAD.ApplicationServices;
using Autodesk.AutoCAD.DatabaseServices;
using Autodesk.AutoCAD.Geometry;
using Autodesk.AutoCAD.EditorInput;
using System.Collections;
using System.Linq;
using System.Text.RegularExpressions;
using UtilityClass;
using System.Xml;
using System.IO;
// This line is not mandatory, but improves loading performances
[assembly: CommandClass(typeof(SetViewPort.MyCommands))]
namespace SetViewPort
{
public class Scale
{
#region Fields
private readonly double _viewsize;
private readonly double _sheetsize;
#endregion
#region Constructor
public Scale(double ViewSize,double SheetSize)
{
_viewsize = ViewSize;
_sheetsize = SheetSize;
}
#endregion
#region Properties
public double ScaleValue { get => _sheetsize / _viewsize; }
#endregion
#region Methods
public double GetScale()
{
double ScaleNumber = _sheetsize / _viewsize;
return ScaleNumber;
}
public string GetCivilScale()
{
string Civil = string.Format("1:{0}", Math.Round(1000 / (_sheetsize / _viewsize)).ToString());
return Civil;
}
public string GetBuildingScale()
{
string Building = string.Format("1:{0}", Math.Round(1 / (_sheetsize / _viewsize)).ToString());
return Building;
}
#endregion
}
public class MyCommands
{
Database db = Application.DocumentManager.MdiActiveDocument.Database;
Editor ed = Application.DocumentManager.MdiActiveDocument.Editor;
//增加方向参数,向右为加,向左为减
private Viewport GetViewport(ViewTableRecord InputView, Point2d BasePoint, double Scale,bool ToRight)
{
Viewport NewViewport = new Viewport();
if(ToRight)
{
NewViewport.CenterPoint = new Point3d(BasePoint.X + (InputView.Width / 2) * Scale, BasePoint.Y + (InputView.Height / 2) * Scale, 0);
}
else
{
NewViewport.CenterPoint = new Point3d(BasePoint.X - (InputView.Width / 2) * Scale, BasePoint.Y + (InputView.Height / 2) * Scale, 0);
}
NewViewport.Height = InputView.Height * Scale;
NewViewport.Width = InputView.Width * Scale;
NewViewport.ViewCenter = InputView.CenterPoint;
NewViewport.ViewDirection = InputView.ViewDirection;
NewViewport.ViewHeight = InputView.Height;
NewViewport.ViewTarget = InputView.Target;
NewViewport.TwistAngle = InputView.ViewTwist;
return NewViewport;
}
private string GetSheetSize(string DstPath,string SheetPath)
{
string SheetSize = "NotFound";
XmlDocument SheetSet = DstViewer.DstToXml(DstPath);
string Xpath1 = string.Format("/ AcSmDatabase / AcSmSheetSet / AcSmSubset / AcSmSheet[AcSmAcDbLayoutReference[AcSmProp = '{0}']]", SheetPath);
string Xpath2 = "AcSmCustomPropertyBag/AcSmCustomPropertyValue[@propname='图幅']/AcSmProp[@propname='Value']";
XmlNodeList NodeList = SheetSet.SelectNodes(Xpath1);
if(NodeList == null)
{
return SheetSize;
}
XmlNode Node = NodeList[0].SelectSingleNode(Xpath2);
if(Node == null || Node.InnerText == "")
{
return SheetSize;
}
SheetSize = Node.InnerText;
return SheetSize;
}
// Modal Command with localized name
[CommandMethod("SetViewPort")]
public void MyCommand() // This method can have any name
{
// Put your command code here
#region 获取CAD核心变量
#endregion
#region 定义及初始化变量
string SheetSize = "NotFound";
string ScaleFlag = "1";
Hashtable FlagTable = new Hashtable();
FlagTable.Add("0", "无比例");
FlagTable.Add("1", "建筑比例");
FlagTable.Add("2", "市政比例");
double SheetLength = 1;
string ClipLayerName = "TK-视口";
string DwgFile = db.OriginalFileName;
string DstFile = Path.GetDirectoryName(DwgFile) + @"\图纸集数据文件.dst";
#endregion
#region 获取图幅数据
if (!new FileInfo(DstFile).Exists)
{
ed.WriteMessage("\n未找到图纸集数据文件<图纸集数据文件.dst>,请检查图纸集数据文件是否与图纸文件在同一个文件夹内,以及文件名是否正确。");
return;
}
try
{
SheetSize = GetSheetSize(DstFile, DwgFile);
if (SheetSize == "NotFound")
{
ed.WriteMessage("\n未找到图幅的图纸自定义属性,请检查图纸集文件中的自定义属性内容。");
return;
}
ed.WriteMessage("\n当前图纸图幅为<{0}>", SheetSize);
}
catch (Autodesk.AutoCAD.Runtime.Exception EX)
{
ed.WriteMessage("\n出错啦!{0}", EX.ToString());
return;
}
#endregion
#region 读取设置文件
string inipath = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location) + "\\config.ini";
INIReader inifile = new INIReader(inipath);
if (inifile.ExistINIFile())
{
try
{
string SheetList = inifile.IniReadValue("SheetList", "List");
if (!SheetList.Contains(SheetSize))
{
ed.WriteMessage("\n配置文件的图幅列表中不包含本图的图幅格式,请检查图纸图幅属性是否正确或在配置文件中添加相关数据。");
System.Diagnostics.Process.Start("notepad.exe", inipath);
ed.WriteMessage("\n已打开配置文件!");
return;
}
SheetLength = Convert.ToDouble(inifile.IniReadValue("Size", SheetSize));
ClipLayerName = inifile.IniReadValue("ClipLayer", "name");
ScaleFlag = inifile.IniReadValue("DefaultScaleType", "type");
}
catch (System.Exception EX)
{
ed.WriteMessage("出错了!" + EX.ToString());
return;
}
}
else
{
ed.WriteMessage("\n未找到配置文件config.ini!,请检查插件所在文件夹。");
return;
}
#endregion
LayerStateManager layerState = db.LayerStateManager;
using (Transaction Trans = db.TransactionManager.StartTransaction())
{
try
{
//获取布局列表(剔除模型空间)
DBDictionary Layouts = Trans.GetObject(db.LayoutDictionaryId, OpenMode.ForRead) as DBDictionary;
ArrayList Layoutlist = new ArrayList();
foreach (DBDictionaryEntry item in Layouts)
{
if (item.Key != "Model")
{
Layout layoutobject = Trans.GetObject(item.Value, OpenMode.ForRead) as Layout;
Layoutlist.Add(layoutobject);
}
}
//获取view列表,注意哦,是symbotable,不能用viewtable,会闪退
SymbolTable MyVt = Trans.GetObject(db.ViewTableId, OpenMode.ForRead) as SymbolTable;
ArrayList viewlist = new ArrayList();
foreach (ObjectId viewID in MyVt)
{
ViewTableRecord VR = Trans.GetObject(viewID, OpenMode.ForRead) as ViewTableRecord;
viewlist.Add(VR);
}
if(viewlist.Count == 0)
{
ed.WriteMessage("\n未发现存储的视图!");
return;
}
PromptKeywordOptions keyops = new PromptKeywordOptions("\n选择比例类别[无比例(0)/建筑比例(1)/市政比例(2)]","0 1 2");
keyops.Keywords.Default = ScaleFlag;
keyops.AllowNone = true;
PromptResult keyres = ed.GetKeywords(keyops);
if(keyres.Status == PromptStatus.OK)
{
ScaleFlag = keyres.StringResult;
}
XmlDocument SheetSet = DstViewer.DstToXml(DstFile);
for (int i = 0; i < Layoutlist.Count; i++)
{
if (i == viewlist.Count)
{
break;
}
Layout LT = Layoutlist[i] as Layout;
string[] split = LT.LayoutName.Split(' ');
Regex patten = new Regex(@"^0*\d{1,}");
Regex replace = new Regex("^0*");
string match = replace.Replace(split[0], "");
var query = from ViewTableRecord view in viewlist
where replace.Replace(view.Name, "") == match
select view;
if(!query.Any())
{
ed.WriteMessage("\n布局“{0}”未找到匹配的视图,没有成功生成视口!", LT.LayoutName);
continue;
}
ViewTableRecord VR = query.First() ;
Scale SheetScale = new Scale(VR.Width, SheetLength);
Viewport VP = GetViewport(VR, new Point2d(0, 0), SheetScale.ScaleValue, false);
#region 创建比例文字,写入图纸集中
string Xpath1 = string.Format("/ AcSmDatabase / AcSmSheetSet / AcSmSubset / AcSmSheet[AcSmAcDbLayoutReference[AcSmProp = '{0}']]", LT.LayoutName);
string Xpath2 = "AcSmCustomPropertyBag/AcSmCustomPropertyValue[@propname='出图比例']/AcSmProp[@propname='Value']";
XmlNode SheetNode = SheetSet.SelectSingleNode(Xpath1);
XmlNode ScaleNode =SheetNode.SelectSingleNode(Xpath2);
string ScaleText = "";
switch (ScaleFlag)
{
case "0":
ScaleText = "- -";
break;
case "1":
ScaleText = SheetScale.GetBuildingScale();
break;
case "2":
ScaleText = SheetScale.GetCivilScale();
break;
}
ScaleNode.InnerText = ScaleText;
/*
if(SheetNode == null)
{
ed.WriteMessage("\n找不到sheetnode!\n");
}
if(ScaleNode == null)
{
ed.WriteMessage("\n找不到scalenode!\n");
}
ed.WriteMessage("\n{0}----=》{1}", LT.LayoutName, ScaleText);
/*
TextStyleTable Tst = (TextStyleTable)Trans.GetObject(db.TextStyleTableId, OpenMode.ForRead);
ObjectId TextStyleID = new ObjectId();
string StyleName = "TK-字段";
if(Tst.Has(StyleName))
{
TextStyleID = Tst[StyleName];
}
else
{
TextStyleID = Tst["Standard"];
}
DBText ScaleMark = new DBText();
if(ScaleFlag == "2")
{
ScaleMark.TextString = SheetScale.GetCivilScale();
}
else
{
ScaleMark.TextString = SheetScale.GetBuildingScale();
}
ScaleMark.TextStyleId = TextStyleID;
ScaleMark.Position = new Point3d(-5, 18, 0);
ScaleMark.Height = 2.5;
ScaleMark.WidthFactor = 0.7;
ScaleMark.HorizontalMode = TextHorizontalMode.TextMid;
ScaleMark.VerticalMode = TextVerticalMode.TextBase;
ScaleMark.AlignmentPoint = new Point3d(-5, 18, 0);
*/
#endregion
BlockTableRecord BTR = Trans.GetObject(LT.BlockTableRecordId, OpenMode.ForWrite) as BlockTableRecord;
//BTR.AppendEntity(ScaleMark);
//Trans.AddNewlyCreatedDBObject(ScaleMark, true);
BTR.AppendEntity(VP);
Trans.AddNewlyCreatedDBObject(VP, true);
LayoutManager.Current.SetCurrentLayoutId(LT.Id);
VP.On = true;
//恢复视图的图层状态
// ed.WriteMessage("\n<" + VR.LayerState + ">");
if(VR.LayerState != "")
{
layerState.RestoreLayerState(VR.LayerState, VP.Id, 1, LayerStateMasks.CurrentViewport);
}
//开始选择多段线裁剪视口
TypedValue[] Filter = new TypedValue[]
{
new TypedValue((int)DxfCode.Operator,"<and"),
new TypedValue((int)DxfCode.LayoutName,LT.LayoutName),
new TypedValue((int)DxfCode.LayerName,ClipLayerName),
new TypedValue((int)DxfCode.Operator,"<or"),
new TypedValue((int)DxfCode.Start,"POLYLINE"),
new TypedValue((int)DxfCode.Start,"POLYLINE3D"),
new TypedValue((int)DxfCode.Start,"POLYLINE2D"),
new TypedValue((int)DxfCode.Start,"LWPOLYLINE"),
new TypedValue((int)DxfCode.Operator,"or>"),
new TypedValue((int)DxfCode.Operator,"and>")
};
PromptSelectionResult selresult = ed.SelectAll(new SelectionFilter(Filter));
if (selresult.Status == PromptStatus.OK)
{
ObjectId[] IDs = selresult.Value.GetObjectIds();
VP.NonRectClipEntityId = IDs[0];
VP.NonRectClipOn = true;
//ed.WriteMessage("\n呵呵,剪裁了视口哦");
}
else
{
ed.WriteMessage("\n布局“{0}”中未找到裁剪视口的多义线!视口裁剪失败!", LT.LayoutName);
}
}
DstViewer.XmlToDst(SheetSet, DstFile);
Trans.Commit();
}
catch (Autodesk.AutoCAD.Runtime.Exception Ex)
{
ed.WriteMessage("\n出错啦!{0}", Ex.ToString());
}
finally
{
Trans.Dispose();
}
}
}
/*
[CommandMethod("restart")]
public void TestCommand() // This method can have any name
{
Document doc = Application.DocumentManager.MdiActiveDocument;
Editor ed = doc.Editor;
ed.WriteMessage("\n中断了哦");
}
*/
}
}
| 40.848485 | 170 | 0.463093 | [
"MIT"
] | sonicg83/AutoCad.net | SetViewPort/SetViewPort/myCommands.cs | 16,990 | C# |
using System;
using System.Collections.Generic;
using System.Text;
using NUnit.Framework;
using Transloadit;
using Transloadit.Assembly;
namespace Tests
{
[TestFixture]
class OptionalSettingTests
{
[Test]
public void InvokeAssemblyWithNotifyUrl()
{
ITransloadit transloadit = new Transloadit.Transloadit("YOUR-PUBLIC-API-KEY", "YOUR-SECRET-KEY");
IAssemblyBuilder assembly = new AssemblyBuilder();
IStep step = new Transloadit.Assembly.Step();
assembly.SetNotifyURL("http://my.localhost");
step.SetOption("robot", "/image/resize");
assembly.AddStep("step", step);
TransloaditResponse response = transloadit.InvokeAssembly(assembly);
Assert.IsTrue((string)response.Data["ok"] == "ASSEMBLY_COMPLETED" || (string)response.Data["ok"] == "ASSEMBLY_EXECUTING");
}
}
}
| 28.53125 | 134 | 0.650602 | [
"MIT"
] | igofed/transloadit-csharp-sdk | test/OptionalSettingsTests.cs | 915 | C# |
using Entities;
using System.Threading.Tasks;
namespace DataAccess.Interfaces
{
public interface IUserRepository
{
Task<bool> ExistsUser(string email);
Task<User> GetUserByEmail(string email);
}
}
| 18.916667 | 48 | 0.700441 | [
"MIT"
] | BootcampTeamNet/ApiRestaurants | ApiRestaurants/DataAccess/Interfaces/IUserRepository.cs | 229 | C# |
using BenchmarkDotNet.Running;
namespace DotNet.Glob.Benchmarks
{
public class Program
{
public static void Main(string[] args)
{
BenchmarkRunner.Run<BaselineRegexGlobCompileBenchmarks>();
BenchmarkRunner.Run<BaselineRegexIsMatchTrueBenchmarks>();
BenchmarkRunner.Run<BaselineRegexIsMatchFalseBenchmarks>();
//BenchmarkRunner.Run<GlobBenchmarks>();
}
}
}
| 27.5625 | 71 | 0.664399 | [
"MIT"
] | Alanoll/DotNet.Glob | src/DotNet.Glob.Benchmarks/Program.cs | 443 | C# |
using System.Reflection;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("AppDomainTest")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("AppDomainTest")]
[assembly: AssemblyCopyright("Copyright © 2013")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("155020ba-b857-45be-89b8-4e47cb208cfb")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")] | 38.685714 | 84 | 0.745199 | [
"Unlicense"
] | jingyiliu/MEFInAnAppDomain | AppDomainTest/Properties/AssemblyInfo.cs | 1,357 | C# |
using System.Reflection;
using System.Runtime.CompilerServices;
using Android.App;
// Information about this assembly is defined by the following attributes.
// Change them to the values specific to your project.
[assembly: AssemblyTitle("Refit")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("")]
[assembly: AssemblyCopyright("paul")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// The assembly version has the format "{Major}.{Minor}.{Build}.{Revision}".
// The form "{Major}.{Minor}.*" will automatically update the build and revision,
// and "{Major}.{Minor}.{Build}.*" will update just the revision.
[assembly: AssemblyVersion("1.0.0")]
// The following attributes are used to specify the signing key for the assembly,
// if desired. See the Mono documentation for more information about signing.
//[assembly: AssemblyDelaySign(false)]
//[assembly: AssemblyKeyFile("")]
| 41.208333 | 82 | 0.747219 | [
"MIT"
] | balauru/refit | Refit-Tests/Properties/AssemblyInfo.cs | 989 | C# |
// <auto-generated/>
// MIT License
//
// Copyright(c) 2020 Jordan Peck (jordan.me2@gmail.com)
// Copyright(c) 2020 Contributors
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files(the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions :
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
// .'',;:cldxkO00KKXXNNWWWNNXKOkxdollcc::::::;:::ccllloooolllllllllooollc:,'... ...........',;cldxkO000Okxdlc::;;;,,;;;::cclllllll
// ..',;:ldxO0KXXNNNNNNNNXXK0kxdolcc::::::;;;,,,,,,;;;;;;;;;;:::cclllllc:;'.... ...........',;:ldxO0KXXXK0Okxdolc::;;;;::cllodddddo
// ...',:loxO0KXNNNNNXXKK0Okxdolc::;::::::::;;;,,'''''.....''',;:clllllc:;,'............''''''''',;:loxO0KXNNNNNXK0Okxdollccccllodxxxxxxd
// ....';:ldkO0KXXXKK00Okxdolcc:;;;;;::cclllcc:;;,''..... ....',;clooddolcc:;;;;,,;;;;;::::;;;;;;:cloxk0KXNWWWWWWNXKK0Okxddoooddxxkkkkkxx
// .....';:ldxkOOOOOkxxdolcc:;;;,,,;;:cllooooolcc:;'... ..,:codxkkkxddooollloooooooollcc:::::clodkO0KXNWWWWWWNNXK00Okxxxxxxxxkkkkxxx
// . ....';:cloddddo___________,,,,;;:clooddddoolc:,... ..,:ldx__00OOOkkk___kkkkkkxxdollc::::cclodkO0KXXNNNNNNXXK0OOkxxxxxxxxxxxxddd
// .......',;:cccc:| |,,,;;:cclooddddoll:;'.. ..';cox| \KKK000| |KK00OOkxdocc___;::clldxxkO0KKKKK00Okkxdddddddddddddddoo
// .......'',,,,,''| ________|',,;;::cclloooooolc:;'......___:ldk| \KK000| |XKKK0Okxolc| |;;::cclodxxkkkkxxdoolllcclllooodddooooo
// ''......''''....| | ....'',,,,;;;::cclloooollc:;,''.'| |oxk| \OOO0| |KKK00Oxdoll|___|;;;;;::ccllllllcc::;;,,;;;:cclloooooooo
// ;;,''.......... | |_____',,;;;____:___cllo________.___| |___| \xkk| |KK_______ool___:::;________;;;_______...'',;;:ccclllloo
// c:;,''......... | |:::/ ' |lo/ | | \dx| |0/ \d| |cc/ |'/ \......',,;;:ccllo
// ol:;,'..........| _____|ll/ __ |o/ ______|____ ___| | \o| |/ ___ \| |o/ ______|/ ___ \ .......'',;:clo
// dlc;,...........| |::clooo| / | |x\___ \KXKKK0| |dol| |\ \| | | | | |d\___ \..| | / / ....',:cl
// xoc;'... .....'| |llodddd| \__| |_____\ \KKK0O| |lc:| |'\ | |___| | |_____\ \.| |_/___/... ...',;:c
// dlc;'... ....',;| |oddddddo\ | |Okkx| |::;| |..\ |\ /| | | \ |... ....',;:c
// ol:,'.......',:c|___|xxxddollc\_____,___|_________/ddoll|___|,,,|___|...\_____|:\ ______/l|___|_________/...\________|'........',;::cc
// c:;'.......';:codxxkkkkxxolc::;::clodxkOO0OOkkxdollc::;;,,''''',,,,''''''''''',,'''''',;:loxkkOOkxol:;,'''',,;:ccllcc:;,'''''',;::ccll
// ;,'.......',:codxkOO0OOkxdlc:;,,;;:cldxxkkxxdolc:;;,,''.....'',;;:::;;,,,'''''........,;cldkO0KK0Okdoc::;;::cloodddoolc:;;;;;::ccllooo
// .........',;:lodxOO0000Okdoc:,,',,;:clloddoolc:;,''.......'',;:clooollc:;;,,''.......',:ldkOKXNNXX0Oxdolllloddxxxxxxdolccccccllooodddd
// . .....';:cldxkO0000Okxol:;,''',,;::cccc:;,,'.......'',;:cldxxkkxxdolc:;;,'.......';coxOKXNWWWNXKOkxddddxxkkkkkkxdoollllooddxxxxkkk
// ....',;:codxkO000OOxdoc:;,''',,,;;;;,''.......',,;:clodkO00000Okxolc::;,,''..',;:ldxOKXNWWWNNK0OkkkkkkkkkkkxxddooooodxxkOOOOO000
// ....',;;clodxkkOOOkkdolc:;,,,,,,,,'..........,;:clodxkO0KKXKK0Okxdolcc::;;,,,;;:codkO0XXNNNNXKK0OOOOOkkkkxxdoollloodxkO0KKKXXXXX
//
// VERSION: 1.0.1
// https://github.com/Auburn/FastNoise
namespace Aster.Client.World
{
using System;
using System.Runtime.CompilerServices;
// Switch between using floats or doubles for input position
using FNLfloat = System.Single;
//using FNLfloat = System.Double;
public class FastNoiseLite
{
private const short INLINE = 256; // MethodImplOptions.AggressiveInlining;
private const short OPTIMISE = 512; // MethodImplOptions.AggressiveOptimization;
public enum NoiseType
{
OpenSimplex2,
OpenSimplex2S,
Cellular,
Perlin,
ValueCubic,
Value
};
public enum RotationType3D
{
None,
ImproveXYPlanes,
ImproveXZPlanes
};
public enum FractalType
{
None,
FBm,
Ridged,
PingPong,
DomainWarpProgressive,
DomainWarpIndependent
};
public enum CellularDistanceFunction
{
Euclidean,
EuclideanSq,
Manhattan,
Hybrid
};
public enum CellularReturnType
{
CellValue,
Distance,
Distance2,
Distance2Add,
Distance2Sub,
Distance2Mul,
Distance2Div
};
public enum DomainWarpType
{
OpenSimplex2,
OpenSimplex2Reduced,
BasicGrid
};
private enum TransformType3D
{
None,
ImproveXYPlanes,
ImproveXZPlanes,
DefaultOpenSimplex2
};
private int mSeed = 1337;
private float mFrequency = 0.01f;
private NoiseType mNoiseType = NoiseType.OpenSimplex2;
private RotationType3D mRotationType3D = RotationType3D.None;
private TransformType3D mTransformType3D = TransformType3D.DefaultOpenSimplex2;
private FractalType mFractalType = FractalType.None;
private int mOctaves = 3;
private float mLacunarity = 2.0f;
private float mGain = 0.5f;
private float mWeightedStrength = 0.0f;
private float mPingPongStength = 2.0f;
private float mFractalBounding = 1 / 1.75f;
private CellularDistanceFunction mCellularDistanceFunction = CellularDistanceFunction.EuclideanSq;
private CellularReturnType mCellularReturnType = CellularReturnType.Distance;
private float mCellularJitterModifier = 1.0f;
private DomainWarpType mDomainWarpType = DomainWarpType.OpenSimplex2;
private TransformType3D mWarpTransformType3D = TransformType3D.DefaultOpenSimplex2;
private float mDomainWarpAmp = 1.0f;
/// <summary>
/// Create new FastNoise object with optional seed
/// </summary>
public FastNoiseLite(int seed = 1337)
{
SetSeed(seed);
}
/// <summary>
/// Sets seed used for all noise types
/// </summary>
/// <remarks>
/// Default: 1337
/// </remarks>
public void SetSeed(int seed)
{
mSeed = seed;
}
/// <summary>
/// Sets frequency for all noise types
/// </summary>
/// <remarks>
/// Default: 0.01
/// </remarks>
public void SetFrequency(float frequency)
{
mFrequency = frequency;
}
/// <summary>
/// Sets noise algorithm used for GetNoise(...)
/// </summary>
/// <remarks>
/// Default: OpenSimplex2
/// </remarks>
public void SetNoiseType(NoiseType noiseType)
{
mNoiseType = noiseType;
UpdateTransformType3D();
}
/// <summary>
/// Sets domain rotation type for 3D Noise and 3D DomainWarp.
/// Can aid in reducing directional artifacts when sampling a 2D plane in 3D
/// </summary>
/// <remarks>
/// Default: None
/// </remarks>
public void SetRotationType3D(RotationType3D rotationType3D)
{
mRotationType3D = rotationType3D;
UpdateTransformType3D();
UpdateWarpTransformType3D();
}
/// <summary>
/// Sets method for combining octaves in all fractal noise types
/// </summary>
/// <remarks>
/// Default: None
/// Note: FractalType.DomainWarp... only affects DomainWarp(...)
/// </remarks>
public void SetFractalType(FractalType fractalType)
{
mFractalType = fractalType;
}
/// <summary>
/// Sets octave count for all fractal noise types
/// </summary>
/// <remarks>
/// Default: 3
/// </remarks>
public void SetFractalOctaves(int octaves)
{
mOctaves = octaves;
CalculateFractalBounding();
}
/// <summary>
/// Sets octave lacunarity for all fractal noise types
/// </summary>
/// <remarks>
/// Default: 2.0
/// </remarks>
public void SetFractalLacunarity(float lacunarity)
{
mLacunarity = lacunarity;
}
/// <summary>
/// Sets octave gain for all fractal noise types
/// </summary>
/// <remarks>
/// Default: 0.5
/// </remarks>
public void SetFractalGain(float gain)
{
mGain = gain;
CalculateFractalBounding();
}
/// <summary>
/// Sets octave weighting for all none DomainWarp fratal types
/// </summary>
/// <remarks>
/// Default: 0.0
/// Note: Keep between 0...1 to maintain -1...1 output bounding
/// </remarks>
public void SetFractalWeightedStrength(float weightedStrength)
{
mWeightedStrength = weightedStrength;
}
/// <summary>
/// Sets strength of the fractal ping pong effect
/// </summary>
/// <remarks>
/// Default: 2.0
/// </remarks>
public void SetFractalPingPongStrength(float pingPongStrength)
{
mPingPongStength = pingPongStrength;
}
/// <summary>
/// Sets distance function used in cellular noise calculations
/// </summary>
/// <remarks>
/// Default: Distance
/// </remarks>
public void SetCellularDistanceFunction(CellularDistanceFunction cellularDistanceFunction)
{
mCellularDistanceFunction = cellularDistanceFunction;
}
/// <summary>
/// Sets return type from cellular noise calculations
/// </summary>
/// <remarks>
/// Default: EuclideanSq
/// </remarks>
public void SetCellularReturnType(CellularReturnType cellularReturnType)
{
mCellularReturnType = cellularReturnType;
}
/// <summary>
/// Sets the maximum distance a cellular point can move from it's grid position
/// </summary>
/// <remarks>
/// Default: 1.0
/// Note: Setting this higher than 1 will cause artifacts
/// </remarks>
public void SetCellularJitter(float cellularJitter)
{
mCellularJitterModifier = cellularJitter;
}
/// <summary>
/// Sets the warp algorithm when using DomainWarp(...)
/// </summary>
/// <remarks>
/// Default: OpenSimplex2
/// </remarks>
public void SetDomainWarpType(DomainWarpType domainWarpType)
{
mDomainWarpType = domainWarpType;
UpdateWarpTransformType3D();
}
/// <summary>
/// Sets the maximum warp distance from original position when using DomainWarp(...)
/// </summary>
/// <remarks>
/// Default: 1.0
/// </remarks>
public void SetDomainWarpAmp(float domainWarpAmp)
{
mDomainWarpAmp = domainWarpAmp;
}
/// <summary>
/// 2D noise at given position using current settings
/// </summary>
/// <returns>
/// Noise output bounded between -1...1
/// </returns>
[MethodImpl(OPTIMISE)]
public float GetNoise(FNLfloat x, FNLfloat y)
{
TransformNoiseCoordinate(ref x, ref y);
switch (mFractalType)
{
default:
return GenNoiseSingle(mSeed, x, y);
case FractalType.FBm:
return GenFractalFBm(x, y);
case FractalType.Ridged:
return GenFractalRidged(x, y);
case FractalType.PingPong:
return GenFractalPingPong(x, y);
}
}
/// <summary>
/// 3D noise at given position using current settings
/// </summary>
/// <returns>
/// Noise output bounded between -1...1
/// </returns>
[MethodImpl(OPTIMISE)]
public float GetNoise(FNLfloat x, FNLfloat y, FNLfloat z)
{
TransformNoiseCoordinate(ref x, ref y, ref z);
switch (mFractalType)
{
default:
return GenNoiseSingle(mSeed, x, y, z);
case FractalType.FBm:
return GenFractalFBm(x, y, z);
case FractalType.Ridged:
return GenFractalRidged(x, y, z);
case FractalType.PingPong:
return GenFractalPingPong(x, y, z);
}
}
/// <summary>
/// 2D warps the input position using current domain warp settings
/// </summary>
/// <example>
/// Example usage with GetNoise
/// <code>DomainWarp(ref x, ref y)
/// noise = GetNoise(x, y)</code>
/// </example>
[MethodImpl(OPTIMISE)]
public void DomainWarp(ref FNLfloat x, ref FNLfloat y)
{
switch (mFractalType)
{
default:
DomainWarpSingle(ref x, ref y);
break;
case FractalType.DomainWarpProgressive:
DomainWarpFractalProgressive(ref x, ref y);
break;
case FractalType.DomainWarpIndependent:
DomainWarpFractalIndependent(ref x, ref y);
break;
}
}
/// <summary>
/// 3D warps the input position using current domain warp settings
/// </summary>
/// <example>
/// Example usage with GetNoise
/// <code>DomainWarp(ref x, ref y, ref z)
/// noise = GetNoise(x, y, z)</code>
/// </example>
[MethodImpl(OPTIMISE)]
public void DomainWarp(ref FNLfloat x, ref FNLfloat y, ref FNLfloat z)
{
switch (mFractalType)
{
default:
DomainWarpSingle(ref x, ref y, ref z);
break;
case FractalType.DomainWarpProgressive:
DomainWarpFractalProgressive(ref x, ref y, ref z);
break;
case FractalType.DomainWarpIndependent:
DomainWarpFractalIndependent(ref x, ref y, ref z);
break;
}
}
private static readonly float[] Gradients2D =
{
0.130526192220052f, 0.99144486137381f, 0.38268343236509f, 0.923879532511287f, 0.608761429008721f, 0.793353340291235f,
0.793353340291235f, 0.608761429008721f,
0.923879532511287f, 0.38268343236509f, 0.99144486137381f, 0.130526192220051f, 0.99144486137381f, -0.130526192220051f,
0.923879532511287f, -0.38268343236509f,
0.793353340291235f, -0.60876142900872f, 0.608761429008721f, -0.793353340291235f, 0.38268343236509f, -0.923879532511287f,
0.130526192220052f, -0.99144486137381f,
-0.130526192220052f, -0.99144486137381f, -0.38268343236509f, -0.923879532511287f, -0.608761429008721f, -0.793353340291235f,
-0.793353340291235f, -0.608761429008721f,
-0.923879532511287f, -0.38268343236509f, -0.99144486137381f, -0.130526192220052f, -0.99144486137381f, 0.130526192220051f,
-0.923879532511287f, 0.38268343236509f,
-0.793353340291235f, 0.608761429008721f, -0.608761429008721f, 0.793353340291235f, -0.38268343236509f, 0.923879532511287f,
-0.130526192220052f, 0.99144486137381f,
0.130526192220052f, 0.99144486137381f, 0.38268343236509f, 0.923879532511287f, 0.608761429008721f, 0.793353340291235f,
0.793353340291235f, 0.608761429008721f,
0.923879532511287f, 0.38268343236509f, 0.99144486137381f, 0.130526192220051f, 0.99144486137381f, -0.130526192220051f,
0.923879532511287f, -0.38268343236509f,
0.793353340291235f, -0.60876142900872f, 0.608761429008721f, -0.793353340291235f, 0.38268343236509f, -0.923879532511287f,
0.130526192220052f, -0.99144486137381f,
-0.130526192220052f, -0.99144486137381f, -0.38268343236509f, -0.923879532511287f, -0.608761429008721f, -0.793353340291235f,
-0.793353340291235f, -0.608761429008721f,
-0.923879532511287f, -0.38268343236509f, -0.99144486137381f, -0.130526192220052f, -0.99144486137381f, 0.130526192220051f,
-0.923879532511287f, 0.38268343236509f,
-0.793353340291235f, 0.608761429008721f, -0.608761429008721f, 0.793353340291235f, -0.38268343236509f, 0.923879532511287f,
-0.130526192220052f, 0.99144486137381f,
0.130526192220052f, 0.99144486137381f, 0.38268343236509f, 0.923879532511287f, 0.608761429008721f, 0.793353340291235f,
0.793353340291235f, 0.608761429008721f,
0.923879532511287f, 0.38268343236509f, 0.99144486137381f, 0.130526192220051f, 0.99144486137381f, -0.130526192220051f,
0.923879532511287f, -0.38268343236509f,
0.793353340291235f, -0.60876142900872f, 0.608761429008721f, -0.793353340291235f, 0.38268343236509f, -0.923879532511287f,
0.130526192220052f, -0.99144486137381f,
-0.130526192220052f, -0.99144486137381f, -0.38268343236509f, -0.923879532511287f, -0.608761429008721f, -0.793353340291235f,
-0.793353340291235f, -0.608761429008721f,
-0.923879532511287f, -0.38268343236509f, -0.99144486137381f, -0.130526192220052f, -0.99144486137381f, 0.130526192220051f,
-0.923879532511287f, 0.38268343236509f,
-0.793353340291235f, 0.608761429008721f, -0.608761429008721f, 0.793353340291235f, -0.38268343236509f, 0.923879532511287f,
-0.130526192220052f, 0.99144486137381f,
0.130526192220052f, 0.99144486137381f, 0.38268343236509f, 0.923879532511287f, 0.608761429008721f, 0.793353340291235f,
0.793353340291235f, 0.608761429008721f,
0.923879532511287f, 0.38268343236509f, 0.99144486137381f, 0.130526192220051f, 0.99144486137381f, -0.130526192220051f,
0.923879532511287f, -0.38268343236509f,
0.793353340291235f, -0.60876142900872f, 0.608761429008721f, -0.793353340291235f, 0.38268343236509f, -0.923879532511287f,
0.130526192220052f, -0.99144486137381f,
-0.130526192220052f, -0.99144486137381f, -0.38268343236509f, -0.923879532511287f, -0.608761429008721f, -0.793353340291235f,
-0.793353340291235f, -0.608761429008721f,
-0.923879532511287f, -0.38268343236509f, -0.99144486137381f, -0.130526192220052f, -0.99144486137381f, 0.130526192220051f,
-0.923879532511287f, 0.38268343236509f,
-0.793353340291235f, 0.608761429008721f, -0.608761429008721f, 0.793353340291235f, -0.38268343236509f, 0.923879532511287f,
-0.130526192220052f, 0.99144486137381f,
0.130526192220052f, 0.99144486137381f, 0.38268343236509f, 0.923879532511287f, 0.608761429008721f, 0.793353340291235f,
0.793353340291235f, 0.608761429008721f,
0.923879532511287f, 0.38268343236509f, 0.99144486137381f, 0.130526192220051f, 0.99144486137381f, -0.130526192220051f,
0.923879532511287f, -0.38268343236509f,
0.793353340291235f, -0.60876142900872f, 0.608761429008721f, -0.793353340291235f, 0.38268343236509f, -0.923879532511287f,
0.130526192220052f, -0.99144486137381f,
-0.130526192220052f, -0.99144486137381f, -0.38268343236509f, -0.923879532511287f, -0.608761429008721f, -0.793353340291235f,
-0.793353340291235f, -0.608761429008721f,
-0.923879532511287f, -0.38268343236509f, -0.99144486137381f, -0.130526192220052f, -0.99144486137381f, 0.130526192220051f,
-0.923879532511287f, 0.38268343236509f,
-0.793353340291235f, 0.608761429008721f, -0.608761429008721f, 0.793353340291235f, -0.38268343236509f, 0.923879532511287f,
-0.130526192220052f, 0.99144486137381f,
0.38268343236509f, 0.923879532511287f, 0.923879532511287f, 0.38268343236509f, 0.923879532511287f, -0.38268343236509f,
0.38268343236509f, -0.923879532511287f,
-0.38268343236509f, -0.923879532511287f, -0.923879532511287f, -0.38268343236509f, -0.923879532511287f, 0.38268343236509f,
-0.38268343236509f, 0.923879532511287f,
};
private static readonly float[] RandVecs2D =
{
-0.2700222198f, -0.9628540911f, 0.3863092627f, -0.9223693152f, 0.04444859006f, -0.999011673f, -0.5992523158f, -0.8005602176f,
-0.7819280288f, 0.6233687174f, 0.9464672271f, 0.3227999196f, -0.6514146797f, -0.7587218957f, 0.9378472289f, 0.347048376f,
-0.8497875957f, -0.5271252623f, -0.879042592f, 0.4767432447f, -0.892300288f, -0.4514423508f, -0.379844434f, -0.9250503802f,
-0.9951650832f, 0.0982163789f, 0.7724397808f, -0.6350880136f, 0.7573283322f, -0.6530343002f, -0.9928004525f, -0.119780055f,
-0.0532665713f, 0.9985803285f, 0.9754253726f, -0.2203300762f, -0.7665018163f, 0.6422421394f, 0.991636706f, 0.1290606184f,
-0.994696838f, 0.1028503788f, -0.5379205513f, -0.84299554f, 0.5022815471f, -0.8647041387f, 0.4559821461f, -0.8899889226f,
-0.8659131224f, -0.5001944266f, 0.0879458407f, -0.9961252577f, -0.5051684983f, 0.8630207346f, 0.7753185226f, -0.6315704146f,
-0.6921944612f, 0.7217110418f, -0.5191659449f, -0.8546734591f, 0.8978622882f, -0.4402764035f, -0.1706774107f, 0.9853269617f,
-0.9353430106f, -0.3537420705f, -0.9992404798f, 0.03896746794f, -0.2882064021f, -0.9575683108f, -0.9663811329f, 0.2571137995f,
-0.8759714238f, -0.4823630009f, -0.8303123018f, -0.5572983775f, 0.05110133755f, -0.9986934731f, -0.8558373281f, -0.5172450752f,
0.09887025282f, 0.9951003332f, 0.9189016087f, 0.3944867976f, -0.2439375892f, -0.9697909324f, -0.8121409387f, -0.5834613061f,
-0.9910431363f, 0.1335421355f, 0.8492423985f, -0.5280031709f, -0.9717838994f, -0.2358729591f, 0.9949457207f, 0.1004142068f,
0.6241065508f, -0.7813392434f, 0.662910307f, 0.7486988212f, -0.7197418176f, 0.6942418282f, -0.8143370775f, -0.5803922158f,
0.104521054f, -0.9945226741f, -0.1065926113f, -0.9943027784f, 0.445799684f, -0.8951327509f, 0.105547406f, 0.9944142724f,
-0.992790267f, 0.1198644477f, -0.8334366408f, 0.552615025f, 0.9115561563f, -0.4111755999f, 0.8285544909f, -0.5599084351f,
0.7217097654f, -0.6921957921f, 0.4940492677f, -0.8694339084f, -0.3652321272f, -0.9309164803f, -0.9696606758f, 0.2444548501f,
0.08925509731f, -0.996008799f, 0.5354071276f, -0.8445941083f, -0.1053576186f, 0.9944343981f, -0.9890284586f, 0.1477251101f,
0.004856104961f, 0.9999882091f, 0.9885598478f, 0.1508291331f, 0.9286129562f, -0.3710498316f, -0.5832393863f, -0.8123003252f,
0.3015207509f, 0.9534596146f, -0.9575110528f, 0.2883965738f, 0.9715802154f, -0.2367105511f, 0.229981792f, 0.9731949318f,
0.955763816f, -0.2941352207f, 0.740956116f, 0.6715534485f, -0.9971513787f, -0.07542630764f, 0.6905710663f, -0.7232645452f,
-0.290713703f, -0.9568100872f, 0.5912777791f, -0.8064679708f, -0.9454592212f, -0.325740481f, 0.6664455681f, 0.74555369f,
0.6236134912f, 0.7817328275f, 0.9126993851f, -0.4086316587f, -0.8191762011f, 0.5735419353f, -0.8812745759f, -0.4726046147f,
0.9953313627f, 0.09651672651f, 0.9855650846f, -0.1692969699f, -0.8495980887f, 0.5274306472f, 0.6174853946f, -0.7865823463f,
0.8508156371f, 0.52546432f, 0.9985032451f, -0.05469249926f, 0.1971371563f, -0.9803759185f, 0.6607855748f, -0.7505747292f,
-0.03097494063f, 0.9995201614f, -0.6731660801f, 0.739491331f, -0.7195018362f, -0.6944905383f, 0.9727511689f, 0.2318515979f,
0.9997059088f, -0.0242506907f, 0.4421787429f, -0.8969269532f, 0.9981350961f, -0.061043673f, -0.9173660799f, -0.3980445648f,
-0.8150056635f, -0.5794529907f, -0.8789331304f, 0.4769450202f, 0.0158605829f, 0.999874213f, -0.8095464474f, 0.5870558317f,
-0.9165898907f, -0.3998286786f, -0.8023542565f, 0.5968480938f, -0.5176737917f, 0.8555780767f, -0.8154407307f, -0.5788405779f,
0.4022010347f, -0.9155513791f, -0.9052556868f, -0.4248672045f, 0.7317445619f, 0.6815789728f, -0.5647632201f, -0.8252529947f,
-0.8403276335f, -0.5420788397f, -0.9314281527f, 0.363925262f, 0.5238198472f, 0.8518290719f, 0.7432803869f, -0.6689800195f,
-0.985371561f, -0.1704197369f, 0.4601468731f, 0.88784281f, 0.825855404f, 0.5638819483f, 0.6182366099f, 0.7859920446f,
0.8331502863f, -0.553046653f, 0.1500307506f, 0.9886813308f, -0.662330369f, -0.7492119075f, -0.668598664f, 0.743623444f,
0.7025606278f, 0.7116238924f, -0.5419389763f, -0.8404178401f, -0.3388616456f, 0.9408362159f, 0.8331530315f, 0.5530425174f,
-0.2989720662f, -0.9542618632f, 0.2638522993f, 0.9645630949f, 0.124108739f, -0.9922686234f, -0.7282649308f, -0.6852956957f,
0.6962500149f, 0.7177993569f, -0.9183535368f, 0.3957610156f, -0.6326102274f, -0.7744703352f, -0.9331891859f, -0.359385508f,
-0.1153779357f, -0.9933216659f, 0.9514974788f, -0.3076565421f, -0.08987977445f, -0.9959526224f, 0.6678496916f, 0.7442961705f,
0.7952400393f, -0.6062947138f, -0.6462007402f, -0.7631674805f, -0.2733598753f, 0.9619118351f, 0.9669590226f, -0.254931851f,
-0.9792894595f, 0.2024651934f, -0.5369502995f, -0.8436138784f, -0.270036471f, -0.9628500944f, -0.6400277131f, 0.7683518247f,
-0.7854537493f, -0.6189203566f, 0.06005905383f, -0.9981948257f, -0.02455770378f, 0.9996984141f, -0.65983623f, 0.751409442f,
-0.6253894466f, -0.7803127835f, -0.6210408851f, -0.7837781695f, 0.8348888491f, 0.5504185768f, -0.1592275245f, 0.9872419133f,
0.8367622488f, 0.5475663786f, -0.8675753916f, -0.4973056806f, -0.2022662628f, -0.9793305667f, 0.9399189937f, 0.3413975472f,
0.9877404807f, -0.1561049093f, -0.9034455656f, 0.4287028224f, 0.1269804218f, -0.9919052235f, -0.3819600854f, 0.924178821f,
0.9754625894f, 0.2201652486f, -0.3204015856f, -0.9472818081f, -0.9874760884f, 0.1577687387f, 0.02535348474f, -0.9996785487f,
0.4835130794f, -0.8753371362f, -0.2850799925f, -0.9585037287f, -0.06805516006f, -0.99768156f, -0.7885244045f, -0.6150034663f,
0.3185392127f, -0.9479096845f, 0.8880043089f, 0.4598351306f, 0.6476921488f, -0.7619021462f, 0.9820241299f, 0.1887554194f,
0.9357275128f, -0.3527237187f, -0.8894895414f, 0.4569555293f, 0.7922791302f, 0.6101588153f, 0.7483818261f, 0.6632681526f,
-0.7288929755f, -0.6846276581f, 0.8729032783f, -0.4878932944f, 0.8288345784f, 0.5594937369f, 0.08074567077f, 0.9967347374f,
0.9799148216f, -0.1994165048f, -0.580730673f, -0.8140957471f, -0.4700049791f, -0.8826637636f, 0.2409492979f, 0.9705377045f,
0.9437816757f, -0.3305694308f, -0.8927998638f, -0.4504535528f, -0.8069622304f, 0.5906030467f, 0.06258973166f, 0.9980393407f,
-0.9312597469f, 0.3643559849f, 0.5777449785f, 0.8162173362f, -0.3360095855f, -0.941858566f, 0.697932075f, -0.7161639607f,
-0.002008157227f, -0.9999979837f, -0.1827294312f, -0.9831632392f, -0.6523911722f, 0.7578824173f, -0.4302626911f, -0.9027037258f,
-0.9985126289f, -0.05452091251f, -0.01028102172f, -0.9999471489f, -0.4946071129f, 0.8691166802f, -0.2999350194f, 0.9539596344f,
0.8165471961f, 0.5772786819f, 0.2697460475f, 0.962931498f, -0.7306287391f, -0.6827749597f, -0.7590952064f, -0.6509796216f,
-0.907053853f, 0.4210146171f, -0.5104861064f, -0.8598860013f, 0.8613350597f, 0.5080373165f, 0.5007881595f, -0.8655698812f,
-0.654158152f, 0.7563577938f, -0.8382755311f, -0.545246856f, 0.6940070834f, 0.7199681717f, 0.06950936031f, 0.9975812994f,
0.1702942185f, -0.9853932612f, 0.2695973274f, 0.9629731466f, 0.5519612192f, -0.8338697815f, 0.225657487f, -0.9742067022f,
0.4215262855f, -0.9068161835f, 0.4881873305f, -0.8727388672f, -0.3683854996f, -0.9296731273f, -0.9825390578f, 0.1860564427f,
0.81256471f, 0.5828709909f, 0.3196460933f, -0.9475370046f, 0.9570913859f, 0.2897862643f, -0.6876655497f, -0.7260276109f,
-0.9988770922f, -0.047376731f, -0.1250179027f, 0.992154486f, -0.8280133617f, 0.560708367f, 0.9324863769f, -0.3612051451f,
0.6394653183f, 0.7688199442f, -0.01623847064f, -0.9998681473f, -0.9955014666f, -0.09474613458f, -0.81453315f, 0.580117012f,
0.4037327978f, -0.9148769469f, 0.9944263371f, 0.1054336766f, -0.1624711654f, 0.9867132919f, -0.9949487814f, -0.100383875f,
-0.6995302564f, 0.7146029809f, 0.5263414922f, -0.85027327f, -0.5395221479f, 0.841971408f, 0.6579370318f, 0.7530729462f,
0.01426758847f, -0.9998982128f, -0.6734383991f, 0.7392433447f, 0.639412098f, -0.7688642071f, 0.9211571421f, 0.3891908523f,
-0.146637214f, -0.9891903394f, -0.782318098f, 0.6228791163f, -0.5039610839f, -0.8637263605f, -0.7743120191f, -0.6328039957f,
};
private static readonly float[] Gradients3D =
{
0, 1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0, -1, -1, 0,
1, 0, 1, 0, -1, 0, 1, 0, 1, 0, -1, 0, -1, 0, -1, 0,
1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0, -1, -1, 0, 0,
0, 1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0, -1, -1, 0,
1, 0, 1, 0, -1, 0, 1, 0, 1, 0, -1, 0, -1, 0, -1, 0,
1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0, -1, -1, 0, 0,
0, 1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0, -1, -1, 0,
1, 0, 1, 0, -1, 0, 1, 0, 1, 0, -1, 0, -1, 0, -1, 0,
1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0, -1, -1, 0, 0,
0, 1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0, -1, -1, 0,
1, 0, 1, 0, -1, 0, 1, 0, 1, 0, -1, 0, -1, 0, -1, 0,
1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0, -1, -1, 0, 0,
0, 1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0, -1, -1, 0,
1, 0, 1, 0, -1, 0, 1, 0, 1, 0, -1, 0, -1, 0, -1, 0,
1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0, -1, -1, 0, 0,
1, 1, 0, 0, 0, -1, 1, 0, -1, 1, 0, 0, 0, -1, -1, 0
};
private static readonly float[] RandVecs3D =
{
-0.7292736885f, -0.6618439697f, 0.1735581948f, 0, 0.790292081f, -0.5480887466f, -0.2739291014f, 0, 0.7217578935f, 0.6226212466f,
-0.3023380997f, 0, 0.565683137f, -0.8208298145f, -0.0790000257f, 0, 0.760049034f, -0.5555979497f, -0.3370999617f, 0,
0.3713945616f, 0.5011264475f, 0.7816254623f, 0, -0.1277062463f, -0.4254438999f, -0.8959289049f, 0, -0.2881560924f,
-0.5815838982f, 0.7607405838f, 0,
0.5849561111f, -0.662820239f, -0.4674352136f, 0, 0.3307171178f, 0.0391653737f, 0.94291689f, 0, 0.8712121778f, -0.4113374369f,
-0.2679381538f, 0, 0.580981015f, 0.7021915846f, 0.4115677815f, 0, 0.503756873f, 0.6330056931f, -0.5878203852f, 0, 0.4493712205f,
0.601390195f, 0.6606022552f, 0, -0.6878403724f, 0.09018890807f, -0.7202371714f, 0, -0.5958956522f, -0.6469350577f, 0.475797649f,
0,
-0.5127052122f, 0.1946921978f, -0.8361987284f, 0, -0.9911507142f, -0.05410276466f, -0.1212153153f, 0, -0.2149721042f,
0.9720882117f, -0.09397607749f, 0, -0.7518650936f, -0.5428057603f, 0.3742469607f, 0, 0.5237068895f, 0.8516377189f,
-0.02107817834f, 0, 0.6333504779f, 0.1926167129f, -0.7495104896f, 0, -0.06788241606f, 0.3998305789f, 0.9140719259f, 0,
-0.5538628599f, -0.4729896695f, -0.6852128902f, 0,
-0.7261455366f, -0.5911990757f, 0.3509933228f, 0, -0.9229274737f, -0.1782808786f, 0.3412049336f, 0, -0.6968815002f,
0.6511274338f, 0.3006480328f, 0, 0.9608044783f, -0.2098363234f, -0.1811724921f, 0, 0.06817146062f, -0.9743405129f,
0.2145069156f, 0, -0.3577285196f, -0.6697087264f, -0.6507845481f, 0, -0.1868621131f, 0.7648617052f, -0.6164974636f, 0,
-0.6541697588f, 0.3967914832f, 0.6439087246f, 0,
0.6993340405f, -0.6164538506f, 0.3618239211f, 0, -0.1546665739f, 0.6291283928f, 0.7617583057f, 0, -0.6841612949f,
-0.2580482182f, -0.6821542638f, 0, 0.5383980957f, 0.4258654885f, 0.7271630328f, 0, -0.5026987823f, -0.7939832935f,
-0.3418836993f, 0, 0.3202971715f, 0.2834415347f, 0.9039195862f, 0, 0.8683227101f, -0.0003762656404f, -0.4959995258f, 0,
0.791120031f, -0.08511045745f, 0.6057105799f, 0,
-0.04011016052f, -0.4397248749f, 0.8972364289f, 0, 0.9145119872f, 0.3579346169f, -0.1885487608f, 0, -0.9612039066f,
-0.2756484276f, 0.01024666929f, 0, 0.6510361721f, -0.2877799159f, -0.7023778346f, 0, -0.2041786351f, 0.7365237271f,
0.644859585f, 0, -0.7718263711f, 0.3790626912f, 0.5104855816f, 0, -0.3060082741f, -0.7692987727f, 0.5608371729f, 0,
0.454007341f, -0.5024843065f, 0.7357899537f, 0,
0.4816795475f, 0.6021208291f, -0.6367380315f, 0, 0.6961980369f, -0.3222197429f, 0.641469197f, 0, -0.6532160499f, -0.6781148932f,
0.3368515753f, 0, 0.5089301236f, -0.6154662304f, -0.6018234363f, 0, -0.1635919754f, -0.9133604627f, -0.372840892f, 0,
0.52408019f, -0.8437664109f, 0.1157505864f, 0, 0.5902587356f, 0.4983817807f, -0.6349883666f, 0, 0.5863227872f, 0.494764745f,
0.6414307729f, 0,
0.6779335087f, 0.2341345225f, 0.6968408593f, 0, 0.7177054546f, -0.6858979348f, 0.120178631f, 0, -0.5328819713f, -0.5205125012f,
0.6671608058f, 0, -0.8654874251f, -0.0700727088f, -0.4960053754f, 0, -0.2861810166f, 0.7952089234f, 0.5345495242f, 0,
-0.04849529634f, 0.9810836427f, -0.1874115585f, 0, -0.6358521667f, 0.6058348682f, 0.4781800233f, 0, 0.6254794696f,
-0.2861619734f, 0.7258696564f, 0,
-0.2585259868f, 0.5061949264f, -0.8227581726f, 0, 0.02136306781f, 0.5064016808f, -0.8620330371f, 0, 0.200111773f, 0.8599263484f,
0.4695550591f, 0, 0.4743561372f, 0.6014985084f, -0.6427953014f, 0, 0.6622993731f, -0.5202474575f, -0.5391679918f, 0,
0.08084972818f, -0.6532720452f, 0.7527940996f, 0, -0.6893687501f, 0.0592860349f, 0.7219805347f, 0, -0.1121887082f,
-0.9673185067f, 0.2273952515f, 0,
0.7344116094f, 0.5979668656f, -0.3210532909f, 0, 0.5789393465f, -0.2488849713f, 0.7764570201f, 0, 0.6988182827f, 0.3557169806f,
-0.6205791146f, 0, -0.8636845529f, -0.2748771249f, -0.4224826141f, 0, -0.4247027957f, -0.4640880967f, 0.777335046f, 0,
0.5257722489f, -0.8427017621f, 0.1158329937f, 0, 0.9343830603f, 0.316302472f, -0.1639543925f, 0, -0.1016836419f, -0.8057303073f,
-0.5834887393f, 0,
-0.6529238969f, 0.50602126f, -0.5635892736f, 0, -0.2465286165f, -0.9668205684f, -0.06694497494f, 0, -0.9776897119f,
-0.2099250524f, -0.007368825344f, 0, 0.7736893337f, 0.5734244712f, 0.2694238123f, 0, -0.6095087895f, 0.4995678998f,
0.6155736747f, 0, 0.5794535482f, 0.7434546771f, 0.3339292269f, 0, -0.8226211154f, 0.08142581855f, 0.5627293636f, 0,
-0.510385483f, 0.4703667658f, 0.7199039967f, 0,
-0.5764971849f, -0.07231656274f, -0.8138926898f, 0, 0.7250628871f, 0.3949971505f, -0.5641463116f, 0, -0.1525424005f,
0.4860840828f, -0.8604958341f, 0, -0.5550976208f, -0.4957820792f, 0.667882296f, 0, -0.1883614327f, 0.9145869398f, 0.357841725f,
0, 0.7625556724f, -0.5414408243f, -0.3540489801f, 0, -0.5870231946f, -0.3226498013f, -0.7424963803f, 0, 0.3051124198f,
0.2262544068f, -0.9250488391f, 0,
0.6379576059f, 0.577242424f, -0.5097070502f, 0, -0.5966775796f, 0.1454852398f, -0.7891830656f, 0, -0.658330573f, 0.6555487542f,
-0.3699414651f, 0, 0.7434892426f, 0.2351084581f, 0.6260573129f, 0, 0.5562114096f, 0.8264360377f, -0.0873632843f, 0,
-0.3028940016f, -0.8251527185f, 0.4768419182f, 0, 0.1129343818f, -0.985888439f, -0.1235710781f, 0, 0.5937652891f,
-0.5896813806f, 0.5474656618f, 0,
0.6757964092f, -0.5835758614f, -0.4502648413f, 0, 0.7242302609f, -0.1152719764f, 0.6798550586f, 0, -0.9511914166f,
0.0753623979f, -0.2992580792f, 0, 0.2539470961f, -0.1886339355f, 0.9486454084f, 0, 0.571433621f, -0.1679450851f, -0.8032795685f,
0, -0.06778234979f, 0.3978269256f, 0.9149531629f, 0, 0.6074972649f, 0.733060024f, -0.3058922593f, 0, -0.5435478392f,
0.1675822484f, 0.8224791405f, 0,
-0.5876678086f, -0.3380045064f, -0.7351186982f, 0, -0.7967562402f, 0.04097822706f, -0.6029098428f, 0, -0.1996350917f,
0.8706294745f, 0.4496111079f, 0, -0.02787660336f, -0.9106232682f, -0.4122962022f, 0, -0.7797625996f, -0.6257634692f,
0.01975775581f, 0, -0.5211232846f, 0.7401644346f, -0.4249554471f, 0, 0.8575424857f, 0.4053272873f, -0.3167501783f, 0,
0.1045223322f, 0.8390195772f, -0.5339674439f, 0,
0.3501822831f, 0.9242524096f, -0.1520850155f, 0, 0.1987849858f, 0.07647613266f, 0.9770547224f, 0, 0.7845996363f, 0.6066256811f,
-0.1280964233f, 0, 0.09006737436f, -0.9750989929f, -0.2026569073f, 0, -0.8274343547f, -0.542299559f, 0.1458203587f, 0,
-0.3485797732f, -0.415802277f, 0.840000362f, 0, -0.2471778936f, -0.7304819962f, -0.6366310879f, 0, -0.3700154943f,
0.8577948156f, 0.3567584454f, 0,
0.5913394901f, -0.548311967f, -0.5913303597f, 0, 0.1204873514f, -0.7626472379f, -0.6354935001f, 0, 0.616959265f, 0.03079647928f,
0.7863922953f, 0, 0.1258156836f, -0.6640829889f, -0.7369967419f, 0, -0.6477565124f, -0.1740147258f, -0.7417077429f, 0,
0.6217889313f, -0.7804430448f, -0.06547655076f, 0, 0.6589943422f, -0.6096987708f, 0.4404473475f, 0, -0.2689837504f,
-0.6732403169f, -0.6887635427f, 0,
-0.3849775103f, 0.5676542638f, 0.7277093879f, 0, 0.5754444408f, 0.8110471154f, -0.1051963504f, 0, 0.9141593684f, 0.3832947817f,
0.131900567f, 0, -0.107925319f, 0.9245493968f, 0.3654593525f, 0, 0.377977089f, 0.3043148782f, 0.8743716458f, 0, -0.2142885215f,
-0.8259286236f, 0.5214617324f, 0, 0.5802544474f, 0.4148098596f, -0.7008834116f, 0, -0.1982660881f, 0.8567161266f,
-0.4761596756f, 0,
-0.03381553704f, 0.3773180787f, -0.9254661404f, 0, -0.6867922841f, -0.6656597827f, 0.2919133642f, 0, 0.7731742607f,
-0.2875793547f, -0.5652430251f, 0, -0.09655941928f, 0.9193708367f, -0.3813575004f, 0, 0.2715702457f, -0.9577909544f,
-0.09426605581f, 0, 0.2451015704f, -0.6917998565f, -0.6792188003f, 0, 0.977700782f, -0.1753855374f, 0.1155036542f, 0,
-0.5224739938f, 0.8521606816f, 0.02903615945f, 0,
-0.7734880599f, -0.5261292347f, 0.3534179531f, 0, -0.7134492443f, -0.269547243f, 0.6467878011f, 0, 0.1644037271f, 0.5105846203f,
-0.8439637196f, 0, 0.6494635788f, 0.05585611296f, 0.7583384168f, 0, -0.4711970882f, 0.5017280509f, -0.7254255765f, 0,
-0.6335764307f, -0.2381686273f, -0.7361091029f, 0, -0.9021533097f, -0.270947803f, -0.3357181763f, 0, -0.3793711033f,
0.872258117f, 0.3086152025f, 0,
-0.6855598966f, -0.3250143309f, 0.6514394162f, 0, 0.2900942212f, -0.7799057743f, -0.5546100667f, 0, -0.2098319339f, 0.85037073f,
0.4825351604f, 0, -0.4592603758f, 0.6598504336f, -0.5947077538f, 0, 0.8715945488f, 0.09616365406f, -0.4807031248f, 0,
-0.6776666319f, 0.7118504878f, -0.1844907016f, 0, 0.7044377633f, 0.312427597f, 0.637304036f, 0, -0.7052318886f, -0.2401093292f,
-0.6670798253f, 0,
0.081921007f, -0.7207336136f, -0.6883545647f, 0, -0.6993680906f, -0.5875763221f, -0.4069869034f, 0, -0.1281454481f,
0.6419895885f, 0.7559286424f, 0, -0.6337388239f, -0.6785471501f, -0.3714146849f, 0, 0.5565051903f, -0.2168887573f,
-0.8020356851f, 0, -0.5791554484f, 0.7244372011f, -0.3738578718f, 0, 0.1175779076f, -0.7096451073f, 0.6946792478f, 0,
-0.6134619607f, 0.1323631078f, 0.7785527795f, 0,
0.6984635305f, -0.02980516237f, -0.715024719f, 0, 0.8318082963f, -0.3930171956f, 0.3919597455f, 0, 0.1469576422f,
0.05541651717f, -0.9875892167f, 0, 0.708868575f, -0.2690503865f, 0.6520101478f, 0, 0.2726053183f, 0.67369766f, -0.68688995f, 0,
-0.6591295371f, 0.3035458599f, -0.6880466294f, 0, 0.4815131379f, -0.7528270071f, 0.4487723203f, 0, 0.9430009463f, 0.1675647412f,
-0.2875261255f, 0,
0.434802957f, 0.7695304522f, -0.4677277752f, 0, 0.3931996188f, 0.594473625f, 0.7014236729f, 0, 0.7254336655f, -0.603925654f,
0.3301814672f, 0, 0.7590235227f, -0.6506083235f, 0.02433313207f, 0, -0.8552768592f, -0.3430042733f, 0.3883935666f, 0,
-0.6139746835f, 0.6981725247f, 0.3682257648f, 0, -0.7465905486f, -0.5752009504f, 0.3342849376f, 0, 0.5730065677f, 0.810555537f,
-0.1210916791f, 0,
-0.9225877367f, -0.3475211012f, -0.167514036f, 0, -0.7105816789f, -0.4719692027f, -0.5218416899f, 0, -0.08564609717f,
0.3583001386f, 0.929669703f, 0, -0.8279697606f, -0.2043157126f, 0.5222271202f, 0, 0.427944023f, 0.278165994f, 0.8599346446f, 0,
0.5399079671f, -0.7857120652f, -0.3019204161f, 0, 0.5678404253f, -0.5495413974f, -0.6128307303f, 0, -0.9896071041f,
0.1365639107f, -0.04503418428f, 0,
-0.6154342638f, -0.6440875597f, 0.4543037336f, 0, 0.1074204368f, -0.7946340692f, 0.5975094525f, 0, -0.3595449969f,
-0.8885529948f, 0.28495784f, 0, -0.2180405296f, 0.1529888965f, 0.9638738118f, 0, -0.7277432317f, -0.6164050508f, -0.3007234646f,
0, 0.7249729114f, -0.00669719484f, 0.6887448187f, 0, -0.5553659455f, -0.5336586252f, 0.6377908264f, 0, 0.5137558015f,
0.7976208196f, -0.3160000073f, 0,
-0.3794024848f, 0.9245608561f, -0.03522751494f, 0, 0.8229248658f, 0.2745365933f, -0.4974176556f, 0, -0.5404114394f,
0.6091141441f, 0.5804613989f, 0, 0.8036581901f, -0.2703029469f, 0.5301601931f, 0, 0.6044318879f, 0.6832968393f, 0.4095943388f,
0, 0.06389988817f, 0.9658208605f, -0.2512108074f, 0, 0.1087113286f, 0.7402471173f, -0.6634877936f, 0, -0.713427712f,
-0.6926784018f, 0.1059128479f, 0,
0.6458897819f, -0.5724548511f, -0.5050958653f, 0, -0.6553931414f, 0.7381471625f, 0.159995615f, 0, 0.3910961323f, 0.9188871375f,
-0.05186755998f, 0, -0.4879022471f, -0.5904376907f, 0.6429111375f, 0, 0.6014790094f, 0.7707441366f, -0.2101820095f, 0,
-0.5677173047f, 0.7511360995f, 0.3368851762f, 0, 0.7858573506f, 0.226674665f, 0.5753666838f, 0, -0.4520345543f, -0.604222686f,
-0.6561857263f, 0,
0.002272116345f, 0.4132844051f, -0.9105991643f, 0, -0.5815751419f, -0.5162925989f, 0.6286591339f, 0, -0.03703704785f,
0.8273785755f, 0.5604221175f, 0, -0.5119692504f, 0.7953543429f, -0.3244980058f, 0, -0.2682417366f, -0.9572290247f,
-0.1084387619f, 0, -0.2322482736f, -0.9679131102f, -0.09594243324f, 0, 0.3554328906f, -0.8881505545f, 0.2913006227f, 0,
0.7346520519f, -0.4371373164f, 0.5188422971f, 0,
0.9985120116f, 0.04659011161f, -0.02833944577f, 0, -0.3727687496f, -0.9082481361f, 0.1900757285f, 0, 0.91737377f,
-0.3483642108f, 0.1925298489f, 0, 0.2714911074f, 0.4147529736f, -0.8684886582f, 0, 0.5131763485f, -0.7116334161f, 0.4798207128f,
0, -0.8737353606f, 0.18886992f, -0.4482350644f, 0, 0.8460043821f, -0.3725217914f, 0.3814499973f, 0, 0.8978727456f,
-0.1780209141f, -0.4026575304f, 0,
0.2178065647f, -0.9698322841f, -0.1094789531f, 0, -0.1518031304f, -0.7788918132f, -0.6085091231f, 0, -0.2600384876f,
-0.4755398075f, -0.8403819825f, 0, 0.572313509f, -0.7474340931f, -0.3373418503f, 0, -0.7174141009f, 0.1699017182f,
-0.6756111411f, 0, -0.684180784f, 0.02145707593f, -0.7289967412f, 0, -0.2007447902f, 0.06555605789f, -0.9774476623f, 0,
-0.1148803697f, -0.8044887315f, 0.5827524187f, 0,
-0.7870349638f, 0.03447489231f, 0.6159443543f, 0, -0.2015596421f, 0.6859872284f, 0.6991389226f, 0, -0.08581082512f,
-0.10920836f, -0.9903080513f, 0, 0.5532693395f, 0.7325250401f, -0.396610771f, 0, -0.1842489331f, -0.9777375055f, -0.1004076743f,
0, 0.0775473789f, -0.9111505856f, 0.4047110257f, 0, 0.1399838409f, 0.7601631212f, -0.6344734459f, 0, 0.4484419361f,
-0.845289248f, 0.2904925424f, 0
};
[MethodImpl(INLINE)]
private static float FastMin(float a, float b)
{
return a < b ? a : b;
}
[MethodImpl(INLINE)]
private static float FastMax(float a, float b)
{
return a > b ? a : b;
}
[MethodImpl(INLINE)]
private static float FastAbs(float f)
{
return f < 0 ? -f : f;
}
[MethodImpl(INLINE)]
private static float FastSqrt(float f)
{
return (float)Math.Sqrt(f);
}
[MethodImpl(INLINE)]
private static int FastFloor(FNLfloat f)
{
return f >= 0 ? (int)f : (int)f - 1;
}
[MethodImpl(INLINE)]
private static int FastRound(FNLfloat f)
{
return f >= 0 ? (int)(f + 0.5f) : (int)(f - 0.5f);
}
[MethodImpl(INLINE)]
private static float Lerp(float a, float b, float t)
{
return a + t * (b - a);
}
[MethodImpl(INLINE)]
private static float InterpHermite(float t)
{
return t * t * (3 - 2 * t);
}
[MethodImpl(INLINE)]
private static float InterpQuintic(float t)
{
return t * t * t * (t * (t * 6 - 15) + 10);
}
[MethodImpl(INLINE)]
private static float CubicLerp(float a, float b, float c, float d, float t)
{
float p = (d - c) - (a - b);
return t * t * t * p + t * t * ((a - b) - p) + t * (c - a) + b;
}
[MethodImpl(INLINE)]
private static float PingPong(float t)
{
t -= (int)(t * 0.5f) * 2;
return t < 1 ? t : 2 - t;
}
private void CalculateFractalBounding()
{
float gain = FastAbs(mGain);
float amp = gain;
float ampFractal = 1.0f;
for (int i = 1; i < mOctaves; i++)
{
ampFractal += amp;
amp *= gain;
}
mFractalBounding = 1 / ampFractal;
}
// Hashing
private const int PrimeX = 501125321;
private const int PrimeY = 1136930381;
private const int PrimeZ = 1720413743;
[MethodImpl(INLINE)]
private static int Hash(int seed, int xPrimed, int yPrimed)
{
int hash = seed ^ xPrimed ^ yPrimed;
hash *= 0x27d4eb2d;
return hash;
}
[MethodImpl(INLINE)]
private static int Hash(int seed, int xPrimed, int yPrimed, int zPrimed)
{
int hash = seed ^ xPrimed ^ yPrimed ^ zPrimed;
hash *= 0x27d4eb2d;
return hash;
}
[MethodImpl(INLINE)]
private static float ValCoord(int seed, int xPrimed, int yPrimed)
{
int hash = Hash(seed, xPrimed, yPrimed);
hash *= hash;
hash ^= hash << 19;
return hash * (1 / 2147483648.0f);
}
[MethodImpl(INLINE)]
private static float ValCoord(int seed, int xPrimed, int yPrimed, int zPrimed)
{
int hash = Hash(seed, xPrimed, yPrimed, zPrimed);
hash *= hash;
hash ^= hash << 19;
return hash * (1 / 2147483648.0f);
}
[MethodImpl(INLINE)]
private static float GradCoord(int seed, int xPrimed, int yPrimed, float xd, float yd)
{
int hash = Hash(seed, xPrimed, yPrimed);
hash ^= hash >> 15;
hash &= 127 << 1;
float xg = Gradients2D[hash];
float yg = Gradients2D[hash | 1];
return xd * xg + yd * yg;
}
[MethodImpl(INLINE)]
private static float GradCoord(int seed, int xPrimed, int yPrimed, int zPrimed, float xd, float yd, float zd)
{
int hash = Hash(seed, xPrimed, yPrimed, zPrimed);
hash ^= hash >> 15;
hash &= 63 << 2;
float xg = Gradients3D[hash];
float yg = Gradients3D[hash | 1];
float zg = Gradients3D[hash | 2];
return xd * xg + yd * yg + zd * zg;
}
[MethodImpl(INLINE)]
private static void GradCoordOut(int seed, int xPrimed, int yPrimed, out float xo, out float yo)
{
int hash = Hash(seed, xPrimed, yPrimed) & (255 << 1);
xo = RandVecs2D[hash];
yo = RandVecs2D[hash | 1];
}
[MethodImpl(INLINE)]
private static void GradCoordOut(int seed, int xPrimed, int yPrimed, int zPrimed, out float xo, out float yo, out float zo)
{
int hash = Hash(seed, xPrimed, yPrimed, zPrimed) & (255 << 2);
xo = RandVecs3D[hash];
yo = RandVecs3D[hash | 1];
zo = RandVecs3D[hash | 2];
}
[MethodImpl(INLINE)]
private static void GradCoordDual(int seed, int xPrimed, int yPrimed, float xd, float yd, out float xo, out float yo)
{
int hash = Hash(seed, xPrimed, yPrimed);
int index1 = hash & (127 << 1);
int index2 = (hash >> 7) & (255 << 1);
float xg = Gradients2D[index1];
float yg = Gradients2D[index1 | 1];
float value = xd * xg + yd * yg;
float xgo = RandVecs2D[index2];
float ygo = RandVecs2D[index2 | 1];
xo = value * xgo;
yo = value * ygo;
}
[MethodImpl(INLINE)]
private static void GradCoordDual(
int seed, int xPrimed, int yPrimed, int zPrimed, float xd, float yd, float zd, out float xo, out float yo, out float zo
)
{
int hash = Hash(seed, xPrimed, yPrimed, zPrimed);
int index1 = hash & (63 << 2);
int index2 = (hash >> 6) & (255 << 2);
float xg = Gradients3D[index1];
float yg = Gradients3D[index1 | 1];
float zg = Gradients3D[index1 | 2];
float value = xd * xg + yd * yg + zd * zg;
float xgo = RandVecs3D[index2];
float ygo = RandVecs3D[index2 | 1];
float zgo = RandVecs3D[index2 | 2];
xo = value * xgo;
yo = value * ygo;
zo = value * zgo;
}
// Generic noise gen
private float GenNoiseSingle(int seed, FNLfloat x, FNLfloat y)
{
switch (mNoiseType)
{
case NoiseType.OpenSimplex2:
return SingleSimplex(seed, x, y);
case NoiseType.OpenSimplex2S:
return SingleOpenSimplex2S(seed, x, y);
case NoiseType.Cellular:
return SingleCellular(seed, x, y);
case NoiseType.Perlin:
return SinglePerlin(seed, x, y);
case NoiseType.ValueCubic:
return SingleValueCubic(seed, x, y);
case NoiseType.Value:
return SingleValue(seed, x, y);
default:
return 0;
}
}
private float GenNoiseSingle(int seed, FNLfloat x, FNLfloat y, FNLfloat z)
{
switch (mNoiseType)
{
case NoiseType.OpenSimplex2:
return SingleOpenSimplex2(seed, x, y, z);
case NoiseType.OpenSimplex2S:
return SingleOpenSimplex2S(seed, x, y, z);
case NoiseType.Cellular:
return SingleCellular(seed, x, y, z);
case NoiseType.Perlin:
return SinglePerlin(seed, x, y, z);
case NoiseType.ValueCubic:
return SingleValueCubic(seed, x, y, z);
case NoiseType.Value:
return SingleValue(seed, x, y, z);
default:
return 0;
}
}
// Noise Coordinate Transforms (frequency, and possible skew or rotation)
[MethodImpl(INLINE)]
private void TransformNoiseCoordinate(ref FNLfloat x, ref FNLfloat y)
{
x *= mFrequency;
y *= mFrequency;
switch (mNoiseType)
{
case NoiseType.OpenSimplex2:
case NoiseType.OpenSimplex2S:
{
const FNLfloat SQRT3 = (FNLfloat)1.7320508075688772935274463415059;
const FNLfloat F2 = 0.5f * (SQRT3 - 1);
FNLfloat t = (x + y) * F2;
x += t;
y += t;
}
break;
default:
break;
}
}
[MethodImpl(INLINE)]
private void TransformNoiseCoordinate(ref FNLfloat x, ref FNLfloat y, ref FNLfloat z)
{
x *= mFrequency;
y *= mFrequency;
z *= mFrequency;
switch (mTransformType3D)
{
case TransformType3D.ImproveXYPlanes:
{
FNLfloat xy = x + y;
FNLfloat s2 = xy * -(FNLfloat)0.211324865405187;
z *= (FNLfloat)0.577350269189626;
x += s2 - z;
y = y + s2 - z;
z += xy * (FNLfloat)0.577350269189626;
}
break;
case TransformType3D.ImproveXZPlanes:
{
FNLfloat xz = x + z;
FNLfloat s2 = xz * -(FNLfloat)0.211324865405187;
y *= (FNLfloat)0.577350269189626;
x += s2 - y;
z += s2 - y;
y += xz * (FNLfloat)0.577350269189626;
}
break;
case TransformType3D.DefaultOpenSimplex2:
{
const FNLfloat R3 = (FNLfloat)(2.0 / 3.0);
FNLfloat r = (x + y + z) * R3; // Rotation, not skew
x = r - x;
y = r - y;
z = r - z;
}
break;
default:
break;
}
}
private void UpdateTransformType3D()
{
switch (mRotationType3D)
{
case RotationType3D.ImproveXYPlanes:
mTransformType3D = TransformType3D.ImproveXYPlanes;
break;
case RotationType3D.ImproveXZPlanes:
mTransformType3D = TransformType3D.ImproveXZPlanes;
break;
default:
switch (mNoiseType)
{
case NoiseType.OpenSimplex2:
case NoiseType.OpenSimplex2S:
mTransformType3D = TransformType3D.DefaultOpenSimplex2;
break;
default:
mTransformType3D = TransformType3D.None;
break;
}
break;
}
}
// Domain Warp Coordinate Transforms
[MethodImpl(INLINE)]
private void TransformDomainWarpCoordinate(ref FNLfloat x, ref FNLfloat y)
{
switch (mDomainWarpType)
{
case DomainWarpType.OpenSimplex2:
case DomainWarpType.OpenSimplex2Reduced:
{
const FNLfloat SQRT3 = (FNLfloat)1.7320508075688772935274463415059;
const FNLfloat F2 = 0.5f * (SQRT3 - 1);
FNLfloat t = (x + y) * F2;
x += t;
y += t;
}
break;
default:
break;
}
}
[MethodImpl(INLINE)]
private void TransformDomainWarpCoordinate(ref FNLfloat x, ref FNLfloat y, ref FNLfloat z)
{
switch (mWarpTransformType3D)
{
case TransformType3D.ImproveXYPlanes:
{
FNLfloat xy = x + y;
FNLfloat s2 = xy * -(FNLfloat)0.211324865405187;
z *= (FNLfloat)0.577350269189626;
x += s2 - z;
y = y + s2 - z;
z += xy * (FNLfloat)0.577350269189626;
}
break;
case TransformType3D.ImproveXZPlanes:
{
FNLfloat xz = x + z;
FNLfloat s2 = xz * -(FNLfloat)0.211324865405187;
y *= (FNLfloat)0.577350269189626;
x += s2 - y;
z += s2 - y;
y += xz * (FNLfloat)0.577350269189626;
}
break;
case TransformType3D.DefaultOpenSimplex2:
{
const FNLfloat R3 = (FNLfloat)(2.0 / 3.0);
FNLfloat r = (x + y + z) * R3; // Rotation, not skew
x = r - x;
y = r - y;
z = r - z;
}
break;
default:
break;
}
}
private void UpdateWarpTransformType3D()
{
switch (mRotationType3D)
{
case RotationType3D.ImproveXYPlanes:
mWarpTransformType3D = TransformType3D.ImproveXYPlanes;
break;
case RotationType3D.ImproveXZPlanes:
mWarpTransformType3D = TransformType3D.ImproveXZPlanes;
break;
default:
switch (mDomainWarpType)
{
case DomainWarpType.OpenSimplex2:
case DomainWarpType.OpenSimplex2Reduced:
mWarpTransformType3D = TransformType3D.DefaultOpenSimplex2;
break;
default:
mWarpTransformType3D = TransformType3D.None;
break;
}
break;
}
}
// Fractal FBm
private float GenFractalFBm(FNLfloat x, FNLfloat y)
{
int seed = mSeed;
float sum = 0;
float amp = mFractalBounding;
for (int i = 0; i < mOctaves; i++)
{
float noise = GenNoiseSingle(seed++, x, y);
sum += noise * amp;
amp *= Lerp(1.0f, FastMin(noise + 1, 2) * 0.5f, mWeightedStrength);
x *= mLacunarity;
y *= mLacunarity;
amp *= mGain;
}
return sum;
}
private float GenFractalFBm(FNLfloat x, FNLfloat y, FNLfloat z)
{
int seed = mSeed;
float sum = 0;
float amp = mFractalBounding;
for (int i = 0; i < mOctaves; i++)
{
float noise = GenNoiseSingle(seed++, x, y, z);
sum += noise * amp;
amp *= Lerp(1.0f, (noise + 1) * 0.5f, mWeightedStrength);
x *= mLacunarity;
y *= mLacunarity;
z *= mLacunarity;
amp *= mGain;
}
return sum;
}
// Fractal Ridged
private float GenFractalRidged(FNLfloat x, FNLfloat y)
{
int seed = mSeed;
float sum = 0;
float amp = mFractalBounding;
for (int i = 0; i < mOctaves; i++)
{
float noise = FastAbs(GenNoiseSingle(seed++, x, y));
sum += (noise * -2 + 1) * amp;
amp *= Lerp(1.0f, 1 - noise, mWeightedStrength);
x *= mLacunarity;
y *= mLacunarity;
amp *= mGain;
}
return sum;
}
private float GenFractalRidged(FNLfloat x, FNLfloat y, FNLfloat z)
{
int seed = mSeed;
float sum = 0;
float amp = mFractalBounding;
for (int i = 0; i < mOctaves; i++)
{
float noise = FastAbs(GenNoiseSingle(seed++, x, y, z));
sum += (noise * -2 + 1) * amp;
amp *= Lerp(1.0f, 1 - noise, mWeightedStrength);
x *= mLacunarity;
y *= mLacunarity;
z *= mLacunarity;
amp *= mGain;
}
return sum;
}
// Fractal PingPong
private float GenFractalPingPong(FNLfloat x, FNLfloat y)
{
int seed = mSeed;
float sum = 0;
float amp = mFractalBounding;
for (int i = 0; i < mOctaves; i++)
{
float noise = PingPong((GenNoiseSingle(seed++, x, y) + 1) * mPingPongStength);
sum += (noise - 0.5f) * 2 * amp;
amp *= Lerp(1.0f, noise, mWeightedStrength);
x *= mLacunarity;
y *= mLacunarity;
amp *= mGain;
}
return sum;
}
private float GenFractalPingPong(FNLfloat x, FNLfloat y, FNLfloat z)
{
int seed = mSeed;
float sum = 0;
float amp = mFractalBounding;
for (int i = 0; i < mOctaves; i++)
{
float noise = PingPong((GenNoiseSingle(seed++, x, y, z) + 1) * mPingPongStength);
sum += (noise - 0.5f) * 2 * amp;
amp *= Lerp(1.0f, noise, mWeightedStrength);
x *= mLacunarity;
y *= mLacunarity;
z *= mLacunarity;
amp *= mGain;
}
return sum;
}
// Simplex/OpenSimplex2 Noise
private float SingleSimplex(int seed, FNLfloat x, FNLfloat y)
{
// 2D OpenSimplex2 case uses the same algorithm as ordinary Simplex.
const float SQRT3 = 1.7320508075688772935274463415059f;
const float G2 = (3 - SQRT3) / 6;
/*
* --- Skew moved to TransformNoiseCoordinate method ---
* const FNfloat F2 = 0.5f * (SQRT3 - 1);
* FNfloat s = (x + y) * F2;
* x += s; y += s;
*/
int i = FastFloor(x);
int j = FastFloor(y);
float xi = (float)(x - i);
float yi = (float)(y - j);
float t = (xi + yi) * G2;
float x0 = (float)(xi - t);
float y0 = (float)(yi - t);
i *= PrimeX;
j *= PrimeY;
float n0, n1, n2;
float a = 0.5f - x0 * x0 - y0 * y0;
if (a <= 0) n0 = 0;
else
{
n0 = (a * a) * (a * a) * GradCoord(seed, i, j, x0, y0);
}
float c = (float)(2 * (1 - 2 * G2) * (1 / G2 - 2)) * t + ((float)(-2 * (1 - 2 * G2) * (1 - 2 * G2)) + a);
if (c <= 0) n2 = 0;
else
{
float x2 = x0 + (2 * (float)G2 - 1);
float y2 = y0 + (2 * (float)G2 - 1);
n2 = (c * c) * (c * c) * GradCoord(seed, i + PrimeX, j + PrimeY, x2, y2);
}
if (y0 > x0)
{
float x1 = x0 + (float)G2;
float y1 = y0 + ((float)G2 - 1);
float b = 0.5f - x1 * x1 - y1 * y1;
if (b <= 0) n1 = 0;
else
{
n1 = (b * b) * (b * b) * GradCoord(seed, i, j + PrimeY, x1, y1);
}
}
else
{
float x1 = x0 + ((float)G2 - 1);
float y1 = y0 + (float)G2;
float b = 0.5f - x1 * x1 - y1 * y1;
if (b <= 0) n1 = 0;
else
{
n1 = (b * b) * (b * b) * GradCoord(seed, i + PrimeX, j, x1, y1);
}
}
return (n0 + n1 + n2) * 99.83685446303647f;
}
private float SingleOpenSimplex2(int seed, FNLfloat x, FNLfloat y, FNLfloat z)
{
// 3D OpenSimplex2 case uses two offset rotated cube grids.
/*
* --- Rotation moved to TransformNoiseCoordinate method ---
* const FNfloat R3 = (FNfloat)(2.0 / 3.0);
* FNfloat r = (x + y + z) * R3; // Rotation, not skew
* x = r - x; y = r - y; z = r - z;
*/
int i = FastRound(x);
int j = FastRound(y);
int k = FastRound(z);
float x0 = (float)(x - i);
float y0 = (float)(y - j);
float z0 = (float)(z - k);
int xNSign = (int)(-1.0f - x0) | 1;
int yNSign = (int)(-1.0f - y0) | 1;
int zNSign = (int)(-1.0f - z0) | 1;
float ax0 = xNSign * -x0;
float ay0 = yNSign * -y0;
float az0 = zNSign * -z0;
i *= PrimeX;
j *= PrimeY;
k *= PrimeZ;
float value = 0;
float a = (0.6f - x0 * x0) - (y0 * y0 + z0 * z0);
for (int l = 0;; l++)
{
if (a > 0)
{
value += (a * a) * (a * a) * GradCoord(seed, i, j, k, x0, y0, z0);
}
if (ax0 >= ay0 && ax0 >= az0)
{
float b = a + ax0 + ax0;
if (b > 1)
{
b -= 1;
value += (b * b) * (b * b) * GradCoord(seed, i - xNSign * PrimeX, j, k, x0 + xNSign, y0, z0);
}
}
else if (ay0 > ax0 && ay0 >= az0)
{
float b = a + ay0 + ay0;
if (b > 1)
{
b -= 1;
value += (b * b) * (b * b) * GradCoord(seed, i, j - yNSign * PrimeY, k, x0, y0 + yNSign, z0);
}
}
else
{
float b = a + az0 + az0;
if (b > 1)
{
b -= 1;
value += (b * b) * (b * b) * GradCoord(seed, i, j, k - zNSign * PrimeZ, x0, y0, z0 + zNSign);
}
}
if (l == 1) break;
ax0 = 0.5f - ax0;
ay0 = 0.5f - ay0;
az0 = 0.5f - az0;
x0 = xNSign * ax0;
y0 = yNSign * ay0;
z0 = zNSign * az0;
a += (0.75f - ax0) - (ay0 + az0);
i += (xNSign >> 1) & PrimeX;
j += (yNSign >> 1) & PrimeY;
k += (zNSign >> 1) & PrimeZ;
xNSign = -xNSign;
yNSign = -yNSign;
zNSign = -zNSign;
seed = ~seed;
}
return value * 32.69428253173828125f;
}
// OpenSimplex2S Noise
private float SingleOpenSimplex2S(int seed, FNLfloat x, FNLfloat y)
{
// 2D OpenSimplex2S case is a modified 2D simplex noise.
const FNLfloat SQRT3 = (FNLfloat)1.7320508075688772935274463415059;
const FNLfloat G2 = (3 - SQRT3) / 6;
/*
* --- Skew moved to TransformNoiseCoordinate method ---
* const FNfloat F2 = 0.5f * (SQRT3 - 1);
* FNfloat s = (x + y) * F2;
* x += s; y += s;
*/
int i = FastFloor(x);
int j = FastFloor(y);
float xi = (float)(x - i);
float yi = (float)(y - j);
i *= PrimeX;
j *= PrimeY;
int i1 = i + PrimeX;
int j1 = j + PrimeY;
float t = (xi + yi) * (float)G2;
float x0 = xi - t;
float y0 = yi - t;
float a0 = (2.0f / 3.0f) - x0 * x0 - y0 * y0;
float value = (a0 * a0) * (a0 * a0) * GradCoord(seed, i, j, x0, y0);
float a1 = (float)(2 * (1 - 2 * G2) * (1 / G2 - 2)) * t + ((float)(-2 * (1 - 2 * G2) * (1 - 2 * G2)) + a0);
float x1 = x0 - (float)(1 - 2 * G2);
float y1 = y0 - (float)(1 - 2 * G2);
value += (a1 * a1) * (a1 * a1) * GradCoord(seed, i1, j1, x1, y1);
// Nested conditionals were faster than compact bit logic/arithmetic.
float xmyi = xi - yi;
if (t > G2)
{
if (xi + xmyi > 1)
{
float x2 = x0 + (float)(3 * G2 - 2);
float y2 = y0 + (float)(3 * G2 - 1);
float a2 = (2.0f / 3.0f) - x2 * x2 - y2 * y2;
if (a2 > 0)
{
value += (a2 * a2) * (a2 * a2) * GradCoord(seed, i + (PrimeX << 1), j + PrimeY, x2, y2);
}
}
else
{
float x2 = x0 + (float)G2;
float y2 = y0 + (float)(G2 - 1);
float a2 = (2.0f / 3.0f) - x2 * x2 - y2 * y2;
if (a2 > 0)
{
value += (a2 * a2) * (a2 * a2) * GradCoord(seed, i, j + PrimeY, x2, y2);
}
}
if (yi - xmyi > 1)
{
float x3 = x0 + (float)(3 * G2 - 1);
float y3 = y0 + (float)(3 * G2 - 2);
float a3 = (2.0f / 3.0f) - x3 * x3 - y3 * y3;
if (a3 > 0)
{
value += (a3 * a3) * (a3 * a3) * GradCoord(seed, i + PrimeX, j + (PrimeY << 1), x3, y3);
}
}
else
{
float x3 = x0 + (float)(G2 - 1);
float y3 = y0 + (float)G2;
float a3 = (2.0f / 3.0f) - x3 * x3 - y3 * y3;
if (a3 > 0)
{
value += (a3 * a3) * (a3 * a3) * GradCoord(seed, i + PrimeX, j, x3, y3);
}
}
}
else
{
if (xi + xmyi < 0)
{
float x2 = x0 + (float)(1 - G2);
float y2 = y0 - (float)G2;
float a2 = (2.0f / 3.0f) - x2 * x2 - y2 * y2;
if (a2 > 0)
{
value += (a2 * a2) * (a2 * a2) * GradCoord(seed, i - PrimeX, j, x2, y2);
}
}
else
{
float x2 = x0 + (float)(G2 - 1);
float y2 = y0 + (float)G2;
float a2 = (2.0f / 3.0f) - x2 * x2 - y2 * y2;
if (a2 > 0)
{
value += (a2 * a2) * (a2 * a2) * GradCoord(seed, i + PrimeX, j, x2, y2);
}
}
if (yi < xmyi)
{
float x2 = x0 - (float)G2;
float y2 = y0 - (float)(G2 - 1);
float a2 = (2.0f / 3.0f) - x2 * x2 - y2 * y2;
if (a2 > 0)
{
value += (a2 * a2) * (a2 * a2) * GradCoord(seed, i, j - PrimeY, x2, y2);
}
}
else
{
float x2 = x0 + (float)G2;
float y2 = y0 + (float)(G2 - 1);
float a2 = (2.0f / 3.0f) - x2 * x2 - y2 * y2;
if (a2 > 0)
{
value += (a2 * a2) * (a2 * a2) * GradCoord(seed, i, j + PrimeY, x2, y2);
}
}
}
return value * 18.24196194486065f;
}
private float SingleOpenSimplex2S(int seed, FNLfloat x, FNLfloat y, FNLfloat z)
{
// 3D OpenSimplex2S case uses two offset rotated cube grids.
/*
* --- Rotation moved to TransformNoiseCoordinate method ---
* const FNfloat R3 = (FNfloat)(2.0 / 3.0);
* FNfloat r = (x + y + z) * R3; // Rotation, not skew
* x = r - x; y = r - y; z = r - z;
*/
int i = FastFloor(x);
int j = FastFloor(y);
int k = FastFloor(z);
float xi = (float)(x - i);
float yi = (float)(y - j);
float zi = (float)(z - k);
i *= PrimeX;
j *= PrimeY;
k *= PrimeZ;
int seed2 = seed + 1293373;
int xNMask = (int)(-0.5f - xi);
int yNMask = (int)(-0.5f - yi);
int zNMask = (int)(-0.5f - zi);
float x0 = xi + xNMask;
float y0 = yi + yNMask;
float z0 = zi + zNMask;
float a0 = 0.75f - x0 * x0 - y0 * y0 - z0 * z0;
float value = (a0 * a0) * (a0 * a0) * GradCoord(
seed,
i + (xNMask & PrimeX),
j + (yNMask & PrimeY),
k + (zNMask & PrimeZ),
x0,
y0,
z0
);
float x1 = xi - 0.5f;
float y1 = yi - 0.5f;
float z1 = zi - 0.5f;
float a1 = 0.75f - x1 * x1 - y1 * y1 - z1 * z1;
value += (a1 * a1) * (a1 * a1) * GradCoord(
seed2,
i + PrimeX,
j + PrimeY,
k + PrimeZ,
x1,
y1,
z1
);
float xAFlipMask0 = ((xNMask | 1) << 1) * x1;
float yAFlipMask0 = ((yNMask | 1) << 1) * y1;
float zAFlipMask0 = ((zNMask | 1) << 1) * z1;
float xAFlipMask1 = (-2 - (xNMask << 2)) * x1 - 1.0f;
float yAFlipMask1 = (-2 - (yNMask << 2)) * y1 - 1.0f;
float zAFlipMask1 = (-2 - (zNMask << 2)) * z1 - 1.0f;
bool skip5 = false;
float a2 = xAFlipMask0 + a0;
if (a2 > 0)
{
float x2 = x0 - (xNMask | 1);
float y2 = y0;
float z2 = z0;
value += (a2 * a2) * (a2 * a2) * GradCoord(
seed,
i + (~xNMask & PrimeX),
j + (yNMask & PrimeY),
k + (zNMask & PrimeZ),
x2,
y2,
z2
);
}
else
{
float a3 = yAFlipMask0 + zAFlipMask0 + a0;
if (a3 > 0)
{
float x3 = x0;
float y3 = y0 - (yNMask | 1);
float z3 = z0 - (zNMask | 1);
value += (a3 * a3) * (a3 * a3) * GradCoord(
seed,
i + (xNMask & PrimeX),
j + (~yNMask & PrimeY),
k + (~zNMask & PrimeZ),
x3,
y3,
z3
);
}
float a4 = xAFlipMask1 + a1;
if (a4 > 0)
{
float x4 = (xNMask | 1) + x1;
float y4 = y1;
float z4 = z1;
value += (a4 * a4) * (a4 * a4) * GradCoord(
seed2,
i + (xNMask & (PrimeX * 2)),
j + PrimeY,
k + PrimeZ,
x4,
y4,
z4
);
skip5 = true;
}
}
bool skip9 = false;
float a6 = yAFlipMask0 + a0;
if (a6 > 0)
{
float x6 = x0;
float y6 = y0 - (yNMask | 1);
float z6 = z0;
value += (a6 * a6) * (a6 * a6) * GradCoord(
seed,
i + (xNMask & PrimeX),
j + (~yNMask & PrimeY),
k + (zNMask & PrimeZ),
x6,
y6,
z6
);
}
else
{
float a7 = xAFlipMask0 + zAFlipMask0 + a0;
if (a7 > 0)
{
float x7 = x0 - (xNMask | 1);
float y7 = y0;
float z7 = z0 - (zNMask | 1);
value += (a7 * a7) * (a7 * a7) * GradCoord(
seed,
i + (~xNMask & PrimeX),
j + (yNMask & PrimeY),
k + (~zNMask & PrimeZ),
x7,
y7,
z7
);
}
float a8 = yAFlipMask1 + a1;
if (a8 > 0)
{
float x8 = x1;
float y8 = (yNMask | 1) + y1;
float z8 = z1;
value += (a8 * a8) * (a8 * a8) * GradCoord(
seed2,
i + PrimeX,
j + (yNMask & (PrimeY << 1)),
k + PrimeZ,
x8,
y8,
z8
);
skip9 = true;
}
}
bool skipD = false;
float aA = zAFlipMask0 + a0;
if (aA > 0)
{
float xA = x0;
float yA = y0;
float zA = z0 - (zNMask | 1);
value += (aA * aA) * (aA * aA) * GradCoord(
seed,
i + (xNMask & PrimeX),
j + (yNMask & PrimeY),
k + (~zNMask & PrimeZ),
xA,
yA,
zA
);
}
else
{
float aB = xAFlipMask0 + yAFlipMask0 + a0;
if (aB > 0)
{
float xB = x0 - (xNMask | 1);
float yB = y0 - (yNMask | 1);
float zB = z0;
value += (aB * aB) * (aB * aB) * GradCoord(
seed,
i + (~xNMask & PrimeX),
j + (~yNMask & PrimeY),
k + (zNMask & PrimeZ),
xB,
yB,
zB
);
}
float aC = zAFlipMask1 + a1;
if (aC > 0)
{
float xC = x1;
float yC = y1;
float zC = (zNMask | 1) + z1;
value += (aC * aC) * (aC * aC) * GradCoord(
seed2,
i + PrimeX,
j + PrimeY,
k + (zNMask & (PrimeZ << 1)),
xC,
yC,
zC
);
skipD = true;
}
}
if (!skip5)
{
float a5 = yAFlipMask1 + zAFlipMask1 + a1;
if (a5 > 0)
{
float x5 = x1;
float y5 = (yNMask | 1) + y1;
float z5 = (zNMask | 1) + z1;
value += (a5 * a5) * (a5 * a5) * GradCoord(
seed2,
i + PrimeX,
j + (yNMask & (PrimeY << 1)),
k + (zNMask & (PrimeZ << 1)),
x5,
y5,
z5
);
}
}
if (!skip9)
{
float a9 = xAFlipMask1 + zAFlipMask1 + a1;
if (a9 > 0)
{
float x9 = (xNMask | 1) + x1;
float y9 = y1;
float z9 = (zNMask | 1) + z1;
value += (a9 * a9) * (a9 * a9) * GradCoord(
seed2,
i + (xNMask & (PrimeX * 2)),
j + PrimeY,
k + (zNMask & (PrimeZ << 1)),
x9,
y9,
z9
);
}
}
if (!skipD)
{
float aD = xAFlipMask1 + yAFlipMask1 + a1;
if (aD > 0)
{
float xD = (xNMask | 1) + x1;
float yD = (yNMask | 1) + y1;
float zD = z1;
value += (aD * aD) * (aD * aD) * GradCoord(
seed2,
i + (xNMask & (PrimeX << 1)),
j + (yNMask & (PrimeY << 1)),
k + PrimeZ,
xD,
yD,
zD
);
}
}
return value * 9.046026385208288f;
}
// Cellular Noise
private float SingleCellular(int seed, FNLfloat x, FNLfloat y)
{
int xr = FastRound(x);
int yr = FastRound(y);
float distance0 = float.MaxValue;
float distance1 = float.MaxValue;
int closestHash = 0;
float cellularJitter = 0.43701595f * mCellularJitterModifier;
int xPrimed = (xr - 1) * PrimeX;
int yPrimedBase = (yr - 1) * PrimeY;
switch (mCellularDistanceFunction)
{
default:
case CellularDistanceFunction.Euclidean:
case CellularDistanceFunction.EuclideanSq:
for (int xi = xr - 1; xi <= xr + 1; xi++)
{
int yPrimed = yPrimedBase;
for (int yi = yr - 1; yi <= yr + 1; yi++)
{
int hash = Hash(seed, xPrimed, yPrimed);
int idx = hash & (255 << 1);
float vecX = (float)(xi - x) + RandVecs2D[idx] * cellularJitter;
float vecY = (float)(yi - y) + RandVecs2D[idx | 1] * cellularJitter;
float newDistance = vecX * vecX + vecY * vecY;
distance1 = FastMax(FastMin(distance1, newDistance), distance0);
if (newDistance < distance0)
{
distance0 = newDistance;
closestHash = hash;
}
yPrimed += PrimeY;
}
xPrimed += PrimeX;
}
break;
case CellularDistanceFunction.Manhattan:
for (int xi = xr - 1; xi <= xr + 1; xi++)
{
int yPrimed = yPrimedBase;
for (int yi = yr - 1; yi <= yr + 1; yi++)
{
int hash = Hash(seed, xPrimed, yPrimed);
int idx = hash & (255 << 1);
float vecX = (float)(xi - x) + RandVecs2D[idx] * cellularJitter;
float vecY = (float)(yi - y) + RandVecs2D[idx | 1] * cellularJitter;
float newDistance = FastAbs(vecX) + FastAbs(vecY);
distance1 = FastMax(FastMin(distance1, newDistance), distance0);
if (newDistance < distance0)
{
distance0 = newDistance;
closestHash = hash;
}
yPrimed += PrimeY;
}
xPrimed += PrimeX;
}
break;
case CellularDistanceFunction.Hybrid:
for (int xi = xr - 1; xi <= xr + 1; xi++)
{
int yPrimed = yPrimedBase;
for (int yi = yr - 1; yi <= yr + 1; yi++)
{
int hash = Hash(seed, xPrimed, yPrimed);
int idx = hash & (255 << 1);
float vecX = (float)(xi - x) + RandVecs2D[idx] * cellularJitter;
float vecY = (float)(yi - y) + RandVecs2D[idx | 1] * cellularJitter;
float newDistance = (FastAbs(vecX) + FastAbs(vecY)) + (vecX * vecX + vecY * vecY);
distance1 = FastMax(FastMin(distance1, newDistance), distance0);
if (newDistance < distance0)
{
distance0 = newDistance;
closestHash = hash;
}
yPrimed += PrimeY;
}
xPrimed += PrimeX;
}
break;
}
if (mCellularDistanceFunction == CellularDistanceFunction.Euclidean && mCellularReturnType >= CellularReturnType.Distance)
{
distance0 = FastSqrt(distance0);
if (mCellularReturnType >= CellularReturnType.Distance2)
{
distance1 = FastSqrt(distance1);
}
}
switch (mCellularReturnType)
{
case CellularReturnType.CellValue:
return closestHash * (1 / 2147483648.0f);
case CellularReturnType.Distance:
return distance0 - 1;
case CellularReturnType.Distance2:
return distance1 - 1;
case CellularReturnType.Distance2Add:
return (distance1 + distance0) * 0.5f - 1;
case CellularReturnType.Distance2Sub:
return distance1 - distance0 - 1;
case CellularReturnType.Distance2Mul:
return distance1 * distance0 * 0.5f - 1;
case CellularReturnType.Distance2Div:
return distance0 / distance1 - 1;
default:
return 0;
}
}
private float SingleCellular(int seed, FNLfloat x, FNLfloat y, FNLfloat z)
{
int xr = FastRound(x);
int yr = FastRound(y);
int zr = FastRound(z);
float distance0 = float.MaxValue;
float distance1 = float.MaxValue;
int closestHash = 0;
float cellularJitter = 0.39614353f * mCellularJitterModifier;
int xPrimed = (xr - 1) * PrimeX;
int yPrimedBase = (yr - 1) * PrimeY;
int zPrimedBase = (zr - 1) * PrimeZ;
switch (mCellularDistanceFunction)
{
case CellularDistanceFunction.Euclidean:
case CellularDistanceFunction.EuclideanSq:
for (int xi = xr - 1; xi <= xr + 1; xi++)
{
int yPrimed = yPrimedBase;
for (int yi = yr - 1; yi <= yr + 1; yi++)
{
int zPrimed = zPrimedBase;
for (int zi = zr - 1; zi <= zr + 1; zi++)
{
int hash = Hash(seed, xPrimed, yPrimed, zPrimed);
int idx = hash & (255 << 2);
float vecX = (float)(xi - x) + RandVecs3D[idx] * cellularJitter;
float vecY = (float)(yi - y) + RandVecs3D[idx | 1] * cellularJitter;
float vecZ = (float)(zi - z) + RandVecs3D[idx | 2] * cellularJitter;
float newDistance = vecX * vecX + vecY * vecY + vecZ * vecZ;
distance1 = FastMax(FastMin(distance1, newDistance), distance0);
if (newDistance < distance0)
{
distance0 = newDistance;
closestHash = hash;
}
zPrimed += PrimeZ;
}
yPrimed += PrimeY;
}
xPrimed += PrimeX;
}
break;
case CellularDistanceFunction.Manhattan:
for (int xi = xr - 1; xi <= xr + 1; xi++)
{
int yPrimed = yPrimedBase;
for (int yi = yr - 1; yi <= yr + 1; yi++)
{
int zPrimed = zPrimedBase;
for (int zi = zr - 1; zi <= zr + 1; zi++)
{
int hash = Hash(seed, xPrimed, yPrimed, zPrimed);
int idx = hash & (255 << 2);
float vecX = (float)(xi - x) + RandVecs3D[idx] * cellularJitter;
float vecY = (float)(yi - y) + RandVecs3D[idx | 1] * cellularJitter;
float vecZ = (float)(zi - z) + RandVecs3D[idx | 2] * cellularJitter;
float newDistance = FastAbs(vecX) + FastAbs(vecY) + FastAbs(vecZ);
distance1 = FastMax(FastMin(distance1, newDistance), distance0);
if (newDistance < distance0)
{
distance0 = newDistance;
closestHash = hash;
}
zPrimed += PrimeZ;
}
yPrimed += PrimeY;
}
xPrimed += PrimeX;
}
break;
case CellularDistanceFunction.Hybrid:
for (int xi = xr - 1; xi <= xr + 1; xi++)
{
int yPrimed = yPrimedBase;
for (int yi = yr - 1; yi <= yr + 1; yi++)
{
int zPrimed = zPrimedBase;
for (int zi = zr - 1; zi <= zr + 1; zi++)
{
int hash = Hash(seed, xPrimed, yPrimed, zPrimed);
int idx = hash & (255 << 2);
float vecX = (float)(xi - x) + RandVecs3D[idx] * cellularJitter;
float vecY = (float)(yi - y) + RandVecs3D[idx | 1] * cellularJitter;
float vecZ = (float)(zi - z) + RandVecs3D[idx | 2] * cellularJitter;
float newDistance = (FastAbs(vecX) + FastAbs(vecY) + FastAbs(vecZ)) +
(vecX * vecX + vecY * vecY + vecZ * vecZ);
distance1 = FastMax(FastMin(distance1, newDistance), distance0);
if (newDistance < distance0)
{
distance0 = newDistance;
closestHash = hash;
}
zPrimed += PrimeZ;
}
yPrimed += PrimeY;
}
xPrimed += PrimeX;
}
break;
default:
break;
}
if (mCellularDistanceFunction == CellularDistanceFunction.Euclidean && mCellularReturnType >= CellularReturnType.Distance)
{
distance0 = FastSqrt(distance0);
if (mCellularReturnType >= CellularReturnType.Distance2)
{
distance1 = FastSqrt(distance1);
}
}
switch (mCellularReturnType)
{
case CellularReturnType.CellValue:
return closestHash * (1 / 2147483648.0f);
case CellularReturnType.Distance:
return distance0 - 1;
case CellularReturnType.Distance2:
return distance1 - 1;
case CellularReturnType.Distance2Add:
return (distance1 + distance0) * 0.5f - 1;
case CellularReturnType.Distance2Sub:
return distance1 - distance0 - 1;
case CellularReturnType.Distance2Mul:
return distance1 * distance0 * 0.5f - 1;
case CellularReturnType.Distance2Div:
return distance0 / distance1 - 1;
default:
return 0;
}
}
// Perlin Noise
private float SinglePerlin(int seed, FNLfloat x, FNLfloat y)
{
int x0 = FastFloor(x);
int y0 = FastFloor(y);
float xd0 = (float)(x - x0);
float yd0 = (float)(y - y0);
float xd1 = xd0 - 1;
float yd1 = yd0 - 1;
float xs = InterpQuintic(xd0);
float ys = InterpQuintic(yd0);
x0 *= PrimeX;
y0 *= PrimeY;
int x1 = x0 + PrimeX;
int y1 = y0 + PrimeY;
float xf0 = Lerp(GradCoord(seed, x0, y0, xd0, yd0), GradCoord(seed, x1, y0, xd1, yd0), xs);
float xf1 = Lerp(GradCoord(seed, x0, y1, xd0, yd1), GradCoord(seed, x1, y1, xd1, yd1), xs);
return Lerp(xf0, xf1, ys) * 1.4247691104677813f;
}
private float SinglePerlin(int seed, FNLfloat x, FNLfloat y, FNLfloat z)
{
int x0 = FastFloor(x);
int y0 = FastFloor(y);
int z0 = FastFloor(z);
float xd0 = (float)(x - x0);
float yd0 = (float)(y - y0);
float zd0 = (float)(z - z0);
float xd1 = xd0 - 1;
float yd1 = yd0 - 1;
float zd1 = zd0 - 1;
float xs = InterpQuintic(xd0);
float ys = InterpQuintic(yd0);
float zs = InterpQuintic(zd0);
x0 *= PrimeX;
y0 *= PrimeY;
z0 *= PrimeZ;
int x1 = x0 + PrimeX;
int y1 = y0 + PrimeY;
int z1 = z0 + PrimeZ;
float xf00 = Lerp(GradCoord(seed, x0, y0, z0, xd0, yd0, zd0), GradCoord(seed, x1, y0, z0, xd1, yd0, zd0), xs);
float xf10 = Lerp(GradCoord(seed, x0, y1, z0, xd0, yd1, zd0), GradCoord(seed, x1, y1, z0, xd1, yd1, zd0), xs);
float xf01 = Lerp(GradCoord(seed, x0, y0, z1, xd0, yd0, zd1), GradCoord(seed, x1, y0, z1, xd1, yd0, zd1), xs);
float xf11 = Lerp(GradCoord(seed, x0, y1, z1, xd0, yd1, zd1), GradCoord(seed, x1, y1, z1, xd1, yd1, zd1), xs);
float yf0 = Lerp(xf00, xf10, ys);
float yf1 = Lerp(xf01, xf11, ys);
return Lerp(yf0, yf1, zs) * 0.964921414852142333984375f;
}
// Value Cubic Noise
private float SingleValueCubic(int seed, FNLfloat x, FNLfloat y)
{
int x1 = FastFloor(x);
int y1 = FastFloor(y);
float xs = (float)(x - x1);
float ys = (float)(y - y1);
x1 *= PrimeX;
y1 *= PrimeY;
int x0 = x1 - PrimeX;
int y0 = y1 - PrimeY;
int x2 = x1 + PrimeX;
int y2 = y1 + PrimeY;
int x3 = x1 + unchecked(PrimeX * 2);
int y3 = y1 + unchecked(PrimeY * 2);
return CubicLerp(
CubicLerp(
ValCoord(seed, x0, y0),
ValCoord(seed, x1, y0),
ValCoord(seed, x2, y0),
ValCoord(seed, x3, y0),
xs
),
CubicLerp(
ValCoord(seed, x0, y1),
ValCoord(seed, x1, y1),
ValCoord(seed, x2, y1),
ValCoord(seed, x3, y1),
xs
),
CubicLerp(
ValCoord(seed, x0, y2),
ValCoord(seed, x1, y2),
ValCoord(seed, x2, y2),
ValCoord(seed, x3, y2),
xs
),
CubicLerp(
ValCoord(seed, x0, y3),
ValCoord(seed, x1, y3),
ValCoord(seed, x2, y3),
ValCoord(seed, x3, y3),
xs
),
ys
) * (1 / (1.5f * 1.5f));
}
private float SingleValueCubic(int seed, FNLfloat x, FNLfloat y, FNLfloat z)
{
int x1 = FastFloor(x);
int y1 = FastFloor(y);
int z1 = FastFloor(z);
float xs = (float)(x - x1);
float ys = (float)(y - y1);
float zs = (float)(z - z1);
x1 *= PrimeX;
y1 *= PrimeY;
z1 *= PrimeZ;
int x0 = x1 - PrimeX;
int y0 = y1 - PrimeY;
int z0 = z1 - PrimeZ;
int x2 = x1 + PrimeX;
int y2 = y1 + PrimeY;
int z2 = z1 + PrimeZ;
int x3 = x1 + unchecked(PrimeX * 2);
int y3 = y1 + unchecked(PrimeY * 2);
int z3 = z1 + unchecked(PrimeZ * 2);
return CubicLerp(
CubicLerp(
CubicLerp(
ValCoord(seed, x0, y0, z0),
ValCoord(seed, x1, y0, z0),
ValCoord(seed, x2, y0, z0),
ValCoord(seed, x3, y0, z0),
xs
),
CubicLerp(
ValCoord(seed, x0, y1, z0),
ValCoord(seed, x1, y1, z0),
ValCoord(seed, x2, y1, z0),
ValCoord(seed, x3, y1, z0),
xs
),
CubicLerp(
ValCoord(seed, x0, y2, z0),
ValCoord(seed, x1, y2, z0),
ValCoord(seed, x2, y2, z0),
ValCoord(seed, x3, y2, z0),
xs
),
CubicLerp(
ValCoord(seed, x0, y3, z0),
ValCoord(seed, x1, y3, z0),
ValCoord(seed, x2, y3, z0),
ValCoord(seed, x3, y3, z0),
xs
),
ys
),
CubicLerp(
CubicLerp(
ValCoord(seed, x0, y0, z1),
ValCoord(seed, x1, y0, z1),
ValCoord(seed, x2, y0, z1),
ValCoord(seed, x3, y0, z1),
xs
),
CubicLerp(
ValCoord(seed, x0, y1, z1),
ValCoord(seed, x1, y1, z1),
ValCoord(seed, x2, y1, z1),
ValCoord(seed, x3, y1, z1),
xs
),
CubicLerp(
ValCoord(seed, x0, y2, z1),
ValCoord(seed, x1, y2, z1),
ValCoord(seed, x2, y2, z1),
ValCoord(seed, x3, y2, z1),
xs
),
CubicLerp(
ValCoord(seed, x0, y3, z1),
ValCoord(seed, x1, y3, z1),
ValCoord(seed, x2, y3, z1),
ValCoord(seed, x3, y3, z1),
xs
),
ys
),
CubicLerp(
CubicLerp(
ValCoord(seed, x0, y0, z2),
ValCoord(seed, x1, y0, z2),
ValCoord(seed, x2, y0, z2),
ValCoord(seed, x3, y0, z2),
xs
),
CubicLerp(
ValCoord(seed, x0, y1, z2),
ValCoord(seed, x1, y1, z2),
ValCoord(seed, x2, y1, z2),
ValCoord(seed, x3, y1, z2),
xs
),
CubicLerp(
ValCoord(seed, x0, y2, z2),
ValCoord(seed, x1, y2, z2),
ValCoord(seed, x2, y2, z2),
ValCoord(seed, x3, y2, z2),
xs
),
CubicLerp(
ValCoord(seed, x0, y3, z2),
ValCoord(seed, x1, y3, z2),
ValCoord(seed, x2, y3, z2),
ValCoord(seed, x3, y3, z2),
xs
),
ys
),
CubicLerp(
CubicLerp(
ValCoord(seed, x0, y0, z3),
ValCoord(seed, x1, y0, z3),
ValCoord(seed, x2, y0, z3),
ValCoord(seed, x3, y0, z3),
xs
),
CubicLerp(
ValCoord(seed, x0, y1, z3),
ValCoord(seed, x1, y1, z3),
ValCoord(seed, x2, y1, z3),
ValCoord(seed, x3, y1, z3),
xs
),
CubicLerp(
ValCoord(seed, x0, y2, z3),
ValCoord(seed, x1, y2, z3),
ValCoord(seed, x2, y2, z3),
ValCoord(seed, x3, y2, z3),
xs
),
CubicLerp(
ValCoord(seed, x0, y3, z3),
ValCoord(seed, x1, y3, z3),
ValCoord(seed, x2, y3, z3),
ValCoord(seed, x3, y3, z3),
xs
),
ys
),
zs
) * (1 / (1.5f * 1.5f * 1.5f));
}
// Value Noise
private float SingleValue(int seed, FNLfloat x, FNLfloat y)
{
int x0 = FastFloor(x);
int y0 = FastFloor(y);
float xs = InterpHermite((float)(x - x0));
float ys = InterpHermite((float)(y - y0));
x0 *= PrimeX;
y0 *= PrimeY;
int x1 = x0 + PrimeX;
int y1 = y0 + PrimeY;
float xf0 = Lerp(ValCoord(seed, x0, y0), ValCoord(seed, x1, y0), xs);
float xf1 = Lerp(ValCoord(seed, x0, y1), ValCoord(seed, x1, y1), xs);
return Lerp(xf0, xf1, ys);
}
private float SingleValue(int seed, FNLfloat x, FNLfloat y, FNLfloat z)
{
int x0 = FastFloor(x);
int y0 = FastFloor(y);
int z0 = FastFloor(z);
float xs = InterpHermite((float)(x - x0));
float ys = InterpHermite((float)(y - y0));
float zs = InterpHermite((float)(z - z0));
x0 *= PrimeX;
y0 *= PrimeY;
z0 *= PrimeZ;
int x1 = x0 + PrimeX;
int y1 = y0 + PrimeY;
int z1 = z0 + PrimeZ;
float xf00 = Lerp(ValCoord(seed, x0, y0, z0), ValCoord(seed, x1, y0, z0), xs);
float xf10 = Lerp(ValCoord(seed, x0, y1, z0), ValCoord(seed, x1, y1, z0), xs);
float xf01 = Lerp(ValCoord(seed, x0, y0, z1), ValCoord(seed, x1, y0, z1), xs);
float xf11 = Lerp(ValCoord(seed, x0, y1, z1), ValCoord(seed, x1, y1, z1), xs);
float yf0 = Lerp(xf00, xf10, ys);
float yf1 = Lerp(xf01, xf11, ys);
return Lerp(yf0, yf1, zs);
}
// Domain Warp
private void DoSingleDomainWarp(int seed, float amp, float freq, FNLfloat x, FNLfloat y, ref FNLfloat xr, ref FNLfloat yr)
{
switch (mDomainWarpType)
{
case DomainWarpType.OpenSimplex2:
SingleDomainWarpSimplexGradient(seed, amp * 38.283687591552734375f, freq, x, y, ref xr, ref yr, false);
break;
case DomainWarpType.OpenSimplex2Reduced:
SingleDomainWarpSimplexGradient(seed, amp * 16.0f, freq, x, y, ref xr, ref yr, true);
break;
case DomainWarpType.BasicGrid:
SingleDomainWarpBasicGrid(seed, amp, freq, x, y, ref xr, ref yr);
break;
}
}
private void DoSingleDomainWarp(
int seed, float amp, float freq, FNLfloat x, FNLfloat y, FNLfloat z, ref FNLfloat xr, ref FNLfloat yr, ref FNLfloat zr
)
{
switch (mDomainWarpType)
{
case DomainWarpType.OpenSimplex2:
SingleDomainWarpOpenSimplex2Gradient(seed, amp * 32.69428253173828125f, freq, x, y, z, ref xr, ref yr, ref zr, false);
break;
case DomainWarpType.OpenSimplex2Reduced:
SingleDomainWarpOpenSimplex2Gradient(seed, amp * 7.71604938271605f, freq, x, y, z, ref xr, ref yr, ref zr, true);
break;
case DomainWarpType.BasicGrid:
SingleDomainWarpBasicGrid(seed, amp, freq, x, y, z, ref xr, ref yr, ref zr);
break;
}
}
// Domain Warp Single Wrapper
private void DomainWarpSingle(ref FNLfloat x, ref FNLfloat y)
{
int seed = mSeed;
float amp = mDomainWarpAmp * mFractalBounding;
float freq = mFrequency;
FNLfloat xs = x;
FNLfloat ys = y;
TransformDomainWarpCoordinate(ref xs, ref ys);
DoSingleDomainWarp(seed, amp, freq, xs, ys, ref x, ref y);
}
private void DomainWarpSingle(ref FNLfloat x, ref FNLfloat y, ref FNLfloat z)
{
int seed = mSeed;
float amp = mDomainWarpAmp * mFractalBounding;
float freq = mFrequency;
FNLfloat xs = x;
FNLfloat ys = y;
FNLfloat zs = z;
TransformDomainWarpCoordinate(ref xs, ref ys, ref zs);
DoSingleDomainWarp(seed, amp, freq, xs, ys, zs, ref x, ref y, ref z);
}
// Domain Warp Fractal Progressive
private void DomainWarpFractalProgressive(ref FNLfloat x, ref FNLfloat y)
{
int seed = mSeed;
float amp = mDomainWarpAmp * mFractalBounding;
float freq = mFrequency;
for (int i = 0; i < mOctaves; i++)
{
FNLfloat xs = x;
FNLfloat ys = y;
TransformDomainWarpCoordinate(ref xs, ref ys);
DoSingleDomainWarp(seed, amp, freq, xs, ys, ref x, ref y);
seed++;
amp *= mGain;
freq *= mLacunarity;
}
}
private void DomainWarpFractalProgressive(ref FNLfloat x, ref FNLfloat y, ref FNLfloat z)
{
int seed = mSeed;
float amp = mDomainWarpAmp * mFractalBounding;
float freq = mFrequency;
for (int i = 0; i < mOctaves; i++)
{
FNLfloat xs = x;
FNLfloat ys = y;
FNLfloat zs = z;
TransformDomainWarpCoordinate(ref xs, ref ys, ref zs);
DoSingleDomainWarp(seed, amp, freq, xs, ys, zs, ref x, ref y, ref z);
seed++;
amp *= mGain;
freq *= mLacunarity;
}
}
// Domain Warp Fractal Independant
private void DomainWarpFractalIndependent(ref FNLfloat x, ref FNLfloat y)
{
FNLfloat xs = x;
FNLfloat ys = y;
TransformDomainWarpCoordinate(ref xs, ref ys);
int seed = mSeed;
float amp = mDomainWarpAmp * mFractalBounding;
float freq = mFrequency;
for (int i = 0; i < mOctaves; i++)
{
DoSingleDomainWarp(seed, amp, freq, xs, ys, ref x, ref y);
seed++;
amp *= mGain;
freq *= mLacunarity;
}
}
private void DomainWarpFractalIndependent(ref FNLfloat x, ref FNLfloat y, ref FNLfloat z)
{
FNLfloat xs = x;
FNLfloat ys = y;
FNLfloat zs = z;
TransformDomainWarpCoordinate(ref xs, ref ys, ref zs);
int seed = mSeed;
float amp = mDomainWarpAmp * mFractalBounding;
float freq = mFrequency;
for (int i = 0; i < mOctaves; i++)
{
DoSingleDomainWarp(seed, amp, freq, xs, ys, zs, ref x, ref y, ref z);
seed++;
amp *= mGain;
freq *= mLacunarity;
}
}
// Domain Warp Basic Grid
private void SingleDomainWarpBasicGrid(
int seed, float warpAmp, float frequency, FNLfloat x, FNLfloat y, ref FNLfloat xr, ref FNLfloat yr
)
{
FNLfloat xf = x * frequency;
FNLfloat yf = y * frequency;
int x0 = FastFloor(xf);
int y0 = FastFloor(yf);
float xs = InterpHermite((float)(xf - x0));
float ys = InterpHermite((float)(yf - y0));
x0 *= PrimeX;
y0 *= PrimeY;
int x1 = x0 + PrimeX;
int y1 = y0 + PrimeY;
int hash0 = Hash(seed, x0, y0) & (255 << 1);
int hash1 = Hash(seed, x1, y0) & (255 << 1);
float lx0x = Lerp(RandVecs2D[hash0], RandVecs2D[hash1], xs);
float ly0x = Lerp(RandVecs2D[hash0 | 1], RandVecs2D[hash1 | 1], xs);
hash0 = Hash(seed, x0, y1) & (255 << 1);
hash1 = Hash(seed, x1, y1) & (255 << 1);
float lx1x = Lerp(RandVecs2D[hash0], RandVecs2D[hash1], xs);
float ly1x = Lerp(RandVecs2D[hash0 | 1], RandVecs2D[hash1 | 1], xs);
xr += Lerp(lx0x, lx1x, ys) * warpAmp;
yr += Lerp(ly0x, ly1x, ys) * warpAmp;
}
private void SingleDomainWarpBasicGrid(
int seed, float warpAmp, float frequency, FNLfloat x, FNLfloat y, FNLfloat z, ref FNLfloat xr, ref FNLfloat yr, ref FNLfloat zr
)
{
FNLfloat xf = x * frequency;
FNLfloat yf = y * frequency;
FNLfloat zf = z * frequency;
int x0 = FastFloor(xf);
int y0 = FastFloor(yf);
int z0 = FastFloor(zf);
float xs = InterpHermite((float)(xf - x0));
float ys = InterpHermite((float)(yf - y0));
float zs = InterpHermite((float)(zf - z0));
x0 *= PrimeX;
y0 *= PrimeY;
z0 *= PrimeZ;
int x1 = x0 + PrimeX;
int y1 = y0 + PrimeY;
int z1 = z0 + PrimeZ;
int hash0 = Hash(seed, x0, y0, z0) & (255 << 2);
int hash1 = Hash(seed, x1, y0, z0) & (255 << 2);
float lx0x = Lerp(RandVecs3D[hash0], RandVecs3D[hash1], xs);
float ly0x = Lerp(RandVecs3D[hash0 | 1], RandVecs3D[hash1 | 1], xs);
float lz0x = Lerp(RandVecs3D[hash0 | 2], RandVecs3D[hash1 | 2], xs);
hash0 = Hash(seed, x0, y1, z0) & (255 << 2);
hash1 = Hash(seed, x1, y1, z0) & (255 << 2);
float lx1x = Lerp(RandVecs3D[hash0], RandVecs3D[hash1], xs);
float ly1x = Lerp(RandVecs3D[hash0 | 1], RandVecs3D[hash1 | 1], xs);
float lz1x = Lerp(RandVecs3D[hash0 | 2], RandVecs3D[hash1 | 2], xs);
float lx0y = Lerp(lx0x, lx1x, ys);
float ly0y = Lerp(ly0x, ly1x, ys);
float lz0y = Lerp(lz0x, lz1x, ys);
hash0 = Hash(seed, x0, y0, z1) & (255 << 2);
hash1 = Hash(seed, x1, y0, z1) & (255 << 2);
lx0x = Lerp(RandVecs3D[hash0], RandVecs3D[hash1], xs);
ly0x = Lerp(RandVecs3D[hash0 | 1], RandVecs3D[hash1 | 1], xs);
lz0x = Lerp(RandVecs3D[hash0 | 2], RandVecs3D[hash1 | 2], xs);
hash0 = Hash(seed, x0, y1, z1) & (255 << 2);
hash1 = Hash(seed, x1, y1, z1) & (255 << 2);
lx1x = Lerp(RandVecs3D[hash0], RandVecs3D[hash1], xs);
ly1x = Lerp(RandVecs3D[hash0 | 1], RandVecs3D[hash1 | 1], xs);
lz1x = Lerp(RandVecs3D[hash0 | 2], RandVecs3D[hash1 | 2], xs);
xr += Lerp(lx0y, Lerp(lx0x, lx1x, ys), zs) * warpAmp;
yr += Lerp(ly0y, Lerp(ly0x, ly1x, ys), zs) * warpAmp;
zr += Lerp(lz0y, Lerp(lz0x, lz1x, ys), zs) * warpAmp;
}
// Domain Warp Simplex/OpenSimplex2
private void SingleDomainWarpSimplexGradient(
int seed, float warpAmp, float frequency, FNLfloat x, FNLfloat y, ref FNLfloat xr, ref FNLfloat yr, bool outGradOnly
)
{
const float SQRT3 = 1.7320508075688772935274463415059f;
const float G2 = (3 - SQRT3) / 6;
x *= frequency;
y *= frequency;
/*
* --- Skew moved to TransformNoiseCoordinate method ---
* const FNfloat F2 = 0.5f * (SQRT3 - 1);
* FNfloat s = (x + y) * F2;
* x += s; y += s;
*/
int i = FastFloor(x);
int j = FastFloor(y);
float xi = (float)(x - i);
float yi = (float)(y - j);
float t = (xi + yi) * G2;
float x0 = (float)(xi - t);
float y0 = (float)(yi - t);
i *= PrimeX;
j *= PrimeY;
float vx, vy;
vx = vy = 0;
float a = 0.5f - x0 * x0 - y0 * y0;
if (a > 0)
{
float aaaa = (a * a) * (a * a);
float xo, yo;
if (outGradOnly)
GradCoordOut(seed, i, j, out xo, out yo);
else
GradCoordDual(seed, i, j, x0, y0, out xo, out yo);
vx += aaaa * xo;
vy += aaaa * yo;
}
float c = (float)(2 * (1 - 2 * G2) * (1 / G2 - 2)) * t + ((float)(-2 * (1 - 2 * G2) * (1 - 2 * G2)) + a);
if (c > 0)
{
float x2 = x0 + (2 * (float)G2 - 1);
float y2 = y0 + (2 * (float)G2 - 1);
float cccc = (c * c) * (c * c);
float xo, yo;
if (outGradOnly)
GradCoordOut(seed, i + PrimeX, j + PrimeY, out xo, out yo);
else
GradCoordDual(seed, i + PrimeX, j + PrimeY, x2, y2, out xo, out yo);
vx += cccc * xo;
vy += cccc * yo;
}
if (y0 > x0)
{
float x1 = x0 + (float)G2;
float y1 = y0 + ((float)G2 - 1);
float b = 0.5f - x1 * x1 - y1 * y1;
if (b > 0)
{
float bbbb = (b * b) * (b * b);
float xo, yo;
if (outGradOnly)
GradCoordOut(seed, i, j + PrimeY, out xo, out yo);
else
GradCoordDual(seed, i, j + PrimeY, x1, y1, out xo, out yo);
vx += bbbb * xo;
vy += bbbb * yo;
}
}
else
{
float x1 = x0 + ((float)G2 - 1);
float y1 = y0 + (float)G2;
float b = 0.5f - x1 * x1 - y1 * y1;
if (b > 0)
{
float bbbb = (b * b) * (b * b);
float xo, yo;
if (outGradOnly)
GradCoordOut(seed, i + PrimeX, j, out xo, out yo);
else
GradCoordDual(seed, i + PrimeX, j, x1, y1, out xo, out yo);
vx += bbbb * xo;
vy += bbbb * yo;
}
}
xr += vx * warpAmp;
yr += vy * warpAmp;
}
private void SingleDomainWarpOpenSimplex2Gradient(
int seed, float warpAmp, float frequency, FNLfloat x, FNLfloat y, FNLfloat z, ref FNLfloat xr, ref FNLfloat yr, ref FNLfloat zr,
bool outGradOnly
)
{
x *= frequency;
y *= frequency;
z *= frequency;
/*
* --- Rotation moved to TransformDomainWarpCoordinate method ---
* const FNfloat R3 = (FNfloat)(2.0 / 3.0);
* FNfloat r = (x + y + z) * R3; // Rotation, not skew
* x = r - x; y = r - y; z = r - z;
*/
int i = FastRound(x);
int j = FastRound(y);
int k = FastRound(z);
float x0 = (float)x - i;
float y0 = (float)y - j;
float z0 = (float)z - k;
int xNSign = (int)(-x0 - 1.0f) | 1;
int yNSign = (int)(-y0 - 1.0f) | 1;
int zNSign = (int)(-z0 - 1.0f) | 1;
float ax0 = xNSign * -x0;
float ay0 = yNSign * -y0;
float az0 = zNSign * -z0;
i *= PrimeX;
j *= PrimeY;
k *= PrimeZ;
float vx, vy, vz;
vx = vy = vz = 0;
float a = (0.6f - x0 * x0) - (y0 * y0 + z0 * z0);
for (int l = 0;; l++)
{
if (a > 0)
{
float aaaa = (a * a) * (a * a);
float xo, yo, zo;
if (outGradOnly)
GradCoordOut(seed, i, j, k, out xo, out yo, out zo);
else
GradCoordDual(seed, i, j, k, x0, y0, z0, out xo, out yo, out zo);
vx += aaaa * xo;
vy += aaaa * yo;
vz += aaaa * zo;
}
float b = a;
int i1 = i;
int j1 = j;
int k1 = k;
float x1 = x0;
float y1 = y0;
float z1 = z0;
if (ax0 >= ay0 && ax0 >= az0)
{
x1 += xNSign;
b = b + ax0 + ax0;
i1 -= xNSign * PrimeX;
}
else if (ay0 > ax0 && ay0 >= az0)
{
y1 += yNSign;
b = b + ay0 + ay0;
j1 -= yNSign * PrimeY;
}
else
{
z1 += zNSign;
b = b + az0 + az0;
k1 -= zNSign * PrimeZ;
}
if (b > 1)
{
b -= 1;
float bbbb = (b * b) * (b * b);
float xo, yo, zo;
if (outGradOnly)
GradCoordOut(seed, i1, j1, k1, out xo, out yo, out zo);
else
GradCoordDual(seed, i1, j1, k1, x1, y1, z1, out xo, out yo, out zo);
vx += bbbb * xo;
vy += bbbb * yo;
vz += bbbb * zo;
}
if (l == 1) break;
ax0 = 0.5f - ax0;
ay0 = 0.5f - ay0;
az0 = 0.5f - az0;
x0 = xNSign * ax0;
y0 = yNSign * ay0;
z0 = zNSign * az0;
a += (0.75f - ax0) - (ay0 + az0);
i += (xNSign >> 1) & PrimeX;
j += (yNSign >> 1) & PrimeY;
k += (zNSign >> 1) & PrimeZ;
xNSign = -xNSign;
yNSign = -yNSign;
zNSign = -zNSign;
seed += 1293373;
}
xr += vx * warpAmp;
yr += vy * warpAmp;
zr += vz * warpAmp;
}
}
}
| 41.295652 | 140 | 0.472561 | [
"MIT"
] | XalacRlyeh/Aster | src/Aster.Client/World/Noise.cs | 123,476 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http.Authentication;
using Microsoft.AspNetCore.Identity;
namespace <%= namespace %>.Models.ManageViewModels
{
public class ManageLoginsViewModel
{
public IList<UserLoginInfo> CurrentLogins { get; set; }
public IList<AuthenticationDescription> OtherLogins { get; set; }
}
}
| 25.235294 | 73 | 0.750583 | [
"Apache-2.0"
] | OmniSharp/generator-aspnet | templates/projects/mvc/Models/ManageViewModels/ManageLoginsViewModel.cs | 431 | C# |
// Auto Generated, do not edit.
using EosSharp.Core;
using EosSharp.Core.Api.v1;
using EosSharp.Core.Providers;
using EosSharp.Unity3D;
using Newtonsoft.Json;
using System;
using System.Threading.Tasks;
namespace EosSharp.UnitTests.Unity3D
{
public class ApiUnitTests
{
ApiUnitTestCases ApiUnitTestCases;
public ApiUnitTests()
{
var eosConfig = new EosConfigurator()
{
SignProvider = new DefaultSignProvider("5K57oSZLpfzePvQNpsLS6NfKXLhhRARNU13q6u2ZPQCGHgKLbTA"),
//HttpEndpoint = "https://nodes.eos42.io", //Mainnet
//ChainId = "aca376f206b8fc25a6ed44dbdc66547c36c6c33e3a119ffbeaef943642f0e906"
HttpEndpoint = "https://nodeos01.btuga.io",
ChainId = "cf057bbfb72640471fd910bcb67639c22df9f92470936cddc1ade0e2f2e7dc4f"
};
var eosApi = new EosApi(eosConfig, new HttpHandler());
ApiUnitTestCases = new ApiUnitTestCases(eosConfig, eosApi);
}
public async Task GetInfo()
{
bool success = false;
try
{
await ApiUnitTestCases.GetInfo();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetInfo run successfuly.");
else
Console.WriteLine("Test GetInfo run failed.");
}
public async Task GetAccount()
{
bool success = false;
try
{
await ApiUnitTestCases.GetAccount();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetAccount run successfuly.");
else
Console.WriteLine("Test GetAccount run failed.");
}
public async Task GetCode()
{
bool success = false;
try
{
await ApiUnitTestCases.GetCode();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetCode run successfuly.");
else
Console.WriteLine("Test GetCode run failed.");
}
public async Task GetAbi()
{
bool success = false;
try
{
await ApiUnitTestCases.GetAbi();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetAbi run successfuly.");
else
Console.WriteLine("Test GetAbi run failed.");
}
public async Task GetRawCodeAndAbi()
{
bool success = false;
try
{
await ApiUnitTestCases.GetRawCodeAndAbi();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetRawCodeAndAbi run successfuly.");
else
Console.WriteLine("Test GetRawCodeAndAbi run failed.");
}
public async Task GetRawAbi()
{
bool success = false;
try
{
await ApiUnitTestCases.GetRawAbi();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetRawAbi run successfuly.");
else
Console.WriteLine("Test GetRawAbi run failed.");
}
public async Task AbiJsonToBin()
{
bool success = false;
try
{
await ApiUnitTestCases.AbiJsonToBin();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test AbiJsonToBin run successfuly.");
else
Console.WriteLine("Test AbiJsonToBin run failed.");
}
public async Task AbiBinToJson()
{
bool success = false;
try
{
await ApiUnitTestCases.AbiBinToJson();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test AbiBinToJson run successfuly.");
else
Console.WriteLine("Test AbiBinToJson run failed.");
}
public async Task GetRequiredKeys()
{
bool success = false;
try
{
await ApiUnitTestCases.GetRequiredKeys();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetRequiredKeys run successfuly.");
else
Console.WriteLine("Test GetRequiredKeys run failed.");
}
public async Task GetBlock()
{
bool success = false;
try
{
await ApiUnitTestCases.GetBlock();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetBlock run successfuly.");
else
Console.WriteLine("Test GetBlock run failed.");
}
public async Task GetBlockHeaderState()
{
bool success = false;
try
{
await ApiUnitTestCases.GetBlockHeaderState();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetBlockHeaderState run successfuly.");
else
Console.WriteLine("Test GetBlockHeaderState run failed.");
}
public async Task GetTableRows()
{
bool success = false;
try
{
await ApiUnitTestCases.GetTableRows();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetTableRows run successfuly.");
else
Console.WriteLine("Test GetTableRows run failed.");
}
public async Task GetTableByScope()
{
bool success = false;
try
{
await ApiUnitTestCases.GetTableByScope();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetTableByScope run successfuly.");
else
Console.WriteLine("Test GetTableByScope run failed.");
}
public async Task GetCurrencyBalance()
{
bool success = false;
try
{
await ApiUnitTestCases.GetCurrencyBalance();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetCurrencyBalance run successfuly.");
else
Console.WriteLine("Test GetCurrencyBalance run failed.");
}
public async Task GetCurrencyStats()
{
bool success = false;
try
{
await ApiUnitTestCases.GetCurrencyStats();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetCurrencyStats run successfuly.");
else
Console.WriteLine("Test GetCurrencyStats run failed.");
}
public async Task GetProducers()
{
bool success = false;
try
{
await ApiUnitTestCases.GetProducers();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetProducers run successfuly.");
else
Console.WriteLine("Test GetProducers run failed.");
}
public async Task GetProducerSchedule()
{
bool success = false;
try
{
await ApiUnitTestCases.GetProducerSchedule();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetProducerSchedule run successfuly.");
else
Console.WriteLine("Test GetProducerSchedule run failed.");
}
public async Task GetScheduledTransactions()
{
bool success = false;
try
{
await ApiUnitTestCases.GetScheduledTransactions();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetScheduledTransactions run successfuly.");
else
Console.WriteLine("Test GetScheduledTransactions run failed.");
}
public async Task PushTransaction()
{
bool success = false;
try
{
await ApiUnitTestCases.PushTransaction();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test PushTransaction run successfuly.");
else
Console.WriteLine("Test PushTransaction run failed.");
}
public async Task GetActions()
{
bool success = false;
try
{
await ApiUnitTestCases.GetActions();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetActions run successfuly.");
else
Console.WriteLine("Test GetActions run failed.");
}
public async Task GetTransaction()
{
bool success = false;
try
{
await ApiUnitTestCases.GetTransaction();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetTransaction run successfuly.");
else
Console.WriteLine("Test GetTransaction run failed.");
}
public async Task GetKeyAccounts()
{
bool success = false;
try
{
await ApiUnitTestCases.GetKeyAccounts();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetKeyAccounts run successfuly.");
else
Console.WriteLine("Test GetKeyAccounts run failed.");
}
public async Task GetControlledAccounts()
{
bool success = false;
try
{
await ApiUnitTestCases.GetControlledAccounts();
success = true;
}
catch (Exception ex)
{
Console.WriteLine(JsonConvert.SerializeObject(ex));
}
if(success)
Console.WriteLine("Test GetControlledAccounts run successfuly.");
else
Console.WriteLine("Test GetControlledAccounts run failed.");
}
public async Task TestAll()
{
await GetInfo();
await GetAccount();
await GetCode();
await GetAbi();
await GetRawCodeAndAbi();
await GetRawAbi();
await AbiJsonToBin();
await AbiBinToJson();
await GetRequiredKeys();
await GetBlock();
await GetBlockHeaderState();
await GetTableRows();
await GetTableByScope();
await GetCurrencyBalance();
await GetCurrencyStats();
await GetProducers();
await GetProducerSchedule();
await GetScheduledTransactions();
await PushTransaction();
await GetActions();
await GetTransaction();
await GetKeyAccounts();
await GetControlledAccounts();
}
}
} | 27.924051 | 110 | 0.530523 | [
"MIT"
] | BTCTrader/eos-sharp | EosSharp/EosSharp.UnitTests.Unity3D/ApiUnitTests.cs | 13,238 | C# |
using System;
namespace Bridge.Contract
{
public interface IAbstractEmitterBlock
{
string AddLocal(string name, ICSharpCode.NRefactory.CSharp.AstType type);
void AddLocals(System.Collections.Generic.IEnumerable<ICSharpCode.NRefactory.CSharp.ParameterDeclaration> declarations, ICSharpCode.NRefactory.CSharp.AstNode statement);
void BeginBlock();
System.Collections.Generic.Dictionary<string, string> BuildLocalsMap();
void ClearLocalsMap(System.Collections.Generic.Dictionary<string, string> prevMap = null);
System.Collections.Generic.Dictionary<string, string> BuildLocalsNamesMap();
void ClearLocalsNamesMap(System.Collections.Generic.Dictionary<string, string> prevMap = null);
void Emit();
void EmitBlockOrIndentedLine(ICSharpCode.NRefactory.CSharp.AstNode node);
Bridge.Contract.IEmitter Emitter { get; set; }
void EndBlock();
void EnsureComma(bool newLine = true);
int GetNumberOfEmptyLinesAtEnd();
void Indent();
bool IsOnlyWhitespaceOnPenultimateLine(bool lastTwoLines = true);
System.Text.StringBuilder NewWriter();
bool NoValueableSiblings(ICSharpCode.NRefactory.CSharp.AstNode node);
void Outdent();
void PopLocals();
string PopWriter(bool preventWrite = false);
void PushLocals();
void PushWriter(string format, Action callback = null);
bool RemovePenultimateEmptyLines(bool withLast = false);
void ResetLocals();
bool RestoreWriter(IWriterInfo writer);
IWriterInfo SaveWriter();
void Write(params object[] values);
void Write(object value);
void WriteCatch();
void WriteCloseBrace();
void WriteCloseBrace(bool addSpace);
void WriteCloseBracket();
void WriteCloseBracket(bool addSpace);
void WriteCloseParentheses();
void WriteCloseParentheses(bool addSpace);
void WriteColon();
void WriteComma();
void WriteComma(bool newLine);
void WriteDo();
void WriteDot();
void WriteElse();
void WriteFinally();
void WriteFor();
void WriteFunction();
void WriteIf();
void WriteIndent();
string WriteIndentToString(string value);
void WriteNew();
void WriteNewLine();
void WriteOpenBrace();
void WriteOpenBrace(bool addSpace);
void WriteOpenBracket();
void WriteOpenBracket(bool addSpace);
void WriteOpenCloseBrace();
void WriteOpenCloseParentheses();
void WriteOpenCloseParentheses(bool addSpace);
void WriteOpenParentheses();
void WriteOpenParentheses(bool addSpace);
void WriteReturn(bool addSpace);
void WriteScript(object value);
void WriteSemiColon();
void WriteSemiColon(bool newLine);
void WriteSpace();
void WriteSpace(bool addSpace);
void WriteSwitch();
void WriteThis();
void WriteThrow();
void WriteTry();
void WriteVar(bool ignoreAsync = false);
void WriteWhile();
}
}
| 40.461538 | 177 | 0.663815 | [
"MIT"
] | Oaz/bridgedotnet_Builder | Contract/IAbstractEmitterBlock.cs | 3,158 | C# |
using Application.Filters;
using Application.Interfaces.Repositories;
using Application.Wrappers;
using AutoMapper;
using MediatR;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace Application.Features
{
public class GetAllAdlerCardsBundlesQuery : IRequest<FilteredPagedResponse<IEnumerable<GetAllAdlerCardsBundlesViewModel>>>
{
public int PageNumber { get; set; }
public int PageSize { get; set; }
public Dictionary<string, string> FilterValue { get; set; }
public Dictionary<string, string> FilterRange { get; set; }
public Dictionary<string, List<string>> FilterArray { get; set; }
public Dictionary<string, string> FilterSearch { get; set; }
public string SortBy { get; set; }
public string SortType { get; set; }
public bool NoPaging { get; set; }
}
public class GetAllAdlerCardsBundlesQueryHandler : IRequestHandler<GetAllAdlerCardsBundlesQuery, FilteredPagedResponse<IEnumerable<GetAllAdlerCardsBundlesViewModel>>>
{
private readonly IAdlerCardsBundleRepositoryAsync _adlercardsbundleRepository;
private readonly IMapper _mapper;
public GetAllAdlerCardsBundlesQueryHandler(IAdlerCardsBundleRepositoryAsync adlercardsbundleRepository, IMapper mapper)
{
_adlercardsbundleRepository = adlercardsbundleRepository;
_mapper = mapper;
}
public async Task<FilteredPagedResponse<IEnumerable<GetAllAdlerCardsBundlesViewModel>>> Handle(GetAllAdlerCardsBundlesQuery request, CancellationToken cancellationToken)
{
var validFilter = _mapper.Map<GetAllAdlerCardsBundlesParameter>(request);
FilteredRequestParameter filteredRequestParameter = new FilteredRequestParameter();
Reflection.CopyProperties(validFilter, filteredRequestParameter);
int count = _adlercardsbundleRepository.GetCount(validFilter);
var adlercardsbundle = await _adlercardsbundleRepository.GetPagedReponseAsync(validFilter);
var adlercardsbundleViewModel = _mapper.Map<IEnumerable<GetAllAdlerCardsBundlesViewModel>>(adlercardsbundle);
return new Wrappers.FilteredPagedResponse<IEnumerable<GetAllAdlerCardsBundlesViewModel>>(adlercardsbundleViewModel, validFilter, count);
}
}
}
| 48.836735 | 177 | 0.751358 | [
"MIT"
] | Devsquares/AdlerZentrum-BackEnd | Application/Features/AdlerCardsBundle/Queries/GetAllAdlerCardsBundles/GetAllAdlerCardsBundlesQuery.cs | 2,393 | C# |
/*
* Copyright(c) 2021 Samsung Electronics Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
using Tizen.NUI.BaseComponents;
using Tizen.NUI.Binding;
using Tizen.NUI.Binding.Internals;
namespace Tizen.NUI.EXaml
{
internal class AddEvent : Operation
{
public AddEvent(int instanceIndex, int elementIndex, int eventIndex, int valueIndex)
{
this.instanceIndex = instanceIndex;
this.elementIndex = elementIndex;
this.eventIndex = eventIndex;
this.valueIndex = valueIndex;
}
public void Do()
{
object instance = LoadEXaml.GatheredInstances[instanceIndex];
object element = LoadEXaml.GatheredInstances[elementIndex];
var eventInfo = GatherEvent.GatheredEvents[eventIndex];
try
{
var methodInfo = GatherMethod.GatheredMethods[valueIndex];
eventInfo.AddEventHandler(instance, methodInfo.CreateDelegate(eventInfo.EventHandlerType, element));
}
catch (ArgumentException ae)
{
Tizen.Log.Fatal("EXaml", ae.ToString());
}
}
private int instanceIndex;
private int elementIndex;
private int eventIndex;
private int valueIndex;
}
}
| 32.711864 | 116 | 0.660622 | [
"Apache-2.0",
"MIT"
] | kimcinoo/TizenFX | src/Tizen.NUI/src/internal/EXaml/Operation/AddEvent.cs | 1,932 | C# |
using System.Collections.Generic;
namespace Ladybug.Core
{
/// <summary>
/// Represents a snapshot of a context of a thread that is running in a debuggee process.
/// </summary>
public interface IThreadContext
{
/// <summary>
/// Gets a collection of all top level registers and their values.
/// </summary>
/// <returns></returns>
IEnumerable<IRegister> GetTopLevelRegisters();
/// <summary>
/// Gets a collection of all registers and their values. This includes all nested registers as well.
/// </summary>
/// <returns></returns>
IEnumerable<IRegister> GetAllRegisters();
/// <summary>
/// Gets a register by its name.
/// </summary>
/// <param name="name">The name of the register.</param>
/// <returns>The register</returns>
IRegister GetRegisterByName(string name);
/// <summary>
/// Commits all changes of the context, updating the state of the running thread.
/// </summary>
void Flush();
}
} | 32.617647 | 109 | 0.579802 | [
"MIT"
] | Washi1337/Ladybug | src/Core/Ladybug.Core/IThreadContext.cs | 1,111 | C# |
using System;
namespace WebApiUtil.Areas.HelpPage.ModelDescriptions
{
/// <summary>
/// Use this attribute to change the name of the <see cref="ModelDescription"/> generated for a type.
/// </summary>
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Enum, AllowMultiple = false, Inherited = false)]
public sealed class ModelNameAttribute : Attribute
{
public ModelNameAttribute(string name)
{
Name = name;
}
public string Name { get; private set; }
}
} | 31.166667 | 136 | 0.666667 | [
"MIT"
] | Ericmas001/WebApiUtil | WebApiUtil/Areas/HelpPage/ModelDescriptions/ModelNameAttribute.cs | 561 | C# |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the ivs-2020-07-14.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using System.Net;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.IVS.Model
{
/// <summary>
/// Container for the parameters to the DeleteChannel operation.
/// Deletes the specified channel and its associated stream keys.
///
///
/// <para>
/// If you try to delete a live channel, you will get an error (409 ConflictException).
/// To delete a channel that is live, call <a>StopStream</a>, wait for the Amazon EventBridge
/// "Stream End" event (to verify that the stream's state was changed from Live to Offline),
/// then call DeleteChannel. (See <a href="https://docs.aws.amazon.com/ivs/latest/userguide/eventbridge.html">
/// Using EventBridge with Amazon IVS</a>.)
/// </para>
/// </summary>
public partial class DeleteChannelRequest : AmazonIVSRequest
{
private string _arn;
/// <summary>
/// Gets and sets the property Arn.
/// <para>
/// ARN of the channel to be deleted.
/// </para>
/// </summary>
[AWSProperty(Required=true, Min=1, Max=128)]
public string Arn
{
get { return this._arn; }
set { this._arn = value; }
}
// Check to see if Arn property is set
internal bool IsSetArn()
{
return this._arn != null;
}
}
} | 32.088235 | 114 | 0.644821 | [
"Apache-2.0"
] | ChristopherButtars/aws-sdk-net | sdk/src/Services/IVS/Generated/Model/DeleteChannelRequest.cs | 2,182 | C# |
namespace GroupDocs.Demo.Annotation.Mvc.Options
{
public enum TextSelectionMode
{
ByWords = 0,
ByChars
}
} | 16.875 | 48 | 0.622222 | [
"MIT"
] | atirtahirgroupdocs/GroupDocs.Annotation-for-.NET | Plugins/GroupDocs_Annotation_SharePointPlugin/GroupDocs.Demo.Annotation.Mvc/Options/TextSelectionMode.cs | 137 | C# |
using System;
using System.Globalization;
using System.Resources;
using System.Windows.Forms;
using TestPlatform.Views.MainForms;
using TestPlatform.Models;
namespace TestPlatform.Views.SidebarUserControls
{
public partial class ReactionControl : DefaultUserControl
{
private ResourceManager LocRM = new ResourceManager("TestPlatform.Resources.Localizations.LocalizedResources", typeof(FormMain).Assembly);
private CultureInfo currentCulture = CultureInfo.CurrentUICulture;
public ReactionControl()
{
this.Dock = DockStyle.Fill;
InitializeComponent();
}
private bool checkSave()
{
bool result = false;
if (FileManipulation.GlobalFormMain._contentPanel.Controls[0] is FormTRConfig)
{
DialogResult dialogResult = MessageBox.Show(LocRM.GetString("savePending", currentCulture), LocRM.GetString("savePendingTitle", currentCulture), MessageBoxButtons.YesNo);
if (dialogResult == DialogResult.Yes)
{
FormTRConfig programToSave = (FormTRConfig)(FileManipulation.GlobalFormMain._contentPanel.Controls[0]);
result = programToSave.save();
}
else
{
FileManipulation.GlobalFormMain._contentPanel.Controls.Clear();
return true;
}
}
if(result == false)
{
FileManipulation.GlobalFormMain._contentPanel.Controls.Clear();
return true;
}
else
{
return false;
}
}
private void newReactButton_Click(object sender, EventArgs e)
{
bool screenTranslationAllowed = true;
try
{
if (newReactButton.Checked)
{
if (FileManipulation.GlobalFormMain._contentPanel.Controls.Count > 0)
{
screenTranslationAllowed = checkSave();
}
if (screenTranslationAllowed)
{
FormTRConfig configureProgram = new FormTRConfig("false");
FileManipulation.GlobalFormMain._contentPanel.Controls.Add(configureProgram);
newReactButton.Checked = false;
}
}
}
catch (Exception ex) { MessageBox.Show(ex.Message); }
}
private void editReactButton_Click(object sender, EventArgs e)
{
bool screenTranslationAllowed = true;
if (editReactButton.Checked)
{
if (FileManipulation.GlobalFormMain._contentPanel.Controls.Count > 0)
{
screenTranslationAllowed = checkSave();
}
if (screenTranslationAllowed)
{
FormDefine defineProgram;
DialogResult result;
string editProgramName = "error";
try
{
defineProgram = new FormDefine(LocRM.GetString("editProgram", currentCulture), ReactionProgram.GetProgramsPath(), "prg", "program", false, false);
result = defineProgram.ShowDialog();
if (result == DialogResult.OK)
{
editProgramName = defineProgram.ReturnValue;
FormTRConfig configureProgram = new FormTRConfig(editProgramName);
configureProgram.PrgName = editProgramName;
FileManipulation.GlobalFormMain._contentPanel.Controls.Add(configureProgram);
editReactButton.Checked = false;
}
else
{
/*do nothing, user cancelled selection of program*/
}
}
catch (Exception ex) { MessageBox.Show(ex.Message); }
}
}
else
{
/*do nothing*/
}
}
private void deleteReactButton_Click(object sender, EventArgs e)
{
bool screenTranslationAllowed = true;
try
{
if (deleteReactButton.Checked)
{
if (FileManipulation.GlobalFormMain._contentPanel.Controls.Count > 0)
{
screenTranslationAllowed = checkSave();
}
if (screenTranslationAllowed)
{
FileManagment deleteProgram = new FileManagment(ReactionProgram.GetProgramsPath(), FileManipulation.ReactionTestFilesBackupPath, 'd', LocRM.GetString("reactionTest", currentCulture));
FileManipulation.GlobalFormMain._contentPanel.Controls.Add(deleteProgram);
deleteReactButton.Checked = false;
}
}
}
catch (Exception ex) { MessageBox.Show(ex.Message); }
}
private void recoverReactButton_Click(object sender, EventArgs e)
{
bool screenTranslationAllowed = true;
try
{
if (recoverReactButton.Checked)
{
if (FileManipulation.GlobalFormMain._contentPanel.Controls.Count > 0)
{
screenTranslationAllowed = checkSave();
}
if (screenTranslationAllowed)
{
FileManagment recoverProgram = new FileManagment(FileManipulation.ReactionTestFilesBackupPath, ReactionProgram.GetProgramsPath(), 'r', LocRM.GetString("reactionTest", currentCulture));
FileManipulation.GlobalFormMain._contentPanel.Controls.Add(recoverProgram);
recoverReactButton.Checked = false;
}
}
}
catch (Exception ex) { MessageBox.Show(ex.Message); }
}
}
}
| 39.341615 | 208 | 0.517682 | [
"MIT"
] | lab-neuro-comp/Test-Plataform | StroopTest/Views/SidebarUserControls/ReactionControl.cs | 6,336 | C# |
using DG.Tweening;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
namespace GuiBaseUI
{
public class IconMove
{
static Transform root;
static GameObject prefab;
static List<IconItem> pool = new List<IconItem>();
public static void Move(Vector3 start, Vector3 target, float duration = .5f, Sprite sprite = null)
{
IconItem item;
if (pool.Count > 0)
{
item = pool[0];
pool.RemoveAt(0);
}
else
{
item = GetNewIcon();
}
item.image.sprite = sprite;
item.transform.localScale = Vector3.one;
item.transform.position = start;
var tweener = item.transform.DOMove(target, duration);
tweener.SetEase(Ease.Linear);
tweener.onComplete = delegate () {
if (item.transform)
{
item.transform.localScale = Vector3.zero;
pool.Add(item);
}
};
}
static IconItem GetNewIcon()
{
if (root == null)
{
GameObject go = new GameObject();
go.name = "IconMove";
root = go.transform;
root.SetParent(Object.FindObjectOfType<Canvas>().transform,false);
root.position = Vector3.zero;
prefab = CreateUI.NewImage();
prefab.transform.SetParent(root);
prefab.transform.localScale = Vector3.zero;
}
IconItem item = new IconItem(Object.Instantiate(prefab));
return item;
}
struct IconItem
{
public Transform transform;
public Image image;
public IconItem(GameObject go)
{
transform = go.transform;
image = go.GetComponent<Image>();
transform.SetParent(root);
transform.localScale = Vector3.zero;
}
}
}
} | 30.695652 | 106 | 0.495279 | [
"MIT"
] | 410998623/Taiwu_mods | GuiBaseUI/IconMove.cs | 2,118 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace ProvaBlazor.Data
{
public class Operatore
{
public int Id { get; set; }
public string Name { get; set; }
public string Surname { get; set; }
}
public class Metereologo : Operatore
{
public string Competenza { get; set; }
}
public class Presentatore : Operatore
{
public string Lingua { get; set; }
}
}
| 19.56 | 46 | 0.619632 | [
"MIT"
] | FITSTIC/Hackathon_SampleCode_19_21 | ProvaBlazor/ProvaBlazor/Data/Operatore.cs | 491 | C# |
namespace BehaviourTree.Decorators
{
public sealed class UntilFailed<TContext> : DecoratorBehaviour<TContext>
{
public UntilFailed(IBehaviour<TContext> child) : this("UntilFailed", child)
{
}
public UntilFailed(string name, IBehaviour<TContext> child) : base(name, child)
{
}
protected override BehaviourStatus Update(TContext context)
{
var childStatus = Child.Tick(context);
return childStatus == BehaviourStatus.Failed ? BehaviourStatus.Succeeded : BehaviourStatus.Running;
}
}
}
| 29.285714 | 112 | 0.62439 | [
"MIT"
] | muscla87/wordle-telegram-bot | BehaviourTree/Decorators/UntilFailed.cs | 617 | C# |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.