context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Orleans.Providers.Streams.Common;
using Orleans.Runtime;
using Orleans.Streams;
using Orleans.TestingHost.Utils;
using Xunit;
namespace UnitTests.OrleansRuntime.Streams
{
public class PooledQueueCacheTests
{
private const int PooledBufferCount = 8;
private const int PooledBufferSize = 1 << 10; // 1K
private const int MessageSize = 1 << 7; // 128
private const int MessagesPerBuffer = 8;
private const string TestStreamNamespace = "blarg";
private class TestQueueMessage
{
private static readonly byte[] FixedMessage = new byte[MessageSize];
public StreamId StreamId;
public long SequenceNumber;
public readonly byte[] Data = FixedMessage;
public DateTime EnqueueTimeUtc = DateTime.UtcNow;
}
private class TestBatchContainer : IBatchContainer
{
public StreamId StreamId { get; set; }
public StreamSequenceToken SequenceToken { get; set; }
public byte[] Data { get; set; }
public IEnumerable<Tuple<T, StreamSequenceToken>> GetEvents<T>()
{
throw new NotImplementedException();
}
public bool ImportRequestContext()
{
throw new NotImplementedException();
}
}
private class TestCacheDataAdapter : ICacheDataAdapter
{
public IBatchContainer GetBatchContainer(ref CachedMessage cachedMessage)
{
//Deserialize payload
int readOffset = 0;
ArraySegment<byte> payload = SegmentBuilder.ReadNextBytes(cachedMessage.Segment, ref readOffset);
return new TestBatchContainer
{
StreamId = cachedMessage.StreamId,
SequenceToken = GetSequenceToken(ref cachedMessage),
Data = payload.ToArray()
};
}
public StreamSequenceToken GetSequenceToken(ref CachedMessage cachedMessage)
{
return new EventSequenceTokenV2(cachedMessage.SequenceNumber);
}
}
private class CachedMessageConverter
{
private readonly IObjectPool<FixedSizeBuffer> bufferPool;
private readonly IEvictionStrategy evictionStrategy;
private FixedSizeBuffer currentBuffer;
public CachedMessageConverter(IObjectPool<FixedSizeBuffer> bufferPool, IEvictionStrategy evictionStrategy)
{
this.bufferPool = bufferPool;
this.evictionStrategy = evictionStrategy;
}
public CachedMessage ToCachedMessage(TestQueueMessage queueMessage, DateTime dequeueTimeUtc)
{
StreamPosition streamPosition = GetStreamPosition(queueMessage);
return new CachedMessage
{
StreamId = streamPosition.StreamId,
SequenceNumber = queueMessage.SequenceNumber,
EnqueueTimeUtc = queueMessage.EnqueueTimeUtc,
DequeueTimeUtc = dequeueTimeUtc,
Segment = SerializeMessageIntoPooledSegment(queueMessage),
};
}
private StreamPosition GetStreamPosition(TestQueueMessage queueMessage)
{
StreamSequenceToken sequenceToken = new EventSequenceTokenV2(queueMessage.SequenceNumber);
return new StreamPosition(queueMessage.StreamId, sequenceToken);
}
private ArraySegment<byte> SerializeMessageIntoPooledSegment(TestQueueMessage queueMessage)
{
// serialize payload
int size = SegmentBuilder.CalculateAppendSize(queueMessage.Data);
// get segment from current block
ArraySegment<byte> segment;
if (currentBuffer == null || !currentBuffer.TryGetSegment(size, out segment))
{
// no block or block full, get new block and try again
currentBuffer = bufferPool.Allocate();
//call EvictionStrategy's OnBlockAllocated method
this.evictionStrategy.OnBlockAllocated(currentBuffer);
// if this fails with clean block, then requested size is too big
if (!currentBuffer.TryGetSegment(size, out segment))
{
string errmsg = String.Format(CultureInfo.InvariantCulture,
"Message size is too big. MessageSize: {0}", size);
throw new ArgumentOutOfRangeException(nameof(queueMessage), errmsg);
}
}
// encode namespace, offset, partitionkey, properties and payload into segment
int writeOffset = 0;
SegmentBuilder.Append(segment, ref writeOffset, queueMessage.Data);
return segment;
}
}
/// <summary>
/// Fill the cache with 2 streams.
/// Get valid cursor to start of each stream.
/// Walk each cursor until there is no more data on each stream.
/// Alternate adding messages to cache and walking cursors.
/// </summary>
[Fact, TestCategory("BVT"), TestCategory("Streaming")]
public void GoldenPathTest()
{
var bufferPool = new ObjectPool<FixedSizeBuffer>(() => new FixedSizeBuffer(PooledBufferSize));
var dataAdapter = new TestCacheDataAdapter();
var cache = new PooledQueueCache(dataAdapter, NullLogger.Instance, null, null);
var evictionStrategy = new ChronologicalEvictionStrategy(NullLogger.Instance, new TimePurgePredicate(TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(10)), null, null);
evictionStrategy.PurgeObservable = cache;
var converter = new CachedMessageConverter(bufferPool, evictionStrategy);
RunGoldenPath(cache, converter, 111);
}
/// <summary>
/// Run normal golden path test, then purge the cache, and then run another golden path test.
/// Goal is to make sure cache cleans up correctly when all data is purged.
/// </summary>
[Fact, TestCategory("BVT"), TestCategory("Streaming")]
public void CacheDrainTest()
{
var bufferPool = new ObjectPool<FixedSizeBuffer>(() => new FixedSizeBuffer(PooledBufferSize));
var dataAdapter = new TestCacheDataAdapter();
var cache = new PooledQueueCache(dataAdapter, NullLogger.Instance, null, null);
var evictionStrategy = new ChronologicalEvictionStrategy(NullLogger.Instance, new TimePurgePredicate(TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(10)), null, null);
evictionStrategy.PurgeObservable = cache;
var converter = new CachedMessageConverter(bufferPool, evictionStrategy);
int startSequenceNuber = 222;
startSequenceNuber = RunGoldenPath(cache, converter, startSequenceNuber);
RunGoldenPath(cache, converter, startSequenceNuber);
}
private int RunGoldenPath(PooledQueueCache cache, CachedMessageConverter converter, int startOfCache)
{
int sequenceNumber = startOfCache;
IBatchContainer batch;
var stream1 = StreamId.Create(TestStreamNamespace, Guid.NewGuid());
var stream2 = StreamId.Create(TestStreamNamespace, Guid.NewGuid());
// now add messages into cache newer than cursor
// Adding enough to fill the pool
List<TestQueueMessage> messages = Enumerable.Range(0, MessagesPerBuffer * PooledBufferCount)
.Select(i => new TestQueueMessage
{
StreamId = i % 2 == 0 ? stream1 : stream2,
SequenceNumber = sequenceNumber + i
})
.ToList();
DateTime utcNow = DateTime.UtcNow;
List<CachedMessage> cachedMessages = messages
.Select(m => converter.ToCachedMessage(m, utcNow))
.ToList();
cache.Add(cachedMessages, utcNow);
sequenceNumber += MessagesPerBuffer * PooledBufferCount;
// get cursor for stream1, walk all the events in the stream using the cursor
object stream1Cursor = cache.GetCursor(stream1, new EventSequenceTokenV2(startOfCache));
int stream1EventCount = 0;
while (cache.TryGetNextMessage(stream1Cursor, out batch))
{
Assert.NotNull(stream1Cursor);
Assert.NotNull(batch);
Assert.Equal(stream1, batch.StreamId);
Assert.NotNull(batch.SequenceToken);
stream1EventCount++;
}
Assert.Equal((sequenceNumber - startOfCache) / 2, stream1EventCount);
// get cursor for stream2, walk all the events in the stream using the cursor
object stream2Cursor = cache.GetCursor(stream2, new EventSequenceTokenV2(startOfCache));
int stream2EventCount = 0;
while (cache.TryGetNextMessage(stream2Cursor, out batch))
{
Assert.NotNull(stream2Cursor);
Assert.NotNull(batch);
Assert.Equal(stream2, batch.StreamId);
Assert.NotNull(batch.SequenceToken);
stream2EventCount++;
}
Assert.Equal((sequenceNumber - startOfCache) / 2, stream2EventCount);
// Add a blocks worth of events to the cache, then walk each cursor. Do this enough times to fill the cache twice.
for (int j = 0; j < PooledBufferCount*2; j++)
{
List<TestQueueMessage> moreMessages = Enumerable.Range(0, MessagesPerBuffer)
.Select(i => new TestQueueMessage
{
StreamId = i % 2 == 0 ? stream1 : stream2,
SequenceNumber = sequenceNumber + i
})
.ToList();
utcNow = DateTime.UtcNow;
List<CachedMessage> moreCachedMessages = moreMessages
.Select(m => converter.ToCachedMessage(m, utcNow))
.ToList();
cache.Add(moreCachedMessages, utcNow);
sequenceNumber += MessagesPerBuffer;
// walk all the events in the stream using the cursor
while (cache.TryGetNextMessage(stream1Cursor, out batch))
{
Assert.NotNull(stream1Cursor);
Assert.NotNull(batch);
Assert.Equal(stream1, batch.StreamId);
Assert.NotNull(batch.SequenceToken);
stream1EventCount++;
}
Assert.Equal((sequenceNumber - startOfCache) / 2, stream1EventCount);
// walk all the events in the stream using the cursor
while (cache.TryGetNextMessage(stream2Cursor, out batch))
{
Assert.NotNull(stream2Cursor);
Assert.NotNull(batch);
Assert.Equal(stream2, batch.StreamId);
Assert.NotNull(batch.SequenceToken);
stream2EventCount++;
}
Assert.Equal((sequenceNumber - startOfCache) / 2, stream2EventCount);
}
return sequenceNumber;
}
}
}
| |
#region BSD License
/*
Copyright (c) 2004-2005 Matthew Holmes (matthew@wildfiregames.com), Dan Moorehead (dan05a@gmail.com)
Redistribution and use in source and binary forms, with or without modification, are permitted
provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the
distribution.
* The name of the author may not be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#endregion
using System;
using System.IO;
using System.Xml;
using Prebuild.Core.Attributes;
using Prebuild.Core.Interfaces;
using Prebuild.Core.Utilities;
using Prebuild.Core.Targets;
namespace Prebuild.Core.Nodes
{
/// <summary>
///
/// </summary>
public enum BuildAction
{
/// <summary>
///
/// </summary>
None,
/// <summary>
///
/// </summary>
Compile,
/// <summary>
///
/// </summary>
Content,
/// <summary>
///
/// </summary>
EmbeddedResource,
/// <summary>
///
/// </summary>
ApplicationDefinition,
/// <summary>
///
/// </summary>
Page,
/// <summary>
///
/// </summary>
Copy
}
/// <summary>
///
/// </summary>
public enum SubType
{
/// <summary>
///
/// </summary>
Code,
/// <summary>
///
/// </summary>
Component,
/// <summary>
///
/// </summary>
Designer,
/// <summary>
///
/// </summary>
Form,
/// <summary>
///
/// </summary>
Settings,
/// <summary>
///
/// </summary>
UserControl,
/// <summary>
///
/// </summary>
CodeBehind,
}
public enum CopyToOutput
{
Never,
Always,
PreserveNewest
}
/// <summary>
///
/// </summary>
[DataNode("File")]
public class FileNode : DataNode
{
#region Fields
private string m_Path;
private string m_ResourceName = "";
private BuildAction? m_BuildAction;
private bool m_Valid;
private SubType? m_SubType;
private CopyToOutput m_CopyToOutput = CopyToOutput.Never;
private bool m_Link = false;
private string m_LinkPath = string.Empty;
private bool m_PreservePath = false;
#endregion
#region Properties
/// <summary>
///
/// </summary>
public string Path
{
get
{
return m_Path;
}
}
/// <summary>
///
/// </summary>
public string ResourceName
{
get
{
return m_ResourceName;
}
}
/// <summary>
///
/// </summary>
public BuildAction BuildAction
{
get
{
if (m_BuildAction != null)
return m_BuildAction.Value;
else
return GetBuildActionByFileName(this.Path);
}
}
public CopyToOutput CopyToOutput
{
get
{
return this.m_CopyToOutput;
}
}
public bool IsLink
{
get
{
return this.m_Link;
}
}
public string LinkPath
{
get
{
return this.m_LinkPath;
}
}
/// <summary>
///
/// </summary>
public SubType SubType
{
get
{
if (m_SubType != null)
return m_SubType.Value;
else
return GetSubTypeByFileName(this.Path);
}
}
/// <summary>
///
/// </summary>
public bool IsValid
{
get
{
return m_Valid;
}
}
/// <summary>
///
/// </summary>
/// <param name="file"></param>
/// <returns></returns>
public bool PreservePath
{
get
{
return m_PreservePath;
}
}
#endregion
#region Public Methods
/// <summary>
///
/// </summary>
/// <param name="node"></param>
public override void Parse(XmlNode node)
{
string buildAction = Helper.AttributeValue(node, "buildAction", String.Empty);
if (buildAction != string.Empty)
m_BuildAction = (BuildAction)Enum.Parse(typeof(BuildAction), buildAction);
string subType = Helper.AttributeValue(node, "subType", string.Empty);
if (subType != String.Empty)
m_SubType = (SubType)Enum.Parse(typeof(SubType), subType);
Console.WriteLine("[FileNode]:BuildAction is {0}", buildAction);
m_ResourceName = Helper.AttributeValue(node, "resourceName", m_ResourceName.ToString());
this.m_Link = bool.Parse(Helper.AttributeValue(node, "link", bool.FalseString));
if ( this.m_Link == true )
{
this.m_LinkPath = Helper.AttributeValue( node, "linkPath", string.Empty );
}
this.m_CopyToOutput = (CopyToOutput) Enum.Parse(typeof(CopyToOutput), Helper.AttributeValue(node, "copyToOutput", this.m_CopyToOutput.ToString()));
this.m_PreservePath = bool.Parse( Helper.AttributeValue( node, "preservePath", bool.FalseString ) );
if( node == null )
{
throw new ArgumentNullException("node");
}
m_Path = Helper.InterpolateForEnvironmentVariables(node.InnerText);
if(m_Path == null)
{
m_Path = "";
}
m_Path = m_Path.Trim();
m_Valid = true;
if(!File.Exists(m_Path))
{
m_Valid = false;
Kernel.Instance.Log.Write(LogType.Warning, "File does not exist: {0}", m_Path);
}
if (System.IO.Path.GetExtension(m_Path) == ".settings")
{
m_SubType = SubType.Settings;
m_BuildAction = BuildAction.None;
}
}
#endregion
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Drawing;
using System.Diagnostics;
using System.Collections;
using System.ComponentModel;
using System.Globalization;
using System.Windows.Forms;
using OpenLiveWriter.Controls;
using OpenLiveWriter.CoreServices;
using OpenLiveWriter.Localization;
using OpenLiveWriter.Localization.Bidi;
using OpenLiveWriter.PostEditor.ContentSources;
namespace OpenLiveWriter.PostEditor.LiveClipboard
{
/// <summary>
/// Summary description for LiveClipboardChangeHandlerForm.
/// </summary>
internal class LiveClipboardChangeHandlerForm : ApplicationDialog
{
private System.Windows.Forms.ColumnHeader columnHeaderHandler;
private System.Windows.Forms.Button buttonOK;
private System.Windows.Forms.Button buttonCancel;
private System.Windows.Forms.Label labelCaption;
private System.Windows.Forms.Label labelContentType;
private System.Windows.Forms.Label labelFormatName;
private System.Windows.Forms.PictureBox pictureBoxFormatIcon;
private System.Windows.Forms.ListView listViewComponents;
private System.Windows.Forms.ImageList imageListComponents;
private System.ComponentModel.IContainer components;
private LiveClipboardFormat _targetFormat;
public LiveClipboardChangeHandlerForm(LiveClipboardFormatHandler existingHandler)
{
//
// Required for Windows Form Designer support
//
InitializeComponent();
this.columnHeaderHandler.Text = Res.Get(StringId.ChangeLiveClipboardHandlerComponent);
this.buttonOK.Text = Res.Get(StringId.OKButtonText);
this.buttonCancel.Text = Res.Get(StringId.CancelButton);
this.labelCaption.Text = Res.Get(StringId.ChangeLiveClipboardHandlerCaption);
this.Text = Res.Get(StringId.ChangeLiveClipboardHandlerTitle);
// save and populate format info
_targetFormat = existingHandler.Format;
pictureBoxFormatIcon.Image = existingHandler.ContentSource.Image;
labelFormatName.Text = existingHandler.FormatName;
labelContentType.Text = existingHandler.FriendlyContentType;
labelCaption.Text = String.Format(CultureInfo.CurrentCulture, labelCaption.Text, existingHandler.FormatName);
// populate the list with content sources that support this format
ContentSourceInfo[] contentSources = LiveClipboardManager.GetContentSourcesForFormat(existingHandler.Format);
Array.Sort(contentSources, new ContentSourceInfo.NameComparer());
foreach (ContentSourceInfo contentSource in contentSources)
{
LiveClipboardComponentDisplay componentDisplay = new LiveClipboardComponentDisplay(contentSource);
imageListComponents.Images.Add(componentDisplay.Icon);
ListViewItem listViewItem = new ListViewItem();
listViewItem.Tag = contentSource;
listViewItem.ImageIndex = imageListComponents.Images.Count - 1;
listViewItem.Text = " " + componentDisplay.Name;
listViewComponents.Items.Add(listViewItem);
if (contentSource.Equals(existingHandler.ContentSource))
listViewItem.Selected = true;
}
}
public LiveClipboardFormatHandler FormatHandler
{
get
{
// scan for the handler on this content source
ContentSourceInfo contentSource = listViewComponents.SelectedItems[0].Tag as ContentSourceInfo;
foreach (LiveClipboardFormatHandler formatHandler in contentSource.LiveClipboardFormatHandlers)
if (formatHandler.Format.Equals(_targetFormat))
return formatHandler;
// handler not found (should be impossible)
Trace.Fail("Unable to find valid handler!");
return null;
}
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (components != null)
{
components.Dispose();
}
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
this.listViewComponents = new System.Windows.Forms.ListView();
this.columnHeaderHandler = new System.Windows.Forms.ColumnHeader();
this.imageListComponents = new System.Windows.Forms.ImageList(this.components);
this.buttonOK = new System.Windows.Forms.Button();
this.buttonCancel = new System.Windows.Forms.Button();
this.labelCaption = new System.Windows.Forms.Label();
this.labelContentType = new System.Windows.Forms.Label();
this.labelFormatName = new System.Windows.Forms.Label();
this.pictureBoxFormatIcon = new System.Windows.Forms.PictureBox();
this.SuspendLayout();
//
// listViewComponents
//
this.listViewComponents.AutoArrange = false;
this.listViewComponents.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] {
this.columnHeaderHandler});
this.listViewComponents.FullRowSelect = true;
this.listViewComponents.HeaderStyle = System.Windows.Forms.ColumnHeaderStyle.Nonclickable;
this.listViewComponents.HideSelection = false;
this.listViewComponents.Location = new System.Drawing.Point(6, 68);
this.listViewComponents.MultiSelect = false;
this.listViewComponents.Name = "listViewComponents";
this.listViewComponents.RightToLeftLayout = BidiHelper.IsRightToLeft;
this.listViewComponents.Size = new System.Drawing.Size(302, 140);
this.listViewComponents.SmallImageList = this.imageListComponents;
this.listViewComponents.TabIndex = 3;
this.listViewComponents.View = System.Windows.Forms.View.Details;
//
// columnHeaderHandler
//
this.columnHeaderHandler.Text = "Component";
this.columnHeaderHandler.Width = 275;
//
// imageListComponents
//
this.imageListComponents.ColorDepth = System.Windows.Forms.ColorDepth.Depth32Bit;
this.imageListComponents.ImageSize = new System.Drawing.Size(16, 16);
this.imageListComponents.TransparentColor = System.Drawing.Color.Transparent;
//
// buttonOK
//
this.buttonOK.DialogResult = System.Windows.Forms.DialogResult.OK;
this.buttonOK.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.buttonOK.Location = new System.Drawing.Point(153, 215);
this.buttonOK.Name = "buttonOK";
this.buttonOK.TabIndex = 4;
this.buttonOK.Text = "OK";
//
// buttonCancel
//
this.buttonCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.buttonCancel.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.buttonCancel.Location = new System.Drawing.Point(233, 215);
this.buttonCancel.Name = "buttonCancel";
this.buttonCancel.TabIndex = 5;
this.buttonCancel.Text = "Cancel";
//
// labelCaption
//
this.labelCaption.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.labelCaption.Location = new System.Drawing.Point(9, 50);
this.labelCaption.Name = "labelCaption";
this.labelCaption.Size = new System.Drawing.Size(292, 16);
this.labelCaption.TabIndex = 2;
this.labelCaption.Text = "Components capable of handling \'{0}\':";
//
// labelContentType
//
this.labelContentType.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.labelContentType.Location = new System.Drawing.Point(33, 27);
this.labelContentType.Name = "labelContentType";
this.labelContentType.Size = new System.Drawing.Size(270, 16);
this.labelContentType.TabIndex = 1;
this.labelContentType.Text = "vcalendar (application/xhtml+xml)";
//
// labelFormatName
//
this.labelFormatName.FlatStyle = System.Windows.Forms.FlatStyle.System;
this.labelFormatName.Location = new System.Drawing.Point(33, 7);
this.labelFormatName.Name = "labelFormatName";
this.labelFormatName.Size = new System.Drawing.Size(267, 15);
this.labelFormatName.TabIndex = 0;
this.labelFormatName.Text = "iCalendar";
//
// pictureBoxFormatIcon
//
this.pictureBoxFormatIcon.Location = new System.Drawing.Point(7, 5);
this.pictureBoxFormatIcon.Name = "pictureBoxFormatIcon";
this.pictureBoxFormatIcon.Size = new System.Drawing.Size(20, 18);
this.pictureBoxFormatIcon.TabIndex = 9;
this.pictureBoxFormatIcon.TabStop = false;
//
// LiveClipboardChangeHandlerForm
//
this.AcceptButton = this.buttonOK;
this.AutoScaleBaseSize = new System.Drawing.Size(5, 14);
this.CancelButton = this.buttonCancel;
this.ClientSize = new System.Drawing.Size(316, 245);
this.Controls.Add(this.labelContentType);
this.Controls.Add(this.labelFormatName);
this.Controls.Add(this.pictureBoxFormatIcon);
this.Controls.Add(this.labelCaption);
this.Controls.Add(this.buttonCancel);
this.Controls.Add(this.buttonOK);
this.Controls.Add(this.listViewComponents);
this.Location = new System.Drawing.Point(0, 0);
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "LiveClipboardChangeHandlerForm";
this.Text = "Change Format Handler";
this.ResumeLayout(false);
}
#endregion
}
}
| |
////#define ENABLE_DEBUG_SPEW
//// Uncomment the above to enable debug spew. Disabling by default as this slows down performance in debug mode
using Microsoft.Xaml.Interactivity;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Controls.Primitives;
using Windows.UI.Xaml.Markup;
namespace Toolkit.Behaviors
{
[ContentProperty(Name = nameof(Mappings))]
public class DataTemplateSelectorBehavior : Behavior<ListViewBase>
{
public static readonly DependencyProperty MappingsProperty =
DependencyProperty.Register(nameof(Mappings), typeof(DependencyObjectCollection), typeof(DataTemplateSelectorBehavior), new PropertyMetadata(null));
private Dictionary<string, DataTemplate> _typeToTemplateMapping;
private Dictionary<string, HashSet<SelectorItem>> _typeToItemHashSetMapping;
private SelectorItemType _itemType = SelectorItemType.GridViewItem;
public DataTemplateSelectorBehavior()
{
_typeToTemplateMapping = new Dictionary<string, DataTemplate>();
_typeToItemHashSetMapping = new Dictionary<string, HashSet<SelectorItem>>();
DisableDataContext = false;
}
private enum SelectorItemType
{
GridViewItem,
ListViewItem
}
public DependencyObjectCollection Mappings
{
get
{
var mappings = (DependencyObjectCollection)GetValue(MappingsProperty);
if (mappings == null)
{
mappings = new DependencyObjectCollection();
SetValue(MappingsProperty, mappings);
}
return mappings;
}
set
{
SetValue(MappingsProperty, value);
}
}
/// <summary>
/// Gets or sets a value indicating whether to apply DataContext.
/// Set to True if the DataTemplates are *not* using {Binding}
/// and are only using x:Bind. This will provide a performance boost.
/// </summary>
public bool DisableDataContext { get; set; }
protected override void OnAttached()
{
base.OnAttached();
_itemType = AssociatedObject is GridView ? SelectorItemType.GridViewItem : SelectorItemType.ListViewItem;
AssociatedObject.ChoosingItemContainer += OnChoosingItemContainer;
AssociatedObject.ContainerContentChanging += OnContainerContentChanging;
ProcessMappings();
}
protected override void OnDetaching()
{
AssociatedObject.ContainerContentChanging -= OnContainerContentChanging;
AssociatedObject.ChoosingItemContainer -= OnChoosingItemContainer;
}
private void ProcessMappings()
{
foreach (DataTemplateMapping item in Mappings)
{
AddTypeMapping(item);
}
}
private void AddTypeMapping(DataTemplateMapping mapping)
{
_typeToTemplateMapping.Add(mapping.TypeName, mapping.Template);
_typeToItemHashSetMapping.Add(mapping.TypeName, new HashSet<SelectorItem>());
var hashSet = _typeToItemHashSetMapping[mapping.TypeName];
for (int i = 0; i < mapping.CacheLength; i++)
{
var item = CreateSelectorItem(mapping.TypeName);
hashSet.Add(item);
#if ENABLE_DEBUG_SPEW
Debug.WriteLine($"Adding {item.GetHashCode()} to {mapping.TypeName}");
#endif // ENABLE_DEBUG_SPEW
}
}
private SelectorItem CreateSelectorItem(string typeName)
{
SelectorItem item = null;
if (_itemType == SelectorItemType.GridViewItem)
{
item = new GridViewItem();
}
else
{
item = new ListViewItem();
}
item.ContentTemplate = _typeToTemplateMapping[typeName];
item.Tag = typeName;
return item;
}
private void OnChoosingItemContainer(ListViewBase sender, ChoosingItemContainerEventArgs args)
{
var typeName = args.Item.GetType().Name;
Debug.Assert(_typeToItemHashSetMapping.ContainsKey(typeName), "The type of the item used with DataTemplateSelectorBehavior must have a DataTemplate mapping");
var relevantHashSet = _typeToItemHashSetMapping[typeName];
// args.ItemContainer is used to indicate whether the ListView is proposing an
// ItemContainer (ListViewItem) to use. If args.Itemcontainer != null, then there was a
// recycled ItemContainer available to be reused.
if (args.ItemContainer != null)
{
if (args.ItemContainer.Tag.Equals(typeName))
{
// Suggestion matches what we want, so remove it from the recycle queue
relevantHashSet.Remove(args.ItemContainer);
#if ENABLE_DEBUG_SPEW
Debug.WriteLine($"Removing (suggested) {args.ItemContainer.GetHashCode()} from {typeName}");
#endif // ENABLE_DEBUG_SPEW
}
else
{
// The ItemContainer's datatemplate does not match the needed
// datatemplate.
// Don't remove it from the recycle queue, since XAML will resuggest it later
args.ItemContainer = null;
}
}
// If there was no suggested container or XAML's suggestion was a miss, pick one up from the recycle queue
// or create a new one
if (args.ItemContainer == null)
{
// See if we can fetch from the correct list.
if (relevantHashSet.Count > 0)
{
// Unfortunately have to resort to LINQ here. There's no efficient way of getting an arbitrary
// item from a hashset without knowing the item. Queue isn't usable for this scenario
// because you can't remove a specific element (which is needed in the block above).
args.ItemContainer = relevantHashSet.First();
relevantHashSet.Remove(args.ItemContainer);
#if ENABLE_DEBUG_SPEW
Debug.WriteLine($"Removing (reused) {args.ItemContainer.GetHashCode()} from {typeName}");
#endif // ENABLE_DEBUG_SPEW
}
else
{
// There aren't any (recycled) ItemContainers available. So a new one
// needs to be created.
var item = CreateSelectorItem(typeName);
args.ItemContainer = item;
#if ENABLE_DEBUG_SPEW
Debug.WriteLine($"Creating {args.ItemContainer.GetHashCode()} for {typeName}");
#endif // ENABLE_DEBUG_SPEW
}
}
// Indicate to XAML that we picked a container for it
args.IsContainerPrepared = true;
}
private void OnContainerContentChanging(ListViewBase sender, ContainerContentChangingEventArgs args)
{
if (args.InRecycleQueue == true)
{
// XAML has indicated that the item is no longer being shown, so add it to the recycle queue
var tag = args.ItemContainer.Tag as string;
#if ENABLE_DEBUG_SPEW
Debug.WriteLine($"Adding {args.ItemContainer.GetHashCode()} to {tag}");
#endif // ENABLE_DEBUG_SPEW
var added = _typeToItemHashSetMapping[tag].Add(args.ItemContainer);
#if ENABLE_DEBUG_SPEW
Debug.Assert(added == true, "Recycle queue should never have dupes. If so, we may be incorrectly reusing a container that is already in use!");
#endif // ENABLE_DEBUG_SPEW
}
if (DisableDataContext == true)
{
// Setting args.Handled to true tells XAML we're not using
// {Binding}, so there's no need to apply DataContext.
// This results in a boost to performance.
args.Handled = true;
}
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Windows.Media.Media3D.Rect3D.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Windows.Media.Media3D
{
public partial struct Rect3D : IFormattable
{
#region Methods and constructors
public static bool operator != (System.Windows.Media.Media3D.Rect3D rect1, System.Windows.Media.Media3D.Rect3D rect2)
{
return default(bool);
}
public static bool operator == (System.Windows.Media.Media3D.Rect3D rect1, System.Windows.Media.Media3D.Rect3D rect2)
{
return default(bool);
}
public bool Contains(System.Windows.Media.Media3D.Rect3D rect)
{
return default(bool);
}
public bool Contains(double x, double y, double z)
{
return default(bool);
}
public bool Contains(Point3D point)
{
return default(bool);
}
public bool Equals(System.Windows.Media.Media3D.Rect3D value)
{
return default(bool);
}
public static bool Equals(System.Windows.Media.Media3D.Rect3D rect1, System.Windows.Media.Media3D.Rect3D rect2)
{
return default(bool);
}
public override bool Equals(Object o)
{
return default(bool);
}
public override int GetHashCode()
{
return default(int);
}
public void Intersect(System.Windows.Media.Media3D.Rect3D rect)
{
}
public static System.Windows.Media.Media3D.Rect3D Intersect(System.Windows.Media.Media3D.Rect3D rect1, System.Windows.Media.Media3D.Rect3D rect2)
{
return default(System.Windows.Media.Media3D.Rect3D);
}
public bool IntersectsWith(System.Windows.Media.Media3D.Rect3D rect)
{
return default(bool);
}
public void Offset(Vector3D offsetVector)
{
}
public static System.Windows.Media.Media3D.Rect3D Offset(System.Windows.Media.Media3D.Rect3D rect, double offsetX, double offsetY, double offsetZ)
{
return default(System.Windows.Media.Media3D.Rect3D);
}
public void Offset(double offsetX, double offsetY, double offsetZ)
{
}
public static System.Windows.Media.Media3D.Rect3D Offset(System.Windows.Media.Media3D.Rect3D rect, Vector3D offsetVector)
{
return default(System.Windows.Media.Media3D.Rect3D);
}
public static System.Windows.Media.Media3D.Rect3D Parse(string source)
{
return default(System.Windows.Media.Media3D.Rect3D);
}
public Rect3D(double x, double y, double z, double sizeX, double sizeY, double sizeZ)
{
}
public Rect3D(Point3D location, Size3D size)
{
}
string System.IFormattable.ToString(string format, IFormatProvider provider)
{
return default(string);
}
public string ToString(IFormatProvider provider)
{
return default(string);
}
public void Union(Point3D point)
{
}
public static System.Windows.Media.Media3D.Rect3D Union(System.Windows.Media.Media3D.Rect3D rect, Point3D point)
{
return default(System.Windows.Media.Media3D.Rect3D);
}
public void Union(System.Windows.Media.Media3D.Rect3D rect)
{
}
public static System.Windows.Media.Media3D.Rect3D Union(System.Windows.Media.Media3D.Rect3D rect1, System.Windows.Media.Media3D.Rect3D rect2)
{
return default(System.Windows.Media.Media3D.Rect3D);
}
#endregion
#region Properties and indexers
public static System.Windows.Media.Media3D.Rect3D Empty
{
get
{
return default(System.Windows.Media.Media3D.Rect3D);
}
}
public bool IsEmpty
{
get
{
return default(bool);
}
}
public Point3D Location
{
get
{
return default(Point3D);
}
set
{
}
}
public Size3D Size
{
get
{
return default(Size3D);
}
set
{
}
}
public double SizeX
{
get
{
return default(double);
}
set
{
}
}
public double SizeY
{
get
{
return default(double);
}
set
{
}
}
public double SizeZ
{
get
{
return default(double);
}
set
{
}
}
public double X
{
get
{
return default(double);
}
set
{
}
}
public double Y
{
get
{
return default(double);
}
set
{
}
}
public double Z
{
get
{
return default(double);
}
set
{
}
}
#endregion
}
}
| |
using System;
using System.CodeDom.Compiler;
using System.Collections.Generic;
using System.ComponentModel;
using System.Runtime.Serialization;
using Microsoft.Xrm.Sdk;
namespace PowerShellLibrary.Crm.CmdletProviders
{
[Microsoft.Xrm.Sdk.Client.EntityLogicalName("sdkmessagerequest")]
[GeneratedCode("CrmSvcUtil", "7.1.0001.3108")]
[DataContract]
public class SdkMessageRequest : Entity, INotifyPropertyChanging, INotifyPropertyChanged
{
public const string EntityLogicalName = "sdkmessagerequest";
public const int EntityTypeCode = 4609;
[AttributeLogicalName("createdby")]
public EntityReference CreatedBy
{
get
{
return this.GetAttributeValue<EntityReference>("createdby");
}
}
[AttributeLogicalName("createdon")]
public DateTime? CreatedOn
{
get
{
return this.GetAttributeValue<DateTime?>("createdon");
}
}
[AttributeLogicalName("createdonbehalfby")]
public EntityReference CreatedOnBehalfBy
{
get
{
return this.GetAttributeValue<EntityReference>("createdonbehalfby");
}
}
[AttributeLogicalName("customizationlevel")]
public int? CustomizationLevel
{
get
{
return this.GetAttributeValue<int?>("customizationlevel");
}
}
[AttributeLogicalName("modifiedby")]
public EntityReference ModifiedBy
{
get
{
return this.GetAttributeValue<EntityReference>("modifiedby");
}
}
[AttributeLogicalName("modifiedon")]
public DateTime? ModifiedOn
{
get
{
return this.GetAttributeValue<DateTime?>("modifiedon");
}
}
[AttributeLogicalName("modifiedonbehalfby")]
public EntityReference ModifiedOnBehalfBy
{
get
{
return this.GetAttributeValue<EntityReference>("modifiedonbehalfby");
}
}
[AttributeLogicalName("name")]
public string Name
{
get
{
return this.GetAttributeValue<string>("name");
}
set
{
this.OnPropertyChanging("Name");
this.SetAttributeValue("name", (object) value);
this.OnPropertyChanged("Name");
}
}
[AttributeLogicalName("organizationid")]
public EntityReference OrganizationId
{
get
{
return this.GetAttributeValue<EntityReference>("organizationid");
}
}
[AttributeLogicalName("primaryobjecttypecode")]
public string PrimaryObjectTypeCode
{
get
{
return this.GetAttributeValue<string>("primaryobjecttypecode");
}
}
[AttributeLogicalName("sdkmessagepairid")]
public EntityReference SdkMessagePairId
{
get
{
return this.GetAttributeValue<EntityReference>("sdkmessagepairid");
}
}
[AttributeLogicalName("sdkmessagerequestid")]
public Guid? SdkMessageRequestId
{
get
{
return this.GetAttributeValue<Guid?>("sdkmessagerequestid");
}
set
{
this.OnPropertyChanging("SdkMessageRequestId");
this.SetAttributeValue("sdkmessagerequestid", (object) value);
if (value.HasValue)
base.Id = value.Value;
else
base.Id = Guid.Empty;
this.OnPropertyChanged("SdkMessageRequestId");
}
}
[AttributeLogicalName("sdkmessagerequestid")]
public override Guid Id
{
get
{
return base.Id;
}
set
{
this.SdkMessageRequestId = new Guid?(value);
}
}
[AttributeLogicalName("sdkmessagerequestidunique")]
public Guid? SdkMessageRequestIdUnique
{
get
{
return this.GetAttributeValue<Guid?>("sdkmessagerequestidunique");
}
}
[AttributeLogicalName("versionnumber")]
public long? VersionNumber
{
get
{
return this.GetAttributeValue<long?>("versionnumber");
}
}
[RelationshipSchemaName("messagerequest_sdkmessagerequestfield")]
public IEnumerable<SdkMessageRequestField> messagerequest_sdkmessagerequestfield
{
get
{
return this.GetRelatedEntities<SdkMessageRequestField>("messagerequest_sdkmessagerequestfield", new EntityRole?());
}
set
{
this.OnPropertyChanging("messagerequest_sdkmessagerequestfield");
this.SetRelatedEntities<SdkMessageRequestField>("messagerequest_sdkmessagerequestfield", new EntityRole?(), value);
this.OnPropertyChanged("messagerequest_sdkmessagerequestfield");
}
}
[RelationshipSchemaName("messagerequest_sdkmessageresponse")]
public IEnumerable<SdkMessageResponse> messagerequest_sdkmessageresponse
{
get
{
return this.GetRelatedEntities<SdkMessageResponse>("messagerequest_sdkmessageresponse", new EntityRole?());
}
set
{
this.OnPropertyChanging("messagerequest_sdkmessageresponse");
this.SetRelatedEntities<SdkMessageResponse>("messagerequest_sdkmessageresponse", new EntityRole?(), value);
this.OnPropertyChanged("messagerequest_sdkmessageresponse");
}
}
[RelationshipSchemaName("createdby_sdkmessagerequest")]
[AttributeLogicalName("createdby")]
public SystemUser createdby_sdkmessagerequest
{
get
{
return this.GetRelatedEntity<SystemUser>("createdby_sdkmessagerequest", new EntityRole?());
}
}
[AttributeLogicalName("createdonbehalfby")]
[RelationshipSchemaName("lk_sdkmessagerequest_createdonbehalfby")]
public SystemUser lk_sdkmessagerequest_createdonbehalfby
{
get
{
return this.GetRelatedEntity<SystemUser>("lk_sdkmessagerequest_createdonbehalfby", new EntityRole?());
}
}
[AttributeLogicalName("modifiedonbehalfby")]
[RelationshipSchemaName("lk_sdkmessagerequest_modifiedonbehalfby")]
public SystemUser lk_sdkmessagerequest_modifiedonbehalfby
{
get
{
return this.GetRelatedEntity<SystemUser>("lk_sdkmessagerequest_modifiedonbehalfby", new EntityRole?());
}
}
[RelationshipSchemaName("messagepair_sdkmessagerequest")]
[AttributeLogicalName("sdkmessagepairid")]
public SdkMessagePair messagepair_sdkmessagerequest
{
get
{
return this.GetRelatedEntity<SdkMessagePair>("messagepair_sdkmessagerequest", new EntityRole?());
}
}
[AttributeLogicalName("modifiedby")]
[RelationshipSchemaName("modifiedby_sdkmessagerequest")]
public SystemUser modifiedby_sdkmessagerequest
{
get
{
return this.GetRelatedEntity<SystemUser>("modifiedby_sdkmessagerequest", new EntityRole?());
}
}
public event PropertyChangedEventHandler PropertyChanged;
public event PropertyChangingEventHandler PropertyChanging;
public SdkMessageRequest()
: base("sdkmessagerequest")
{
}
private void OnPropertyChanged(string propertyName)
{
if (this.PropertyChanged == null)
return;
this.PropertyChanged((object) this, new PropertyChangedEventArgs(propertyName));
}
private void OnPropertyChanging(string propertyName)
{
if (this.PropertyChanging == null)
return;
this.PropertyChanging((object) this, new PropertyChangingEventArgs(propertyName));
}
}
}
| |
#if __MonoCS__
using System.Linq;
using System.Windows.Forms;
using NUnit.Framework;
using SIL.Reporting;
using SIL.Windows.Forms.Keyboarding;
using SIL.Windows.Forms.Keyboarding.Linux;
using SIL.Keyboarding;
namespace SIL.Windows.Forms.Keyboarding.Tests
{
[TestFixture]
[Category("SkipOnTeamCity")]
public class LinuxKeyboardControllerTests
{
private Form _window;
[TestFixtureSetUp]
public void FixtureSetup()
{
KeyboardController.Initialize();
}
[TestFixtureTearDown]
public void FixtureTearDown()
{
KeyboardController.Shutdown();
}
[SetUp]
public void Setup()
{
ErrorReport.IsOkToInteractWithUser = false;
}
[TearDown]
public void Teardown()
{
if (_window != null)
{
_window.Close();
Application.DoEvents();
_window.Dispose();
_window = null;
}
}
private void RequiresWindowForFocus()
{
_window = new Form();
var box = new TextBox();
box.Dock = DockStyle.Fill;
_window.Controls.Add(box);
_window.Show();
box.Select();
Application.DoEvents();
}
[Test]
public void GetAllKeyboards_GivesSeveral()
{
IKeyboardDefinition[] keyboards = Keyboard.Controller.AvailableKeyboards.ToArray();
Assert.Greater(keyboards.Length, 1, "This test requires that the Windows IME has at least two languages installed.");
}
#if WANT_PORT
/// <summary>
/// The main thing here is that it doesn't crash doing a LoadLibrary()
/// </summary>
[Test]
public void NoKeyman7_GetKeyboards_DoesNotCrash()
{
KeyboardController.GetAvailableKeyboards(KeyboardController.Engines.Keyman7);
}
[Test]
[Ignore("SCIM deprecated")]
public void EngineAvailable_ScimIsSetUpAndConfiguredCorrectly_ReturnsTrue()
{
Assert.IsTrue(KeyboardController.EngineAvailable(KeyboardController.Engines.Scim));
}
[Test]
[Ignore("SCIM deprecated")]
public void GetActiveKeyboard_ScimIsSetUpAndConfiguredToDefault_ReturnsEnglishKeyboard()
{
RequiresWindowForFocus();
ResetKeyboardToDefault();
Assert.AreEqual("English/Keyboard", KeyboardController.GetActiveKeyboard());
}
[Test]
[Ignore("SCIM deprecated")]
public void KeyboardDescriptors_ScimIsSetUpAndConfiguredToDefault_3KeyboardsReturned()
{
List<KeyboardController.KeyboardDescriptor> availableKeyboards = KeyboardController.GetAvailableKeyboards(KeyboardController.Engines.Scim);
Assert.AreEqual("English/European", availableKeyboards[0].ShortName);
Assert.AreEqual("RAW CODE", availableKeyboards[1].ShortName);
Assert.AreEqual("English/Keyboard", availableKeyboards[2].ShortName);
}
[Test]
[Ignore("SCIM deprecated")]
public void Deactivate_ScimIsRunning_GetCurrentKeyboardReturnsEnglishKeyboard()
{
RequiresWindowForFocus();
Keyboard.Controller.SetKeyboard("English/European");
KeyboardController.DeactivateKeyboard();
Assert.AreEqual("English/Keyboard", KeyboardController.GetActiveKeyboard());
}
[Test]
[Ignore("SCIM deprecated")]
public void ActivateKeyBoard_ScimHasKeyboard_GetCurrentKeyboardReturnsActivatedKeyboard()
{
RequiresWindowForFocus();
ResetKeyboardToDefault();
Keyboard.Controller.SetKeyboard("English/European");
Assert.AreEqual("English/European", KeyboardController.GetActiveKeyboard());
ResetKeyboardToDefault();
}
[Test]
[Ignore("SCIM deprecated")]
public void ActivateKeyBoard_ScimDoesNotHaveKeyboard_Throws()
{
Assert.Throws<ErrorReport.ProblemNotificationSentToUserException>(
() => Keyboard.Controller.SetKeyboard("Nonexistent Keyboard")
);
}
[Test]
[Ignore("SCIM deprecated")]
[Category("No IM Running")]
public void GetAvailableKeyboards_NoIMRunning_ReturnsEmptyList()
{
var availableKeyboards = Keyboard.Controller.AllAvailableKeyboards.Where(kbd => kbd is KeyboardDescription && ((KeyboardDescription)kbd).Engine == "SCIM");
Assert.AreEqual(0, availableKeyboards.Count());
}
#endif
private static void ResetKeyboardToDefault()
{
Keyboard.Controller.ActivateDefaultKeyboard();
}
[Test]
[Category("No IM Running")]
public void Deactivate_NoIMRunning_DoesNotThrow()
{
Keyboard.Controller.ActivateDefaultKeyboard();
}
#if WANT_PORT
[Test]
[Category("IBus not Running")]
public void EngineAvailable_IBusIsnotRunning_returnsFalse()
{
Assert.IsFalse(KeyboardController.EngineAvailable(KeyboardController.Engines.IBus));
}
[Test]
[Category("IBus")]
public void EngineAvailable_IBusIsSetUpAndConfiguredCorrectly_ReturnsTrue()
{
// needed for focus
RequiresWindowForFocus();
Assert.IsTrue(KeyboardController.EngineAvailable(KeyboardController.Engines.IBus));
}
#endif
[Test]
[Category("IBus")]
public void Deactivate_IBusIsRunning_GetCurrentKeyboardReturnsEnglishKeyboard()
{
if (Keyboard.Controller.AvailableKeyboards.Count(kbd => kbd.Layout == "m17n:am:sera") <= 0)
Assert.Ignore("Can't run this test without ibus keyboard 'm17n:am:sera' being installed.");
// needed for focus
RequiresWindowForFocus();
Keyboard.Controller.GetKeyboard("m17n:am:sera").Activate();
Keyboard.Controller.ActivateDefaultKeyboard();
Assert.AreEqual("m17n:am:sera", Keyboard.Controller.ActiveKeyboard);
}
[Test]
[Category("IBus")]
public void ActivateKeyBoard_IBusHasKeyboard_GetCurrentKeyboardReturnsActivatedKeyboard()
{
if (Keyboard.Controller.AvailableKeyboards.Count(kbd => kbd.Layout == "m17n:am:sera") <= 0)
Assert.Ignore("Can't run this test without ibus keyboard 'm17n:am:sera' being installed.");
// needed for focus
RequiresWindowForFocus();
Keyboard.Controller.ActivateDefaultKeyboard();
Keyboard.Controller.GetKeyboard("m17n:am:sera").Activate();
Assert.AreEqual("m17n:am:sera", Keyboard.Controller.ActiveKeyboard);
Keyboard.Controller.ActivateDefaultKeyboard();
}
[Test]
public void CreateKeyboardDefinition_NewKeyboard_ReturnsNewObject()
{
// REVIEW: adjust this test
IKeyboardDefinition keyboard = Keyboard.Controller.CreateKeyboard("en-US_foo", KeyboardFormat.Unknown, Enumerable.Empty<string>());
Assert.That(keyboard, Is.Not.Null);
Assert.That(keyboard, Is.TypeOf<XkbKeyboardDescription>());
}
}
}
#endif
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
namespace DalSic
{
/// <summary>
/// Strongly-typed collection for the SysTipoProfesional class.
/// </summary>
[Serializable]
public partial class SysTipoProfesionalCollection : ActiveList<SysTipoProfesional, SysTipoProfesionalCollection>
{
public SysTipoProfesionalCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>SysTipoProfesionalCollection</returns>
public SysTipoProfesionalCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
SysTipoProfesional o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the Sys_TipoProfesional table.
/// </summary>
[Serializable]
public partial class SysTipoProfesional : ActiveRecord<SysTipoProfesional>, IActiveRecord
{
#region .ctors and Default Settings
public SysTipoProfesional()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public SysTipoProfesional(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public SysTipoProfesional(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public SysTipoProfesional(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("Sys_TipoProfesional", TableType.Table, DataService.GetInstance("sicProvider"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarIdTipoProfesional = new TableSchema.TableColumn(schema);
colvarIdTipoProfesional.ColumnName = "idTipoProfesional";
colvarIdTipoProfesional.DataType = DbType.Int32;
colvarIdTipoProfesional.MaxLength = 0;
colvarIdTipoProfesional.AutoIncrement = true;
colvarIdTipoProfesional.IsNullable = false;
colvarIdTipoProfesional.IsPrimaryKey = true;
colvarIdTipoProfesional.IsForeignKey = false;
colvarIdTipoProfesional.IsReadOnly = false;
colvarIdTipoProfesional.DefaultSetting = @"";
colvarIdTipoProfesional.ForeignKeyTableName = "";
schema.Columns.Add(colvarIdTipoProfesional);
TableSchema.TableColumn colvarNombre = new TableSchema.TableColumn(schema);
colvarNombre.ColumnName = "nombre";
colvarNombre.DataType = DbType.AnsiString;
colvarNombre.MaxLength = 50;
colvarNombre.AutoIncrement = false;
colvarNombre.IsNullable = false;
colvarNombre.IsPrimaryKey = false;
colvarNombre.IsForeignKey = false;
colvarNombre.IsReadOnly = false;
colvarNombre.DefaultSetting = @"('')";
colvarNombre.ForeignKeyTableName = "";
schema.Columns.Add(colvarNombre);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["sicProvider"].AddSchema("Sys_TipoProfesional",schema);
}
}
#endregion
#region Props
[XmlAttribute("IdTipoProfesional")]
[Bindable(true)]
public int IdTipoProfesional
{
get { return GetColumnValue<int>(Columns.IdTipoProfesional); }
set { SetColumnValue(Columns.IdTipoProfesional, value); }
}
[XmlAttribute("Nombre")]
[Bindable(true)]
public string Nombre
{
get { return GetColumnValue<string>(Columns.Nombre); }
set { SetColumnValue(Columns.Nombre, value); }
}
#endregion
#region PrimaryKey Methods
protected override void SetPrimaryKey(object oValue)
{
base.SetPrimaryKey(oValue);
SetPKValues();
}
private DalSic.SysProfesionalCollection colSysProfesionalRecords;
public DalSic.SysProfesionalCollection SysProfesionalRecords
{
get
{
if(colSysProfesionalRecords == null)
{
colSysProfesionalRecords = new DalSic.SysProfesionalCollection().Where(SysProfesional.Columns.IdTipoProfesional, IdTipoProfesional).Load();
colSysProfesionalRecords.ListChanged += new ListChangedEventHandler(colSysProfesionalRecords_ListChanged);
}
return colSysProfesionalRecords;
}
set
{
colSysProfesionalRecords = value;
colSysProfesionalRecords.ListChanged += new ListChangedEventHandler(colSysProfesionalRecords_ListChanged);
}
}
void colSysProfesionalRecords_ListChanged(object sender, ListChangedEventArgs e)
{
if (e.ListChangedType == ListChangedType.ItemAdded)
{
// Set foreign key value
colSysProfesionalRecords[e.NewIndex].IdTipoProfesional = IdTipoProfesional;
}
}
#endregion
//no foreign key tables defined (0)
//no ManyToMany tables defined (0)
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(string varNombre)
{
SysTipoProfesional item = new SysTipoProfesional();
item.Nombre = varNombre;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(int varIdTipoProfesional,string varNombre)
{
SysTipoProfesional item = new SysTipoProfesional();
item.IdTipoProfesional = varIdTipoProfesional;
item.Nombre = varNombre;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn IdTipoProfesionalColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn NombreColumn
{
get { return Schema.Columns[1]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string IdTipoProfesional = @"idTipoProfesional";
public static string Nombre = @"nombre";
}
#endregion
#region Update PK Collections
public void SetPKValues()
{
if (colSysProfesionalRecords != null)
{
foreach (DalSic.SysProfesional item in colSysProfesionalRecords)
{
if (item.IdTipoProfesional != IdTipoProfesional)
{
item.IdTipoProfesional = IdTipoProfesional;
}
}
}
}
#endregion
#region Deep Save
public void DeepSave()
{
Save();
if (colSysProfesionalRecords != null)
{
colSysProfesionalRecords.SaveAll();
}
}
#endregion
}
}
| |
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Diagnostics;
using RefactoringEssentials;
using RefactoringEssentials.Util;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
namespace RefactoringEssentials.CSharp.Diagnostics
{
/// <summary>
/// Checks for str == null && str == " "
/// Converts to: string.IsNullOrEmpty (str)
/// </summary>
[DiagnosticAnalyzer(LanguageNames.CSharp)]
public class ReplaceWithStringIsNullOrEmptyAnalyzer : DiagnosticAnalyzer
{
/// <summary>
/// The name of the property referred to by the <see cref="Diagnostic"/> for the replacement code.
/// </summary>
public static readonly string ReplacementPropertyName = "Replacement";
static readonly DiagnosticDescriptor descriptor = new DiagnosticDescriptor(
CSharpDiagnosticIDs.ReplaceWithStringIsNullOrEmptyAnalyzerID,
GettextCatalog.GetString("Uses shorter string.IsNullOrEmpty call instead of a longer condition"),
GettextCatalog.GetString("Expression can be replaced with '{0}'"),
DiagnosticAnalyzerCategories.PracticesAndImprovements,
DiagnosticSeverity.Info,
isEnabledByDefault: true,
helpLinkUri: HelpLink.CreateFor(CSharpDiagnosticIDs.ReplaceWithStringIsNullOrEmptyAnalyzerID)
);
public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics => ImmutableArray.Create(descriptor);
public override void Initialize(AnalysisContext context)
{
context.RegisterSyntaxNodeAction(
(nodeContext) =>
{
Diagnostic diagnostic;
if (TryGetDiagnostic(nodeContext, out diagnostic))
{
nodeContext.ReportDiagnostic(diagnostic);
}
},
SyntaxKind.LogicalAndExpression,
SyntaxKind.LogicalOrExpression
);
}
static bool TryGetDiagnostic(SyntaxNodeAnalysisContext nodeContext, out Diagnostic diagnostic)
{
diagnostic = default(Diagnostic);
if (nodeContext.IsFromGeneratedCode())
return false;
var node = nodeContext.Node as BinaryExpressionSyntax;
// Must be of binary expression of 2 binary expressions.
if (!node.IsKind(SyntaxKind.LogicalAndExpression, SyntaxKind.LogicalOrExpression))
return false;
// Verify left is a binary expression.
var left = SimplifySyntax(node.Left) as BinaryExpressionSyntax;
if (left == null)
return false;
// Verify right is a binary expression.
var right = SimplifySyntax(node.Right) as BinaryExpressionSyntax;
if (right == null)
return false;
// Ensure left and right are binary and not assignment.
if (!SyntaxFacts.IsBinaryExpression(left.OperatorToken.Kind()) || !SyntaxFacts.IsBinaryExpression(right.OperatorToken.Kind()))
return false;
// Test if left and right are suitable for replacement.
var leftReplace = ShouldReplace(nodeContext, left);
var rightReplace = ShouldReplace(nodeContext, right);
// Test both are suitable for replacement.
if (!leftReplace.ShouldReplace || !rightReplace.ShouldReplace)
return false;
// Test that both are either positive or negative tests.
if (!(leftReplace.IsNegative == rightReplace.IsNegative))
return false;
// Ensure that one tests for null and the other tests for empty.
var isNullOrEmptyTest = (leftReplace.IsNullTest && rightReplace.IsEmptyTest) || (leftReplace.IsEmptyTest && rightReplace.IsNullTest);
if (!isNullOrEmptyTest)
return false;
// Ensure that both refer to the same identifier.
// Good: foo != null && foo != ""
// Bad: foo != null && bar != ""
if (!string.Equals(leftReplace.IdentifierNode.ToString(),
rightReplace.IdentifierNode.ToString(),
StringComparison.OrdinalIgnoreCase))
return false;
// Generate replacement string and negate if necessary.
// Used within diagnostic message and also passed down for replacement.
var replacementString = string.Format("string.IsNullOrEmpty({0})", leftReplace.IdentifierNode);
if (leftReplace.IsNegative)
replacementString = "!" + replacementString;
// We already did the work now pass it down to the code fix provider via a property.
var props = new Dictionary<string, string>
{
{ ReplacementPropertyName, replacementString }
};
diagnostic = Diagnostic.Create(
descriptor,
node.GetLocation(),
ImmutableDictionary.CreateRange(props),
replacementString);
return true;
}
/// <summary>
/// Indicates whether a binary expression is suitable for replacement and info about it.
/// </summary>
class ShouldReplaceResult
{
/// <summary>
/// Is the expression suitable for replacement.
/// </summary>
public bool ShouldReplace { get; set; } = false;
/// <summary>
/// Is the expression a test for null?
/// </summary>
public bool IsNullTest { get; set; } = false;
/// <summary>
/// Is the expression a test for empty?
/// </summary>
public bool IsEmptyTest { get; set; } = false;
/// <summary>
/// Is the expression negated?
/// </summary>
public bool IsNegative { get; set; } = false;
/// <summary>
/// What string symbol is being tested for null or empty?
/// </summary>
public ExpressionSyntax IdentifierNode { get; set; } = null;
}
/// <summary>
/// Test whether a binary expression is suitable for replacement.
/// </summary>
/// <returns>
/// A <see cref="ShouldReplaceResult"/> indicating whether the node is suitable for replacement.
/// </returns>
static ShouldReplaceResult ShouldReplace(SyntaxNodeAnalysisContext nodeContext, BinaryExpressionSyntax node)
{
// input (left, right, operator) output Result
var left = SimplifySyntax(node.Left);
var right = SimplifySyntax(node.Right);
// str ==
if (IsStringSyntax(nodeContext, left))
{
return ShouldReplaceString(nodeContext, left, right, node.OperatorToken);
}
// == str
if (IsStringSyntax(nodeContext, right))
{
return ShouldReplaceString(nodeContext, right, left, node.OperatorToken);
}
// str.Length ==
if (IsStringLengthSyntax(nodeContext, left))
{
return ShouldReplaceStringLength(left as MemberAccessExpressionSyntax, right, node.OperatorToken);
}
// == str.Length
if (IsStringLengthSyntax(nodeContext, right))
{
return ShouldReplaceStringLength(right as MemberAccessExpressionSyntax, left, node.OperatorToken);
}
// We did not find a suitable replacement.
return new ShouldReplaceResult
{
ShouldReplace = false
};
}
/// <summary>
/// Determine whether a binary expression with a string expression is suitable for replacement.
/// </summary>
/// <param name="left">A node representing a string expression.</param>
/// <param name="right">A node to be tested.</param>
/// <param name="operatorToken">The operator separating the nodes.</param>
/// <returns></returns>
static ShouldReplaceResult ShouldReplaceString(SyntaxNodeAnalysisContext nodeContext, ExpressionSyntax left, ExpressionSyntax right, SyntaxToken operatorToken)
{
var result = new ShouldReplaceResult();
result.ShouldReplace = false;
// str == null or str != null
if (IsNullSyntax(nodeContext, right))
{
result.IsNullTest = true;
result.ShouldReplace = true;
}
// str == "" or str != ""
// str == string.Empty or str != string.Empty
else if (IsEmptySyntax(nodeContext, right))
{
result.IsEmptyTest = true;
result.ShouldReplace = true;
}
if (result.ShouldReplace)
{
result.IdentifierNode = left;
if (operatorToken.IsKind(SyntaxKind.ExclamationEqualsToken))
{
result.IsNegative = true;
}
}
return result;
}
/// <summary>
/// Determines whether a binary expression with a string length expression is suitable for replacement.
/// </summary>
/// <param name="left">A node representing a string length expression.</param>
/// <param name="right">A node to be tested.</param>
/// <param name="operatorToken">The operator separating the nodes.</param>
/// <returns></returns>
static ShouldReplaceResult ShouldReplaceStringLength(MemberAccessExpressionSyntax left, ExpressionSyntax right, SyntaxToken operatorToken)
{
const string zeroLiteral = "0";
const string oneLiteral = "1";
var result = new ShouldReplaceResult();
result.ShouldReplace = false;
// str.Length == 0 or str.Length <= 0
if (operatorToken.IsKind(SyntaxKind.EqualsEqualsToken, SyntaxKind.LessThanEqualsToken) && string.Equals(zeroLiteral, right.ToString()))
{
result.IsEmptyTest = true;
result.ShouldReplace = true;
}
// str.Length < 1
else if (operatorToken.IsKind(SyntaxKind.LessThanToken) && string.Equals(oneLiteral, right.ToString()))
{
result.IsEmptyTest = true;
result.ShouldReplace = true;
}
// str.Length != 0 or str.Length > 0
else if (operatorToken.IsKind(SyntaxKind.ExclamationEqualsToken, SyntaxKind.GreaterThanToken) && string.Equals(zeroLiteral, right.ToString()))
{
result.IsEmptyTest = true;
result.IsNegative = true;
result.ShouldReplace = true;
}
// str.Length >= 1
else if (operatorToken.IsKind(SyntaxKind.GreaterThanEqualsToken) && string.Equals(oneLiteral, right.ToString()))
{
result.IsEmptyTest = true;
result.IsNegative = true;
result.ShouldReplace = true;
}
if (result.ShouldReplace)
{
result.IdentifierNode = left.Expression;
}
return result;
}
/// <summary>
/// Does the expression look like a string type?
/// </summary>
static bool IsStringSyntax(SyntaxNodeAnalysisContext nodeContext, ExpressionSyntax node)
{
if (!IsStringType(nodeContext, node))
return false;
return node.IsKind(SyntaxKind.IdentifierName, SyntaxKind.InvocationExpression, SyntaxKind.SimpleMemberAccessExpression);
}
/// <summary>
/// Does the expression look like a string length call?
/// </summary>
static bool IsStringLengthSyntax(SyntaxNodeAnalysisContext nodeContext, ExpressionSyntax node)
{
if (node.IsKind(SyntaxKind.SimpleMemberAccessExpression))
{
var smaNode = node as MemberAccessExpressionSyntax;
if (smaNode.Name.Identifier.Text == "Length")
{
if (!IsStringType(nodeContext, smaNode.Expression))
return false;
return true;
}
}
return false;
}
/// <summary>
/// Does the expression look like a null?
/// </summary>
static bool IsNullSyntax(SyntaxNodeAnalysisContext nodeContext, ExpressionSyntax node)
{
if (!IsStringType(nodeContext, node))
return false;
return node.IsKind(SyntaxKind.NullLiteralExpression);
}
/// <summary>
/// Does the expression look like a test for empty string ("" or string.Empty)?
/// </summary>
/// <param name="node"></param>
/// <returns></returns>
static bool IsEmptySyntax(SyntaxNodeAnalysisContext nodeContext, ExpressionSyntax node)
{
if (!IsStringType(nodeContext, node))
return false;
if (node.IsKind(SyntaxKind.StringLiteralExpression))
{
if (string.Equals("\"\"", node.ToString()))
return true;
}
else if (node.IsKind(SyntaxKind.SimpleMemberAccessExpression))
{
var sma = node as MemberAccessExpressionSyntax;
if (!string.Equals("string", sma.Expression.ToString(), StringComparison.OrdinalIgnoreCase))
return false;
if (!string.Equals("Empty", sma.Name.ToString(), StringComparison.OrdinalIgnoreCase))
return false;
return true;
}
return false;
}
/// <summary>
/// Test if expression is a string type.
/// </summary>
static bool IsStringType(SyntaxNodeAnalysisContext nodeContext, ExpressionSyntax node)
{
var typeInfo = nodeContext.SemanticModel.GetTypeInfo(node);
if (typeInfo.ConvertedType == null)
return false;
if (!string.Equals("String", typeInfo.ConvertedType.Name, StringComparison.OrdinalIgnoreCase))
return false;
return true;
}
/// <summary>
/// Simplify an <see cref="ExpressionSyntax"/> by removing unecessary parenthesis.
/// </summary>
/// <returns>
/// A simplified <see cref="ExpressionSyntax"/>.
/// </returns>
static ExpressionSyntax SimplifySyntax(ExpressionSyntax syntax)
{
if (syntax.IsKind(SyntaxKind.ParenthesizedExpression))
{
syntax = (syntax as ParenthesizedExpressionSyntax).Expression;
syntax = SimplifySyntax(syntax);
}
return syntax;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using NUnit.Framework;
using Analyzer = Lucene.Net.Analysis.Analyzer;
using LowerCaseTokenizer = Lucene.Net.Analysis.LowerCaseTokenizer;
using TokenFilter = Lucene.Net.Analysis.TokenFilter;
using TokenStream = Lucene.Net.Analysis.TokenStream;
using PayloadAttribute = Lucene.Net.Analysis.Tokenattributes.PayloadAttribute;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using IndexWriter = Lucene.Net.Index.IndexWriter;
using Payload = Lucene.Net.Index.Payload;
using Term = Lucene.Net.Index.Term;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
using BooleanClause = Lucene.Net.Search.BooleanClause;
using BooleanQuery = Lucene.Net.Search.BooleanQuery;
using CheckHits = Lucene.Net.Search.CheckHits;
using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
using IndexSearcher = Lucene.Net.Search.IndexSearcher;
using QueryUtils = Lucene.Net.Search.QueryUtils;
using ScoreDoc = Lucene.Net.Search.ScoreDoc;
using TopDocs = Lucene.Net.Search.TopDocs;
using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
using TermSpans = Lucene.Net.Search.Spans.TermSpans;
using English = Lucene.Net.Util.English;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
namespace Lucene.Net.Search.Payloads
{
/// <summary>
///
///
/// </summary>
[TestFixture]
public class TestPayloadTermQuery:LuceneTestCase
{
private void InitBlock()
{
similarity = new BoostingSimilarity();
}
private IndexSearcher searcher;
private BoostingSimilarity similarity;
private byte[] payloadField = new byte[]{1};
private byte[] payloadMultiField1 = new byte[]{2};
private byte[] payloadMultiField2 = new byte[]{4};
protected internal RAMDirectory directory;
public TestPayloadTermQuery():base()
{
InitBlock();
}
private class PayloadAnalyzer:Analyzer
{
public PayloadAnalyzer(TestPayloadTermQuery enclosingInstance)
{
InitBlock(enclosingInstance);
}
private void InitBlock(TestPayloadTermQuery enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
}
private TestPayloadTermQuery enclosingInstance;
public TestPayloadTermQuery Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
{
TokenStream result = new LowerCaseTokenizer(reader);
result = new PayloadFilter(enclosingInstance, result, fieldName);
return result;
}
}
private class PayloadFilter:TokenFilter
{
private void InitBlock(TestPayloadTermQuery enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
}
private TestPayloadTermQuery enclosingInstance;
public TestPayloadTermQuery Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
internal System.String fieldName;
internal int numSeen = 0;
internal PayloadAttribute payloadAtt;
public PayloadFilter(TestPayloadTermQuery enclosingInstance, TokenStream input, System.String fieldName):base(input)
{
InitBlock(enclosingInstance);
this.fieldName = fieldName;
payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
}
public override bool IncrementToken()
{
bool hasNext = input.IncrementToken();
if (hasNext)
{
if (fieldName.Equals("field"))
{
payloadAtt.SetPayload(new Payload(Enclosing_Instance.payloadField));
}
else if (fieldName.Equals("multiField"))
{
if (numSeen % 2 == 0)
{
payloadAtt.SetPayload(new Payload(Enclosing_Instance.payloadMultiField1));
}
else
{
payloadAtt.SetPayload(new Payload(Enclosing_Instance.payloadMultiField2));
}
numSeen++;
}
return true;
}
else
{
return false;
}
}
}
[SetUp]
public override void SetUp()
{
base.SetUp();
directory = new RAMDirectory();
PayloadAnalyzer analyzer = new PayloadAnalyzer(this);
IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.SetSimilarity(similarity);
//writer.infoStream = System.out;
for (int i = 0; i < 1000; i++)
{
Document doc = new Document();
Field noPayloadField = new Field(PayloadHelper.NO_PAYLOAD_FIELD, English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED);
//noPayloadField.setBoost(0);
doc.Add(noPayloadField);
doc.Add(new Field("field", English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
doc.Add(new Field("multiField", English.IntToEnglish(i) + " " + English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
}
writer.Optimize();
writer.Close();
searcher = new IndexSearcher(directory, true);
searcher.SetSimilarity(similarity);
}
[Test]
public virtual void Test()
{
PayloadTermQuery query = new PayloadTermQuery(new Term("field", "seventy"), new MaxPayloadFunction());
TopDocs hits = searcher.Search(query, null, 100);
Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
Assert.IsTrue(hits.TotalHits == 100, "hits Size: " + hits.TotalHits + " is not: " + 100);
//they should all have the exact same score, because they all contain seventy once, and we set
//all the other similarity factors to be 1
Assert.IsTrue(hits.GetMaxScore() == 1, hits.GetMaxScore() + " does not equal: " + 1);
for (int i = 0; i < hits.ScoreDocs.Length; i++)
{
ScoreDoc doc = hits.ScoreDocs[i];
Assert.IsTrue(doc.score == 1, doc.score + " does not equal: " + 1);
}
CheckHits.CheckExplanations(query, PayloadHelper.FIELD, searcher, true);
Lucene.Net.Search.Spans.Spans spans = query.GetSpans(searcher.GetIndexReader());
Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
Assert.IsTrue(spans is TermSpans, "spans is not an instanceof " + typeof(TermSpans));
/*float score = hits.score(0);
for (int i =1; i < hits.length(); i++)
{
Assert.IsTrue(score == hits.score(i), "scores are not equal and they should be");
}*/
}
[Test]
public virtual void TestQuery()
{
PayloadTermQuery boostingFuncTermQuery = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction());
QueryUtils.Check(boostingFuncTermQuery);
SpanTermQuery spanTermQuery = new SpanTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"));
Assert.IsTrue(boostingFuncTermQuery.Equals(spanTermQuery) == spanTermQuery.Equals(boostingFuncTermQuery));
PayloadTermQuery boostingFuncTermQuery2 = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new AveragePayloadFunction());
QueryUtils.CheckUnequal(boostingFuncTermQuery, boostingFuncTermQuery2);
}
[Test]
public virtual void TestMultipleMatchesPerDoc()
{
PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction());
TopDocs hits = searcher.Search(query, null, 100);
Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
Assert.IsTrue(hits.TotalHits == 100, "hits Size: " + hits.TotalHits + " is not: " + 100);
//they should all have the exact same score, because they all contain seventy once, and we set
//all the other similarity factors to be 1
//System.out.println("Hash: " + seventyHash + " Twice Hash: " + 2*seventyHash);
Assert.IsTrue(hits.GetMaxScore() == 4.0, hits.GetMaxScore() + " does not equal: " + 4.0);
//there should be exactly 10 items that score a 4, all the rest should score a 2
//The 10 items are: 70 + i*100 where i in [0-9]
int numTens = 0;
for (int i = 0; i < hits.ScoreDocs.Length; i++)
{
ScoreDoc doc = hits.ScoreDocs[i];
if (doc.doc % 10 == 0)
{
numTens++;
Assert.IsTrue(doc.score == 4.0, doc.score + " does not equal: " + 4.0);
}
else
{
Assert.IsTrue(doc.score == 2, doc.score + " does not equal: " + 2);
}
}
Assert.IsTrue(numTens == 10, numTens + " does not equal: " + 10);
CheckHits.CheckExplanations(query, "field", searcher, true);
Lucene.Net.Search.Spans.Spans spans = query.GetSpans(searcher.GetIndexReader());
Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
Assert.IsTrue(spans is TermSpans, "spans is not an instanceof " + typeof(TermSpans));
//should be two matches per document
int count = 0;
//100 hits times 2 matches per hit, we should have 200 in count
while (spans.Next())
{
count++;
}
Assert.IsTrue(count == 200, count + " does not equal: " + 200);
}
//Set includeSpanScore to false, in which case just the payload score comes through.
[Test]
public virtual void TestIgnoreSpanScorer()
{
PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction(), false);
IndexSearcher theSearcher = new IndexSearcher(directory, true);
theSearcher.SetSimilarity(new FullSimilarity());
TopDocs hits = searcher.Search(query, null, 100);
Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
Assert.IsTrue(hits.TotalHits == 100, "hits Size: " + hits.TotalHits + " is not: " + 100);
//they should all have the exact same score, because they all contain seventy once, and we set
//all the other similarity factors to be 1
//System.out.println("Hash: " + seventyHash + " Twice Hash: " + 2*seventyHash);
Assert.IsTrue(hits.GetMaxScore() == 4.0, hits.GetMaxScore() + " does not equal: " + 4.0);
//there should be exactly 10 items that score a 4, all the rest should score a 2
//The 10 items are: 70 + i*100 where i in [0-9]
int numTens = 0;
for (int i = 0; i < hits.ScoreDocs.Length; i++)
{
ScoreDoc doc = hits.ScoreDocs[i];
if (doc.doc % 10 == 0)
{
numTens++;
Assert.IsTrue(doc.score == 4.0, doc.score + " does not equal: " + 4.0);
}
else
{
Assert.IsTrue(doc.score == 2, doc.score + " does not equal: " + 2);
}
}
Assert.IsTrue(numTens == 10, numTens + " does not equal: " + 10);
CheckHits.CheckExplanations(query, "field", searcher, true);
Lucene.Net.Search.Spans.Spans spans = query.GetSpans(searcher.GetIndexReader());
Assert.IsTrue(spans != null, "spans is null and it shouldn't be");
Assert.IsTrue(spans is TermSpans, "spans is not an instanceof " + typeof(TermSpans));
//should be two matches per document
int count = 0;
//100 hits times 2 matches per hit, we should have 200 in count
while (spans.Next())
{
count++;
}
}
[Test]
public virtual void TestNoMatch()
{
PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.FIELD, "junk"), new MaxPayloadFunction());
TopDocs hits = searcher.Search(query, null, 100);
Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
Assert.IsTrue(hits.TotalHits == 0, "hits Size: " + hits.TotalHits + " is not: " + 0);
}
[Test]
public virtual void TestNoPayload()
{
PayloadTermQuery q1 = new PayloadTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "zero"), new MaxPayloadFunction());
PayloadTermQuery q2 = new PayloadTermQuery(new Term(PayloadHelper.NO_PAYLOAD_FIELD, "foo"), new MaxPayloadFunction());
BooleanClause c1 = new BooleanClause(q1, BooleanClause.Occur.MUST);
BooleanClause c2 = new BooleanClause(q2, BooleanClause.Occur.MUST_NOT);
BooleanQuery query = new BooleanQuery();
query.Add(c1);
query.Add(c2);
TopDocs hits = searcher.Search(query, null, 100);
Assert.IsTrue(hits != null, "hits is null and it shouldn't be");
Assert.IsTrue(hits.TotalHits == 1, "hits Size: " + hits.TotalHits + " is not: " + 1);
int[] results = new int[1];
results[0] = 0; //hits.scoreDocs[0].doc;
CheckHits.CheckHitCollector(query, PayloadHelper.NO_PAYLOAD_FIELD, searcher, results);
}
// must be static for weight serialization tests
[Serializable]
internal class BoostingSimilarity:DefaultSimilarity
{
// TODO: Remove warning after API has been finalized
public override float ScorePayload(int docId, System.String fieldName, int start, int end, byte[] payload, int offset, int length)
{
//we know it is size 4 here, so ignore the offset/length
return payload[0];
}
//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
//Make everything else 1 so we see the effect of the payload
//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
public override float LengthNorm(System.String fieldName, int numTerms)
{
return 1;
}
public override float QueryNorm(float sumOfSquaredWeights)
{
return 1;
}
public override float SloppyFreq(int distance)
{
return 1;
}
public override float Coord(int overlap, int maxOverlap)
{
return 1;
}
public override float Idf(int docFreq, int numDocs)
{
return 1;
}
public override float Tf(float freq)
{
return freq == 0?0:1;
}
}
[Serializable]
internal class FullSimilarity:DefaultSimilarity
{
public virtual float ScorePayload(int docId, System.String fieldName, byte[] payload, int offset, int length)
{
//we know it is size 4 here, so ignore the offset/length
return payload[0];
}
}
}
}
| |
//
// PlaybackSubtitleActions.cs
//
// Author:
// Olivier Dufour <olivier.duff@gmail.com>
//
// Copyright 2010 Olivier Dufour
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.Collections;
using Gtk;
using Banshee.ServiceStack;
using Banshee.I18n;
using Banshee.Collection;
using Hyena;
namespace Banshee.Gui
{
public class PlaybackSubtitleActions : BansheeActionGroup, IEnumerable<RadioAction>
{
private readonly List<RadioActionEntry> embedded_subtitles_actions = new List<RadioActionEntry> ();
public event EventHandler Changed;
private Menu mainMenu;
public new bool Sensitive {
get { return base.Sensitive; }
set {
base.Sensitive = value;
OnChanged ();
}
}
public PlaybackSubtitleActions (InterfaceActionService actionService)
: base (actionService, "PlaybackSubtitle")
{
Actions.AddActionGroup (this);
Add (new ActionEntry [] {
new ActionEntry ("SubtitleMenuAction", null,
Catalog.GetString ("Subtitle"), null,
Catalog.GetString ("Subtitle"), null),
new ActionEntry ("LoadSubtitleAction", null,
Catalog.GetString ("Load File..."), null,
Catalog.GetString ("Load subtitle file"), OnLoadSubtitleAction)
});
this["SubtitleMenuAction"].Sensitive = true;
ServiceManager.PlaybackController.TrackStarted += OnPlaybackTrackStarted;
//TODO: Set default sub
}
private void OnLoadSubtitleAction (object o, EventArgs args)
{
var chooser = new Banshee.Gui.Dialogs.FileChooserDialog (
Catalog.GetString ("Load Subtitle File"),
ServiceManager.Get<Banshee.Gui.GtkElementsService> ().PrimaryWindow,
FileChooserAction.Open
);
chooser.DefaultResponse = ResponseType.Ok;
chooser.SelectMultiple = false;
chooser.AddButton (Stock.Cancel, ResponseType.Cancel);
chooser.AddButton (Catalog.GetString ("L_oad"), ResponseType.Ok);
Hyena.Gui.GtkUtilities.SetChooserShortcuts (chooser,
ServiceManager.SourceManager.VideoLibrary.BaseDirectory
);
var filter = new FileFilter();
filter.AddMimeType ("text/x-pango-markup");
filter.AddMimeType ("text/plain");
filter.Name = Catalog.GetString ("Subtitle files");
chooser.AddFilter (filter);
if (chooser.Run () == (int)ResponseType.Ok) {
ServiceManager.PlayerEngine.SubtitleUri = new SafeUri (chooser.Uri);
}
chooser.Destroy ();
}
private void OnPlaybackTrackStarted (object o, EventArgs args)
{
var current_track = ServiceManager.PlaybackController.CurrentTrack;
if (current_track != null &&
(current_track.MediaAttributes & TrackMediaAttributes.VideoStream) != 0) {
//TODO: activate load subtitle file menu else unactivate
}
}
private void ClearEmbeddedSubtitles ()
{
foreach (RadioActionEntry action in embedded_subtitles_actions) {
this.Remove (action.name);
}
}
private void AddEmbeddedSubtitle (int i)
{
string desc = ServiceManager.PlayerEngine.GetSubtitleDescription (i);
if (String.IsNullOrEmpty (desc)) {
desc = String.Format (Catalog.GetString ("Subtitle {0}"), i);
}
RadioActionEntry new_action = new RadioActionEntry (String.Format ("Subtitle{0}", i), null,
desc, null,
String.Format (Catalog.GetString ("Activate embedded subtitle {0}"), i), i);
embedded_subtitles_actions.Add (new_action);
}
public void ReloadEmbeddedSubtitle ()
{
ClearEmbeddedSubtitles ();
int sub_count = ServiceManager.PlayerEngine.SubtitleCount;
if (sub_count == 0) {
RefreshMenu ();
return;
}
embedded_subtitles_actions.Add (new RadioActionEntry ("None", null,
Catalog.GetString ("None"), null,
Catalog.GetString ("Hide subtitles"), -1));
for (int i = 0; i < sub_count; i++) {
AddEmbeddedSubtitle (i);
}
Add (embedded_subtitles_actions.ToArray (), 0, OnActionChanged);
RefreshMenu ();
}
private void OnActionChanged (object o, ChangedArgs args)
{
Log.Debug (string.Format ("[sub] Set sub {0}", args.Current.Value));
ServiceManager.PlayerEngine.SubtitleIndex = args.Current.Value;
}
private void OnChanged ()
{
EventHandler handler = Changed;
if (handler != null) {
handler (this, EventArgs.Empty);
}
}
public IEnumerator<RadioAction> GetEnumerator ()
{
foreach (RadioActionEntry entry in embedded_subtitles_actions) {
yield return (RadioAction)this[entry.name];
}
}
IEnumerator IEnumerable.GetEnumerator ()
{
return GetEnumerator ();
}
public void AttachSubmenu (string menuItemPath)
{
MenuItem menu = Actions.UIManager.GetWidget (menuItemPath) as MenuItem;
menu.Submenu = CreateMenu ();
}
private void RefreshMenu ()
{
foreach (Widget w in mainMenu.Children) {
//RadioMenuItems are embedded subtitle ones
if (w is RadioMenuItem) {
mainMenu.Remove (w);
}
}
AddEmbeddedSubtitleMenu ();
mainMenu.ShowAll ();
}
public Menu CreateMenu ()
{
mainMenu = new Gtk.Menu ();
mainMenu.Append (this["LoadSubtitleAction"].CreateMenuItem ());
mainMenu.Append (new SeparatorMenuItem ());
AddEmbeddedSubtitleMenu ();
mainMenu.ShowAll ();
return mainMenu;
}
public void AddEmbeddedSubtitleMenu ()
{
foreach (RadioAction action in this) {
mainMenu.Append (action.CreateMenuItem ());
Log.Debug (string.Format ("[sub] Add {0}", action.Name));
}
}
}
}
| |
//
// Copyright (C) DataStax Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using Cassandra.IntegrationTests.TestClusterManagement;
using NUnit.Framework;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Cassandra.IntegrationTests.TestBase;
using Cassandra.Serialization;
using Cassandra.Tests;
namespace Cassandra.IntegrationTests.Core
{
[TestFixture, Category(TestCategory.Long), Ignore("tests that are not marked with 'short' need to be refactored/deleted")]
public class LargeDataTests : TestGlobals
{
private const int Key = 0;
private const string KeyspaceNameDefault = "largedatatests";
ISession _session = null;
[SetUp]
public void SetupFixture()
{
_session = TestClusterManager.GetTestCluster(1).Session;
}
/// <summary>
/// Test a wide row
/// </summary>
[Test]
public void WideRows()
{
string uniqueTableName = "wide_rows_" + Randomm.RandomAlphaNum(16);
TestWideRows(_session, uniqueTableName);
}
/// <summary>
/// Test a batch that writes a row of size 10,000
/// </summary>
[Test]
public void WideBatchRows()
{
string uniqueTableName = "wide_batch_rows" + Randomm.RandomAlphaNum(16);
TestWideBatchRows(_session, uniqueTableName);
}
/// <summary>
/// Test a wide row consisting of a ByteBuffer
/// </summary>
[Test]
public void WideByteRows()
{
string uniqueTableName = "wide_byte_rows" + Randomm.RandomAlphaNum(16);
TestByteRows(_session, uniqueTableName);
}
/// <summary>
/// Test a row with a single extra large text value
/// </summary>
[Test]
public void LargeText()
{
string uniqueTableName = "large_text_" + Randomm.RandomAlphaNum(16);
TestLargeText(_session, uniqueTableName);
}
/// <summary>
/// Creates a table with 330 columns
/// </summary>
[Test]
public void WideTable()
{
string uniqueTableName = "wide_table" + Randomm.RandomAlphaNum(16);
TestWideTable(_session, uniqueTableName);
}
/// <summary>
/// Test list with a single large text value
/// </summary>
[Test]
public void LargeListText()
{
string uniqueTableName = GetUniqueTableName();
CreateTable(_session, uniqueTableName, "list<text>");
string b = new string('8', UInt16.MaxValue);
_session.Execute(string.Format("INSERT INTO {0}(k,i) VALUES({1},['{2}'])", uniqueTableName, Key, b), ConsistencyLevel.Quorum);
using (var rs = _session.Execute(string.Format("SELECT * FROM {0} WHERE k = {1}", uniqueTableName, Key.ToString()), ConsistencyLevel.Quorum))
{
Row row = rs.GetRows().FirstOrDefault();
Assert.True(b.Equals(((List<string>)row["i"])[0]));
}
}
/// <summary>
/// Test set with max allowed value size
/// </summary>
[Test]
public void Set_Val_Max()
{
string uniqueTableName = GetUniqueTableName();
CreateTable(_session, uniqueTableName, "set<text>");
// according to specs it should accept full UInt16.MaxValue, but for some reason it throws "The sum of all clustering columns is too long"
string setVal = new string('a', UInt16.MaxValue - 9);
_session.Execute(string.Format("INSERT INTO {0}(k,i) VALUES({1},{{'{2}'}})", uniqueTableName, Key, setVal));
using (var rs = _session.Execute(string.Format("SELECT * FROM {0} WHERE k = {1}", uniqueTableName, Key), ConsistencyLevel.Quorum))
{
Row row = rs.GetRows().FirstOrDefault();
Assert.AreEqual(setVal, ((List<string>)row["i"]).First());
}
}
/// <summary>
/// Test set with max allowed value size plus one
/// </summary>
[Test]
public void Set_Val_MaxPlusOne()
{
string uniqueTableName = GetUniqueTableName();
CreateTable(_session, uniqueTableName, "set<text>");
// given MAX = 65535
// for C* 2.1.x, set string value max = MAX - 8
// for C* 2.0.x, set string value max = MAX - 6
string setVal = new string('a', UInt16.MaxValue - 6);
try
{
_session.Execute(string.Format("INSERT INTO {0}(k,i) VALUES({1},{{'{2}'}})", uniqueTableName, Key, setVal));
Assert.Fail("Expected exception was not thrown!");
}
catch (InvalidQueryException e)
{
string expectedErrMsg = "The sum of all clustering columns is too long";
Assert.True(e.Message.Contains(expectedErrMsg), "Exception message {0} did not contain expected error message {1}.", e.Message, expectedErrMsg);
}
}
/// <summary>
/// Test map with max allowed key and value size
/// </summary>
[Test]
public void Map_Key_Max_Val_Max()
{
string uniqueTableName = GetUniqueTableName();
CreateTable(_session, uniqueTableName, "map<text, text>");
// given MAX = 65535, map string key max = MAX - 9 and map string value max = MAX
string mapKey = new string('a', UInt16.MaxValue - 9);
string mapVal = new string('b', UInt16.MaxValue);
_session.Execute(string.Format("INSERT INTO {0}(k,i) VALUES({1},{{ '{2}' : '{3}' }})", uniqueTableName, Key, mapKey, mapVal), ConsistencyLevel.Quorum);
using (var rs = _session.Execute(string.Format("SELECT * FROM {0} WHERE k = {1}", uniqueTableName, Key.ToString()), ConsistencyLevel.Quorum))
{
Row row = rs.GetRows().FirstOrDefault();
Assert.AreEqual(mapKey, ((SortedDictionary<string, string>)row["i"]).First().Key);
Assert.AreEqual(mapVal, ((SortedDictionary<string, string>)row["i"]).First().Value);
}
}
/// <summary>
/// Test map with max allowed key size + 1
/// </summary>
[Test]
public void Map_TextKey_MaxPlusOne()
{
string uniqueTableName = GetUniqueTableName();
CreateTable(_session, uniqueTableName, "map<text, text>");
// given MAX = 65535
// for C* 2.1.x -- map string key max = MAX - 9 and map string value max = MAX
// for C* 2.0.x -- map string key max = MAX - 6
string mapKey = new string('a', UInt16.MaxValue - 6);
string mapVal = new string('b', 1); // something safe
try
{
_session.Execute(string.Format("INSERT INTO {0}(k,i) VALUES({1},{{ '{2}' : '{3}' }})", uniqueTableName, Key, mapKey, mapVal), ConsistencyLevel.Quorum);
Assert.Fail("Expected exception was not thrown!");
}
catch (InvalidQueryException e)
{
string expectedErrMsg = "The sum of all clustering columns is too long";
Assert.True(e.Message.Contains(expectedErrMsg),
string.Format("Exception message: '{0}' did not contain error message '{1}'", e.Message, expectedErrMsg));
}
}
/// <summary>
/// Test map with max allowed value size + 1
/// </summary>
[Test]
public void Map_Value_MaxPlusOne()
{
string uniqueTableName = GetUniqueTableName();
CreateTable(_session, uniqueTableName, "map<text, text>");
// given MAX = 65535, map string key max = MAX - 9 and map string value max = MAX
string mapKey = new string('a', UInt16.MaxValue - 9);
string mapVal = new string('b', UInt16.MaxValue + 1);
try
{
_session.Execute(string.Format("INSERT INTO {0}(k,i) VALUES({1},{{ '{2}' : '{3}' }})", uniqueTableName, Key, mapKey, mapVal), ConsistencyLevel.Quorum);
Assert.Fail("Expected exception was not thrown!");
}
catch (InvalidQueryException e)
{
string expectedErrMsg = "Map value is too long.";
Assert.True(e.Message.Contains(expectedErrMsg),
string.Format("Exception message: '{0}' did not contain error message '{1}'", e.Message, expectedErrMsg));
}
}
///////////////////////////////////////
// Test Helpers
///////////////////////////////////////
private static void CreateTable(ISession session, string tableName, string cqlType)
{
session.CreateKeyspaceIfNotExists(KeyspaceNameDefault);
session.ChangeKeyspace(KeyspaceNameDefault);
session.Execute(string.Format("CREATE TABLE {0} (k INT, i {1}, PRIMARY KEY(k))", tableName, cqlType));
}
// Test a wide row
private static void TestWideRows(ISession session, string tableName)
{
string cql = string.Format("CREATE TABLE {0} (i INT, str {1}, PRIMARY KEY(i,str))", tableName, "text");
session.Execute(cql);
// Write data
//Use a row length of 1024, we are testing the driver not Cassandra itself
List<string> expectedStrings = new List<string>();
for (int str = 0; str < 1024; ++str)
{
string insertCql = string.Format("INSERT INTO {0} (i,str) VALUES({1},'{2}')", tableName, Key, str);
expectedStrings.Add(str.ToString());
session.Execute(insertCql, ConsistencyLevel.Quorum);
}
// Read data
expectedStrings.Sort();
var rs = session.Execute(string.Format("SELECT str FROM {0} WHERE i = {1}", tableName, Key), ConsistencyLevel.Quorum);
{
// Verify data
List<Row> rows = rs.GetRows().ToList();
for (int j = 0; j < rows.Count; j++)
Assert.AreEqual(expectedStrings[j].ToString(), rows[j]["str"]);
}
}
// Test a batch that writes a row of size
private static void TestWideBatchRows(ISession session, string tableName)
{
string cql = string.Format("CREATE TABLE {0} (i INT, str {1}, PRIMARY KEY(i,str))", tableName, "text");
session.Execute(cql);
// Write data
List<string> expectedStrings = new List<string>();
var sb = new StringBuilder("BEGIN BATCH ");
for (int str = 0; str < 1024; ++str)
{
string insertCql = string.Format("INSERT INTO {0} (i,str) VALUES({1},'{2}')", tableName, Key, str);
expectedStrings.Add(str.ToString());
sb.AppendLine(insertCql);
}
sb.Append("APPLY BATCH");
session.Execute(sb.ToString(), ConsistencyLevel.Quorum);
// Read data
expectedStrings.Sort();
var rs = session.Execute(string.Format("SELECT str FROM {0} WHERE i = {1}", tableName, Key), ConsistencyLevel.Quorum);
{
// Verify data
List<Row> rows = rs.GetRows().ToList();
for (int j = 0; j < rows.Count; j++)
Assert.AreEqual(expectedStrings[j].ToString(), rows[j]["str"]);
}
}
// Test a wide row consisting of a ByteBuffer
private static void TestByteRows(ISession session, string tableName)
{
session.Execute(string.Format("CREATE TABLE {0} (k INT, i {1}, PRIMARY KEY(k,i))", tableName, "BLOB"));
// Build small ByteBuffer sample
var bw = new FrameWriter(new MemoryStream(), new SerializerManager(ProtocolVersion.V1).GetCurrentSerializer());
for (int i = 0; i < 56; i++)
bw.WriteByte(0);
bw.WriteUInt16(0xCAFE);
var bb = new byte[58];
Array.Copy(bw.GetBuffer(), bb, 58);
// Write data
for (int i = 0; i < 1024; ++i)
session.Execute(string.Format("INSERT INTO {0}(k,i) values({1},0x{2})", tableName, Key, CqlQueryTools.ToHex(bb)),
ConsistencyLevel.Quorum);
// Read data
var rs = session.Execute("SELECT i FROM " + tableName + " WHERE k = " + Key, ConsistencyLevel.Quorum);
// Verify data
foreach (var row in rs)
Assert.AreEqual((byte[])row["i"], bb);
}
// Test a row with a single extra large text value
private static void TestLargeText(ISession session, string tableName)
{
session.Execute(string.Format("CREATE TABLE {0} (k INT, i {1}, PRIMARY KEY(k,i))", tableName, "text"));
// Write data
var b = new StringBuilder();
for (int i = 0; i < 1000; ++i)
b.Append(i); // Create ultra-long text
session.Execute(string.Format("INSERT INTO {0}(k,i) VALUES({1},'{2}')", tableName, Key, b), ConsistencyLevel.Quorum);
// Read data
var rs = session.Execute("SELECT * FROM " + tableName + " WHERE k = " + Key, ConsistencyLevel.Quorum);
{
Row row = rs.GetRows().FirstOrDefault(); // select().all().from("large_text").where(eq("k", key))).one();
// Verify data
Assert.True(b.ToString().Equals(row["i"]));
}
}
// Converts an integer to an string of letters
private static String CreateColumnName(int i)
{
String[] letters = { "a", "b", "c", "d", "e", "f", "g", "h", "i", "j" };
StringBuilder columnName;
int currentI;
currentI = i;
columnName = new StringBuilder();
while (true)
{
columnName.Append(letters[currentI % 10]);
currentI /= 10;
if (currentI == 0)
break;
}
return columnName.ToString();
}
// Creates a table with 330 columns
private static void TestWideTable(ISession session, String tableName)
{
session.Execute(GetTableDeclaration(tableName));
// Write data
var insrt = new StringBuilder("INSERT INTO " + tableName + "(k");
var valus = new StringBuilder(" VALUES(" + Key);
for (int i = 0; i < 330; ++i)
{
insrt.Append(",\"" + CreateColumnName(i) + "\"");
valus.Append("," + i);
}
insrt.Append(") " + valus + ")");
session.Execute(insrt.ToString(), ConsistencyLevel.Quorum);
// Read data
var rs = session.Execute("SELECT * FROM " + tableName + " WHERE k = " + Key, ConsistencyLevel.Quorum);
{
Row row = rs.GetRows().FirstOrDefault();
Assert.True(row != null, "row is null");
Assert.True(row.Length >= 330, "not enough columns");
// Verify data
for (int i = 0; i < 330; ++i)
{
string cn = CreateColumnName(i);
Assert.True(row[cn] != null, "column is null");
Assert.True(row[cn] is int, "column is not int");
Assert.True((int)row[cn] == i);
}
}
}
private static string GetUniqueTableName()
{
return "LgDataTsts_" + Randomm.RandomAlphaNum(16);
}
private static String GetTableDeclaration(string tableName)
{
var tableDeclaration = new StringBuilder();
tableDeclaration.Append("CREATE TABLE " + tableName + " (");
tableDeclaration.Append("k INT PRIMARY KEY");
for (int i = 0; i < 330; ++i)
{
tableDeclaration.Append(string.Format(", \"{0}\" INT", CreateColumnName(i)));
}
tableDeclaration.Append(")");
return tableDeclaration.ToString();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Globalization;
using System.Runtime.InteropServices;
using System.Runtime.Versioning;
using System.Diagnostics.Contracts;
namespace System
{
// A place holder class for signed bytes.
[CLSCompliant(false), System.Runtime.InteropServices.StructLayout(LayoutKind.Sequential)]
[Serializable]
public struct SByte : IComparable, IFormattable, IComparable<SByte>, IEquatable<SByte>, IConvertible
{
private sbyte m_value; // Do not rename (binary serialization)
// The maximum value that a Byte may represent: 127.
public const sbyte MaxValue = (sbyte)0x7F;
// The minimum value that a Byte may represent: -128.
public const sbyte MinValue = unchecked((sbyte)0x80);
// Compares this object to another object, returning an integer that
// indicates the relationship.
// Returns a value less than zero if this object
// null is considered to be less than any instance.
// If object is not of type SByte, this method throws an ArgumentException.
//
public int CompareTo(Object obj)
{
if (obj == null)
{
return 1;
}
if (!(obj is SByte))
{
throw new ArgumentException(SR.Arg_MustBeSByte);
}
return m_value - ((SByte)obj).m_value;
}
public int CompareTo(SByte value)
{
return m_value - value;
}
// Determines whether two Byte objects are equal.
public override bool Equals(Object obj)
{
if (!(obj is SByte))
{
return false;
}
return m_value == ((SByte)obj).m_value;
}
[NonVersionable]
public bool Equals(SByte obj)
{
return m_value == obj;
}
// Gets a hash code for this instance.
public override int GetHashCode()
{
return ((int)m_value ^ (int)m_value << 8);
}
// Provides a string representation of a byte.
public override String ToString()
{
Contract.Ensures(Contract.Result<String>() != null);
return FormatProvider.FormatInt32(m_value, null, null);
}
public String ToString(IFormatProvider provider)
{
Contract.Ensures(Contract.Result<String>() != null);
return FormatProvider.FormatInt32(m_value, null, provider);
}
public String ToString(String format)
{
Contract.Ensures(Contract.Result<String>() != null);
return ToString(format, null);
}
public String ToString(String format, IFormatProvider provider)
{
Contract.Ensures(Contract.Result<String>() != null);
if (m_value < 0 && format != null && format.Length > 0 && (format[0] == 'X' || format[0] == 'x'))
{
uint temp = (uint)(m_value & 0x000000FF);
return FormatProvider.FormatUInt32(temp, format, provider);
}
return FormatProvider.FormatInt32(m_value, format, provider);
}
[CLSCompliant(false)]
public static sbyte Parse(String s)
{
return Parse(s, NumberStyles.Integer, null);
}
[CLSCompliant(false)]
public static sbyte Parse(String s, NumberStyles style)
{
UInt32.ValidateParseStyleInteger(style);
return Parse(s, style, null);
}
[CLSCompliant(false)]
public static sbyte Parse(String s, IFormatProvider provider)
{
return Parse(s, NumberStyles.Integer, provider);
}
// Parses a signed byte from a String in the given style. If
// a NumberFormatInfo isn't specified, the current culture's
// NumberFormatInfo is assumed.
//
[CLSCompliant(false)]
public static sbyte Parse(String s, NumberStyles style, IFormatProvider provider)
{
UInt32.ValidateParseStyleInteger(style);
int i = 0;
try
{
i = FormatProvider.ParseInt32(s, style, provider);
}
catch (OverflowException e)
{
throw new OverflowException(SR.Overflow_SByte, e);
}
if ((style & NumberStyles.AllowHexSpecifier) != 0)
{ // We are parsing a hexadecimal number
if ((i < 0) || i > Byte.MaxValue)
{
throw new OverflowException(SR.Overflow_SByte);
}
return (sbyte)i;
}
if (i < MinValue || i > MaxValue) throw new OverflowException(SR.Overflow_SByte);
return (sbyte)i;
}
[CLSCompliant(false)]
public static bool TryParse(String s, out SByte result)
{
return TryParse(s, NumberStyles.Integer, null, out result);
}
[CLSCompliant(false)]
public static bool TryParse(String s, NumberStyles style, IFormatProvider provider, out SByte result)
{
UInt32.ValidateParseStyleInteger(style);
result = 0;
int i;
if (!FormatProvider.TryParseInt32(s, style, provider, out i))
{
return false;
}
if ((style & NumberStyles.AllowHexSpecifier) != 0)
{ // We are parsing a hexadecimal number
if ((i < 0) || i > Byte.MaxValue)
{
return false;
}
result = (sbyte)i;
return true;
}
if (i < MinValue || i > MaxValue)
{
return false;
}
result = (sbyte)i;
return true;
}
//
// IConvertible implementation
//
public TypeCode GetTypeCode()
{
return TypeCode.SByte;
}
bool IConvertible.ToBoolean(IFormatProvider provider)
{
return Convert.ToBoolean(m_value);
}
char IConvertible.ToChar(IFormatProvider provider)
{
return Convert.ToChar(m_value);
}
sbyte IConvertible.ToSByte(IFormatProvider provider)
{
return m_value;
}
byte IConvertible.ToByte(IFormatProvider provider)
{
return Convert.ToByte(m_value);
}
short IConvertible.ToInt16(IFormatProvider provider)
{
return Convert.ToInt16(m_value);
}
ushort IConvertible.ToUInt16(IFormatProvider provider)
{
return Convert.ToUInt16(m_value);
}
int IConvertible.ToInt32(IFormatProvider provider)
{
return m_value;
}
uint IConvertible.ToUInt32(IFormatProvider provider)
{
return Convert.ToUInt32(m_value);
}
long IConvertible.ToInt64(IFormatProvider provider)
{
return Convert.ToInt64(m_value);
}
ulong IConvertible.ToUInt64(IFormatProvider provider)
{
return Convert.ToUInt64(m_value);
}
float IConvertible.ToSingle(IFormatProvider provider)
{
return Convert.ToSingle(m_value);
}
double IConvertible.ToDouble(IFormatProvider provider)
{
return Convert.ToDouble(m_value);
}
Decimal IConvertible.ToDecimal(IFormatProvider provider)
{
return Convert.ToDecimal(m_value);
}
DateTime IConvertible.ToDateTime(IFormatProvider provider)
{
throw new InvalidCastException(String.Format(SR.InvalidCast_FromTo, "SByte", "DateTime"));
}
Object IConvertible.ToType(Type type, IFormatProvider provider)
{
return Convert.DefaultToType((IConvertible)this, type, provider);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using sw = System.Windows;
using swm = System.Windows.Media;
using Eto.Drawing;
namespace Eto.Wpf.Drawing
{
/// <summary>
/// Handler for <see cref="IGraphicsPath"/>
/// </summary>
/// <copyright>(c) 2012-2014 by Curtis Wensley</copyright>
/// <license type="BSD-3">See LICENSE for full terms</license>
public class GraphicsPathHandler : GraphicsPath.IHandler
{
swm.PathGeometry Control { get; set; }
swm.PathFigure figure;
public GraphicsPathHandler ()
{
Control = new swm.PathGeometry ();
Control.Figures = new swm.PathFigureCollection ();
}
GraphicsPathHandler (swm.PathGeometry control)
{
Control = control;
}
public bool IsEmpty
{
get { return Control.IsEmpty (); }
}
public PointF CurrentPoint
{
get;
private set;
}
void ConnectTo (sw.Point startPoint, bool startNewFigure = false)
{
if (startNewFigure || figure == null) {
figure = new swm.PathFigure ();
figure.StartPoint = startPoint;
figure.Segments = new swm.PathSegmentCollection ();
Control.Figures.Add (figure);
} else
figure.Segments.Add (new swm.LineSegment (startPoint, true));
}
public void CloseFigure ()
{
if (figure != null) {
if (!(figure.Segments.Count == 1 && figure.Segments[0] is swm.LineSegment))
figure.IsClosed = true;
}
figure = null;
}
public void StartFigure ()
{
figure = null;
}
public void AddLines (IEnumerable<PointF> points)
{
var pointsList = points as IList<PointF> ?? points.ToArray ();
ConnectTo (pointsList.First ().ToWpf ());
var wpfPoints = from p in pointsList select p.ToWpf ();
figure.Segments.Add (new swm.PolyLineSegment (wpfPoints, true));
CurrentPoint = pointsList.Last ();
}
public void AddLine (float startX, float startY, float endX, float endY)
{
ConnectTo(new sw.Point(startX, startY));
figure.Segments.Add (new swm.LineSegment (new sw.Point (endX, endY), true));
CurrentPoint = new PointF (endX, endY);
}
public void AddRectangle (float x, float y, float width, float height)
{
Control.AddGeometry (new swm.RectangleGeometry (new sw.Rect (x, y, width, height)));
figure = null;
}
public void LineTo (float x, float y)
{
ConnectTo (new sw.Point (x, y));
CurrentPoint = new PointF (x, y);
}
public void MoveTo (float x, float y)
{
ConnectTo (new sw.Point (x, y), startNewFigure: true);
CurrentPoint = new PointF (x, y);
}
public void AddArc (float x, float y, float width, float height, float startAngle, float sweepAngle)
{
// degrees to radians conversion
double startRadians = startAngle * Math.PI / 180.0;
double sweepRadians = sweepAngle * Math.PI / 180.0;
// x and y radius
double dx = width / 2;
double dy = height / 2;
// determine the start point
double xs = x + dx + (Math.Cos (startRadians) * dx);
double ys = y + dy + (Math.Sin (startRadians) * dy);
// determine the end point
double xe = x + dx + (Math.Cos (startRadians + sweepRadians) * dx);
double ye = y + dy + (Math.Sin (startRadians + sweepRadians) * dy);
bool isLargeArc = Math.Abs (sweepAngle) > 180;
var sweepDirection = sweepAngle < 0 ? swm.SweepDirection.Counterclockwise : swm.SweepDirection.Clockwise;
ConnectTo (new sw.Point (xs, ys));
figure.Segments.Add (new swm.ArcSegment (new sw.Point (xe, ye), new sw.Size (dx, dy), 0, isLargeArc, sweepDirection, true));
CurrentPoint = new PointF ((float)xe, (float)ye);
}
public void AddBezier (PointF start, PointF control1, PointF control2, PointF end)
{
ConnectTo (start.ToWpf ());
figure.Segments.Add (new swm.BezierSegment (control1.ToWpf (), control2.ToWpf (), end.ToWpf (), true));
CurrentPoint = end;
}
public void AddPath (IGraphicsPath path, bool connect = false)
{
if (path.IsEmpty)
return;
var wpfPath = path.ToWpf ();
if (!wpfPath.Transform.Value.IsIdentity) {
var newpath = new swm.PathGeometry ();
newpath.AddGeometry (wpfPath);
wpfPath = newpath;
}
var en = wpfPath.Figures.GetEnumerator ();
if (connect) {
// merge current figure (if any) and first figure of new path, if they are not closed paths
if (figure != null && !figure.IsClosed && en.MoveNext ()) {
var firstFigure = en.Current;
if (!firstFigure.IsClosed) {
figure.Segments.Add (new swm.LineSegment (firstFigure.StartPoint, true));
foreach (var seg in firstFigure.Segments)
figure.Segments.Add (seg);
} else {
Control.Figures.Add (firstFigure);
}
}
}
swm.PathFigure pathFigure = null;
while (en.MoveNext ()) {
pathFigure = en.Current;
Control.Figures.Add (pathFigure);
}
// continue with last figure of new path if not closed
if (pathFigure != null && !pathFigure.IsClosed)
figure = pathFigure;
else
figure = null;
}
public RectangleF Bounds
{
get { return Control.Bounds.ToEto (); }
}
public void Transform (IMatrix matrix)
{
if (Control.Transform != null)
Control.Transform = new swm.MatrixTransform (swm.Matrix.Multiply (matrix.ToWpf (), Control.Transform.Value));
else
Control.Transform = matrix.ToWpfTransform ();
}
public void AddEllipse (float x, float y, float width, float height)
{
Control.AddGeometry (new swm.EllipseGeometry (new sw.Rect (x, y, width, height)));
figure = null;
}
public void AddCurve (IEnumerable<PointF> points, float tension = 0.5f)
{
points = SplineHelper.SplineCurve (points, tension);
var swpoints = (from p in points select p.ToWpf ()).ToArray();
ConnectTo (swpoints.First ());
figure.Segments.Add (new swm.PolyBezierSegment (swpoints, true));
CurrentPoint = swpoints.Last ().ToEto ();
}
public object ControlObject
{
get { return Control; }
}
public void Dispose ()
{
}
public IGraphicsPath Clone ()
{
return new GraphicsPathHandler (Control.Clone ());
}
public FillMode FillMode
{
set { Control.FillRule = value == FillMode.Alternate ? swm.FillRule.EvenOdd : swm.FillRule.Nonzero; }
get { return Control.FillRule == swm.FillRule.EvenOdd ? FillMode.Alternate : FillMode.Winding; }
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="ActorMaterializer.cs" company="Akka.NET Project">
// Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Runtime.Serialization;
using Akka.Actor;
using Akka.Configuration;
using Akka.Dispatch;
using Akka.Event;
using Akka.Pattern;
using Akka.Streams.Dsl;
using Akka.Streams.Dsl.Internal;
using Akka.Streams.Implementation;
using Akka.Streams.Supervision;
using Akka.Util;
using Reactive.Streams;
using Decider = Akka.Streams.Supervision.Decider;
namespace Akka.Streams
{
/// <summary>
/// A ActorMaterializer takes the list of transformations comprising a
/// <see cref="IFlow{TOut,TMat}"/> and materializes them in the form of
/// <see cref="IProcessor{T1,T2}"/> instances. How transformation
/// steps are split up into asynchronous regions is implementation
/// dependent.
/// </summary>
public abstract class ActorMaterializer : IMaterializer, IMaterializerLoggingProvider, IDisposable
{
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public static Config DefaultConfig()
=> ConfigurationFactory.FromResource<ActorMaterializer>("Akka.Streams.reference.conf");
#region static
/// <summary>
/// <para>
/// Creates a ActorMaterializer which will execute every step of a transformation
/// pipeline within its own <see cref="ActorBase"/>. The required <see cref="IActorRefFactory"/>
/// (which can be either an <see cref="ActorSystem"/> or an <see cref="IActorContext"/>)
/// will be used to create one actor that in turn creates actors for the transformation steps.
/// </para>
/// <para>
/// The materializer's <see cref="ActorMaterializerSettings"/> will be obtained from the
/// configuration of the <paramref name="context"/>'s underlying <see cref="ActorSystem"/>.
/// </para>
/// <para>
/// The <paramref name="namePrefix"/> is used as the first part of the names of the actors running
/// the processing steps. The default <paramref name="namePrefix"/> is "flow". The actor names are built up of
/// `namePrefix-flowNumber-flowStepNumber-stepName`.
/// </para>
/// </summary>
/// <param name="context">TBD</param>
/// <param name="settings">TBD</param>
/// <param name="namePrefix">TBD</param>
/// <exception cref="ArgumentException">
/// This exception is thrown when the specified <paramref name="context"/> is not of type <see cref="ActorSystem"/> or <see cref="IActorContext"/>.
/// </exception>
/// <exception cref="ArgumentNullException">
/// This exception is thrown when the specified <paramref name="context"/> is undefined.
/// </exception>
/// <returns>TBD</returns>
public static ActorMaterializer Create(IActorRefFactory context, ActorMaterializerSettings settings = null, string namePrefix = null)
{
var haveShutDown = new AtomicBoolean();
var system = ActorSystemOf(context);
system.Settings.InjectTopLevelFallback(DefaultConfig());
settings = settings ?? ActorMaterializerSettings.Create(system);
return new ActorMaterializerImpl(
system: system,
settings: settings,
dispatchers: system.Dispatchers,
supervisor: context.ActorOf(StreamSupervisor.Props(settings, haveShutDown).WithDispatcher(settings.Dispatcher), StreamSupervisor.NextName()),
haveShutDown: haveShutDown,
flowNames: EnumerableActorName.Create(namePrefix ?? "Flow"));
}
private static ActorSystem ActorSystemOf(IActorRefFactory context)
{
if (context is ExtendedActorSystem)
return (ActorSystem)context;
if (context is IActorContext)
return ((IActorContext)context).System;
if (context == null)
throw new ArgumentNullException(nameof(context), "IActorRefFactory must be defined");
throw new ArgumentException($"ActorRefFactory context must be a ActorSystem or ActorContext, got [{context.GetType()}]");
}
#endregion
/// <summary>
/// TBD
/// </summary>
public abstract ActorMaterializerSettings Settings { get; }
/// <summary>
/// Indicates if the materializer has been shut down.
/// </summary>
public abstract bool IsShutdown { get; }
/// <summary>
/// TBD
/// </summary>
public abstract MessageDispatcher ExecutionContext { get; }
/// <summary>
/// TBD
/// </summary>
public abstract ActorSystem System { get; }
/// <summary>
/// TBD
/// </summary>
public abstract ILoggingAdapter Logger { get; }
/// <summary>
/// TBD
/// </summary>
public abstract IActorRef Supervisor { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="namePrefix">TBD</param>
/// <returns>TBD</returns>
public abstract IMaterializer WithNamePrefix(string namePrefix);
/// <inheritdoc />
public abstract TMat Materialize<TMat>(IGraph<ClosedShape, TMat> runnable);
/// <inheritdoc />
public abstract TMat Materialize<TMat>(IGraph<ClosedShape, TMat> runnable, Attributes initialAttributes);
/// <summary>
/// TBD
/// </summary>
/// <param name="delay">TBD</param>
/// <param name="action">TBD</param>
/// <returns>TBD</returns>
public abstract ICancelable ScheduleOnce(TimeSpan delay, Action action);
/// <summary>
/// TBD
/// </summary>
/// <param name="initialDelay">TBD</param>
/// <param name="interval">TBD</param>
/// <param name="action">TBD</param>
/// <returns>TBD</returns>
public abstract ICancelable ScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interval, Action action);
/// <summary>
/// TBD
/// </summary>
/// <param name="attributes">TBD</param>
/// <returns>TBD</returns>
public abstract ActorMaterializerSettings EffectiveSettings(Attributes attributes);
/// <summary>
/// Shuts down this materializer and all the stages that have been materialized through this materializer. After
/// having shut down, this materializer cannot be used again. Any attempt to materialize stages after having
/// shut down will result in an <see cref="IllegalStateException"/> being thrown at materialization time.
/// </summary>
public abstract void Shutdown();
/// <summary>
/// TBD
/// </summary>
/// <param name="context">TBD</param>
/// <param name="props">TBD</param>
/// <returns>TBD</returns>
public abstract IActorRef ActorOf(MaterializationContext context, Props props);
/// <summary>
/// Creates a new logging adapter.
/// </summary>
/// <param name="logSource">The source that produces the log events.</param>
/// <returns>The newly created logging adapter.</returns>
public abstract ILoggingAdapter MakeLogger(object logSource);
/// <inheritdoc/>
public void Dispose() => Shutdown();
}
/// <summary>
/// INTERNAL API
/// </summary>
internal static class ActorMaterializerHelper
{
/// <summary>
/// TBD
/// </summary>
/// <param name="materializer">TBD</param>
/// <exception cref="ArgumentException">
/// This exception is thrown when the specified <paramref name="materializer"/> is not of type <see cref="ActorMaterializer"/>.
/// </exception>
/// <returns>TBD</returns>
internal static ActorMaterializer Downcast(IMaterializer materializer)
{
//FIXME this method is going to cause trouble for other Materializer implementations
var downcast = materializer as ActorMaterializer;
if (downcast != null)
return downcast;
throw new ArgumentException($"Expected {typeof(ActorMaterializer)} but got {materializer.GetType()}");
}
}
/// <summary>
/// This exception signals that an actor implementing a Reactive Streams Subscriber, Publisher or Processor
/// has been terminated without being notified by an onError, onComplete or cancel signal. This usually happens
/// when an ActorSystem is shut down while stream processing actors are still running.
/// </summary>
[Serializable]
public class AbruptTerminationException : Exception
{
/// <summary>
/// The actor that was terminated without notification.
/// </summary>
public readonly IActorRef Actor;
/// <summary>
/// Initializes a new instance of the <see cref="AbruptTerminationException" /> class.
/// </summary>
/// <param name="actor">The actor that was terminated.</param>
public AbruptTerminationException(IActorRef actor)
: base($"Processor actor [{actor}] terminated abruptly")
{
Actor = actor;
}
#if SERIALIZATION
/// <summary>
/// Initializes a new instance of the <see cref="AbruptTerminationException" /> class.
/// </summary>
/// <param name="info">The <see cref="SerializationInfo"/> that holds the serialized object data about the exception being thrown.</param>
/// <param name="context">The <see cref="StreamingContext"/> that contains contextual information about the source or destination.</param>
protected AbruptTerminationException(SerializationInfo info, StreamingContext context) : base(info, context)
{
Actor = (IActorRef)info.GetValue("Actor", typeof(IActorRef));
}
#endif
}
/// <summary>
/// This exception or subtypes thereof should be used to signal materialization failures.
/// </summary>
public class MaterializationException : Exception
{
/// <summary>
/// Initializes a new instance of the <see cref="MaterializationException"/> class.
/// </summary>
/// <param name="message">The message that describes the error.</param>
/// <param name="innerException">The exception that is the cause of the current exception.</param>
public MaterializationException(string message, Exception innerException) : base(message, innerException) { }
#if SERIALIZATION
/// <summary>
/// Initializes a new instance of the <see cref="MaterializationException"/> class.
/// </summary>
/// <param name="info">The <see cref="SerializationInfo" /> that holds the serialized object data about the exception being thrown.</param>
/// <param name="context">The <see cref="StreamingContext" /> that contains contextual information about the source or destination.</param>
protected MaterializationException(SerializationInfo info, StreamingContext context) : base(info, context) { }
#endif
}
/// <summary>
/// This class describes the configurable properties of the <see cref="ActorMaterializer"/>.
/// Please refer to the withX methods for descriptions of the individual settings.
/// </summary>
public sealed class ActorMaterializerSettings
{
/// <summary>
/// TBD
/// </summary>
/// <param name="system">TBD</param>
/// <returns>TBD</returns>
public static ActorMaterializerSettings Create(ActorSystem system)
{
var config = system.Settings.Config.GetConfig("akka.stream.materializer");
return Create(config ?? Config.Empty);
}
private static ActorMaterializerSettings Create(Config config)
{
return new ActorMaterializerSettings(
initialInputBufferSize: config.GetInt("initial-input-buffer-size", 4),
maxInputBufferSize: config.GetInt("max-input-buffer-size", 16),
dispatcher: config.GetString("dispatcher", string.Empty),
supervisionDecider: Deciders.StoppingDecider,
subscriptionTimeoutSettings: StreamSubscriptionTimeoutSettings.Create(config),
isDebugLogging: config.GetBoolean("debug-logging"),
outputBurstLimit: config.GetInt("output-burst-limit", 1000),
isFuzzingMode: config.GetBoolean("debug.fuzzing-mode"),
isAutoFusing: config.GetBoolean("auto-fusing", true),
maxFixedBufferSize: config.GetInt("max-fixed-buffer-size", 1000000000),
syncProcessingLimit: config.GetInt("sync-processing-limit", 1000));
}
private const int DefaultlMaxFixedbufferSize = 1000;
/// <summary>
/// TBD
/// </summary>
public readonly int InitialInputBufferSize;
/// <summary>
/// TBD
/// </summary>
public readonly int MaxInputBufferSize;
/// <summary>
/// TBD
/// </summary>
public readonly string Dispatcher;
/// <summary>
/// TBD
/// </summary>
public readonly Decider SupervisionDecider;
/// <summary>
/// TBD
/// </summary>
public readonly StreamSubscriptionTimeoutSettings SubscriptionTimeoutSettings;
/// <summary>
/// TBD
/// </summary>
public readonly bool IsDebugLogging;
/// <summary>
/// TBD
/// </summary>
public readonly int OutputBurstLimit;
/// <summary>
/// TBD
/// </summary>
public readonly bool IsFuzzingMode;
/// <summary>
/// TBD
/// </summary>
public readonly bool IsAutoFusing;
/// <summary>
/// TBD
/// </summary>
public readonly int MaxFixedBufferSize;
/// <summary>
/// TBD
/// </summary>
public readonly int SyncProcessingLimit;
/// <summary>
/// TBD
/// </summary>
/// <param name="initialInputBufferSize">TBD</param>
/// <param name="maxInputBufferSize">TBD</param>
/// <param name="dispatcher">TBD</param>
/// <param name="supervisionDecider">TBD</param>
/// <param name="subscriptionTimeoutSettings">TBD</param>
/// <param name="isDebugLogging">TBD</param>
/// <param name="outputBurstLimit">TBD</param>
/// <param name="isFuzzingMode">TBD</param>
/// <param name="isAutoFusing">TBD</param>
/// <param name="maxFixedBufferSize">TBD</param>
/// <param name="syncProcessingLimit">TBD</param>
public ActorMaterializerSettings(int initialInputBufferSize, int maxInputBufferSize, string dispatcher, Decider supervisionDecider, StreamSubscriptionTimeoutSettings subscriptionTimeoutSettings, bool isDebugLogging, int outputBurstLimit, bool isFuzzingMode, bool isAutoFusing, int maxFixedBufferSize, int syncProcessingLimit = DefaultlMaxFixedbufferSize)
{
InitialInputBufferSize = initialInputBufferSize;
MaxInputBufferSize = maxInputBufferSize;
Dispatcher = dispatcher;
SupervisionDecider = supervisionDecider;
SubscriptionTimeoutSettings = subscriptionTimeoutSettings;
IsDebugLogging = isDebugLogging;
OutputBurstLimit = outputBurstLimit;
IsFuzzingMode = isFuzzingMode;
IsAutoFusing = isAutoFusing;
MaxFixedBufferSize = maxFixedBufferSize;
SyncProcessingLimit = syncProcessingLimit;
}
/// <summary>
/// TBD
/// </summary>
/// <param name="initialSize">TBD</param>
/// <param name="maxSize">TBD</param>
/// <returns>TBD</returns>
public ActorMaterializerSettings WithInputBuffer(int initialSize, int maxSize)
{
return new ActorMaterializerSettings(initialSize, maxSize, Dispatcher, SupervisionDecider, SubscriptionTimeoutSettings, IsDebugLogging, OutputBurstLimit, IsFuzzingMode, IsAutoFusing, MaxFixedBufferSize, SyncProcessingLimit);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="dispatcher">TBD</param>
/// <returns>TBD</returns>
public ActorMaterializerSettings WithDispatcher(string dispatcher)
{
return new ActorMaterializerSettings(InitialInputBufferSize, MaxInputBufferSize, dispatcher, SupervisionDecider, SubscriptionTimeoutSettings, IsDebugLogging, OutputBurstLimit, IsFuzzingMode, IsAutoFusing, MaxFixedBufferSize, SyncProcessingLimit);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="decider">TBD</param>
/// <returns>TBD</returns>
public ActorMaterializerSettings WithSupervisionStrategy(Decider decider)
{
return new ActorMaterializerSettings(InitialInputBufferSize, MaxInputBufferSize, Dispatcher, decider, SubscriptionTimeoutSettings, IsDebugLogging, OutputBurstLimit, IsFuzzingMode, IsAutoFusing, MaxFixedBufferSize, SyncProcessingLimit);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="isEnabled">TBD</param>
/// <returns>TBD</returns>
public ActorMaterializerSettings WithDebugLogging(bool isEnabled)
{
return new ActorMaterializerSettings(InitialInputBufferSize, MaxInputBufferSize, Dispatcher, SupervisionDecider, SubscriptionTimeoutSettings, isEnabled, OutputBurstLimit, IsFuzzingMode, IsAutoFusing, MaxFixedBufferSize, SyncProcessingLimit);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="isFuzzingMode">TBD</param>
/// <returns>TBD</returns>
public ActorMaterializerSettings WithFuzzingMode(bool isFuzzingMode)
{
return new ActorMaterializerSettings(InitialInputBufferSize, MaxInputBufferSize, Dispatcher, SupervisionDecider, SubscriptionTimeoutSettings, IsDebugLogging, OutputBurstLimit, isFuzzingMode, IsAutoFusing, MaxFixedBufferSize, SyncProcessingLimit);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="isAutoFusing">TBD</param>
/// <returns>TBD</returns>
public ActorMaterializerSettings WithAutoFusing(bool isAutoFusing)
{
return new ActorMaterializerSettings(InitialInputBufferSize, MaxInputBufferSize, Dispatcher, SupervisionDecider, SubscriptionTimeoutSettings, IsDebugLogging, OutputBurstLimit, IsFuzzingMode, isAutoFusing, MaxFixedBufferSize, SyncProcessingLimit);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="maxFixedBufferSize">TBD</param>
/// <returns>TBD</returns>
public ActorMaterializerSettings WithMaxFixedBufferSize(int maxFixedBufferSize)
{
return new ActorMaterializerSettings(InitialInputBufferSize, MaxInputBufferSize, Dispatcher, SupervisionDecider, SubscriptionTimeoutSettings, IsDebugLogging, OutputBurstLimit, IsFuzzingMode, IsAutoFusing, maxFixedBufferSize, SyncProcessingLimit);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="limit">TBD</param>
/// <returns>TBD</returns>
public ActorMaterializerSettings WithSyncProcessingLimit(int limit)
{
return new ActorMaterializerSettings(InitialInputBufferSize, MaxInputBufferSize, Dispatcher, SupervisionDecider, SubscriptionTimeoutSettings, IsDebugLogging, OutputBurstLimit, IsFuzzingMode, IsAutoFusing, MaxFixedBufferSize, limit);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="settings">TBD</param>
/// <returns>TBD</returns>
public ActorMaterializerSettings WithSubscriptionTimeoutSettings(StreamSubscriptionTimeoutSettings settings)
{
if (Equals(settings, SubscriptionTimeoutSettings))
return this;
return new ActorMaterializerSettings(InitialInputBufferSize, MaxInputBufferSize, Dispatcher, SupervisionDecider, settings, IsDebugLogging, OutputBurstLimit, IsFuzzingMode, IsAutoFusing, MaxFixedBufferSize, SyncProcessingLimit);
}
}
/// <summary>
/// Leaked publishers and subscribers are cleaned up when they are not used within a given deadline, configured by <see cref="StreamSubscriptionTimeoutSettings"/>.
/// </summary>
public sealed class StreamSubscriptionTimeoutSettings : IEquatable<StreamSubscriptionTimeoutSettings>
{
/// <summary>
/// TBD
/// </summary>
/// <param name="config">TBD</param>
/// <exception cref="ArgumentException">TBD</exception>
/// <returns>TBD</returns>
public static StreamSubscriptionTimeoutSettings Create(Config config)
{
var c = config.GetConfig("subscription-timeout") ?? Config.Empty;
var configMode = c.GetString("mode", "cancel").ToLowerInvariant();
StreamSubscriptionTimeoutTerminationMode mode;
switch (configMode)
{
case "no": case "off": case "false": case "noop": mode = StreamSubscriptionTimeoutTerminationMode.NoopTermination; break;
case "warn": mode = StreamSubscriptionTimeoutTerminationMode.WarnTermination; break;
case "cancel": mode = StreamSubscriptionTimeoutTerminationMode.CancelTermination; break;
default: throw new ArgumentException("akka.stream.materializer.subscribtion-timeout.mode was not defined or has invalid value. Valid values are: no, off, false, noop, warn, cancel");
}
return new StreamSubscriptionTimeoutSettings(
mode: mode,
timeout: c.GetTimeSpan("timeout", TimeSpan.FromSeconds(5)));
}
/// <summary>
/// TBD
/// </summary>
public readonly StreamSubscriptionTimeoutTerminationMode Mode;
/// <summary>
/// TBD
/// </summary>
public readonly TimeSpan Timeout;
/// <summary>
/// TBD
/// </summary>
/// <param name="mode">TBD</param>
/// <param name="timeout">TBD</param>
public StreamSubscriptionTimeoutSettings(StreamSubscriptionTimeoutTerminationMode mode, TimeSpan timeout)
{
Mode = mode;
Timeout = timeout;
}
/// <inheritdoc/>
public override bool Equals(object obj)
{
if (ReferenceEquals(obj, null))
return false;
if (ReferenceEquals(obj, this))
return true;
if (obj is StreamSubscriptionTimeoutSettings)
return Equals((StreamSubscriptionTimeoutSettings) obj);
return false;
}
/// <inheritdoc/>
public bool Equals(StreamSubscriptionTimeoutSettings other)
=> Mode == other.Mode && Timeout.Equals(other.Timeout);
/// <inheritdoc/>
public override int GetHashCode()
{
unchecked
{
return ((int)Mode * 397) ^ Timeout.GetHashCode();
}
}
/// <inheritdoc/>
public override string ToString() => $"StreamSubscriptionTimeoutSettings<{Mode}, {Timeout}>";
}
/// <summary>
/// This mode describes what shall happen when the subscription timeout expires
/// for substream Publishers created by operations like <see cref="InternalFlowOperations.PrefixAndTail{T,TMat}"/>.
/// </summary>
public enum StreamSubscriptionTimeoutTerminationMode
{
/// <summary>
/// Do not do anything when timeout expires.
/// </summary>
NoopTermination,
/// <summary>
/// Log a warning when the timeout expires.
/// </summary>
WarnTermination,
/// <summary>
/// When the timeout expires attach a Subscriber that will immediately cancel its subscription.
/// </summary>
CancelTermination
}
/// <summary>
/// TBD
/// </summary>
public static class ActorMaterializerExtensions
{
/// <summary>
/// <para>
/// Creates a ActorMaterializer which will execute every step of a transformation
/// pipeline within its own <see cref="ActorBase"/>. The required <see cref="IActorRefFactory"/>
/// (which can be either an <see cref="ActorSystem"/> or an <see cref="IActorContext"/>)
/// will be used to create one actor that in turn creates actors for the transformation steps.
/// </para>
/// <para>
/// The materializer's <see cref="ActorMaterializerSettings"/> will be obtained from the
/// configuration of the <paramref name="context"/>'s underlying <see cref="ActorSystem"/>.
/// </para>
/// <para>
/// The <paramref name="namePrefix"/> is used as the first part of the names of the actors running
/// the processing steps. The default <paramref name="namePrefix"/> is "flow". The actor names are built up of
/// namePrefix-flowNumber-flowStepNumber-stepName.
/// </para>
/// </summary>
/// <param name="context">TBD</param>
/// <param name="settings">TBD</param>
/// <param name="namePrefix">TBD</param>
/// <returns>TBD</returns>
public static ActorMaterializer Materializer(this IActorRefFactory context, ActorMaterializerSettings settings = null, string namePrefix = null)
=> ActorMaterializer.Create(context, settings, namePrefix);
}
}
| |
//-----------------------------------------------------------------------
// Copyright (c) Microsoft Open Technologies, Inc.
// All Rights Reserved
// Apache License 2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//-----------------------------------------------------------------------
using Microsoft.IdentityModel.Protocols;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.IdentityModel.Tokens;
using System.Reflection;
using System.Security.Claims;
using System.Text;
using System.Threading;
namespace Microsoft.IdentityModel.Test
{
/// <summary>
///
/// </summary>
[TestClass]
public class OpenIdConnectProtocolValidatorTests
{
public TestContext TestContext { get; set; }
[ClassInitialize]
public static void ClassSetup(TestContext testContext)
{
}
[ClassCleanup]
public static void ClassCleanup()
{
}
[TestInitialize]
public void Initialize()
{
}
[TestMethod]
[TestProperty("TestCaseID", "CA77926E-BE0E-4483-8A70-F96E5D651D3F")]
[Description("Tests: GenerateNonce")]
public void OpenIdConnectProtocolValidator_GenerateNonce()
{
List<string> errors = new List<string>();
OpenIdConnectProtocolValidator protocolValidator = new OpenIdConnectProtocolValidator();
string nonce = protocolValidator.GenerateNonce();
int endOfTimestamp = nonce.IndexOf('.');
if (endOfTimestamp == -1)
{
errors.Add("nonce does not have '.' seperator");
}
else
{
}
}
[TestMethod]
[TestProperty("TestCaseID", "4696852e-94e7-4c2b-a768-ce6e7f16e80d")]
[Description("Tests: GetSets, test covers defaults")]
public void OpenIdConnectProtocolValidator_GetSets()
{
OpenIdConnectProtocolValidator validationParameters = new OpenIdConnectProtocolValidator();
Type type = typeof(OpenIdConnectProtocolValidator);
PropertyInfo[] properties = type.GetProperties();
if (properties.Length != 9)
Assert.Fail("Number of properties has changed from 9 to: " + properties.Length + ", adjust tests");
GetSetContext context =
new GetSetContext
{
PropertyNamesAndSetGetValue = new List<KeyValuePair<string, List<object>>>
{
new KeyValuePair<string, List<object>>("NonceLifetime", new List<object>{TimeSpan.FromMinutes(60), TimeSpan.FromMinutes(10), TimeSpan.FromMinutes(100)}),
new KeyValuePair<string, List<object>>("RequireAcr", new List<object>{false, true, false}),
new KeyValuePair<string, List<object>>("RequireAmr", new List<object>{false, true, false}),
new KeyValuePair<string, List<object>>("RequireAuthTime", new List<object>{false, true, false}),
new KeyValuePair<string, List<object>>("RequireAzp", new List<object>{false, true, false}),
new KeyValuePair<string, List<object>>("RequireNonce", new List<object>{true, false, true}),
new KeyValuePair<string, List<object>>("RequireSub", new List<object>{false, true, false}),
new KeyValuePair<string, List<object>>("RequireTimeStampInNonce", new List<object>{true, false, true}),
},
Object = validationParameters,
};
TestUtilities.GetSet(context);
TestUtilities.AssertFailIfErrors(MethodInfo.GetCurrentMethod().Name, context.Errors);
ExpectedException ee = ExpectedException.ArgumentNullException();
Assert.IsNotNull(validationParameters.HashAlgorithmMap);
Assert.AreEqual(validationParameters.HashAlgorithmMap.Count, 9);
ee = ExpectedException.ArgumentOutOfRangeException();
try
{
validationParameters.NonceLifetime = TimeSpan.Zero;
ee.ProcessNoException();
}
catch (Exception ex)
{
ee.ProcessException(ex);
}
}
[TestMethod]
[TestProperty("TestCaseID", "e905d825-a3ff-4461-a5a4-46d842d0c4ba")]
[Description("Tests: Validate")]
public void OpenIdConnectProtocolValidator_Validate()
{
JwtSecurityToken jwt = new JwtSecurityToken();
OpenIdConnectProtocolValidationContext validationContext = new OpenIdConnectProtocolValidationContext();
OpenIdConnectProtocolValidator protocolValidator = new OpenIdConnectProtocolValidator();
// jwt null
Validate(jwt: null, protocolValidator: protocolValidator, validationContext: null, ee: ExpectedException.ArgumentNullException());
// validationContext null
Validate(jwt: jwt, protocolValidator: protocolValidator, validationContext: null, ee: ExpectedException.ArgumentNullException());
// aud missing
Validate(jwt: jwt, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolException), substringExpected: "IDX10309:"));
// exp missing
jwt.Payload.AddClaim(new Claim(JwtRegisteredClaimNames.Aud, IdentityUtilities.DefaultAudience));
Validate(jwt: jwt, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolException), substringExpected: "IDX10309:"));
// iat missing
jwt.Payload.AddClaim(new Claim(JwtRegisteredClaimNames.Exp, EpochTime.GetIntDate(DateTime.UtcNow).ToString()));
Validate(jwt: jwt, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolException), substringExpected: "IDX10309:"));
// iss missing
jwt.Payload.AddClaim(new Claim(JwtRegisteredClaimNames.Iat, EpochTime.GetIntDate(DateTime.UtcNow).ToString()));
Validate(jwt: jwt, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolException), substringExpected: "IDX10309:"));
// add iis, nonce is not retuired.
protocolValidator.RequireNonce = false;
jwt.Payload.AddClaim(new Claim(JwtRegisteredClaimNames.Iss, IdentityUtilities.DefaultIssuer));
Validate(jwt: jwt, protocolValidator: protocolValidator, validationContext: validationContext, ee: ExpectedException.NoExceptionExpected);
// nonce invalid
string validNonce = protocolValidator.GenerateNonce();
// add the valid 'nonce' but set validationContext.Nonce to a different 'nonce'.
protocolValidator.RequireNonce = true;
jwt.Payload.AddClaim(new Claim(JwtRegisteredClaimNames.Nonce, validNonce));
validationContext.Nonce = protocolValidator.GenerateNonce();
Validate(jwt: jwt, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidNonceException), substringExpected: "IDX10301:"));
// sub missing, default not required
validationContext.Nonce = validNonce;
Validate(jwt: jwt, protocolValidator: protocolValidator, validationContext: validationContext, ee: ExpectedException.NoExceptionExpected);
protocolValidator.RequireSub = true;
Validate(jwt: jwt, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolException), substringExpected: "IDX10309:"));
// authorizationCode invalid
string validAuthorizationCode = protocolValidator.GenerateNonce();
string validChash = IdentityUtilities.CreateCHash(validAuthorizationCode, "SHA256");
JwtSecurityToken jwtWithSignatureChash =
new JwtSecurityToken
(
audience: IdentityUtilities.DefaultAudience,
claims: new List<Claim>
{
new Claim(JwtRegisteredClaimNames.CHash, validChash),
new Claim(JwtRegisteredClaimNames.Iat, EpochTime.GetIntDate(DateTime.UtcNow).ToString()),
new Claim(JwtRegisteredClaimNames.Nonce, validNonce),
new Claim(JwtRegisteredClaimNames.Sub, "sub"),
},
expires: DateTime.UtcNow + TimeSpan.FromHours(1),
issuer: IdentityUtilities.DefaultIssuer,
signingCredentials: IdentityUtilities.DefaultAsymmetricSigningCredentials
);
Dictionary<string,string> algmap = new Dictionary<string,string>(protocolValidator.HashAlgorithmMap);
protocolValidator.HashAlgorithmMap.Clear();
protocolValidator.HashAlgorithmMap.Add(JwtAlgorithms.RSA_SHA256, "SHA256");
validationContext.Nonce = validNonce;
validationContext.AuthorizationCode = validNonce;
Validate(jwt: jwtWithSignatureChash, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidCHashException), substringExpected: "IDX10304:"));
// nonce and authorizationCode valid
validationContext.AuthorizationCode = validAuthorizationCode;
Validate(jwt: jwtWithSignatureChash, protocolValidator: protocolValidator, validationContext: validationContext, ee: ExpectedException.NoExceptionExpected);
// validate optional claims
protocolValidator.RequireAcr = true;
Validate(jwt: jwtWithSignatureChash, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolException), substringExpected: "IDX10312:"));
jwtWithSignatureChash.Payload.AddClaim(new Claim(JwtRegisteredClaimNames.Acr, "acr"));
protocolValidator.RequireAmr = true;
Validate(jwt: jwtWithSignatureChash, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolException), substringExpected: "IDX10313:"));
jwtWithSignatureChash.Payload.AddClaim(new Claim(JwtRegisteredClaimNames.Amr, "amr"));
protocolValidator.RequireAuthTime = true;
Validate(jwt: jwtWithSignatureChash, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolException), substringExpected: "IDX10314:"));
jwtWithSignatureChash.Payload.AddClaim(new Claim(JwtRegisteredClaimNames.AuthTime, "authTime"));
protocolValidator.RequireAzp = true;
Validate(jwt: jwtWithSignatureChash, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolException), substringExpected: "IDX10315:"));
jwtWithSignatureChash.Payload.AddClaim(new Claim(JwtRegisteredClaimNames.Azp, "azp"));
Validate(jwt: jwtWithSignatureChash, protocolValidator: protocolValidator, validationContext: validationContext, ee: ExpectedException.NoExceptionExpected);
}
public void Validate(JwtSecurityToken jwt, OpenIdConnectProtocolValidator protocolValidator, OpenIdConnectProtocolValidationContext validationContext, ExpectedException ee)
{
try
{
protocolValidator.Validate(jwt, validationContext);
ee.ProcessNoException();
}
catch (Exception ex)
{
ee.ProcessException(ex);
}
}
[TestMethod]
[TestProperty("TestCaseID", "9a082558-f87e-4ae0-be80-852fbcf869d4")]
[Description("Tests: Validation of CHash")]
public void OpenIdConnectProtocolValidator_CHash()
{
PublicOpenIdConnectProtocolValidator protocolValidator = new PublicOpenIdConnectProtocolValidator();
string authorizationCode1 = protocolValidator.GenerateNonce();
string authorizationCode2 = protocolValidator.GenerateNonce();
string chash1 = IdentityUtilities.CreateCHash(authorizationCode1, "SHA256");
string chash2 = IdentityUtilities.CreateCHash(authorizationCode2, "SHA256");
Dictionary<string, string> emptyDictionary = new Dictionary<string, string>();
Dictionary<string, string> mappedDictionary = new Dictionary<string, string>(protocolValidator.HashAlgorithmMap);
JwtSecurityToken jwtWithCHash1 =
new JwtSecurityToken
(
audience: IdentityUtilities.DefaultAudience,
claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.CHash, chash1) },
issuer: IdentityUtilities.DefaultIssuer
);
JwtSecurityToken jwtWithEmptyCHash =
new JwtSecurityToken
(
audience: IdentityUtilities.DefaultAudience,
claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.CHash, string.Empty) },
issuer: IdentityUtilities.DefaultIssuer,
signingCredentials: IdentityUtilities.DefaultAsymmetricSigningCredentials
);
JwtSecurityToken jwtWithoutCHash =
new JwtSecurityToken
(
audience: IdentityUtilities.DefaultAudience,
claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.Nonce, chash2) },
issuer: IdentityUtilities.DefaultIssuer
);
JwtSecurityToken jwtWithSignatureChash1 =
new JwtSecurityToken
(
audience : IdentityUtilities.DefaultAudience,
claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.CHash, chash1) },
issuer: IdentityUtilities.DefaultIssuer,
signingCredentials : IdentityUtilities.DefaultAsymmetricSigningCredentials
);
JwtSecurityToken jwtWithSignatureMultipleChashes =
new JwtSecurityToken
(
audience: IdentityUtilities.DefaultAudience,
claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.CHash, chash1), new Claim(JwtRegisteredClaimNames.CHash, chash2) },
issuer: IdentityUtilities.DefaultIssuer,
signingCredentials: IdentityUtilities.DefaultAsymmetricSigningCredentials
);
OpenIdConnectProtocolValidationContext validationContext = new OpenIdConnectProtocolValidationContext();
validationContext.AuthorizationCode = authorizationCode2;
// chash is not a string, but array
ValidateCHash(jwt: jwtWithSignatureMultipleChashes, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidCHashException), substringExpected: "IDX10304:"));
// chash doesn't match
ValidateCHash(jwt: jwtWithSignatureChash1, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidCHashException), substringExpected: "IDX10304:"));
// use algorithm map
validationContext.AuthorizationCode = authorizationCode1;
ValidateCHash(jwt: jwtWithSignatureChash1, protocolValidator: protocolValidator, validationContext: validationContext, ee: ExpectedException.NoExceptionExpected);
// Creation of algorithm failed, need to map.
protocolValidator.SetHashAlgorithmMap(emptyDictionary);
ValidateCHash(jwt: jwtWithSignatureChash1, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidCHashException), substringExpected: "IDX10307:"));
protocolValidator.SetHashAlgorithmMap(mappedDictionary);
ValidateCHash(jwt: null, protocolValidator: protocolValidator, validationContext: validationContext, ee: ExpectedException.ArgumentNullException());
ValidateCHash(jwt: jwtWithoutCHash, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidCHashException), substringExpected: "IDX10308:"));
ValidateCHash(jwt: jwtWithEmptyCHash, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidCHashException), substringExpected: "IDX10304:"));
ValidateCHash(jwt: jwtWithCHash1, protocolValidator: protocolValidator, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidCHashException), substringExpected: "IDX10307:"));
ValidateCHash(jwt: jwtWithoutCHash, protocolValidator: protocolValidator, validationContext: null, ee: ExpectedException.ArgumentNullException());
// make sure default alg works.
validationContext.AuthorizationCode = authorizationCode1;
jwtWithCHash1.Header.Remove("alg");
ValidateCHash(jwt: jwtWithCHash1, protocolValidator: protocolValidator, validationContext: validationContext, ee: ExpectedException.NoExceptionExpected);
}
private void ValidateCHash(JwtSecurityToken jwt, OpenIdConnectProtocolValidationContext validationContext, PublicOpenIdConnectProtocolValidator protocolValidator, ExpectedException ee)
{
try
{
protocolValidator.PublicValidateCHash(jwt, validationContext);
ee.ProcessNoException();
}
catch(Exception ex)
{
ee.ProcessException(ex);
}
return;
}
[TestMethod]
[TestProperty("TestCaseID", "9a082558-f87e-4ae0-be80-852fbcf869d4")]
[Description("Tests: Validation of Nonce")]
public void OpenIdConnectProtocolValidator_ValidateNonce()
{
PublicOpenIdConnectProtocolValidator protocolValidatorRequiresTimeStamp = new PublicOpenIdConnectProtocolValidator();
string nonceWithTimeStamp = protocolValidatorRequiresTimeStamp.GenerateNonce();
PublicOpenIdConnectProtocolValidator protocolValidatorDoesNotRequireTimeStamp =
new PublicOpenIdConnectProtocolValidator
{
RequireTimeStampInNonce = false,
};
PublicOpenIdConnectProtocolValidator protocolValidatorDoesNotRequireNonce =
new PublicOpenIdConnectProtocolValidator
{
RequireNonce = false,
};
string nonceWithoutTimeStamp = protocolValidatorDoesNotRequireTimeStamp.GenerateNonce();
string nonceBadTimeStamp = "abc.abc";
string nonceTicksTooLarge = Int64.MaxValue.ToString() + "." + nonceWithoutTimeStamp;
string nonceTicksTooSmall = Int64.MinValue.ToString() + "." + nonceWithoutTimeStamp;
string nonceTicksNegative = ((Int64)(-1)).ToString() + "." + nonceWithoutTimeStamp;
string nonceTicksZero = ((Int64)(0)).ToString() + "." + nonceWithoutTimeStamp;
JwtSecurityToken jwtWithNonceWithTimeStamp = new JwtSecurityToken ( claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.Nonce, nonceWithTimeStamp) });
JwtSecurityToken jwtWithNonceWithoutTimeStamp = new JwtSecurityToken(claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.Nonce, nonceWithoutTimeStamp) });
JwtSecurityToken jwtWithNonceWithBadTimeStamp = new JwtSecurityToken(claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.Nonce, nonceBadTimeStamp) });
JwtSecurityToken jwtWithNonceTicksTooLarge = new JwtSecurityToken(claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.Nonce, nonceTicksTooLarge) });
JwtSecurityToken jwtWithNonceTicksTooSmall = new JwtSecurityToken(claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.Nonce, nonceTicksTooSmall) });
JwtSecurityToken jwtWithNonceTicksNegative = new JwtSecurityToken(claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.Nonce, nonceTicksNegative) });
JwtSecurityToken jwtWithNonceZero = new JwtSecurityToken(claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.Nonce, nonceTicksZero) });
JwtSecurityToken jwtWithoutNonce = new JwtSecurityToken(claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.NameId, nonceWithTimeStamp) });
JwtSecurityToken jwtWithNonceWhitespace = new JwtSecurityToken(claims: new List<Claim> { new Claim(JwtRegisteredClaimNames.Nonce, "") });
OpenIdConnectProtocolValidationContext validationContext = new OpenIdConnectProtocolValidationContext();
validationContext.Nonce = null;
ValidateNonce(jwt: null, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: ExpectedException.ArgumentNullException());
ValidateNonce(jwt: jwtWithNonceWithTimeStamp, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: null, ee: ExpectedException.ArgumentNullException());
// nonce is null, RequireNonce is true.
ValidateNonce(jwt: jwtWithNonceWithTimeStamp, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException(typeof(OpenIdConnectProtocolInvalidNonceException), substringExpected: "IDX10311:"));
validationContext.Nonce = nonceWithoutTimeStamp;
ValidateNonce(jwt: jwtWithoutNonce, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException(typeof(OpenIdConnectProtocolInvalidNonceException), substringExpected: "IDX10322:"));
ValidateNonce(jwt: jwtWithNonceWhitespace, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException(typeof(OpenIdConnectProtocolInvalidNonceException), substringExpected: "IDX10301:"));
ValidateNonce(jwt: jwtWithNonceWithTimeStamp, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException(typeof(OpenIdConnectProtocolInvalidNonceException), substringExpected: "IDX10301:"));
validationContext.Nonce = nonceWithTimeStamp;
ValidateNonce(jwt: jwtWithNonceWithTimeStamp, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: ExpectedException.NoExceptionExpected);
// nonce expired
validationContext.Nonce = nonceWithTimeStamp;
protocolValidatorRequiresTimeStamp.NonceLifetime = TimeSpan.FromMilliseconds(10);
Thread.Sleep(100);
ValidateNonce(jwt: jwtWithNonceWithTimeStamp, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException(typeof(OpenIdConnectProtocolInvalidNonceException)));
// nonce missing timestamp, validator requires time stamp
// 1. not well formed, no '.'
validationContext.Nonce = nonceWithoutTimeStamp;
protocolValidatorRequiresTimeStamp.NonceLifetime = TimeSpan.FromMinutes(10);
ValidateNonce(jwt: jwtWithNonceWithoutTimeStamp, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException(typeof(OpenIdConnectProtocolInvalidNonceException), substringExpected: "IDX10317:"));
// 2. timestamp not well formed
validationContext.Nonce = nonceBadTimeStamp;
ValidateNonce(jwt: jwtWithNonceWithBadTimeStamp, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException( typeExpected: typeof(OpenIdConnectProtocolInvalidNonceException), innerTypeExpected: typeof(FormatException), substringExpected: "IDX10318:"));
// 3. timestamp not required
validationContext.Nonce = nonceBadTimeStamp;
ValidateNonce(jwt: jwtWithNonceWithBadTimeStamp, protocolValidator: protocolValidatorDoesNotRequireTimeStamp, validationContext: validationContext, ee: ExpectedException.NoExceptionExpected);
// 4. ticks max value
validationContext.Nonce = nonceTicksTooLarge;
ValidateNonce(jwt: jwtWithNonceTicksTooLarge, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidNonceException), innerTypeExpected: typeof(ArgumentException), substringExpected: "IDX10320:"));
// 5. ticks min value small
validationContext.Nonce = nonceTicksTooSmall;
ValidateNonce(jwt: jwtWithNonceTicksTooSmall, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidNonceException), substringExpected: "IDX10318:"));
// 6. ticks negative
validationContext.Nonce = nonceTicksNegative;
ValidateNonce(jwt: jwtWithNonceTicksNegative, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidNonceException), substringExpected: "IDX10318:"));
// 7. ticks zero
validationContext.Nonce = nonceTicksZero;
ValidateNonce(jwt: jwtWithNonceZero, protocolValidator: protocolValidatorRequiresTimeStamp, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidNonceException), substringExpected: "IDX10318:"));
// require nonce false
validationContext.Nonce = null;
ValidateNonce(jwt: jwtWithNonceWithoutTimeStamp, protocolValidator: protocolValidatorDoesNotRequireNonce, validationContext: validationContext, ee: ExpectedException.NoExceptionExpected);
// validationContext has nonce
validationContext.Nonce = nonceWithTimeStamp;
ValidateNonce(jwt: jwtWithoutNonce, protocolValidator: protocolValidatorDoesNotRequireNonce, validationContext: validationContext, ee: new ExpectedException(typeExpected: typeof(OpenIdConnectProtocolInvalidNonceException), substringExpected: "IDX10323:"));
}
private void ValidateNonce(JwtSecurityToken jwt, PublicOpenIdConnectProtocolValidator protocolValidator, OpenIdConnectProtocolValidationContext validationContext, ExpectedException ee)
{
try
{
protocolValidator.PublicValidateNonce(jwt, validationContext);
ee.ProcessNoException();
}
catch(Exception ex)
{
ee.ProcessException(ex);
}
}
}
class PublicOpenIdConnectProtocolValidator : OpenIdConnectProtocolValidator
{
public void PublicValidateCHash(JwtSecurityToken jwt, OpenIdConnectProtocolValidationContext context)
{
base.ValidateCHash(jwt, context);
}
public void PublicValidateNonce(JwtSecurityToken jwt, OpenIdConnectProtocolValidationContext context)
{
base.ValidateNonce(jwt, context);
}
public void SetHashAlgorithmMap(Dictionary<string, string> hashAlgorithmMap)
{
HashAlgorithmMap.Clear();
foreach (var key in hashAlgorithmMap.Keys)
HashAlgorithmMap.Add(key, hashAlgorithmMap[key]);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void OrUInt16()
{
var test = new SimpleBinaryOpTest__OrUInt16();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__OrUInt16
{
private const int VectorSize = 16;
private const int ElementCount = VectorSize / sizeof(UInt16);
private static UInt16[] _data1 = new UInt16[ElementCount];
private static UInt16[] _data2 = new UInt16[ElementCount];
private static Vector128<UInt16> _clsVar1;
private static Vector128<UInt16> _clsVar2;
private Vector128<UInt16> _fld1;
private Vector128<UInt16> _fld2;
private SimpleBinaryOpTest__DataTable<UInt16> _dataTable;
static SimpleBinaryOpTest__OrUInt16()
{
var random = new Random();
for (var i = 0; i < ElementCount; i++) { _data1[i] = (ushort)(random.Next(0, ushort.MaxValue)); _data2[i] = (ushort)(random.Next(0, ushort.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _clsVar1), ref Unsafe.As<UInt16, byte>(ref _data2[0]), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _clsVar2), ref Unsafe.As<UInt16, byte>(ref _data1[0]), VectorSize);
}
public SimpleBinaryOpTest__OrUInt16()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < ElementCount; i++) { _data1[i] = (ushort)(random.Next(0, ushort.MaxValue)); _data2[i] = (ushort)(random.Next(0, ushort.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _fld1), ref Unsafe.As<UInt16, byte>(ref _data1[0]), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt16>, byte>(ref _fld2), ref Unsafe.As<UInt16, byte>(ref _data2[0]), VectorSize);
for (var i = 0; i < ElementCount; i++) { _data1[i] = (ushort)(random.Next(0, ushort.MaxValue)); _data2[i] = (ushort)(random.Next(0, ushort.MaxValue)); }
_dataTable = new SimpleBinaryOpTest__DataTable<UInt16>(_data1, _data2, new UInt16[ElementCount], VectorSize);
}
public bool IsSupported => Sse2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Sse2.Or(
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
var result = Sse2.Or(
Sse2.LoadVector128((UInt16*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((UInt16*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
var result = Sse2.Or(
Sse2.LoadAlignedVector128((UInt16*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((UInt16*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Sse2).GetMethod(nameof(Sse2.Or), new Type[] { typeof(Vector128<UInt16>), typeof(Vector128<UInt16>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
var result = typeof(Sse2).GetMethod(nameof(Sse2.Or), new Type[] { typeof(Vector128<UInt16>), typeof(Vector128<UInt16>) })
.Invoke(null, new object[] {
Sse2.LoadVector128((UInt16*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((UInt16*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Sse2).GetMethod(nameof(Sse2.Or), new Type[] { typeof(Vector128<UInt16>), typeof(Vector128<UInt16>) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((UInt16*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((UInt16*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<UInt16>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
var result = Sse2.Or(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
var left = Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector128<UInt16>>(_dataTable.inArray2Ptr);
var result = Sse2.Or(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
var left = Sse2.LoadVector128((UInt16*)(_dataTable.inArray1Ptr));
var right = Sse2.LoadVector128((UInt16*)(_dataTable.inArray2Ptr));
var result = Sse2.Or(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
var left = Sse2.LoadAlignedVector128((UInt16*)(_dataTable.inArray1Ptr));
var right = Sse2.LoadAlignedVector128((UInt16*)(_dataTable.inArray2Ptr));
var result = Sse2.Or(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclFldScenario()
{
var test = new SimpleBinaryOpTest__OrUInt16();
var result = Sse2.Or(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunFldScenario()
{
var result = Sse2.Or(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector128<UInt16> left, Vector128<UInt16> right, void* result, [CallerMemberName] string method = "")
{
UInt16[] inArray1 = new UInt16[ElementCount];
UInt16[] inArray2 = new UInt16[ElementCount];
UInt16[] outArray = new UInt16[ElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left);
Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
UInt16[] inArray1 = new UInt16[ElementCount];
UInt16[] inArray2 = new UInt16[ElementCount];
UInt16[] outArray = new UInt16[ElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt16, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(UInt16[] left, UInt16[] right, UInt16[] result, [CallerMemberName] string method = "")
{
if ((ushort)(left[0] | right[0]) != result[0])
{
Succeeded = false;
}
else
{
for (var i = 1; i < left.Length; i++)
{
if ((ushort)(left[i] | right[i]) != result[i])
{
Succeeded = false;
break;
}
}
}
if (!Succeeded)
{
Console.WriteLine($"{nameof(Sse2)}.{nameof(Sse2.Or)}<UInt16>: {method} failed:");
Console.WriteLine($" left: ({string.Join(", ", left)})");
Console.WriteLine($" right: ({string.Join(", ", right)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.ComponentModel;
using System.Runtime.InteropServices;
using System.Diagnostics;
namespace System.Net.WebSockets
{
/// <summary>
/// Static class containing the WinHttp global callback and associated routines.
/// </summary>
internal static class WinHttpWebSocketCallback
{
public static Interop.WinHttp.WINHTTP_STATUS_CALLBACK s_StaticCallbackDelegate =
new Interop.WinHttp.WINHTTP_STATUS_CALLBACK(WinHttpCallback);
public static void WinHttpCallback(
IntPtr handle,
IntPtr context,
uint internetStatus,
IntPtr statusInformation,
uint statusInformationLength)
{
if (Environment.HasShutdownStarted)
{
return;
}
if (context == IntPtr.Zero)
{
Debug.Assert(internetStatus != Interop.WinHttp.WINHTTP_CALLBACK_STATUS_HANDLE_CLOSING);
return;
}
try
{
WinHttpWebSocketState state = WinHttpWebSocketState.FromIntPtr(context);
Debug.Assert(state != null, "WinHttpWebSocketCallback: state should not be null");
if ((state.RequestHandle != null) &&
(state.RequestHandle.DangerousGetHandle() == handle))
{
RequestCallback(handle, state, internetStatus, statusInformation, statusInformationLength);
return;
}
if ((state.WebSocketHandle != null) &&
(state.WebSocketHandle.DangerousGetHandle() == handle))
{
WebSocketCallback(handle, state, internetStatus, statusInformation, statusInformationLength);
return;
}
}
catch (Exception ex)
{
Debug.Fail("Unhandled exception in WinHTTP callback: " + ex);
}
}
#region RequestCallback
private static void RequestCallback(
IntPtr handle,
WinHttpWebSocketState state,
uint internetStatus,
IntPtr statusInformation,
uint statusInformationLength)
{
switch (internetStatus)
{
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_SENDREQUEST_COMPLETE:
OnRequestSendRequestComplete(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_HEADERS_AVAILABLE:
OnRequestHeadersAvailable(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_HANDLE_CLOSING:
OnRequestHandleClosing(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_REQUEST_ERROR:
Debug.Assert(
statusInformationLength == Marshal.SizeOf<Interop.WinHttp.WINHTTP_ASYNC_RESULT>(),
"RequestCallback: statusInformationLength=" + statusInformationLength +
" must be sizeof(WINHTTP_ASYNC_RESULT)=" + Marshal.SizeOf<Interop.WinHttp.WINHTTP_ASYNC_RESULT>());
var asyncResult = Marshal.PtrToStructure<Interop.WinHttp.WINHTTP_ASYNC_RESULT>(statusInformation);
OnRequestError(state, asyncResult);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_SECURE_FAILURE:
Debug.Assert(
statusInformationLength == sizeof(uint),
"RequestCallback: statusInformationLength must be sizeof(uint).");
// statusInformation contains a flag: WINHTTP_CALLBACK_STATUS_FLAG_*
uint flags = 0;
unchecked
{
flags = (uint)Marshal.ReadInt32(statusInformation);
}
OnRequestSecureFailure(state, flags);
return;
}
}
private static void OnRequestSendRequestComplete(WinHttpWebSocketState state)
{
Debug.Assert(state != null, "OnRequestSendRequestComplete: state is null");
Debug.Assert(state.TcsUpgrade != null, "OnRequestSendRequestComplete: task completion source is null");
state.TcsUpgrade.TrySetResult(true);
}
private static void OnRequestHeadersAvailable(WinHttpWebSocketState state)
{
Debug.Assert(state != null, "OnRequestHeadersAvailable: state is null");
Debug.Assert(state.TcsUpgrade != null, "OnRequestHeadersAvailable: task completion source is null");
state.TcsUpgrade.TrySetResult(true);
}
private static void OnRequestHandleClosing(WinHttpWebSocketState state)
{
Debug.Assert(state != null, "OnRequestError: state is null");
Debug.Assert(state.RequestHandle != null, "OnRequestError: RequestHandle is null");
Debug.Assert(!state.RequestHandle.IsInvalid, "OnRequestError: RequestHandle is invalid");
state.RequestHandle.DetachCallback();
state.RequestHandle = null;
// Unpin the state object if there are no more open handles that are wired to the callback.
if (state.DecrementHandlesOpenWithCallback() == 0)
{
state.Unpin();
}
}
private static void OnRequestError(
WinHttpWebSocketState state,
Interop.WinHttp.WINHTTP_ASYNC_RESULT asyncResult)
{
Debug.Assert(state != null, "OnRequestError: state is null");
var innerException = WinHttpException.CreateExceptionUsingError((int)asyncResult.dwError);
switch ((uint)asyncResult.dwResult.ToInt32())
{
case Interop.WinHttp.API_SEND_REQUEST:
case Interop.WinHttp.API_RECEIVE_RESPONSE:
{
var exception = new WebSocketException(SR.net_webstatus_ConnectFailure, innerException);
state.UpdateState(WebSocketState.Closed);
state.TcsUpgrade.TrySetException(exception);
}
break;
default:
{
Debug.Fail(
"OnRequestError: Result (" + asyncResult.dwResult + ") is not expected.",
"Error code: " + asyncResult.dwError + " (" + innerException.Message + ")");
}
break;
}
}
private static void OnRequestSecureFailure(WinHttpWebSocketState state, uint flags)
{
Debug.Assert(state != null, "OnRequestSecureFailure: state is null");
var innerException = WinHttpException.CreateExceptionUsingError((int)Interop.WinHttp.ERROR_WINHTTP_SECURE_FAILURE);
var exception = new WebSocketException(
WebSocketError.Success,
SR.net_webstatus_ConnectFailure,
innerException);
// TODO (#2509): handle SSL related exceptions.
state.UpdateState(WebSocketState.Closed);
// TODO (#2509): Create exception from WINHTTP_CALLBACK_STATUS_SECURE_FAILURE flags.
state.TcsUpgrade.TrySetException(exception);
}
#endregion
#region WebSocketCallback
private static void WebSocketCallback(
IntPtr handle,
WinHttpWebSocketState state,
uint internetStatus,
IntPtr statusInformation,
uint statusInformationLength)
{
switch (internetStatus)
{
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_WRITE_COMPLETE:
OnWebSocketWriteComplete(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_READ_COMPLETE:
Debug.Assert(
statusInformationLength == Marshal.SizeOf<Interop.WinHttp.WINHTTP_WEB_SOCKET_STATUS>(),
"WebSocketCallback: statusInformationLength must be sizeof(WINHTTP_WEB_SOCKET_STATUS).");
var info = Marshal.PtrToStructure<Interop.WinHttp.WINHTTP_WEB_SOCKET_STATUS>(statusInformation);
OnWebSocketReadComplete(state, info);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_CLOSE_COMPLETE:
OnWebSocketCloseComplete(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_SHUTDOWN_COMPLETE:
OnWebSocketShutdownComplete(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_HANDLE_CLOSING:
OnWebSocketHandleClosing(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_REQUEST_ERROR:
Debug.Assert(
statusInformationLength == Marshal.SizeOf<Interop.WinHttp.WINHTTP_WEB_SOCKET_ASYNC_RESULT>(),
"WebSocketCallback: statusInformationLength must be sizeof(WINHTTP_WEB_SOCKET_ASYNC_RESULT).");
var asyncResult = Marshal.PtrToStructure<Interop.WinHttp.WINHTTP_WEB_SOCKET_ASYNC_RESULT>(statusInformation);
OnWebSocketError(state, asyncResult);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_SECURE_FAILURE:
Debug.Assert(
statusInformationLength == sizeof(uint),
"WebSocketCallback: statusInformationLength must be sizeof(uint).");
// statusInformation contains a flag: WINHTTP_CALLBACK_STATUS_FLAG_*
uint flags = (uint)statusInformation;
OnRequestSecureFailure(state, flags);
return;
}
}
private static void OnWebSocketWriteComplete(WinHttpWebSocketState state)
{
Debug.Assert(state != null, "OnWebSocketWriteComplete: state is null");
state.PendingWriteOperation = false;
state.TcsSend.TrySetResult(true);
}
private static void OnWebSocketReadComplete(
WinHttpWebSocketState state,
Interop.WinHttp.WINHTTP_WEB_SOCKET_STATUS info)
{
Debug.Assert(state != null, "OnWebSocketReadComplete: state is null");
if (info.eBufferType == Interop.WinHttp.WINHTTP_WEB_SOCKET_BUFFER_TYPE.WINHTTP_WEB_SOCKET_CLOSE_BUFFER_TYPE)
{
state.UpdateState(WebSocketState.CloseReceived);
}
state.BufferType = info.eBufferType;
state.BytesTransferred = info.dwBytesTransferred;
state.PendingReadOperation = false;
state.TcsReceive.TrySetResult(true);
}
private static void OnWebSocketCloseComplete(WinHttpWebSocketState state)
{
Debug.Assert(state != null, "OnWebSocketCloseComplete: state is null");
state.UpdateState(WebSocketState.Closed);
state.TcsClose.TrySetResult(true);
}
private static void OnWebSocketShutdownComplete(WinHttpWebSocketState state)
{
Debug.Assert(state != null, "OnWebSocketShutdownComplete: state is null");
state.UpdateState(WebSocketState.CloseSent);
state.TcsCloseOutput.TrySetResult(true);
}
private static void OnWebSocketHandleClosing(WinHttpWebSocketState state)
{
Debug.Assert(state != null, "OnWebSocketHandleClosing: state is null");
Debug.Assert(state.WebSocketHandle != null, "OnWebSocketHandleClosing: WebSocketHandle is null");
Debug.Assert(!state.WebSocketHandle.IsInvalid, "OnWebSocketHandleClosing: WebSocketHandle is invalid");
state.WebSocketHandle.DetachCallback();
state.WebSocketHandle = null;
// Unpin the state object if there are no more open handles that are wired to the callback.
if (state.DecrementHandlesOpenWithCallback() == 0)
{
state.Unpin();
}
}
private static void OnWebSocketError(
WinHttpWebSocketState state,
Interop.WinHttp.WINHTTP_WEB_SOCKET_ASYNC_RESULT asyncResult)
{
Debug.Assert(state != null, "OnWebSocketError: state is null");
var innerException = WinHttpException.CreateExceptionUsingError((int)(asyncResult.AsyncResult.dwError));
if (asyncResult.AsyncResult.dwError == Interop.WinHttp.ERROR_WINHTTP_OPERATION_CANCELLED)
{
state.UpdateState(WebSocketState.Aborted);
if (state.TcsReceive != null)
{
state.TcsReceive.TrySetCanceled();
}
if (state.TcsSend != null)
{
state.TcsSend.TrySetCanceled();
}
return;
}
switch (asyncResult.Operation)
{
case Interop.WinHttp.WINHTTP_WEB_SOCKET_OPERATION.WINHTTP_WEB_SOCKET_SEND_OPERATION:
state.PendingWriteOperation = false;
state.TcsSend.TrySetException(innerException);
break;
case Interop.WinHttp.WINHTTP_WEB_SOCKET_OPERATION.WINHTTP_WEB_SOCKET_RECEIVE_OPERATION:
state.PendingReadOperation = false;
state.TcsReceive.TrySetException(innerException);
break;
case Interop.WinHttp.WINHTTP_WEB_SOCKET_OPERATION.WINHTTP_WEB_SOCKET_CLOSE_OPERATION:
state.TcsClose.TrySetException(innerException);
break;
case Interop.WinHttp.WINHTTP_WEB_SOCKET_OPERATION.WINHTTP_WEB_SOCKET_SHUTDOWN_OPERATION:
state.TcsCloseOutput.TrySetException(innerException);
break;
default:
Debug.Fail(
"OnWebSocketError: Operation (" + asyncResult.Operation + ") is not expected.",
"Error code: " + asyncResult.AsyncResult.dwError + " (" + innerException.Message + ")");
break;
}
}
private static void OnWebSocketSecureFailure(WinHttpWebSocketState state, uint flags)
{
Debug.Assert(state != null, "OnWebSocketSecureFailure: state is null");
var innerException = WinHttpException.CreateExceptionUsingError((int)Interop.WinHttp.ERROR_WINHTTP_SECURE_FAILURE);
var exception = new WebSocketException(WebSocketError.ConnectionClosedPrematurely, innerException);
// TODO (Issue 2509): handle SSL related exceptions.
state.UpdateState(WebSocketState.Aborted);
// TODO (Issue 2509): Create exception from WINHTTP_CALLBACK_STATUS_SECURE_FAILURE flags.
state.TcsUpgrade.TrySetException(exception);
}
#endregion
}
}
| |
using System;
using System.Text;
using NUnit.Framework;
using Raksha.Crypto;
using Raksha.Crypto.Engines;
using Raksha.Crypto.Modes;
using Raksha.Crypto.Modes.Gcm;
using Raksha.Crypto.Parameters;
using Raksha.Security;
using Raksha.Utilities.Encoders;
using Raksha.Tests.Utilities;
namespace Raksha.Tests.Crypto
{
/// <summary>
/// Test vectors from "The Galois/Counter Mode of Operation (GCM)", McGrew/Viega, Appendix B
/// </summary>
[TestFixture]
public class GcmTest
: SimpleTest
{
private static readonly string[][] TestVectors = new string[][]
{
new string[]
{
"Test Case 1",
"00000000000000000000000000000000",
"",
"",
"000000000000000000000000",
"",
"58e2fccefa7e3061367f1d57a4e7455a",
},
new string[]
{
"Test Case 2",
"00000000000000000000000000000000",
"00000000000000000000000000000000",
"",
"000000000000000000000000",
"0388dace60b6a392f328c2b971b2fe78",
"ab6e47d42cec13bdf53a67b21257bddf",
},
new string[]
{
"Test Case 3",
"feffe9928665731c6d6a8f9467308308",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b391aafd255",
"",
"cafebabefacedbaddecaf888",
"42831ec2217774244b7221b784d0d49c"
+ "e3aa212f2c02a4e035c17e2329aca12e"
+ "21d514b25466931c7d8f6a5aac84aa05"
+ "1ba30b396a0aac973d58e091473f5985",
"4d5c2af327cd64a62cf35abd2ba6fab4",
},
new string[]
{
"Test Case 4",
"feffe9928665731c6d6a8f9467308308",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b39",
"feedfacedeadbeeffeedfacedeadbeef"
+ "abaddad2",
"cafebabefacedbaddecaf888",
"42831ec2217774244b7221b784d0d49c"
+ "e3aa212f2c02a4e035c17e2329aca12e"
+ "21d514b25466931c7d8f6a5aac84aa05"
+ "1ba30b396a0aac973d58e091",
"5bc94fbc3221a5db94fae95ae7121a47",
},
new string[]
{
"Test Case 5",
"feffe9928665731c6d6a8f9467308308",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b39",
"feedfacedeadbeeffeedfacedeadbeef"
+ "abaddad2",
"cafebabefacedbad",
"61353b4c2806934a777ff51fa22a4755"
+ "699b2a714fcdc6f83766e5f97b6c7423"
+ "73806900e49f24b22b097544d4896b42"
+ "4989b5e1ebac0f07c23f4598",
"3612d2e79e3b0785561be14aaca2fccb",
},
new string[]
{
"Test Case 6",
"feffe9928665731c6d6a8f9467308308",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b39",
"feedfacedeadbeeffeedfacedeadbeef"
+ "abaddad2",
"9313225df88406e555909c5aff5269aa"
+ "6a7a9538534f7da1e4c303d2a318a728"
+ "c3c0c95156809539fcf0e2429a6b5254"
+ "16aedbf5a0de6a57a637b39b",
"8ce24998625615b603a033aca13fb894"
+ "be9112a5c3a211a8ba262a3cca7e2ca7"
+ "01e4a9a4fba43c90ccdcb281d48c7c6f"
+ "d62875d2aca417034c34aee5",
"619cc5aefffe0bfa462af43c1699d050",
},
new string[]
{
"Test Case 7",
"00000000000000000000000000000000"
+ "0000000000000000",
"",
"",
"000000000000000000000000",
"",
"cd33b28ac773f74ba00ed1f312572435",
},
new string[]
{
"Test Case 8",
"00000000000000000000000000000000"
+ "0000000000000000",
"00000000000000000000000000000000",
"",
"000000000000000000000000",
"98e7247c07f0fe411c267e4384b0f600",
"2ff58d80033927ab8ef4d4587514f0fb",
},
new string[]
{
"Test Case 9",
"feffe9928665731c6d6a8f9467308308"
+ "feffe9928665731c",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b391aafd255",
"",
"cafebabefacedbaddecaf888",
"3980ca0b3c00e841eb06fac4872a2757"
+ "859e1ceaa6efd984628593b40ca1e19c"
+ "7d773d00c144c525ac619d18c84a3f47"
+ "18e2448b2fe324d9ccda2710acade256",
"9924a7c8587336bfb118024db8674a14",
},
new string[]
{
"Test Case 10",
"feffe9928665731c6d6a8f9467308308"
+ "feffe9928665731c",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b39",
"feedfacedeadbeeffeedfacedeadbeef"
+ "abaddad2",
"cafebabefacedbaddecaf888",
"3980ca0b3c00e841eb06fac4872a2757"
+ "859e1ceaa6efd984628593b40ca1e19c"
+ "7d773d00c144c525ac619d18c84a3f47"
+ "18e2448b2fe324d9ccda2710",
"2519498e80f1478f37ba55bd6d27618c",
},
new string[]
{
"Test Case 11",
"feffe9928665731c6d6a8f9467308308"
+ "feffe9928665731c",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b39",
"feedfacedeadbeeffeedfacedeadbeef"
+ "abaddad2",
"cafebabefacedbad",
"0f10f599ae14a154ed24b36e25324db8"
+ "c566632ef2bbb34f8347280fc4507057"
+ "fddc29df9a471f75c66541d4d4dad1c9"
+ "e93a19a58e8b473fa0f062f7",
"65dcc57fcf623a24094fcca40d3533f8",
},
new string[]
{
"Test Case 12",
"feffe9928665731c6d6a8f9467308308"
+ "feffe9928665731c",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b39",
"feedfacedeadbeeffeedfacedeadbeef"
+ "abaddad2",
"9313225df88406e555909c5aff5269aa"
+ "6a7a9538534f7da1e4c303d2a318a728"
+ "c3c0c95156809539fcf0e2429a6b5254"
+ "16aedbf5a0de6a57a637b39b",
"d27e88681ce3243c4830165a8fdcf9ff"
+ "1de9a1d8e6b447ef6ef7b79828666e45"
+ "81e79012af34ddd9e2f037589b292db3"
+ "e67c036745fa22e7e9b7373b",
"dcf566ff291c25bbb8568fc3d376a6d9",
},
new string[]
{
"Test Case 13",
"00000000000000000000000000000000"
+ "00000000000000000000000000000000",
"",
"",
"000000000000000000000000",
"",
"530f8afbc74536b9a963b4f1c4cb738b",
},
new string[]
{
"Test Case 14",
"00000000000000000000000000000000"
+ "00000000000000000000000000000000",
"00000000000000000000000000000000",
"",
"000000000000000000000000",
"cea7403d4d606b6e074ec5d3baf39d18",
"d0d1c8a799996bf0265b98b5d48ab919",
},
new string[]
{
"Test Case 15",
"feffe9928665731c6d6a8f9467308308"
+ "feffe9928665731c6d6a8f9467308308",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b391aafd255",
"",
"cafebabefacedbaddecaf888",
"522dc1f099567d07f47f37a32a84427d"
+ "643a8cdcbfe5c0c97598a2bd2555d1aa"
+ "8cb08e48590dbb3da7b08b1056828838"
+ "c5f61e6393ba7a0abcc9f662898015ad",
"b094dac5d93471bdec1a502270e3cc6c",
},
new string[]
{
"Test Case 16",
"feffe9928665731c6d6a8f9467308308"
+ "feffe9928665731c6d6a8f9467308308",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b39",
"feedfacedeadbeeffeedfacedeadbeef"
+ "abaddad2",
"cafebabefacedbaddecaf888",
"522dc1f099567d07f47f37a32a84427d"
+ "643a8cdcbfe5c0c97598a2bd2555d1aa"
+ "8cb08e48590dbb3da7b08b1056828838"
+ "c5f61e6393ba7a0abcc9f662",
"76fc6ece0f4e1768cddf8853bb2d551b",
},
new string[]
{
"Test Case 17",
"feffe9928665731c6d6a8f9467308308"
+ "feffe9928665731c6d6a8f9467308308",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b39",
"feedfacedeadbeeffeedfacedeadbeef"
+ "abaddad2",
"cafebabefacedbad",
"c3762df1ca787d32ae47c13bf19844cb"
+ "af1ae14d0b976afac52ff7d79bba9de0"
+ "feb582d33934a4f0954cc2363bc73f78"
+ "62ac430e64abe499f47c9b1f",
"3a337dbf46a792c45e454913fe2ea8f2",
},
new string[]
{
"Test Case 18",
"feffe9928665731c6d6a8f9467308308"
+ "feffe9928665731c6d6a8f9467308308",
"d9313225f88406e5a55909c5aff5269a"
+ "86a7a9531534f7da2e4c303d8a318a72"
+ "1c3c0c95956809532fcf0e2449a6b525"
+ "b16aedf5aa0de657ba637b39",
"feedfacedeadbeeffeedfacedeadbeef"
+ "abaddad2",
"9313225df88406e555909c5aff5269aa"
+ "6a7a9538534f7da1e4c303d2a318a728"
+ "c3c0c95156809539fcf0e2429a6b5254"
+ "16aedbf5a0de6a57a637b39b",
"5a8def2f0c9e53f1f75d7853659e2a20"
+ "eeb2b22aafde6419a058ab4f6f746bf4"
+ "0fc0c3b780f244452da3ebf1c5d82cde"
+ "a2418997200ef82e44ae7e3f",
"a44a8266ee1c8eb0c8b5d4cf5ae9f19a",
},
};
public override string Name
{
get { return "GCM"; }
}
public override void PerformTest()
{
for (int i = 0; i < TestVectors.Length; ++i)
{
runTestCase(TestVectors[i]);
}
randomTests();
}
private void runTestCase(
string[] testVector)
{
for (int macLength = 12; macLength <= 16; ++macLength)
{
runTestCase(testVector, macLength);
}
}
private void runTestCase(
string[] testVector,
int macLength)
{
int pos = 0;
string testName = testVector[pos++];
byte[] K = Hex.Decode(testVector[pos++]);
byte[] P = Hex.Decode(testVector[pos++]);
byte[] A = Hex.Decode(testVector[pos++]);
byte[] IV = Hex.Decode(testVector[pos++]);
byte[] C = Hex.Decode(testVector[pos++]);
// For short MAC, take leading bytes
byte[] t = Hex.Decode(testVector[pos++]);
byte[] T = new byte[macLength];
Array.Copy(t, T, T.Length);
AeadParameters parameters = new AeadParameters(new KeyParameter(K), T.Length * 8, IV, A);
// Default multiplier
runTestCase(null, null, parameters, testName, P, C, T);
runTestCase(new BasicGcmMultiplier(), new BasicGcmMultiplier(), parameters, testName, P, C, T);
runTestCase(new Tables8kGcmMultiplier(), new Tables8kGcmMultiplier(), parameters, testName, P, C, T);
runTestCase(new Tables64kGcmMultiplier(), new Tables64kGcmMultiplier(), parameters, testName, P, C, T);
}
private void runTestCase(
IGcmMultiplier encM,
IGcmMultiplier decM,
AeadParameters parameters,
string testName,
byte[] P,
byte[] C,
byte[] T)
{
GcmBlockCipher encCipher = initCipher(encM, true, parameters);
GcmBlockCipher decCipher = initCipher(decM, false, parameters);
checkTestCase(encCipher, decCipher, testName, P, C, T);
checkTestCase(encCipher, decCipher, testName + " (reused)", P, C, T);
}
private GcmBlockCipher initCipher(
IGcmMultiplier m,
bool forEncryption,
AeadParameters parameters)
{
GcmBlockCipher c = new GcmBlockCipher(new AesFastEngine(), m);
c.Init(forEncryption, parameters);
return c;
}
private void checkTestCase(
GcmBlockCipher encCipher,
GcmBlockCipher decCipher,
string testName,
byte[] P,
byte[] C,
byte[] T)
{
byte[] enc = new byte[encCipher.GetOutputSize(P.Length)];
int len = encCipher.ProcessBytes(P, 0, P.Length, enc, 0);
len += encCipher.DoFinal(enc, len);
if (enc.Length != len)
{
// Console.WriteLine("" + enc.Length + "/" + len);
Fail("encryption reported incorrect length: " + testName);
}
byte[] mac = encCipher.GetMac();
byte[] data = new byte[P.Length];
Array.Copy(enc, data, data.Length);
byte[] tail = new byte[enc.Length - P.Length];
Array.Copy(enc, P.Length, tail, 0, tail.Length);
if (!AreEqual(C, data))
{
Fail("incorrect encrypt in: " + testName);
}
if (!AreEqual(T, mac))
{
Fail("GetMac() returned wrong mac in: " + testName);
}
if (!AreEqual(T, tail))
{
Fail("stream contained wrong mac in: " + testName);
}
byte[] dec = new byte[decCipher.GetOutputSize(enc.Length)];
len = decCipher.ProcessBytes(enc, 0, enc.Length, dec, 0);
len += decCipher.DoFinal(dec, len);
mac = decCipher.GetMac();
data = new byte[C.Length];
Array.Copy(dec, data, data.Length);
if (!AreEqual(P, data))
{
Fail("incorrect decrypt in: " + testName);
}
}
private void randomTests()
{
SecureRandom srng = new SecureRandom();
for (int i = 0; i < 10; ++i)
{
randomTest(srng, null);
randomTest(srng, new BasicGcmMultiplier());
randomTest(srng, new Tables8kGcmMultiplier());
randomTest(srng, new Tables64kGcmMultiplier());
}
}
private void randomTest(
SecureRandom srng,
IGcmMultiplier m)
{
int kLength = 16 + 8 * srng.Next(3);
byte[] K = new byte[kLength];
srng.NextBytes(K);
int pLength = srng.Next(1024);
byte[] P = new byte[pLength];
srng.NextBytes(P);
int aLength = srng.Next(1024);
byte[] A = new byte[aLength];
srng.NextBytes(A);
int ivLength = 1 + srng.Next(1024);
byte[] IV = new byte[ivLength];
srng.NextBytes(IV);
GcmBlockCipher cipher = new GcmBlockCipher(new AesFastEngine(), m);
AeadParameters parameters = new AeadParameters(new KeyParameter(K), 16 * 8, IV, A);
cipher.Init(true, parameters);
byte[] C = new byte[cipher.GetOutputSize(P.Length)];
int len = cipher.ProcessBytes(P, 0, P.Length, C, 0);
len += cipher.DoFinal(C, len);
if (C.Length != len)
{
// Console.WriteLine("" + C.Length + "/" + len);
Fail("encryption reported incorrect length in randomised test");
}
byte[] encT = cipher.GetMac();
byte[] tail = new byte[C.Length - P.Length];
Array.Copy(C, P.Length, tail, 0, tail.Length);
if (!AreEqual(encT, tail))
{
Fail("stream contained wrong mac in randomised test");
}
cipher.Init(false, parameters);
byte[] decP = new byte[cipher.GetOutputSize(C.Length)];
len = cipher.ProcessBytes(C, 0, C.Length, decP, 0);
len += cipher.DoFinal(decP, len);
if (!AreEqual(P, decP))
{
Fail("incorrect decrypt in randomised test");
}
byte[] decT = cipher.GetMac();
if (!AreEqual(encT, decT))
{
Fail("decryption produced different mac from encryption");
}
}
public static void Main(
string[] args)
{
RunTest(new GcmTest());
}
[Test]
public void TestFunction()
{
string resultText = Perform().ToString();
Assert.AreEqual(Name + ": Okay", resultText);
}
}
}
| |
using System;
using System.Collections;
using System.Globalization;
using System.IO;
using Org.BouncyCastle.Asn1;
using Org.BouncyCastle.Asn1.CryptoPro;
using Org.BouncyCastle.Asn1.Nist;
using Org.BouncyCastle.Asn1.Pkcs;
using Org.BouncyCastle.Asn1.TeleTrust;
using Org.BouncyCastle.Asn1.X509;
using Org.BouncyCastle.Asn1.X9;
using Org.BouncyCastle.Security;
using Org.BouncyCastle.Crypto.Digests;
using Org.BouncyCastle.Crypto;
using Org.BouncyCastle.Crypto.Engines;
using Org.BouncyCastle.Crypto.Signers;
using Org.BouncyCastle.Utilities;
namespace Org.BouncyCastle.Security
{
/// <summary>
/// Signer Utility class contains methods that can not be specifically grouped into other classes.
/// </summary>
public sealed class SignerUtilities
{
private SignerUtilities()
{
}
internal static readonly IDictionary algorithms = Platform.CreateHashtable();
internal static readonly IDictionary oids = Platform.CreateHashtable();
static SignerUtilities()
{
algorithms["MD2WITHRSA"] = "MD2withRSA";
algorithms["MD2WITHRSAENCRYPTION"] = "MD2withRSA";
algorithms[PkcsObjectIdentifiers.MD2WithRsaEncryption.Id] = "MD2withRSA";
algorithms["MD4WITHRSA"] = "MD4withRSA";
algorithms["MD4WITHRSAENCRYPTION"] = "MD4withRSA";
algorithms[PkcsObjectIdentifiers.MD4WithRsaEncryption.Id] = "MD4withRSA";
algorithms["MD5WITHRSA"] = "MD5withRSA";
algorithms["MD5WITHRSAENCRYPTION"] = "MD5withRSA";
algorithms[PkcsObjectIdentifiers.MD5WithRsaEncryption.Id] = "MD5withRSA";
algorithms["SHA1WITHRSA"] = "SHA-1withRSA";
algorithms["SHA1WITHRSAENCRYPTION"] = "SHA-1withRSA";
algorithms[PkcsObjectIdentifiers.Sha1WithRsaEncryption.Id] = "SHA-1withRSA";
algorithms["SHA-1WITHRSA"] = "SHA-1withRSA";
algorithms["SHA224WITHRSA"] = "SHA-224withRSA";
algorithms["SHA224WITHRSAENCRYPTION"] = "SHA-224withRSA";
algorithms[PkcsObjectIdentifiers.Sha224WithRsaEncryption.Id] = "SHA-224withRSA";
algorithms["SHA-224WITHRSA"] = "SHA-224withRSA";
algorithms["SHA256WITHRSA"] = "SHA-256withRSA";
algorithms["SHA256WITHRSAENCRYPTION"] = "SHA-256withRSA";
algorithms[PkcsObjectIdentifiers.Sha256WithRsaEncryption.Id] = "SHA-256withRSA";
algorithms["SHA-256WITHRSA"] = "SHA-256withRSA";
algorithms["SHA384WITHRSA"] = "SHA-384withRSA";
algorithms["SHA384WITHRSAENCRYPTION"] = "SHA-384withRSA";
algorithms[PkcsObjectIdentifiers.Sha384WithRsaEncryption.Id] = "SHA-384withRSA";
algorithms["SHA-384WITHRSA"] = "SHA-384withRSA";
algorithms["SHA512WITHRSA"] = "SHA-512withRSA";
algorithms["SHA512WITHRSAENCRYPTION"] = "SHA-512withRSA";
algorithms[PkcsObjectIdentifiers.Sha512WithRsaEncryption.Id] = "SHA-512withRSA";
algorithms["SHA-512WITHRSA"] = "SHA-512withRSA";
algorithms["PSSWITHRSA"] = "PSSwithRSA";
algorithms["RSASSA-PSS"] = "PSSwithRSA";
algorithms[PkcsObjectIdentifiers.IdRsassaPss.Id] = "PSSwithRSA";
algorithms["RSAPSS"] = "PSSwithRSA";
algorithms["SHA1WITHRSAANDMGF1"] = "SHA-1withRSAandMGF1";
algorithms["SHA-1WITHRSAANDMGF1"] = "SHA-1withRSAandMGF1";
algorithms["SHA1WITHRSA/PSS"] = "SHA-1withRSAandMGF1";
algorithms["SHA-1WITHRSA/PSS"] = "SHA-1withRSAandMGF1";
algorithms["SHA224WITHRSAANDMGF1"] = "SHA-224withRSAandMGF1";
algorithms["SHA-224WITHRSAANDMGF1"] = "SHA-224withRSAandMGF1";
algorithms["SHA224WITHRSA/PSS"] = "SHA-224withRSAandMGF1";
algorithms["SHA-224WITHRSA/PSS"] = "SHA-224withRSAandMGF1";
algorithms["SHA256WITHRSAANDMGF1"] = "SHA-256withRSAandMGF1";
algorithms["SHA-256WITHRSAANDMGF1"] = "SHA-256withRSAandMGF1";
algorithms["SHA256WITHRSA/PSS"] = "SHA-256withRSAandMGF1";
algorithms["SHA-256WITHRSA/PSS"] = "SHA-256withRSAandMGF1";
algorithms["SHA384WITHRSAANDMGF1"] = "SHA-384withRSAandMGF1";
algorithms["SHA-384WITHRSAANDMGF1"] = "SHA-384withRSAandMGF1";
algorithms["SHA384WITHRSA/PSS"] = "SHA-384withRSAandMGF1";
algorithms["SHA-384WITHRSA/PSS"] = "SHA-384withRSAandMGF1";
algorithms["SHA512WITHRSAANDMGF1"] = "SHA-512withRSAandMGF1";
algorithms["SHA-512WITHRSAANDMGF1"] = "SHA-512withRSAandMGF1";
algorithms["SHA512WITHRSA/PSS"] = "SHA-512withRSAandMGF1";
algorithms["SHA-512WITHRSA/PSS"] = "SHA-512withRSAandMGF1";
algorithms["RIPEMD128WITHRSA"] = "RIPEMD128withRSA";
algorithms["RIPEMD128WITHRSAENCRYPTION"] = "RIPEMD128withRSA";
algorithms[TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD128.Id] = "RIPEMD128withRSA";
algorithms["RIPEMD160WITHRSA"] = "RIPEMD160withRSA";
algorithms["RIPEMD160WITHRSAENCRYPTION"] = "RIPEMD160withRSA";
algorithms[TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD160.Id] = "RIPEMD160withRSA";
algorithms["RIPEMD256WITHRSA"] = "RIPEMD256withRSA";
algorithms["RIPEMD256WITHRSAENCRYPTION"] = "RIPEMD256withRSA";
algorithms[TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD256.Id] = "RIPEMD256withRSA";
algorithms["NONEWITHRSA"] = "RSA";
algorithms["RSAWITHNONE"] = "RSA";
algorithms["RAWRSA"] = "RSA";
algorithms["RAWRSAPSS"] = "RAWRSASSA-PSS";
algorithms["NONEWITHRSAPSS"] = "RAWRSASSA-PSS";
algorithms["NONEWITHRSASSA-PSS"] = "RAWRSASSA-PSS";
algorithms["NONEWITHDSA"] = "NONEwithDSA";
algorithms["DSAWITHNONE"] = "NONEwithDSA";
algorithms["RAWDSA"] = "NONEwithDSA";
algorithms["DSA"] = "SHA-1withDSA";
algorithms["DSAWITHSHA1"] = "SHA-1withDSA";
algorithms["DSAWITHSHA-1"] = "SHA-1withDSA";
algorithms["SHA/DSA"] = "SHA-1withDSA";
algorithms["SHA1/DSA"] = "SHA-1withDSA";
algorithms["SHA-1/DSA"] = "SHA-1withDSA";
algorithms["SHA1WITHDSA"] = "SHA-1withDSA";
algorithms["SHA-1WITHDSA"] = "SHA-1withDSA";
algorithms[X9ObjectIdentifiers.IdDsaWithSha1.Id] = "SHA-1withDSA";
algorithms["DSAWITHSHA224"] = "SHA-224withDSA";
algorithms["DSAWITHSHA-224"] = "SHA-224withDSA";
algorithms["SHA224/DSA"] = "SHA-224withDSA";
algorithms["SHA-224/DSA"] = "SHA-224withDSA";
algorithms["SHA224WITHDSA"] = "SHA-224withDSA";
algorithms["SHA-224WITHDSA"] = "SHA-224withDSA";
algorithms[NistObjectIdentifiers.DsaWithSha224.Id] = "SHA-224withDSA";
algorithms["DSAWITHSHA256"] = "SHA-256withDSA";
algorithms["DSAWITHSHA-256"] = "SHA-256withDSA";
algorithms["SHA256/DSA"] = "SHA-256withDSA";
algorithms["SHA-256/DSA"] = "SHA-256withDSA";
algorithms["SHA256WITHDSA"] = "SHA-256withDSA";
algorithms["SHA-256WITHDSA"] = "SHA-256withDSA";
algorithms[NistObjectIdentifiers.DsaWithSha256.Id] = "SHA-256withDSA";
algorithms["DSAWITHSHA384"] = "SHA-384withDSA";
algorithms["DSAWITHSHA-384"] = "SHA-384withDSA";
algorithms["SHA384/DSA"] = "SHA-384withDSA";
algorithms["SHA-384/DSA"] = "SHA-384withDSA";
algorithms["SHA384WITHDSA"] = "SHA-384withDSA";
algorithms["SHA-384WITHDSA"] = "SHA-384withDSA";
algorithms[NistObjectIdentifiers.DsaWithSha384.Id] = "SHA-384withDSA";
algorithms["DSAWITHSHA512"] = "SHA-512withDSA";
algorithms["DSAWITHSHA-512"] = "SHA-512withDSA";
algorithms["SHA512/DSA"] = "SHA-512withDSA";
algorithms["SHA-512/DSA"] = "SHA-512withDSA";
algorithms["SHA512WITHDSA"] = "SHA-512withDSA";
algorithms["SHA-512WITHDSA"] = "SHA-512withDSA";
algorithms[NistObjectIdentifiers.DsaWithSha512.Id] = "SHA-512withDSA";
algorithms["NONEWITHECDSA"] = "NONEwithECDSA";
algorithms["ECDSAWITHNONE"] = "NONEwithECDSA";
algorithms["ECDSA"] = "SHA-1withECDSA";
algorithms["SHA1/ECDSA"] = "SHA-1withECDSA";
algorithms["SHA-1/ECDSA"] = "SHA-1withECDSA";
algorithms["ECDSAWITHSHA1"] = "SHA-1withECDSA";
algorithms["ECDSAWITHSHA-1"] = "SHA-1withECDSA";
algorithms["SHA1WITHECDSA"] = "SHA-1withECDSA";
algorithms["SHA-1WITHECDSA"] = "SHA-1withECDSA";
algorithms[X9ObjectIdentifiers.ECDsaWithSha1.Id] = "SHA-1withECDSA";
algorithms[TeleTrusTObjectIdentifiers.ECSignWithSha1.Id] = "SHA-1withECDSA";
algorithms["SHA224/ECDSA"] = "SHA-224withECDSA";
algorithms["SHA-224/ECDSA"] = "SHA-224withECDSA";
algorithms["ECDSAWITHSHA224"] = "SHA-224withECDSA";
algorithms["ECDSAWITHSHA-224"] = "SHA-224withECDSA";
algorithms["SHA224WITHECDSA"] = "SHA-224withECDSA";
algorithms["SHA-224WITHECDSA"] = "SHA-224withECDSA";
algorithms[X9ObjectIdentifiers.ECDsaWithSha224.Id] = "SHA-224withECDSA";
algorithms["SHA256/ECDSA"] = "SHA-256withECDSA";
algorithms["SHA-256/ECDSA"] = "SHA-256withECDSA";
algorithms["ECDSAWITHSHA256"] = "SHA-256withECDSA";
algorithms["ECDSAWITHSHA-256"] = "SHA-256withECDSA";
algorithms["SHA256WITHECDSA"] = "SHA-256withECDSA";
algorithms["SHA-256WITHECDSA"] = "SHA-256withECDSA";
algorithms[X9ObjectIdentifiers.ECDsaWithSha256.Id] = "SHA-256withECDSA";
algorithms["SHA384/ECDSA"] = "SHA-384withECDSA";
algorithms["SHA-384/ECDSA"] = "SHA-384withECDSA";
algorithms["ECDSAWITHSHA384"] = "SHA-384withECDSA";
algorithms["ECDSAWITHSHA-384"] = "SHA-384withECDSA";
algorithms["SHA384WITHECDSA"] = "SHA-384withECDSA";
algorithms["SHA-384WITHECDSA"] = "SHA-384withECDSA";
algorithms[X9ObjectIdentifiers.ECDsaWithSha384.Id] = "SHA-384withECDSA";
algorithms["SHA512/ECDSA"] = "SHA-512withECDSA";
algorithms["SHA-512/ECDSA"] = "SHA-512withECDSA";
algorithms["ECDSAWITHSHA512"] = "SHA-512withECDSA";
algorithms["ECDSAWITHSHA-512"] = "SHA-512withECDSA";
algorithms["SHA512WITHECDSA"] = "SHA-512withECDSA";
algorithms["SHA-512WITHECDSA"] = "SHA-512withECDSA";
algorithms[X9ObjectIdentifiers.ECDsaWithSha512.Id] = "SHA-512withECDSA";
algorithms["RIPEMD160/ECDSA"] = "RIPEMD160withECDSA";
algorithms["SHA-512/ECDSA"] = "RIPEMD160withECDSA";
algorithms["ECDSAWITHRIPEMD160"] = "RIPEMD160withECDSA";
algorithms["ECDSAWITHRIPEMD160"] = "RIPEMD160withECDSA";
algorithms["RIPEMD160WITHECDSA"] = "RIPEMD160withECDSA";
algorithms["RIPEMD160WITHECDSA"] = "RIPEMD160withECDSA";
algorithms[TeleTrusTObjectIdentifiers.ECSignWithRipeMD160.Id] = "RIPEMD160withECDSA";
algorithms["GOST-3410"] = "GOST3410";
algorithms["GOST-3410-94"] = "GOST3410";
algorithms["GOST3411WITHGOST3410"] = "GOST3410";
algorithms[CryptoProObjectIdentifiers.GostR3411x94WithGostR3410x94.Id] = "GOST3410";
algorithms["ECGOST-3410"] = "ECGOST3410";
algorithms["ECGOST-3410-2001"] = "ECGOST3410";
algorithms["GOST3411WITHECGOST3410"] = "ECGOST3410";
algorithms[CryptoProObjectIdentifiers.GostR3411x94WithGostR3410x2001.Id] = "ECGOST3410";
oids["MD2withRSA"] = PkcsObjectIdentifiers.MD2WithRsaEncryption;
oids["MD4withRSA"] = PkcsObjectIdentifiers.MD4WithRsaEncryption;
oids["MD5withRSA"] = PkcsObjectIdentifiers.MD5WithRsaEncryption;
oids["SHA-1withRSA"] = PkcsObjectIdentifiers.Sha1WithRsaEncryption;
oids["SHA-224withRSA"] = PkcsObjectIdentifiers.Sha224WithRsaEncryption;
oids["SHA-256withRSA"] = PkcsObjectIdentifiers.Sha256WithRsaEncryption;
oids["SHA-384withRSA"] = PkcsObjectIdentifiers.Sha384WithRsaEncryption;
oids["SHA-512withRSA"] = PkcsObjectIdentifiers.Sha512WithRsaEncryption;
oids["PSSwithRSA"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["SHA-1withRSAandMGF1"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["SHA-224withRSAandMGF1"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["SHA-256withRSAandMGF1"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["SHA-384withRSAandMGF1"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["SHA-512withRSAandMGF1"] = PkcsObjectIdentifiers.IdRsassaPss;
oids["RIPEMD128withRSA"] = TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD128;
oids["RIPEMD160withRSA"] = TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD160;
oids["RIPEMD256withRSA"] = TeleTrusTObjectIdentifiers.RsaSignatureWithRipeMD256;
oids["SHA-1withDSA"] = X9ObjectIdentifiers.IdDsaWithSha1;
oids["SHA-1withECDSA"] = X9ObjectIdentifiers.ECDsaWithSha1;
oids["SHA-224withECDSA"] = X9ObjectIdentifiers.ECDsaWithSha224;
oids["SHA-256withECDSA"] = X9ObjectIdentifiers.ECDsaWithSha256;
oids["SHA-384withECDSA"] = X9ObjectIdentifiers.ECDsaWithSha384;
oids["SHA-512withECDSA"] = X9ObjectIdentifiers.ECDsaWithSha512;
oids["GOST3410"] = CryptoProObjectIdentifiers.GostR3411x94WithGostR3410x94;
oids["ECGOST3410"] = CryptoProObjectIdentifiers.GostR3411x94WithGostR3410x2001;
}
/// <summary>
/// Returns a ObjectIdentifier for a give encoding.
/// </summary>
/// <param name="mechanism">A string representation of the encoding.</param>
/// <returns>A DerObjectIdentifier, null if the Oid is not available.</returns>
// TODO Don't really want to support this
public static DerObjectIdentifier GetObjectIdentifier(
string mechanism)
{
if (mechanism == null)
throw new ArgumentNullException("mechanism");
mechanism = mechanism.ToUpper(CultureInfo.InvariantCulture);
string aliased = (string) algorithms[mechanism];
if (aliased != null)
mechanism = aliased;
return (DerObjectIdentifier) oids[mechanism];
}
public static ICollection Algorithms
{
get { return oids.Keys; }
}
public static Asn1Encodable GetDefaultX509Parameters(
DerObjectIdentifier id)
{
return GetDefaultX509Parameters(id.Id);
}
public static Asn1Encodable GetDefaultX509Parameters(
string algorithm)
{
if (algorithm == null)
throw new ArgumentNullException("algorithm");
algorithm = algorithm.ToUpper(CultureInfo.InvariantCulture);
string mechanism = (string) algorithms[algorithm];
if (mechanism == null)
mechanism = algorithm;
if (mechanism == "PSSwithRSA")
{
// TODO The Sha1Digest here is a default. In JCE version, the actual digest
// to be used can be overridden by subsequent parameter settings.
return GetPssX509Parameters("SHA-1");
}
if (mechanism.EndsWith("withRSAandMGF1"))
{
string digestName = mechanism.Substring(0, mechanism.Length - "withRSAandMGF1".Length);
return GetPssX509Parameters(digestName);
}
return DerNull.Instance;
}
private static Asn1Encodable GetPssX509Parameters(
string digestName)
{
AlgorithmIdentifier hashAlgorithm = new AlgorithmIdentifier(
DigestUtilities.GetObjectIdentifier(digestName), DerNull.Instance);
// TODO Is it possible for the MGF hash alg to be different from the PSS one?
AlgorithmIdentifier maskGenAlgorithm = new AlgorithmIdentifier(
PkcsObjectIdentifiers.IdMgf1, hashAlgorithm);
int saltLen = DigestUtilities.GetDigest(digestName).GetDigestSize();
return new RsassaPssParameters(hashAlgorithm, maskGenAlgorithm,
new DerInteger(saltLen), new DerInteger(1));
}
public static ISigner GetSigner(
DerObjectIdentifier id)
{
return GetSigner(id.Id);
}
public static ISigner GetSigner(
string algorithm)
{
if (algorithm == null)
throw new ArgumentNullException("algorithm");
algorithm = algorithm.ToUpper(CultureInfo.InvariantCulture);
string mechanism = (string) algorithms[algorithm];
if (mechanism == null)
mechanism = algorithm;
if (mechanism.Equals("RSA"))
{
return (new RsaDigestSigner(new NullDigest()));
}
if (mechanism.Equals("MD2withRSA"))
{
return (new RsaDigestSigner(new MD2Digest()));
}
if (mechanism.Equals("MD4withRSA"))
{
return (new RsaDigestSigner(new MD4Digest()));
}
if (mechanism.Equals("MD5withRSA"))
{
return (new RsaDigestSigner(new MD5Digest()));
}
if (mechanism.Equals("SHA-1withRSA"))
{
return (new RsaDigestSigner(new Sha1Digest()));
}
if (mechanism.Equals("SHA-224withRSA"))
{
return (new RsaDigestSigner(new Sha224Digest()));
}
if (mechanism.Equals("SHA-256withRSA"))
{
return (new RsaDigestSigner(new Sha256Digest()));
}
if (mechanism.Equals("SHA-384withRSA"))
{
return (new RsaDigestSigner(new Sha384Digest()));
}
if (mechanism.Equals("SHA-512withRSA"))
{
return (new RsaDigestSigner(new Sha512Digest()));
}
if (mechanism.Equals("RIPEMD128withRSA"))
{
return (new RsaDigestSigner(new RipeMD128Digest()));
}
if (mechanism.Equals("RIPEMD160withRSA"))
{
return (new RsaDigestSigner(new RipeMD160Digest()));
}
if (mechanism.Equals("RIPEMD256withRSA"))
{
return (new RsaDigestSigner(new RipeMD256Digest()));
}
if (mechanism.Equals("RAWRSASSA-PSS"))
{
// TODO Add support for other parameter settings
return PssSigner.CreateRawSigner(new RsaBlindedEngine(), new Sha1Digest());
}
if (mechanism.Equals("PSSwithRSA"))
{
// TODO The Sha1Digest here is a default. In JCE version, the actual digest
// to be used can be overridden by subsequent parameter settings.
return (new PssSigner(new RsaBlindedEngine(), new Sha1Digest()));
}
if (mechanism.Equals("SHA-1withRSAandMGF1"))
{
return (new PssSigner(new RsaBlindedEngine(), new Sha1Digest()));
}
if (mechanism.Equals("SHA-224withRSAandMGF1"))
{
return (new PssSigner(new RsaBlindedEngine(), new Sha224Digest()));
}
if (mechanism.Equals("SHA-256withRSAandMGF1"))
{
return (new PssSigner(new RsaBlindedEngine(), new Sha256Digest()));
}
if (mechanism.Equals("SHA-384withRSAandMGF1"))
{
return (new PssSigner(new RsaBlindedEngine(), new Sha384Digest()));
}
if (mechanism.Equals("SHA-512withRSAandMGF1"))
{
return (new PssSigner(new RsaBlindedEngine(), new Sha512Digest()));
}
if (mechanism.Equals("NONEwithDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new NullDigest()));
}
if (mechanism.Equals("SHA-1withDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new Sha1Digest()));
}
if (mechanism.Equals("SHA-224withDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new Sha224Digest()));
}
if (mechanism.Equals("SHA-256withDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new Sha256Digest()));
}
if (mechanism.Equals("SHA-384withDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new Sha384Digest()));
}
if (mechanism.Equals("SHA-512withDSA"))
{
return (new DsaDigestSigner(new DsaSigner(), new Sha512Digest()));
}
if (mechanism.Equals("NONEwithECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new NullDigest()));
}
if (mechanism.Equals("SHA-1withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new Sha1Digest()));
}
if (mechanism.Equals("SHA-224withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new Sha224Digest()));
}
if (mechanism.Equals("SHA-256withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new Sha256Digest()));
}
if (mechanism.Equals("SHA-384withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new Sha384Digest()));
}
if (mechanism.Equals("SHA-512withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new Sha512Digest()));
}
if (mechanism.Equals("RIPEMD160withECDSA"))
{
return (new DsaDigestSigner(new ECDsaSigner(), new RipeMD160Digest()));
}
if (mechanism.Equals("SHA1WITHECNR"))
{
return (new DsaDigestSigner(new ECNRSigner(), new Sha1Digest()));
}
if (mechanism.Equals("SHA224WITHECNR"))
{
return (new DsaDigestSigner(new ECNRSigner(), new Sha224Digest()));
}
if (mechanism.Equals("SHA256WITHECNR"))
{
return (new DsaDigestSigner(new ECNRSigner(), new Sha256Digest()));
}
if (mechanism.Equals("SHA384WITHECNR"))
{
return (new DsaDigestSigner(new ECNRSigner(), new Sha384Digest()));
}
if (mechanism.Equals("SHA512WITHECNR"))
{
return (new DsaDigestSigner(new ECNRSigner(), new Sha512Digest()));
}
if (mechanism.Equals("GOST3410"))
{
return new Gost3410DigestSigner(new Gost3410Signer(), new Gost3411Digest());
}
if (mechanism.Equals("ECGOST3410"))
{
return new Gost3410DigestSigner(new ECGost3410Signer(), new Gost3411Digest());
}
if (mechanism.Equals("SHA1WITHRSA/ISO9796-2"))
{
return new Iso9796d2Signer(new RsaBlindedEngine(), new Sha1Digest(), true);
}
if (mechanism.Equals("MD5WITHRSA/ISO9796-2"))
{
return new Iso9796d2Signer(new RsaBlindedEngine(), new MD5Digest(), true);
}
if (mechanism.Equals("RIPEMD160WITHRSA/ISO9796-2"))
{
return new Iso9796d2Signer(new RsaBlindedEngine(), new RipeMD160Digest(), true);
}
throw new SecurityUtilityException("Signer " + algorithm + " not recognised.");
}
public static string GetEncodingName(
DerObjectIdentifier oid)
{
return (string) algorithms[oid.Id];
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using Mono.Addins;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Services.Connectors.Hypergrid;
using System;
using System.Collections.Generic;
using System.Reflection;
using GridRegion = OpenSim.Services.Interfaces.GridRegion;
namespace OpenSim.Region.CoreModules.Avatar.Lure
{
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "HGLureModule")]
public class HGLureModule : ISharedRegionModule
{
private static readonly ILog m_log = LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
private readonly ThreadedClasses.RwLockedList<Scene> m_scenes = new ThreadedClasses.RwLockedList<Scene>();
private IMessageTransferModule m_TransferModule = null;
private bool m_Enabled = false;
private string m_ThisGridURL;
private ThreadedClasses.ExpiringCache<UUID, GridInstantMessage> m_PendingLures = new ThreadedClasses.ExpiringCache<UUID, GridInstantMessage>(30);
public void Initialise(IConfigSource config)
{
if (config.Configs["Messaging"] != null)
{
if (config.Configs["Messaging"].GetString("LureModule", string.Empty) == "HGLureModule")
{
m_Enabled = true;
m_ThisGridURL = Util.GetConfigVarFromSections<string>(config, "GatekeeperURI",
new string[] { "Startup", "Hypergrid", "Messaging" }, String.Empty);
// Legacy. Remove soon!
m_ThisGridURL = config.Configs["Messaging"].GetString("Gatekeeper", m_ThisGridURL);
m_log.DebugFormat("[LURE MODULE]: {0} enabled", Name);
}
}
}
public void AddRegion(Scene scene)
{
if (!m_Enabled)
return;
m_scenes.Add(scene);
scene.EventManager.OnIncomingInstantMessage += OnIncomingInstantMessage;
scene.EventManager.OnNewClient += OnNewClient;
}
public void RegionLoaded(Scene scene)
{
if (!m_Enabled)
return;
if (m_TransferModule == null)
{
m_TransferModule =
scene.RequestModuleInterface<IMessageTransferModule>();
if (m_TransferModule == null)
{
m_log.Error("[LURE MODULE]: No message transfer module, lures will not work!");
m_Enabled = false;
m_scenes.Clear();
scene.EventManager.OnNewClient -= OnNewClient;
scene.EventManager.OnIncomingInstantMessage -= OnIncomingInstantMessage;
}
}
}
public void RemoveRegion(Scene scene)
{
if (!m_Enabled)
return;
m_scenes.Remove(scene);
scene.EventManager.OnNewClient -= OnNewClient;
scene.EventManager.OnIncomingInstantMessage -= OnIncomingInstantMessage;
}
void OnNewClient(IClientAPI client)
{
client.OnInstantMessage += OnInstantMessage;
client.OnStartLure += OnStartLure;
client.OnTeleportLureRequest += OnTeleportLureRequest;
}
public void PostInitialise()
{
}
public void Close()
{
}
public string Name
{
get { return "HGLureModule"; }
}
public Type ReplaceableInterface
{
get { return null; }
}
void OnInstantMessage(IClientAPI client, GridInstantMessage im)
{
}
void OnIncomingInstantMessage(GridInstantMessage im)
{
if (im.dialog == (byte)InstantMessageDialog.RequestTeleport
|| im.dialog == (byte)InstantMessageDialog.GodLikeRequestTeleport)
{
UUID sessionID = new UUID(im.imSessionID);
try
{
m_PendingLures.Add(sessionID, im, 7200); // 2 hours
m_log.DebugFormat("[HG LURE MODULE]: RequestTeleport sessionID={0}, regionID={1}, message={2}", im.imSessionID, im.RegionID, im.message);
}
catch(ArgumentException)
{
}
// Forward. We do this, because the IM module explicitly rejects
// IMs of this type
if (m_TransferModule != null)
m_TransferModule.SendInstantMessage(im, delegate(bool success) { });
}
}
public void OnStartLure(byte lureType, string message, UUID targetid, IClientAPI client)
{
if (!(client.Scene is Scene))
return;
Scene scene = (Scene)(client.Scene);
ScenePresence presence = scene.GetScenePresence(client.AgentId);
message += "@" + m_ThisGridURL;
m_log.DebugFormat("[HG LURE MODULE]: TP invite with message {0}", message);
UUID sessionID = UUID.Random();
GridInstantMessage m = new GridInstantMessage(scene, client.AgentId,
client.FirstName+" "+client.LastName, targetid,
(byte)InstantMessageDialog.RequestTeleport, false,
message, sessionID, false, presence.AbsolutePosition,
new Byte[0], true);
m.RegionID = client.Scene.RegionInfo.RegionID.Guid;
m_log.DebugFormat("[HG LURE MODULE]: RequestTeleport sessionID={0}, regionID={1}, message={2}", m.imSessionID, m.RegionID, m.message);
m_PendingLures.Add(sessionID, m, 7200); // 2 hours
if (m_TransferModule != null)
{
m_TransferModule.SendInstantMessage(m,
delegate(bool success) { });
}
}
public void OnTeleportLureRequest(UUID lureID, uint teleportFlags, IClientAPI client)
{
if (!(client.Scene is Scene))
return;
// Scene scene = (Scene)(client.Scene);
GridInstantMessage im = null;
if (m_PendingLures.TryGetValue(lureID, out im))
{
m_PendingLures.Remove(lureID);
Lure(client, teleportFlags, im);
}
else
m_log.DebugFormat("[HG LURE MODULE]: pending lure {0} not found", lureID);
}
private void Lure(IClientAPI client, uint teleportflags, GridInstantMessage im)
{
Scene scene = (Scene)(client.Scene);
GridRegion region = scene.GridService.GetRegionByUUID(scene.RegionInfo.ScopeID, new UUID(im.RegionID));
if (region != null)
scene.RequestTeleportLocation(client, region.RegionHandle, im.Position + new Vector3(0.5f, 0.5f, 0f), Vector3.UnitX, teleportflags);
else // we don't have that region here. Check if it's HG
{
string[] parts = im.message.Split(new char[] { '@' });
if (parts.Length > 1)
{
string url = parts[parts.Length - 1]; // the last part
if (url.Trim(new char[] {'/'}) != m_ThisGridURL.Trim(new char[] {'/'}))
{
m_log.DebugFormat("[HG LURE MODULE]: Luring agent to grid {0} region {1} position {2}", url, im.RegionID, im.Position);
GatekeeperServiceConnector gConn = new GatekeeperServiceConnector();
GridRegion gatekeeper = new GridRegion();
gatekeeper.ServerURI = url;
string homeURI = scene.GetAgentHomeURI(client.AgentId);
string message;
GridRegion finalDestination = gConn.GetHyperlinkRegion(gatekeeper, new UUID(im.RegionID), client.AgentId, homeURI, out message);
if (finalDestination != null)
{
ScenePresence sp = scene.GetScenePresence(client.AgentId);
IEntityTransferModule transferMod = scene.RequestModuleInterface<IEntityTransferModule>();
if (transferMod != null && sp != null)
{
if (message != null)
sp.ControllingClient.SendAgentAlertMessage(message, true);
transferMod.DoTeleport(
sp, gatekeeper, finalDestination, im.Position + new Vector3(0.5f, 0.5f, 0f),
Vector3.UnitX, teleportflags);
}
}
else
{
m_log.InfoFormat("[HG LURE MODULE]: Lure failed: {0}", message);
client.SendAgentAlertMessage(message, true);
}
}
}
}
}
}
}
| |
using System;
using System.Data;
using Csla;
using Csla.Data;
using ParentLoad.DataAccess;
using ParentLoad.DataAccess.ERLevel;
namespace ParentLoad.Business.ERLevel
{
/// <summary>
/// A09_Region_ReChild (editable child object).<br/>
/// This is a generated base class of <see cref="A09_Region_ReChild"/> business object.
/// </summary>
/// <remarks>
/// This class is an item of <see cref="A08_Region"/> collection.
/// </remarks>
[Serializable]
public partial class A09_Region_ReChild : BusinessBase<A09_Region_ReChild>
{
#region State Fields
[NotUndoable]
[NonSerialized]
internal int region_ID2 = 0;
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="Region_Child_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> Region_Child_NameProperty = RegisterProperty<string>(p => p.Region_Child_Name, "Region Child Name");
/// <summary>
/// Gets or sets the Region Child Name.
/// </summary>
/// <value>The Region Child Name.</value>
public string Region_Child_Name
{
get { return GetProperty(Region_Child_NameProperty); }
set { SetProperty(Region_Child_NameProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="A09_Region_ReChild"/> object.
/// </summary>
/// <returns>A reference to the created <see cref="A09_Region_ReChild"/> object.</returns>
internal static A09_Region_ReChild NewA09_Region_ReChild()
{
return DataPortal.CreateChild<A09_Region_ReChild>();
}
/// <summary>
/// Factory method. Loads a <see cref="A09_Region_ReChild"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
/// <returns>A reference to the fetched <see cref="A09_Region_ReChild"/> object.</returns>
internal static A09_Region_ReChild GetA09_Region_ReChild(SafeDataReader dr)
{
A09_Region_ReChild obj = new A09_Region_ReChild();
// show the framework that this is a child object
obj.MarkAsChild();
obj.Fetch(dr);
obj.MarkOld();
// check all object rules and property rules
obj.BusinessRules.CheckRules();
return obj;
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="A09_Region_ReChild"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public A09_Region_ReChild()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="A09_Region_ReChild"/> object properties.
/// </summary>
[Csla.RunLocal]
protected override void Child_Create()
{
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="A09_Region_ReChild"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(Region_Child_NameProperty, dr.GetString("Region_Child_Name"));
// parent properties
region_ID2 = dr.GetInt32("Region_ID2");
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
}
/// <summary>
/// Inserts a new <see cref="A09_Region_ReChild"/> object in the database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert(A08_Region parent)
{
using (var dalManager = DalFactoryParentLoad.GetManager())
{
var args = new DataPortalHookArgs();
OnInsertPre(args);
var dal = dalManager.GetProvider<IA09_Region_ReChildDal>();
using (BypassPropertyChecks)
{
dal.Insert(
parent.Region_ID,
Region_Child_Name
);
}
OnInsertPost(args);
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="A09_Region_ReChild"/> object.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update(A08_Region parent)
{
if (!IsDirty)
return;
using (var dalManager = DalFactoryParentLoad.GetManager())
{
var args = new DataPortalHookArgs();
OnUpdatePre(args);
var dal = dalManager.GetProvider<IA09_Region_ReChildDal>();
using (BypassPropertyChecks)
{
dal.Update(
parent.Region_ID,
Region_Child_Name
);
}
OnUpdatePost(args);
}
}
/// <summary>
/// Self deletes the <see cref="A09_Region_ReChild"/> object from database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf(A08_Region parent)
{
using (var dalManager = DalFactoryParentLoad.GetManager())
{
var args = new DataPortalHookArgs();
OnDeletePre(args);
var dal = dalManager.GetProvider<IA09_Region_ReChildDal>();
using (BypassPropertyChecks)
{
dal.Delete(parent.Region_ID);
}
OnDeletePost(args);
}
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
/*
* Farseer Physics Engine:
* Copyright (c) 2012 Ian Qvist
*
* Original source Box2D:
* Copyright (c) 2006-2011 Erin Catto http://www.box2d.org
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
//#define USE_IGNORE_CCD_CATEGORIES
using System;
using System.Collections.Generic;
using System.Diagnostics;
using FarseerPhysics.Collision;
using FarseerPhysics.Collision.Shapes;
using FarseerPhysics.Common;
using FarseerPhysics.Dynamics.Contacts;
using Microsoft.Xna.Framework;
namespace FarseerPhysics.Dynamics
{
[Flags]
public enum Category
{
None = 0,
All = int.MaxValue,
Cat1 = 1,
Cat2 = 2,
Cat3 = 4,
Cat4 = 8,
Cat5 = 16,
Cat6 = 32,
Cat7 = 64,
Cat8 = 128,
Cat9 = 256,
Cat10 = 512,
Cat11 = 1024,
Cat12 = 2048,
Cat13 = 4096,
Cat14 = 8192,
Cat15 = 16384,
Cat16 = 32768,
Cat17 = 65536,
Cat18 = 131072,
Cat19 = 262144,
Cat20 = 524288,
Cat21 = 1048576,
Cat22 = 2097152,
Cat23 = 4194304,
Cat24 = 8388608,
Cat25 = 16777216,
Cat26 = 33554432,
Cat27 = 67108864,
Cat28 = 134217728,
Cat29 = 268435456,
Cat30 = 536870912,
Cat31 = 1073741824
}
/// <summary>
/// This proxy is used internally to connect fixtures to the broad-phase.
/// </summary>
public struct FixtureProxy
{
public AABB AABB;
public int childIndex;
public Fixture fixture;
public int proxyId;
}
/// <summary>
/// A fixture is used to attach a Shape to a body for collision detection. A fixture
/// inherits its transform from its parent. Fixtures hold additional non-geometric data
/// such as friction, collision filters, etc.
/// Fixtures are created via Body.CreateFixture.
/// Warning: You cannot reuse fixtures.
/// </summary>
public class Fixture : IDisposable
{
#region Properties/Fields/Events
public FixtureProxy[] proxies;
public int proxyCount;
public Category ignoreCCDWith;
/// <summary>
/// Defaults to 0
///
/// If Settings.useFPECollisionCategories is set to false:
/// Collision groups allow a certain group of objects to never collide (negative)
/// or always collide (positive). Zero means no collision group. Non-zero group
/// filtering always wins against the mask bits.
///
/// If Settings.useFPECollisionCategories is set to true:
/// If 2 fixtures are in the same collision group, they will not collide.
/// </summary>
public short collisionGroup
{
set
{
if( _collisionGroup == value )
return;
_collisionGroup = value;
refilter();
}
get { return _collisionGroup; }
}
/// <summary>
/// Defaults to Category.All
///
/// The collision mask bits. This states the categories that this
/// fixture would accept for collision.
/// Use Settings.UseFPECollisionCategories to change the behavior.
/// </summary>
public Category collidesWith
{
get { return _collidesWith; }
set
{
if( _collidesWith == value )
return;
_collidesWith = value;
refilter();
}
}
/// <summary>
/// The collision categories this fixture is a part of.
///
/// If Settings.UseFPECollisionCategories is set to false:
/// Defaults to Category.Cat1
///
/// If Settings.UseFPECollisionCategories is set to true:
/// Defaults to Category.All
/// </summary>
public Category collisionCategories
{
get { return _collisionCategories; }
set
{
if( _collisionCategories == value )
return;
_collisionCategories = value;
refilter();
}
}
/// <summary>
/// Get the child Shape. You can modify the child Shape, however you should not change the
/// number of vertices because this will crash some collision caching mechanisms.
/// </summary>
/// <value>The shape.</value>
public Shape shape { get; internal set; }
/// <summary>
/// Gets or sets a value indicating whether this fixture is a sensor.
/// </summary>
/// <value><c>true</c> if this instance is a sensor; otherwise, <c>false</c>.</value>
public bool isSensor
{
get { return _isSensor; }
set
{
if( body != null )
body.isAwake = true;
_isSensor = value;
}
}
/// <summary>
/// Get the parent body of this fixture. This is null if the fixture is not attached.
/// </summary>
/// <value>The body.</value>
public Body body { get; internal set; }
/// <summary>
/// Set the user data. Use this to store your application specific data.
/// </summary>
/// <value>The user data.</value>
public object userData;
/// <summary>
/// Set the coefficient of friction. This will _not_ change the friction of existing contacts.
/// </summary>
/// <value>The friction.</value>
public float friction
{
get { return _friction; }
set
{
Debug.Assert( !float.IsNaN( value ) );
_friction = value;
}
}
/// <summary>
/// Set the coefficient of restitution. This will not change the restitution of existing contacts.
/// </summary>
/// <value>The restitution.</value>
public float restitution
{
get { return _restitution; }
set
{
Debug.Assert( !float.IsNaN( value ) );
_restitution = value;
}
}
/// <summary>
/// Gets a unique ID for this fixture.
/// </summary>
/// <value>The fixture id.</value>
public int fixtureId { get; internal set; }
/// <summary>
/// Fires after two shapes have collided and are solved. This gives you a chance to get the impact force.
/// </summary>
public AfterCollisionEventHandler afterCollision;
/// <summary>
/// Fires when two fixtures are close to each other.
/// Due to how the broadphase works, this can be quite inaccurate as shapes are approximated using AABBs.
/// </summary>
public BeforeCollisionEventHandler beforeCollision;
/// <summary>
/// Fires when two shapes collide and a contact is created between them.
/// Note: the first fixture argument is always the fixture that the delegate is subscribed to.
/// </summary>
public OnCollisionEventHandler onCollision;
/// <summary>
/// Fires when two shapes separate and a contact is removed between them.
/// Note: this can in some cases be called multiple times, as a fixture can have multiple contacts.
/// Note: the first fixture argument is always the fixture that the delegate is subscribed to.
/// </summary>
public OnSeparationEventHandler onSeparation;
[ThreadStatic]
static int _fixtureIdCounter;
bool _isSensor;
float _friction;
float _restitution;
internal Category _collidesWith;
internal Category _collisionCategories;
internal short _collisionGroup;
internal HashSet<int> _collisionIgnores;
#endregion
internal Fixture()
{
fixtureId = _fixtureIdCounter++;
_collisionCategories = Settings.defaultFixtureCollisionCategories;
_collidesWith = Settings.defaultFixtureCollidesWith;
_collisionGroup = 0;
_collisionIgnores = new HashSet<int>();
ignoreCCDWith = Settings.defaultFixtureIgnoreCCDWith;
//Fixture defaults
friction = 0.2f;
restitution = 0;
}
internal Fixture( Body body, Shape shape, object userData = null ) : this()
{
#if DEBUG
if( shape.shapeType == ShapeType.Polygon )
( (PolygonShape)shape ).vertices.attachedToBody = true;
#endif
this.body = body;
this.userData = userData;
this.shape = shape.clone();
registerFixture();
}
#region IDisposable Members
public bool IsDisposed { get; set; }
public void Dispose()
{
if( !IsDisposed )
{
body.destroyFixture( this );
IsDisposed = true;
GC.SuppressFinalize( this );
}
}
#endregion
/// <summary>
/// Restores collisions between this fixture and the provided fixture.
/// </summary>
/// <param name="fixture">The fixture.</param>
public void restoreCollisionWith( Fixture fixture )
{
if( _collisionIgnores.Contains( fixture.fixtureId ) )
{
_collisionIgnores.Remove( fixture.fixtureId );
refilter();
}
}
/// <summary>
/// Ignores collisions between this fixture and the provided fixture.
/// </summary>
/// <param name="fixture">The fixture.</param>
public void ignoreCollisionWith( Fixture fixture )
{
if( !_collisionIgnores.Contains( fixture.fixtureId ) )
{
_collisionIgnores.Add( fixture.fixtureId );
refilter();
}
}
/// <summary>
/// Determines whether collisions are ignored between this fixture and the provided fixture.
/// </summary>
/// <param name="fixture">The fixture.</param>
/// <returns>
/// <c>true</c> if the fixture is ignored; otherwise, <c>false</c>.
/// </returns>
public bool isFixtureIgnored( Fixture fixture )
{
return _collisionIgnores.Contains( fixture.fixtureId );
}
/// <summary>
/// Contacts are persistant and will keep being persistant unless they are
/// flagged for filtering.
/// This methods flags all contacts associated with the body for filtering.
/// </summary>
void refilter()
{
// Flag associated contacts for filtering.
ContactEdge edge = body.contactList;
while( edge != null )
{
Contact contact = edge.contact;
Fixture fixtureA = contact.fixtureA;
Fixture fixtureB = contact.fixtureB;
if( fixtureA == this || fixtureB == this )
{
contact.filterFlag = true;
}
edge = edge.next;
}
World world = body._world;
if( world == null )
{
return;
}
// Touch each proxy so that new pairs may be created
var broadPhase = world.contactManager.broadPhase;
for( var i = 0; i < proxyCount; ++i )
broadPhase.touchProxy( proxies[i].proxyId );
}
void registerFixture()
{
// Reserve proxy space
proxies = new FixtureProxy[shape.childCount];
proxyCount = 0;
if( body.enabled )
{
var broadPhase = body._world.contactManager.broadPhase;
createProxies( broadPhase, ref body._xf );
}
body.fixtureList.Add( this );
// Adjust mass properties if needed.
if( shape._density > 0.0f )
body.resetMassData();
// Let the world know we have a new fixture. This will cause new contacts
// to be created at the beginning of the next time step.
body._world._worldHasNewFixture = true;
//FPE: Added event
if( body._world.onFixtureAdded != null )
body._world.onFixtureAdded( this );
}
/// <summary>
/// Test a point for containment in this fixture.
/// </summary>
/// <param name="point">A point in world coordinates.</param>
/// <returns></returns>
public bool testPoint( ref Vector2 point )
{
return shape.testPoint( ref body._xf, ref point );
}
/// <summary>
/// Cast a ray against this Fixture by passing the call through to the Shape
/// </summary>
/// <param name="output">The ray-cast results.</param>
/// <param name="input">The ray-cast input parameters.</param>
/// <param name="childIndex">Index of the child.</param>
/// <returns></returns>
public bool rayCast( out RayCastOutput output, ref RayCastInput input, int childIndex )
{
return shape.rayCast( out output, ref input, ref body._xf, childIndex );
}
/// <summary>
/// Get the fixture's AABB. This AABB may be enlarged and/or stale. If you need a more accurate AABB, compute it using the Shape and
/// the body transform.
/// </summary>
/// <param name="aabb">The aabb.</param>
/// <param name="childIndex">Index of the child.</param>
public void getAABB( out AABB aabb, int childIndex )
{
Debug.Assert( 0 <= childIndex && childIndex < proxyCount );
aabb = proxies[childIndex].AABB;
}
internal void destroy()
{
#if DEBUG
if( shape.shapeType == ShapeType.Polygon )
( (PolygonShape)shape ).vertices.attachedToBody = false;
#endif
// The proxies must be destroyed before calling this.
Debug.Assert( proxyCount == 0 );
// Free the proxy array.
proxies = null;
shape = null;
//FPE: We set the userdata to null here to help prevent bugs related to stale references in GC
userData = null;
beforeCollision = null;
onCollision = null;
onSeparation = null;
afterCollision = null;
if( body._world.onFixtureRemoved != null )
{
body._world.onFixtureRemoved( this );
}
body._world.onFixtureAdded = null;
body._world.onFixtureRemoved = null;
onSeparation = null;
onCollision = null;
}
// These support body activation/deactivation.
internal void createProxies( DynamicTreeBroadPhase broadPhase, ref Transform xf )
{
Debug.Assert( proxyCount == 0 );
// Create proxies in the broad-phase.
proxyCount = shape.childCount;
for( int i = 0; i < proxyCount; ++i )
{
FixtureProxy proxy = new FixtureProxy();
shape.computeAABB( out proxy.AABB, ref xf, i );
proxy.fixture = this;
proxy.childIndex = i;
//FPE note: This line needs to be after the previous two because FixtureProxy is a struct
proxy.proxyId = broadPhase.addProxy( ref proxy );
proxies[i] = proxy;
}
}
internal void destroyProxies( DynamicTreeBroadPhase broadPhase )
{
// Destroy proxies in the broad-phase.
for( int i = 0; i < proxyCount; ++i )
{
broadPhase.removeProxy( proxies[i].proxyId );
proxies[i].proxyId = -1;
}
proxyCount = 0;
}
internal void synchronize( DynamicTreeBroadPhase broadPhase, ref Transform transform1, ref Transform transform2 )
{
if( proxyCount == 0 )
return;
for( var i = 0; i < proxyCount; ++i )
{
var proxy = proxies[i];
// Compute an AABB that covers the swept Shape (may miss some rotation effect).
AABB aabb1, aabb2;
shape.computeAABB( out aabb1, ref transform1, proxy.childIndex );
shape.computeAABB( out aabb2, ref transform2, proxy.childIndex );
proxy.AABB.combine( ref aabb1, ref aabb2 );
Vector2 displacement = transform2.p - transform1.p;
broadPhase.moveProxy( proxy.proxyId, ref proxy.AABB, displacement );
}
}
/// <summary>
/// Only compares the values of this fixture, and not the attached shape or body.
/// This is used for deduplication in serialization only.
/// </summary>
internal bool compareTo( Fixture fixture )
{
return ( _collidesWith == fixture._collidesWith &&
_collisionCategories == fixture._collisionCategories &&
_collisionGroup == fixture._collisionGroup &&
friction == fixture.friction &&
isSensor == fixture.isSensor &&
restitution == fixture.restitution &&
userData == fixture.userData &&
ignoreCCDWith == fixture.ignoreCCDWith &&
sequenceEqual( _collisionIgnores, fixture._collisionIgnores ) );
}
bool sequenceEqual<T>( HashSet<T> first, HashSet<T> second )
{
if( first.Count != second.Count )
return false;
using( IEnumerator<T> enumerator1 = first.GetEnumerator() )
{
using( IEnumerator<T> enumerator2 = second.GetEnumerator() )
{
while( enumerator1.MoveNext() )
{
if( !enumerator2.MoveNext() || !Equals( enumerator1.Current, enumerator2.Current ) )
return false;
}
if( enumerator2.MoveNext() )
return false;
}
}
return true;
}
/// <summary>
/// Clones the fixture and attached shape onto the specified body.
/// </summary>
/// <param name="body">The body you wish to clone the fixture onto.</param>
/// <returns>The cloned fixture.</returns>
public Fixture cloneOnto( Body body )
{
var fixture = new Fixture();
fixture.body = body;
fixture.shape = shape.clone();
fixture.userData = userData;
fixture.restitution = restitution;
fixture.friction = friction;
fixture.isSensor = isSensor;
fixture._collisionGroup = _collisionGroup;
fixture._collisionCategories = _collisionCategories;
fixture._collidesWith = _collidesWith;
fixture.ignoreCCDWith = ignoreCCDWith;
foreach( int ignore in _collisionIgnores )
{
fixture._collisionIgnores.Add( ignore );
}
fixture.registerFixture();
return fixture;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Composition;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Extensions;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.CSharp.LineSeparator
{
[ExportLanguageService(typeof(ILineSeparatorService), LanguageNames.CSharp), Shared]
internal class CSharpLineSeparatorService : ILineSeparatorService
{
/// <summary>
/// Given a tree returns line separator spans.
/// The operation may take fairly long time on a big tree so it is cancelable.
/// </summary>
public async Task<IEnumerable<TextSpan>> GetLineSeparatorsAsync(
Document document,
TextSpan textSpan,
CancellationToken cancellationToken)
{
var tree = await document.GetCSharpSyntaxTreeAsync(cancellationToken).ConfigureAwait(false);
var node = await tree.GetRootAsync(cancellationToken).ConfigureAwait(false);
var spans = new List<TextSpan>();
var blocks = node.Traverse<SyntaxNode>(textSpan, IsSeparableContainer);
foreach (var block in blocks)
{
if (cancellationToken.IsCancellationRequested)
{
return SpecializedCollections.EmptyEnumerable<TextSpan>();
}
var typeBlock = block as TypeDeclarationSyntax;
if (typeBlock != null)
{
ProcessNodeList(typeBlock.Members, spans, cancellationToken);
continue;
}
var namespaceBlock = block as NamespaceDeclarationSyntax;
if (namespaceBlock != null)
{
ProcessUsings(namespaceBlock.Usings, spans, cancellationToken);
ProcessNodeList(namespaceBlock.Members, spans, cancellationToken);
continue;
}
var progBlock = block as CompilationUnitSyntax;
if (progBlock != null)
{
ProcessUsings(progBlock.Usings, spans, cancellationToken);
ProcessNodeList(progBlock.Members, spans, cancellationToken);
}
}
return spans;
}
/// <summary>Node types that are interesting for line separation.</summary>
private static bool IsSeparableBlock(SyntaxNode node)
{
if (SyntaxFacts.IsTypeDeclaration(node.Kind()))
{
return true;
}
switch (node.Kind())
{
case SyntaxKind.NamespaceDeclaration:
case SyntaxKind.MethodDeclaration:
case SyntaxKind.PropertyDeclaration:
case SyntaxKind.EventDeclaration:
case SyntaxKind.IndexerDeclaration:
case SyntaxKind.ConstructorDeclaration:
case SyntaxKind.DestructorDeclaration:
case SyntaxKind.OperatorDeclaration:
case SyntaxKind.ConversionOperatorDeclaration:
return true;
default:
return false;
}
}
/// <summary>Node types that may contain separable blocks.</summary>
private static bool IsSeparableContainer(SyntaxNode node)
{
return node is TypeDeclarationSyntax ||
node is NamespaceDeclarationSyntax ||
node is CompilationUnitSyntax;
}
private static bool IsBadType(SyntaxNode node)
{
var typeDecl = node as TypeDeclarationSyntax;
if (typeDecl != null)
{
if (typeDecl.OpenBraceToken.IsMissing ||
typeDecl.CloseBraceToken.IsMissing)
{
return true;
}
}
return false;
}
private static bool IsBadEnum(SyntaxNode node)
{
var enumDecl = node as EnumDeclarationSyntax;
if (enumDecl != null)
{
if (enumDecl.OpenBraceToken.IsMissing ||
enumDecl.CloseBraceToken.IsMissing)
{
return true;
}
}
return false;
}
private static bool IsBadMethod(SyntaxNode node)
{
var methodDecl = node as MethodDeclarationSyntax;
if (methodDecl != null)
{
if (methodDecl.Body != null &&
(methodDecl.Body.OpenBraceToken.IsMissing ||
methodDecl.Body.CloseBraceToken.IsMissing))
{
return true;
}
}
return false;
}
private static bool IsBadProperty(SyntaxNode node)
{
return IsBadAccessorList(node as PropertyDeclarationSyntax);
}
private static bool IsBadEvent(SyntaxNode node)
{
return IsBadAccessorList(node as EventDeclarationSyntax);
}
private static bool IsBadIndexer(SyntaxNode node)
{
return IsBadAccessorList(node as IndexerDeclarationSyntax);
}
private static bool IsBadAccessorList(BasePropertyDeclarationSyntax baseProperty)
{
if (baseProperty?.AccessorList == null)
{
return false;
}
return baseProperty.AccessorList.OpenBraceToken.IsMissing ||
baseProperty.AccessorList.CloseBraceToken.IsMissing;
}
private static bool IsBadConstructor(SyntaxNode node)
{
var constructorDecl = node as ConstructorDeclarationSyntax;
if (constructorDecl != null)
{
if (constructorDecl.Body != null &&
(constructorDecl.Body.OpenBraceToken.IsMissing ||
constructorDecl.Body.CloseBraceToken.IsMissing))
{
return true;
}
}
return false;
}
private static bool IsBadDestructor(SyntaxNode node)
{
var destructorDecl = node as DestructorDeclarationSyntax;
if (destructorDecl != null)
{
if (destructorDecl.Body != null &&
(destructorDecl.Body.OpenBraceToken.IsMissing ||
destructorDecl.Body.CloseBraceToken.IsMissing))
{
return true;
}
}
return false;
}
private static bool IsBadOperator(SyntaxNode node)
{
var operatorDecl = node as OperatorDeclarationSyntax;
if (operatorDecl != null)
{
if (operatorDecl.Body != null &&
(operatorDecl.Body.OpenBraceToken.IsMissing ||
operatorDecl.Body.CloseBraceToken.IsMissing))
{
return true;
}
}
return false;
}
private static bool IsBadConversionOperator(SyntaxNode node)
{
var conversionDecl = node as ConversionOperatorDeclarationSyntax;
if (conversionDecl != null)
{
if (conversionDecl.Body != null &&
(conversionDecl.Body.OpenBraceToken.IsMissing ||
conversionDecl.Body.CloseBraceToken.IsMissing))
{
return true;
}
}
return false;
}
private static bool IsBadNode(SyntaxNode node)
{
if (node is IncompleteMemberSyntax)
{
return true;
}
if (IsBadType(node) ||
IsBadEnum(node) ||
IsBadMethod(node) ||
IsBadProperty(node) ||
IsBadEvent(node) ||
IsBadIndexer(node) ||
IsBadConstructor(node) ||
IsBadDestructor(node) ||
IsBadOperator(node) ||
IsBadConversionOperator(node))
{
return true;
}
return false;
}
private static void ProcessUsings(SyntaxList<UsingDirectiveSyntax> usings, List<TextSpan> spans, CancellationToken cancellationToken)
{
Contract.ThrowIfNull(spans);
if (usings.Any())
{
AddLineSeparatorSpanForNode(usings.Last(), spans, cancellationToken);
}
}
/// <summary>
/// If node is separable and not the last in its container => add line separator after the node
/// If node is separable and not the first in its container => ensure separator before the node
/// last separable node in Program needs separator after it.
/// </summary>
private static void ProcessNodeList<T>(SyntaxList<T> children, List<TextSpan> spans, CancellationToken cancellationToken) where T : SyntaxNode
{
Contract.ThrowIfNull(spans);
if (children.Count == 0)
{
// nothing to separate
return;
}
// first child needs no separator
var seenSeparator = true;
for (int i = 0; i < children.Count - 1; i++)
{
cancellationToken.ThrowIfCancellationRequested();
var cur = children[i];
if (!IsSeparableBlock(cur))
{
seenSeparator = false;
}
else
{
if (!seenSeparator)
{
var prev = children[i - 1];
AddLineSeparatorSpanForNode(prev, spans, cancellationToken);
}
AddLineSeparatorSpanForNode(cur, spans, cancellationToken);
seenSeparator = true;
}
}
// last child may need separator only before it
var lastChild = children.Last();
if (IsSeparableBlock(lastChild))
{
if (!seenSeparator)
{
var nextToLast = children[children.Count - 2];
AddLineSeparatorSpanForNode(nextToLast, spans, cancellationToken);
}
if (lastChild.IsParentKind(SyntaxKind.CompilationUnit))
{
AddLineSeparatorSpanForNode(lastChild, spans, cancellationToken);
}
}
}
private static void AddLineSeparatorSpanForNode(SyntaxNode node, List<TextSpan> spans, CancellationToken cancellationToken)
{
if (IsBadNode(node))
{
return;
}
var span = GetLineSeparatorSpanForNode(node);
if (IsLegalSpanForLineSeparator(node.SyntaxTree, span, cancellationToken))
{
spans.Add(span);
}
}
private static bool IsLegalSpanForLineSeparator(SyntaxTree syntaxTree, TextSpan textSpan, CancellationToken cancellationToken)
{
// A span is a legal location for a line separator if the following line
// contains only whitespace or the span is the last line in the buffer.
var line = syntaxTree.GetText(cancellationToken).Lines.IndexOf(textSpan.End);
if (line == syntaxTree.GetText(cancellationToken).Lines.Count - 1)
{
return true;
}
if (string.IsNullOrWhiteSpace(syntaxTree.GetText(cancellationToken).Lines[line + 1].ToString()))
{
return true;
}
return false;
}
private static TextSpan GetLineSeparatorSpanForNode(SyntaxNode node)
{
// we only want to underline the node with a long line
// for this purpose the last token is as good as the whole node, but has
// simpler and typically single line geometry (so it will be easier to find "bottom")
return node.GetLastToken().Span;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Microsoft.DotNet.InternalAbstractions;
using Xunit;
namespace Microsoft.DotNet.CoreSetup.Test.HostActivation.SDKLookup
{
public class GivenThatICareAboutSDKLookup : IDisposable
{
private static IDictionary<string, string> s_DefaultEnvironment = new Dictionary<string, string>()
{
{"COREHOST_TRACE", "1" },
// The SDK being used may be crossgen'd for a different architecture than we are building for.
// Turn off ready to run, so an x64 crossgen'd SDK can be loaded in an x86 process.
{"COMPlus_ReadyToRun", "0" },
};
private RepoDirectoriesProvider RepoDirectories;
private TestProjectFixture PreviouslyBuiltAndRestoredPortableTestProjectFixture;
private string _currentWorkingDir;
private string _userDir;
private string _executableDir;
private string _cwdSdkBaseDir;
private string _userSdkBaseDir;
private string _exeSdkBaseDir;
private string _cwdSelectedMessage;
private string _userSelectedMessage;
private string _exeSelectedMessage;
private string _sdkDir;
private string _baseDir;
private const string _dotnetSdkDllMessageTerminator = "dotnet.dll]";
public GivenThatICareAboutSDKLookup()
{
// From the artifacts dir, it's possible to find where the sharedFrameworkPublish folder is. We need
// to locate it because we'll copy its contents into other folders
string artifactsDir = Environment.GetEnvironmentVariable("TEST_ARTIFACTS");
string builtDotnet = Path.Combine(artifactsDir, "sharedFrameworkPublish");
// The dotnetSDKLookup dir will contain some folders and files that will be
// necessary to perform the tests
string baseDir = Path.Combine(artifactsDir, "dotnetSDKLookup");
_baseDir = SharedFramework.CalculateUniqueTestDirectory(baseDir);
// The three tested locations will be the cwd, the user folder and the exe dir. cwd and user are no longer supported.
// All dirs will be placed inside the base folder
_currentWorkingDir = Path.Combine(_baseDir, "cwd");
_userDir = Path.Combine(_baseDir, "user");
_executableDir = Path.Combine(_baseDir, "exe");
// It's necessary to copy the entire publish folder to the exe dir because
// we'll need to build from it. The CopyDirectory method automatically creates the dest dir
SharedFramework.CopyDirectory(builtDotnet, _executableDir);
RepoDirectories = new RepoDirectoriesProvider(builtDotnet: _executableDir);
// SdkBaseDirs contain all available version folders
_cwdSdkBaseDir = Path.Combine(_currentWorkingDir, "sdk");
_userSdkBaseDir = Path.Combine(_userDir, ".dotnet", RepoDirectories.BuildArchitecture, "sdk");
_exeSdkBaseDir = Path.Combine(_executableDir, "sdk");
// Create directories
Directory.CreateDirectory(_cwdSdkBaseDir);
Directory.CreateDirectory(_userSdkBaseDir);
Directory.CreateDirectory(_exeSdkBaseDir);
// Restore and build PortableApp from exe dir
PreviouslyBuiltAndRestoredPortableTestProjectFixture = new TestProjectFixture("PortableApp", RepoDirectories)
.EnsureRestored(RepoDirectories.CorehostPackages)
.BuildProject();
var fixture = PreviouslyBuiltAndRestoredPortableTestProjectFixture;
// Set a dummy framework version (9999.0.0) in the exe sharedFx location. We will
// always pick the framework from this to avoid interference with the sharedFxLookup
string exeDirDummyFxVersion = Path.Combine(_executableDir, "shared", "Microsoft.NETCore.App", "9999.0.0");
string builtSharedFxDir = fixture.BuiltDotnet.GreatestVersionSharedFxPath;
SharedFramework.CopyDirectory(builtSharedFxDir, exeDirDummyFxVersion);
// The actual SDK version can be obtained from the built fixture. We'll use it to
// locate the sdkDir from which we can get the files contained in the version folder
string sdkBaseDir = Path.Combine(fixture.SdkDotnet.BinPath, "sdk");
var sdkVersionDirs = Directory.EnumerateDirectories(sdkBaseDir)
.Select(p => Path.GetFileName(p));
string greatestVersionSdk = sdkVersionDirs
.Where(p => !string.Equals(p, "NuGetFallbackFolder", StringComparison.OrdinalIgnoreCase))
.OrderByDescending(p => p.ToLower())
.First();
_sdkDir = Path.Combine(sdkBaseDir, greatestVersionSdk);
// Trace messages used to identify from which folder the SDK was picked
_cwdSelectedMessage = $"Using dotnet SDK dll=[{_cwdSdkBaseDir}";
_userSelectedMessage = $"Using dotnet SDK dll=[{_userSdkBaseDir}";
_exeSelectedMessage = $"Using dotnet SDK dll=[{_exeSdkBaseDir}";
}
public void Dispose()
{
PreviouslyBuiltAndRestoredPortableTestProjectFixture.Dispose();
if (!TestProject.PreserveTestRuns())
{
Directory.Delete(_baseDir, true);
}
}
[Fact]
public void SdkLookup_Global_Json_Single_Digit_Patch_Rollup()
{
var fixture = PreviouslyBuiltAndRestoredPortableTestProjectFixture
.Copy();
var dotnet = fixture.BuiltDotnet;
// Set specified SDK version = 9999.3.4-global-dummy
SetGlobalJsonVersion("SingleDigit-global.json");
// Specified SDK version: 9999.3.4-global-dummy
// Exe: empty
// Expected: no compatible version and a specific error messages
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute(fExpectedToFail: true)
.Should()
.Fail()
.And
.HaveStdErrContaining("A compatible installed dotnet SDK for global.json version")
.And
.HaveStdErrContaining("It was not possible to find any installed dotnet SDKs")
.And
.NotHaveStdErrContaining("Checking if resolved SDK dir");
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.4.1", "9999.3.4-dummy");
// Specified SDK version: 9999.3.4-global-dummy
// Exe: 9999.4.1, 9999.3.4-dummy
// Expected: no compatible version and a specific error message
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute(fExpectedToFail: true)
.Should()
.Fail()
.And
.HaveStdErrContaining("A compatible installed dotnet SDK for global.json version")
.And
.NotHaveStdErrContaining("It was not possible to find any installed dotnet SDKs");
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.3");
// Specified SDK version: 9999.3.4-global-dummy
// Exe: 9999.4.1, 9999.3.4-dummy, 9999.3.3
// Expected: no compatible version and a specific error message
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute(fExpectedToFail: true)
.Should()
.Fail()
.And
.HaveStdErrContaining("A compatible installed dotnet SDK for global.json version")
.And
.NotHaveStdErrContaining("It was not possible to find any installed dotnet SDKs");
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.4");
// Specified SDK version: 9999.3.4-global-dummy
// Exe: 9999.4.1, 9999.3.4-dummy, 9999.3.3, 9999.3.4
// Expected: 9999.3.4 from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.3.4", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.5-dummy");
// Specified SDK version: 9999.3.4-global-dummy
// Exe: 9999.4.1, 9999.3.4-dummy, 9999.3.3, 9999.3.4, 9999.3.5-dummy
// Expected: 9999.3.5-dummy from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.3.5-dummy", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.600");
// Specified SDK version: 9999.3.4-global-dummy
// Exe: 9999.4.1, 9999.3.4-dummy, 9999.3.3, 9999.3.4, 9999.3.5-dummy, 9999.3.600
// Expected: 9999.3.5-dummy from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.3.5-dummy", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.4-global-dummy");
// Specified SDK version: 9999.3.4-global-dummy
// Exe: 9999.4.1, 9999.3.4-dummy, 9999.3.3, 9999.3.4, 9999.3.5-dummy, 9999.3.600, 9999.3.4-global-dummy
// Expected: 9999.3.4-global-dummy from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.3.4-global-dummy", _dotnetSdkDllMessageTerminator));
// Verify we have the expected SDK versions
dotnet.Exec("--list-sdks")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.Execute()
.Should()
.Pass()
.And
.HaveStdOutContaining("9999.3.4-dummy")
.And
.HaveStdOutContaining("9999.3.4-global-dummy")
.And
.HaveStdOutContaining("9999.4.1")
.And
.HaveStdOutContaining("9999.3.3")
.And
.HaveStdOutContaining("9999.3.4")
.And
.HaveStdOutContaining("9999.3.600")
.And
.HaveStdOutContaining("9999.3.5-dummy");
}
[Fact]
public void SdkLookup_Global_Json_Two_Part_Patch_Rollup()
{
var fixture = PreviouslyBuiltAndRestoredPortableTestProjectFixture
.Copy();
var dotnet = fixture.BuiltDotnet;
// Set specified SDK version = 9999.3.304-global-dummy
SetGlobalJsonVersion("TwoPart-global.json");
// Specified SDK version: 9999.3.304-global-dummy
// Exe: empty
// Expected: no compatible version and a specific error messages
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute(fExpectedToFail: true)
.Should()
.Fail()
.And
.HaveStdErrContaining("A compatible installed dotnet SDK for global.json version")
.And
.HaveStdErrContaining("It was not possible to find any installed dotnet SDKs");
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.57", "9999.3.4-dummy");
// Specified SDK version: 9999.3.304-global-dummy
// Exe: 9999.3.57, 9999.3.4-dummy
// Expected: no compatible version and a specific error message
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute(fExpectedToFail: true)
.Should()
.Fail()
.And
.HaveStdErrContaining("A compatible installed dotnet SDK for global.json version")
.And
.NotHaveStdErrContaining("It was not possible to find any installed dotnet SDKs");
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.300", "9999.7.304-global-dummy");
// Specified SDK version: 9999.3.304-global-dummy
// Exe: 9999.3.57, 9999.3.4-dummy, 9999.3.300, 9999.7.304-global-dummy
// Expected: no compatible version and a specific error message
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute(fExpectedToFail: true)
.Should()
.Fail()
.And
.HaveStdErrContaining("A compatible installed dotnet SDK for global.json version")
.And
.NotHaveStdErrContaining("It was not possible to find any installed dotnet SDKs");
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.304");
// Specified SDK version: 9999.3.304-global-dummy
// Exe: 99999.3.57, 9999.3.4-dummy, 9999.3.300, 9999.7.304-global-dummy, 9999.3.304
// Expected: 9999.3.304 from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.3.304", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.399", "9999.3.399-dummy", "9999.3.400");
// Specified SDK version: 9999.3.304-global-dummy
// Exe: 9999.3.57, 9999.3.4-dummy, 9999.3.300, 9999.7.304-global-dummy, 9999.3.304, 9999.3.399, 9999.3.399-dummy, 9999.3.400
// Expected: 9999.3.399 from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.3.399", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.2400, 9999.3.3004");
// Specified SDK version: 9999.3.304-global-dummy
// Exe: 9999.3.57, 9999.3.4-dummy, 9999.3.300, 9999.7.304-global-dummy, 9999.3.304, 9999.3.399, 9999.3.399-dummy, 9999.3.400, 9999.3.2400, 9999.3.3004
// Expected: 9999.3.399 from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.3.399", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.3.304-global-dummy");
// Specified SDK version: 9999.3.304-global-dummy
// Exe: 9999.3.57, 9999.3.4-dummy, 9999.3.300, 9999.7.304-global-dummy, 9999.3.304, 9999.3.399, 9999.3.399-dummy, 9999.3.400, 9999.3.2400, 9999.3.3004, 9999.3.304-global-dummy
// Expected: 9999.3.304-global-dummy from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.3.304-global-dummy", _dotnetSdkDllMessageTerminator));
// Verify we have the expected SDK versions
dotnet.Exec("--list-sdks")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.Execute()
.Should()
.Pass()
.And
.HaveStdOutContaining("9999.3.57")
.And
.HaveStdOutContaining("9999.3.4-dummy")
.And
.HaveStdOutContaining("9999.3.300")
.And
.HaveStdOutContaining("9999.7.304-global-dummy")
.And
.HaveStdOutContaining("9999.3.399")
.And
.HaveStdOutContaining("9999.3.399-dummy")
.And
.HaveStdOutContaining("9999.3.400")
.And
.HaveStdOutContaining("9999.3.2400")
.And
.HaveStdOutContaining("9999.3.3004")
.And
.HaveStdOutContaining("9999.3.304")
.And
.HaveStdOutContaining("9999.3.304-global-dummy");
}
[Fact]
public void SdkLookup_Negative_Version()
{
var fixture = PreviouslyBuiltAndRestoredPortableTestProjectFixture
.Copy();
var dotnet = fixture.BuiltDotnet;
// Add a negative SDK version
AddAvailableSdkVersions(_exeSdkBaseDir, "-1.-1.-1");
// Specified SDK version: none
// Exe: -1.-1.-1
// Expected: no compatible version and a specific error messages
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute(fExpectedToFail: true)
.Should()
.Fail()
.And
.HaveStdErrContaining("It was not possible to find any installed dotnet SDKs")
.And
.HaveStdErrContaining("Did you mean to run dotnet SDK commands? Please install dotnet SDK from");
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.0.4");
// Specified SDK version: none
// Exe: -1.-1.-1, 9999.0.4
// Expected: 9999.0.4 from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.0.4", _dotnetSdkDllMessageTerminator));
// Verify we have the expected SDK versions
dotnet.Exec("--list-sdks")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.Execute()
.Should()
.Pass()
.And
.HaveStdOutContaining("9999.0.4");
}
[Fact]
public void SdkLookup_Must_Pick_The_Highest_Semantic_Version()
{
var fixture = PreviouslyBuiltAndRestoredPortableTestProjectFixture
.Copy();
var dotnet = fixture.BuiltDotnet;
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.0.0", "9999.0.3-dummy");
// Specified SDK version: none
// Cwd: empty
// User: empty
// Exe: 9999.0.0, 9999.0.3-dummy
// Expected: 9999.0.3-dummy from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.0.3-dummy", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.0.3");
// Specified SDK version: none
// Cwd: empty
// User: empty
// Exe: 9999.0.0, 9999.0.3-dummy, 9999.0.3
// Expected: 9999.0.3 from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.0.3", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_userSdkBaseDir, "9999.0.200");
AddAvailableSdkVersions(_cwdSdkBaseDir, "10000.0.0");
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.0.100");
// Specified SDK version: none
// Cwd: 10000.0.0 --> should not be picked
// User: 9999.0.200 --> should not be picked
// Exe: 9999.0.0, 9999.0.3-dummy, 9999.0.3, 9999.0.100
// Expected: 9999.0.100 from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.0.100", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.0.80");
// Specified SDK version: none
// Cwd: 10000.0.0 --> should not be picked
// User: 9999.0.200 --> should not be picked
// Exe: 9999.0.0, 9999.0.3-dummy, 9999.0.3, 9999.0.100, 9999.0.80
// Expected: 9999.0.100 from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.0.100", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.0.5500000");
// Specified SDK version: none
// Cwd: 10000.0.0 --> should not be picked
// User: 9999.0.200 --> should not be picked
// Exe: 9999.0.0, 9999.0.3-dummy, 9999.0.3, 9999.0.100, 9999.0.80, 9999.0.5500000
// Expected: 9999.0.5500000 from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.0.5500000", _dotnetSdkDllMessageTerminator));
// Add SDK versions
AddAvailableSdkVersions(_exeSdkBaseDir, "9999.0.52000000");
// Specified SDK version: none
// Cwd: 10000.0.0 --> should not be picked
// User: 9999.0.200 --> should not be picked
// Exe: 9999.0.0, 9999.0.3-dummy, 9999.0.3, 9999.0.100, 9999.0.80, 9999.0.5500000, 9999.0.52000000
// Expected: 9999.0.52000000 from exe dir
dotnet.Exec("help")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.CaptureStdErr()
.Execute()
.Should()
.Pass()
.And
.HaveStdErrContaining(Path.Combine(_exeSelectedMessage, "9999.0.52000000", _dotnetSdkDllMessageTerminator));
// Verify we have the expected SDK versions
dotnet.Exec("--list-sdks")
.WorkingDirectory(_currentWorkingDir)
.WithUserProfile(_userDir)
.Environment(s_DefaultEnvironment)
.EnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0")
.CaptureStdOut()
.Execute()
.Should()
.Pass()
.And
.HaveStdOutContaining("9999.0.0")
.And
.HaveStdOutContaining("9999.0.3-dummy")
.And
.HaveStdOutContaining("9999.0.3")
.And
.HaveStdOutContaining("9999.0.100")
.And
.HaveStdOutContaining("9999.0.80")
.And
.HaveStdOutContaining("9999.0.5500000")
.And
.HaveStdOutContaining("9999.0.52000000");
}
// This method adds a list of new sdk version folders in the specified
// sdkBaseDir. The files are copied from the _sdkDir. Also, the dotnet.runtimeconfig.json
// file is overwritten in order to use a dummy framework version (9999.0.0)
// Remarks:
// - If the sdkBaseDir does not exist, then a DirectoryNotFoundException
// is thrown.
// - If a specified version folder already exists, then it is deleted and replaced
// with the contents of the _builtSharedFxDir.
private void AddAvailableSdkVersions(string sdkBaseDir, params string[] availableVersions)
{
DirectoryInfo sdkBaseDirInfo = new DirectoryInfo(sdkBaseDir);
if (!sdkBaseDirInfo.Exists)
{
throw new DirectoryNotFoundException();
}
string dummyRuntimeConfig = Path.Combine(RepoDirectories.RepoRoot, "src", "test", "Assets", "TestUtils",
"SDKLookup", "dotnet.runtimeconfig.json");
foreach (string version in availableVersions)
{
string newSdkDir = Path.Combine(sdkBaseDir, version);
SharedFramework.CopyDirectory(_sdkDir, newSdkDir);
string runtimeConfig = Path.Combine(newSdkDir, "dotnet.runtimeconfig.json");
File.Copy(dummyRuntimeConfig, runtimeConfig, true);
}
}
// Put a global.json file in the cwd in order to specify a CLI
public void SetGlobalJsonVersion(string globalJsonFileName)
{
string destFile = Path.Combine(_currentWorkingDir, "global.json");
string srcFile = Path.Combine(RepoDirectories.RepoRoot, "src", "test", "Assets", "TestUtils",
"SDKLookup", globalJsonFileName);
File.Copy(srcFile, destFile, true);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.NetworkInformation;
using System.Threading;
using PcapDotNet.Base;
using PcapDotNet.Core.Extensions;
using PcapDotNet.Packets;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using PcapDotNet.Packets.Ethernet;
using PcapDotNet.Packets.IpV6;
using PcapDotNet.Packets.TestUtils;
using PcapDotNet.TestUtils;
namespace PcapDotNet.Core.Test
{
/// <summary>
/// Summary description for LivePacketDeviceTests
/// </summary>
[TestClass]
public class LivePacketDeviceTests
{
public LivePacketDeviceTests()
{
}
/// <summary>
/// Gets or sets the test context which provides
/// information about and functionality for the current test run.
/// </summary>
public TestContext TestContext { get; set; }
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
[TestMethod]
public void SendAndReceievePacketTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumPacketsToSend = 10;
using (PacketCommunicator communicator = OpenLiveDevice(100))
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet packet;
DateTime startWaiting = DateTime.Now;
PacketCommunicatorReceiveResult result = communicator.ReceivePacket(out packet);
DateTime finishedWaiting = DateTime.Now;
Assert.AreEqual(PacketCommunicatorReceiveResult.Timeout, result);
Assert.AreEqual<uint>(0, communicator.TotalStatistics.PacketsCaptured);
MoreAssert.IsInRange(TimeSpan.FromSeconds(0.99), TimeSpan.FromSeconds(1.075), finishedWaiting - startWaiting);
Packet sentPacket = _random.NextEthernetPacket(200, 300, SourceMac, DestinationMac);
DateTime startSendingTime = DateTime.Now;
for (int i = 0; i != NumPacketsToSend; ++i)
communicator.SendPacket(sentPacket);
DateTime endSendingTime = DateTime.Now;
for (int i = 0; i != NumPacketsToSend; ++i)
{
result = communicator.ReceivePacket(out packet);
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
Assert.AreEqual(100, packet.Length);
Assert.AreEqual<uint>(200, packet.OriginalLength);
MoreAssert.IsInRange(startSendingTime - TimeSpan.FromSeconds(1), endSendingTime + TimeSpan.FromSeconds(30), packet.Timestamp);
}
Assert.AreEqual<uint>(NumPacketsToSend, communicator.TotalStatistics.PacketsCaptured);
}
}
[TestMethod]
[ExpectedException(typeof(ArgumentNullException), AllowDerivedTypes = false)]
public void SendNullPacketTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SendPacket(null);
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(ArgumentNullException), AllowDerivedTypes = false)]
public void SetNullFilterTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter(null as BerkeleyPacketFilter);
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(ArgumentNullException), AllowDerivedTypes = false)]
public void SetNullSamplingMethodTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetSamplingMethod(null);
}
Assert.Fail();
}
[TestMethod]
public void ReceiveSomePacketsTest()
{
const int NumPacketsToSend = 100;
const int PacketSize = 100;
// Test normal mode
TestReceiveSomePackets(0, 0, int.MaxValue, PacketSize, false, PacketCommunicatorReceiveResult.Ok, 0, 1, 1.06);
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend, int.MaxValue, PacketSize, false, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.02);
TestReceiveSomePackets(NumPacketsToSend, 0, int.MaxValue, PacketSize, false, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.02);
TestReceiveSomePackets(NumPacketsToSend, -1, int.MaxValue, PacketSize, false, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.028);
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend + 1, int.MaxValue, PacketSize, false, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.031);
// Test non blocking
TestReceiveSomePackets(0, 0, int.MaxValue, PacketSize, true, PacketCommunicatorReceiveResult.Ok, 0, 0, 0.02);
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend, int.MaxValue, PacketSize, true, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.02);
// Test break loop
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend, NumPacketsToSend / 2, PacketSize, false, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend / 2, 0, 0.02);
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend, NumPacketsToSend / 2, PacketSize, true, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend / 2, 0, 0.02);
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend, 0, PacketSize, false, PacketCommunicatorReceiveResult.BreakLoop, 0, 0, 0.02);
}
[TestMethod]
public void ReceivePacketsTest()
{
const int NumPacketsToSend = 100;
const int PacketSize = 100;
// Normal
TestReceivePackets(NumPacketsToSend, NumPacketsToSend, int.MaxValue, 2, PacketSize, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.12);
// Wait for less packets
TestReceivePackets(NumPacketsToSend, NumPacketsToSend / 2, int.MaxValue, 2, PacketSize, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend / 2, 0, 0.04);
// Wait for more packets
TestReceivePackets(NumPacketsToSend, 0, int.MaxValue, 2, PacketSize, PacketCommunicatorReceiveResult.None, NumPacketsToSend, 2, 2.45);
TestReceivePackets(NumPacketsToSend, -1, int.MaxValue, 2, PacketSize, PacketCommunicatorReceiveResult.None, NumPacketsToSend, 2, 2.3);
TestReceivePackets(NumPacketsToSend, NumPacketsToSend + 1, int.MaxValue, 2, PacketSize, PacketCommunicatorReceiveResult.None, NumPacketsToSend, 2, 2.16);
// Break loop
TestReceivePackets(NumPacketsToSend, NumPacketsToSend, 0, 2, PacketSize, PacketCommunicatorReceiveResult.BreakLoop, 0, 0, 0.027);
TestReceivePackets(NumPacketsToSend, NumPacketsToSend, NumPacketsToSend / 2, 2, PacketSize, PacketCommunicatorReceiveResult.BreakLoop, NumPacketsToSend / 2, 0, 0.046);
}
[TestMethod]
public void ReceivePacketsEnumerableTest()
{
const int NumPacketsToSend = 100;
const int PacketSize = 100;
// Normal
TestReceivePacketsEnumerable(NumPacketsToSend, NumPacketsToSend, int.MaxValue, 2, PacketSize, NumPacketsToSend, 0, 0.3);
// Wait for less packets
TestReceivePacketsEnumerable(NumPacketsToSend, NumPacketsToSend / 2, int.MaxValue, 2, PacketSize, NumPacketsToSend / 2, 0, 0.032);
// Wait for more packets
TestReceivePacketsEnumerable(NumPacketsToSend, -1, int.MaxValue, 2, PacketSize, NumPacketsToSend, 2, 2.14);
TestReceivePacketsEnumerable(NumPacketsToSend, NumPacketsToSend + 1, int.MaxValue, 2, PacketSize, NumPacketsToSend, 2, 2.13);
// Break loop
TestReceivePacketsEnumerable(NumPacketsToSend, NumPacketsToSend, 0, 2, PacketSize, 0, 0, 0.051);
TestReceivePacketsEnumerable(NumPacketsToSend, NumPacketsToSend, NumPacketsToSend / 2, 2, PacketSize, NumPacketsToSend / 2, 0, 0.1);
}
[TestMethod]
[Timeout(10 * 1000)]
public void ReceivePacketsGcCollectTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumPackets = 2;
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet sentPacket = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
for (int i = 0; i != NumPackets; ++i)
communicator.SendPacket(sentPacket);
PacketCommunicatorReceiveResult result = communicator.ReceivePackets(NumPackets, delegate
{
GC.Collect();
});
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
}
}
[TestMethod]
public void ReceiveSomePacketsGcCollectTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumPackets = 2;
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet sentPacket = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
for (int i = 0; i != NumPackets; ++i)
communicator.SendPacket(sentPacket);
int numGot;
PacketCommunicatorReceiveResult result = communicator.ReceiveSomePackets(out numGot, NumPackets,
delegate
{
GC.Collect();
});
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
Assert.AreEqual(NumPackets, numGot);
}
}
[TestMethod]
public void ReceiveStatisticsGcCollectTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumStatistics = 2;
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.Mode = PacketCommunicatorMode.Statistics;
PacketCommunicatorReceiveResult result = communicator.ReceiveStatistics(NumStatistics, delegate
{
GC.Collect();
});
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
}
}
[TestMethod]
public void ReceiveStatisticsTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumPacketsToSend = 100;
const int PacketSize = 100;
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = PacketCommunicatorMode.Statistics;
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet sentPacket = _random.NextEthernetPacket(PacketSize, SourceMac, DestinationMac);
PacketSampleStatistics statistics;
PacketCommunicatorReceiveResult result = communicator.ReceiveStatistics(out statistics);
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
MoreAssert.IsInRange(DateTime.Now.AddSeconds(-1), DateTime.Now.AddSeconds(1), statistics.Timestamp);
Assert.AreEqual<ulong>(0, statistics.AcceptedPackets);
Assert.AreEqual<ulong>(0, statistics.AcceptedBytes);
for (int i = 0; i != NumPacketsToSend; ++i)
communicator.SendPacket(sentPacket);
result = communicator.ReceiveStatistics(out statistics);
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
MoreAssert.IsInRange(DateTime.Now.AddSeconds(-1), DateTime.Now.AddSeconds(1), statistics.Timestamp);
Assert.AreEqual<ulong>(NumPacketsToSend, statistics.AcceptedPackets, "AcceptedPackets");
// Todo check byte statistics. See http://www.winpcap.org/pipermail/winpcap-users/2015-February/004931.html
// Assert.AreEqual<long>((sentPacket.Length * NumPacketsToSend), statistics.AcceptedBytes,
// "AcceptedBytes. Diff Per Packet: " +
// (statistics.AcceptedBytes - sentPacket.Length * NumPacketsToSend) /
// ((double)NumPacketsToSend));
}
}
[TestMethod]
public void GetStatisticsTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumPacketsToSend = 100;
const int NumStatisticsToGather = 3;
const int PacketSize = 100;
// Normal
TestGetStatistics(SourceMac, DestinationMac, NumPacketsToSend, NumStatisticsToGather, int.MaxValue, 5, PacketSize,
PacketCommunicatorReceiveResult.Ok, NumStatisticsToGather, NumPacketsToSend, NumStatisticsToGather, NumStatisticsToGather + 0.16);
// Wait for less statistics
TestGetStatistics(SourceMac, DestinationMac, NumPacketsToSend, NumStatisticsToGather / 2, int.MaxValue, 5, PacketSize,
PacketCommunicatorReceiveResult.Ok, NumStatisticsToGather / 2, NumPacketsToSend, NumStatisticsToGather / 2, NumStatisticsToGather / 2 + 0.17);
// Wait for more statistics
TestGetStatistics(SourceMac, DestinationMac, NumPacketsToSend, 0, int.MaxValue, 5.5, PacketSize,
PacketCommunicatorReceiveResult.None, 5, NumPacketsToSend, 5.5, 5.85);
// Break loop
TestGetStatistics(SourceMac, DestinationMac, NumPacketsToSend, NumStatisticsToGather, 0, 5, PacketSize,
PacketCommunicatorReceiveResult.BreakLoop, 0, 0, 0, 0.04);
TestGetStatistics(SourceMac, DestinationMac, NumPacketsToSend, NumStatisticsToGather, NumStatisticsToGather / 2, 5, PacketSize,
PacketCommunicatorReceiveResult.BreakLoop, NumStatisticsToGather / 2, NumPacketsToSend, NumStatisticsToGather / 2, NumStatisticsToGather / 2 + 0.22);
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void GetStatisticsOnCaptureModeErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
PacketSampleStatistics statistics;
communicator.ReceiveStatistics(out statistics);
}
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void GetPacketOnStatisticsModeErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = PacketCommunicatorMode.Statistics;
Packet packet;
communicator.ReceivePacket(out packet);
}
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetInvalidModeErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = (PacketCommunicatorMode)(-99);
}
}
// this test is removed for now since it doens't throw an exception for such big value
// [TestMethod]
// [ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
// public void SetBigKernelBufferSizeErrorTest()
// {
// using (PacketCommunicator communicator = OpenLiveDevice())
// {
// communicator.SetKernelBufferSize(1024 * 1024 * 1024);
// }
// }
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetSmallKernelBufferSizeGetPacketErrorTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetKernelBufferSize(10);
Packet packet = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
communicator.SendPacket(packet);
communicator.ReceivePacket(out packet);
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetSmallKernelBufferSizeGetSomePacketsErrorTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetKernelBufferSize(10);
Packet packet = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
communicator.SendPacket(packet);
int numPacketsGot;
communicator.ReceiveSomePackets(out numPacketsGot, 1, delegate { });
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetSmallKernelBufferSizeGetPacketsErrorTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetKernelBufferSize(10);
Packet packet = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
communicator.SendPacket(packet);
Exception exception = null;
Thread thread = new Thread(delegate()
{
try
{
communicator.ReceivePackets(1, delegate { });
}
catch (Exception e)
{
exception = e;
}
});
thread.Start();
if (!thread.Join(TimeSpan.FromSeconds(5)))
thread.Abort();
if (exception != null)
throw exception;
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetSmallKernelBufferSizeGetNextStatisticsErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = PacketCommunicatorMode.Statistics;
communicator.SetKernelBufferSize(10);
PacketSampleStatistics statistics;
communicator.ReceiveStatistics(out statistics);
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetSmallKernelBufferSizeGetStatisticsErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = PacketCommunicatorMode.Statistics;
communicator.SetKernelBufferSize(10);
communicator.ReceiveStatistics(1, delegate { Assert.Fail(); });
}
Assert.Fail();
}
[TestMethod]
public void SetBigKernelMinimumBytesToCopyTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetKernelMinimumBytesToCopy(1024 * 1024);
Packet expectedPacket = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
for (int i = 0; i != 5; ++i)
{
communicator.SendPacket(expectedPacket);
Packet packet;
DateTime start = DateTime.Now;
PacketCommunicatorReceiveResult result = communicator.ReceivePacket(out packet);
DateTime end = DateTime.Now;
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
Assert.AreEqual(expectedPacket, packet);
MoreAssert.IsBigger(TimeSpan.FromSeconds(0.9), end - start);
}
}
}
[TestMethod]
public void SetSmallKernelMinimumBytesToCopyTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetKernelMinimumBytesToCopy(1);
Packet expectedPacket = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
for (int i = 0; i != 100; ++i)
{
communicator.SendPacket(expectedPacket);
Packet packet;
DateTime start = DateTime.Now;
PacketCommunicatorReceiveResult result = communicator.ReceivePacket(out packet);
DateTime end = DateTime.Now;
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
Assert.AreEqual(expectedPacket, packet);
MoreAssert.IsSmallerOrEqual(TimeSpan.FromSeconds(0.07), end - start);
}
}
}
[TestMethod]
public void SetSamplingMethodOneEveryNTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetSamplingMethod(new SamplingMethodOneEveryCount(5));
for (int i = 0; i != 20; ++i)
{
Packet expectedPacket = _random.NextEthernetPacket(60 * (i + 1), SourceMac, DestinationMac);
communicator.SendPacket(expectedPacket);
}
Packet packet;
PacketCommunicatorReceiveResult result;
for (int i = 0; i != 4; ++i)
{
result = communicator.ReceivePacket(out packet);
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
Assert.AreEqual(60 * 5 * (i + 1), packet.Length);
}
result = communicator.ReceivePacket(out packet);
Assert.AreEqual(PacketCommunicatorReceiveResult.Timeout, result);
Assert.IsNull(packet);
}
}
[TestMethod]
public void SetSamplingMethodFirstAfterIntervalTest()
{
Random random = new Random();
MacAddress sourceMac = random.NextMacAddress();
MacAddress destinationMac = random.NextMacAddress();
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + sourceMac + " and ether dst " + destinationMac);
communicator.SetSamplingMethod(new SamplingMethodFirstAfterInterval(TimeSpan.FromSeconds(1)));
int numPacketsGot;
communicator.ReceiveSomePackets(out numPacketsGot, 100, p => { });
Packet[] packetsToSend = new Packet[11];
packetsToSend[0] = _random.NextEthernetPacket(60, sourceMac, destinationMac);
for (int i = 0; i != 10; ++i)
packetsToSend[i + 1] = _random.NextEthernetPacket(60 * (i + 2), sourceMac, destinationMac);
List<Packet> packets = new List<Packet>(6);
Thread thread = new Thread(() => packets.AddRange(communicator.ReceivePackets(6)));
thread.Start();
communicator.SendPacket(packetsToSend[0]);
Thread.Sleep(TimeSpan.FromSeconds(0.7));
for (int i = 0; i != 10; ++i)
{
communicator.SendPacket(packetsToSend[i + 1]);
Thread.Sleep(TimeSpan.FromSeconds(0.55));
}
if (!thread.Join(TimeSpan.FromSeconds(10)))
thread.Abort();
Assert.AreEqual(6, packets.Count, packets.Select(p => (p.Timestamp-packets[0].Timestamp).TotalSeconds + "(" + p.Length + ")").SequenceToString(", "));
Packet packet;
for (int i = 0; i != 6; ++i)
Assert.AreEqual(60 * (i * 2 + 1), packets[i].Length, i.ToString());
PacketCommunicatorReceiveResult result = communicator.ReceivePacket(out packet);
Assert.AreEqual(PacketCommunicatorReceiveResult.Timeout, result);
Assert.IsNull(packet);
}
}
[TestMethod]
[ExpectedException(typeof(ArgumentOutOfRangeException), AllowDerivedTypes = false)]
public void SetSamplingMethodOneEveryNErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetSamplingMethod(new SamplingMethodOneEveryCount(0));
}
}
[TestMethod]
[ExpectedException(typeof(ArgumentOutOfRangeException), AllowDerivedTypes = false)]
public void SetSamplingMethodFirstAfterIntervalNegativeMsErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetSamplingMethod(new SamplingMethodFirstAfterInterval(-1));
}
}
[TestMethod]
[ExpectedException(typeof(ArgumentOutOfRangeException), AllowDerivedTypes = false)]
public void SetSamplingMethodFirstAfterIntervalNegativeTimespanErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetSamplingMethod(new SamplingMethodFirstAfterInterval(TimeSpan.FromSeconds(-1)));
}
}
[TestMethod]
[ExpectedException(typeof(ArgumentOutOfRangeException), AllowDerivedTypes = false)]
public void SetSamplingMethodFirstAfterIntervalBigTimespanErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetSamplingMethod(new SamplingMethodFirstAfterInterval(TimeSpan.FromDays(25)));
}
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetInvalidDataLink()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.DataLink = new PcapDataLink(0);
Assert.AreEqual(new PcapDataLink(0), communicator.DataLink);
}
}
[TestMethod]
public void SendZeroPacket()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SendPacket(new Packet(new byte[0], DateTime.Now, DataLinkKind.Ethernet));
}
}
private static void TestGetStatistics(string sourceMac, string destinationMac, int numPacketsToSend, int numStatisticsToGather, int numStatisticsToBreakLoop, double secondsToWait, int packetSize,
PacketCommunicatorReceiveResult expectedResult, int expectedNumStatistics, int expectedNumPackets, double expectedMinSeconds, double expectedMaxSeconds)
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = PacketCommunicatorMode.Statistics;
communicator.SetFilter("ether src " + sourceMac + " and ether dst " + destinationMac);
Packet sentPacket = _random.NextEthernetPacket(packetSize, sourceMac, destinationMac);
PacketCommunicatorReceiveResult result = PacketCommunicatorReceiveResult.None;
int numStatisticsGot = 0;
ulong totalPackets = 0;
ulong totalBytes = 0;
for (int i = 0; i != numPacketsToSend; ++i)
communicator.SendPacket(sentPacket);
if (numStatisticsToBreakLoop == 0)
communicator.Break();
Thread thread = new Thread(delegate()
{
result = communicator.ReceiveStatistics(numStatisticsToGather,
delegate(PacketSampleStatistics statistics)
{
Assert.IsNotNull(statistics.ToString());
totalPackets += statistics.AcceptedPackets;
totalBytes += statistics.AcceptedBytes;
++numStatisticsGot;
if (numStatisticsGot >= numStatisticsToBreakLoop)
communicator.Break();
});
});
DateTime startWaiting = DateTime.Now;
thread.Start();
if (!thread.Join(TimeSpan.FromSeconds(secondsToWait)))
thread.Abort();
DateTime finishedWaiting = DateTime.Now;
Assert.AreEqual(expectedResult, result, "Result");
Assert.AreEqual(expectedNumStatistics, numStatisticsGot, "NumStatistics");
Assert.AreEqual((ulong)expectedNumPackets, totalPackets, "NumPackets");
// Todo check byte statistics. See http://www.winpcap.org/pipermail/winpcap-users/2015-February/004931.html
// Assert.AreEqual((ulong)(numPacketsToSend * sentPacket.Length), totalBytes, "NumBytes");
MoreAssert.IsInRange(expectedMinSeconds, expectedMaxSeconds, (finishedWaiting - startWaiting).TotalSeconds);
}
}
private static void TestReceiveSomePackets(int numPacketsToSend, int numPacketsToGet, int numPacketsToBreakLoop, int packetSize, bool nonBlocking,
PacketCommunicatorReceiveResult expectedResult, int expectedNumPackets, double expectedMinSeconds, double expectedMaxSeconds)
{
string testDescription = "NumPacketsToSend=" + numPacketsToSend + ". NumPacketsToGet=" + numPacketsToGet +
". NumPacketsToBreakLoop=" + numPacketsToBreakLoop + ". PacketSize=" + packetSize +
". NonBlocking=" + nonBlocking;
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
Packet packetToSend = _random.NextEthernetPacket(packetSize, SourceMac, DestinationMac);
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.NonBlocking = nonBlocking;
Assert.AreEqual(nonBlocking, communicator.NonBlocking);
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
int numPacketsGot;
for (int i = 0; i != numPacketsToSend; ++i)
communicator.SendPacket(packetToSend);
if (numPacketsToBreakLoop == 0)
communicator.Break();
PacketHandler handler = new PacketHandler(packetToSend, communicator, numPacketsToBreakLoop);
DateTime startWaiting = DateTime.Now;
PacketCommunicatorReceiveResult result = communicator.ReceiveSomePackets(out numPacketsGot, numPacketsToGet,
handler.Handle);
DateTime finishedWaiting = DateTime.Now;
Assert.AreEqual(expectedResult, result);
Assert.AreEqual(expectedNumPackets, numPacketsGot, "NumPacketsGot. Test: " + testDescription);
Assert.AreEqual(expectedNumPackets, handler.NumPacketsHandled, "NumPacketsHandled. Test: " + testDescription);
MoreAssert.IsInRange(expectedMinSeconds, expectedMaxSeconds, (finishedWaiting - startWaiting).TotalSeconds, testDescription);
}
}
private static void TestReceivePackets(int numPacketsToSend, int numPacketsToWait, int numPacketsToBreakLoop, double secondsToWait, int packetSize,
PacketCommunicatorReceiveResult expectedResult, int expectedNumPackets,
double expectedMinSeconds, double expectedMaxSeconds)
{
string testDescription = "NumPacketsToSend=" + numPacketsToSend + ". NumPacketsToWait=" + numPacketsToWait +
". NumPacketsToBreakLoop=" + numPacketsToBreakLoop + ". SecondsToWait=" +
secondsToWait + ". PacketSize=" + packetSize;
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet sentPacket = _random.NextEthernetPacket(packetSize, SourceMac, DestinationMac);
PacketCommunicatorReceiveResult result = PacketCommunicatorReceiveResult.None;
for (int i = 0; i != numPacketsToSend; ++i)
communicator.SendPacket(sentPacket);
PacketHandler handler = new PacketHandler(sentPacket, communicator, numPacketsToBreakLoop);
Thread thread = new Thread(delegate()
{
if (numPacketsToBreakLoop == 0)
communicator.Break();
result = communicator.ReceivePackets(numPacketsToWait, handler.Handle);
});
DateTime startWaiting = DateTime.Now;
thread.Start();
if (!thread.Join(TimeSpan.FromSeconds(secondsToWait)))
thread.Abort();
DateTime finishedWaiting = DateTime.Now;
Assert.AreEqual(expectedResult, result, testDescription);
Assert.AreEqual(expectedNumPackets, handler.NumPacketsHandled);
MoreAssert.IsInRange(expectedMinSeconds, expectedMaxSeconds, (finishedWaiting - startWaiting).TotalSeconds);
}
}
private static void TestReceivePacketsEnumerable(int numPacketsToSend, int numPacketsToWait, int numPacketsToBreakLoop, double secondsToWait,
int packetSize, int expectedNumPackets, double expectedMinSeconds, double expectedMaxSeconds)
{
string testDescription = "NumPacketsToSend=" + numPacketsToSend + ". NumPacketsToWait=" + numPacketsToWait +
". NumPacketsToBreakLoop=" + numPacketsToBreakLoop + ". SecondsToWait=" +
secondsToWait + ". PacketSize=" + packetSize;
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet sentPacket = _random.NextEthernetPacket(packetSize, SourceMac, DestinationMac);
for (int i = 0; i != numPacketsToSend; ++i)
communicator.SendPacket(sentPacket);
int actualPacketsReceived = 0;
Thread thread = new Thread(delegate()
{
if (numPacketsToBreakLoop == 0)
communicator.Break();
IEnumerable<Packet> packets = numPacketsToWait == -1
? communicator.ReceivePackets()
: communicator.ReceivePackets(numPacketsToWait);
foreach (Packet packet in packets)
{
Assert.AreEqual(sentPacket, packet);
++actualPacketsReceived;
if (actualPacketsReceived == numPacketsToBreakLoop)
break;
}
});
DateTime startWaiting = DateTime.Now;
thread.Start();
if (!thread.Join(TimeSpan.FromSeconds(secondsToWait)))
thread.Abort();
DateTime finishedWaiting = DateTime.Now;
Assert.AreEqual(expectedNumPackets, actualPacketsReceived, testDescription);
MoreAssert.IsInRange(expectedMinSeconds, expectedMaxSeconds, (finishedWaiting - startWaiting).TotalSeconds, testDescription);
}
}
public static PacketCommunicator OpenLiveDevice(int snapshotLength)
{
NetworkInterface networkInterface =
NetworkInterface.GetAllNetworkInterfaces().FirstOrDefault(
ni => !ni.IsReceiveOnly && ni.NetworkInterfaceType == NetworkInterfaceType.Ethernet && ni.OperationalStatus == OperationalStatus.Up);
LivePacketDevice device = networkInterface.GetLivePacketDevice();
MoreAssert.IsMatch(@"Network adapter '.*' on local host", device.Description);
Assert.AreEqual(DeviceAttributes.None, device.Attributes);
Assert.AreNotEqual(MacAddress.Zero, device.GetMacAddress());
Assert.AreNotEqual(string.Empty, device.GetPnpDeviceId());
MoreAssert.IsBiggerOrEqual(1, device.Addresses.Count);
foreach (DeviceAddress address in device.Addresses)
{
if (address.Address.Family == SocketAddressFamily.Internet)
{
MoreAssert.IsMatch("Address: " + SocketAddressFamily.Internet + @" [0-9]+\.[0-9]+\.[0-9]+\.[0-9]+ " +
"Netmask: " + SocketAddressFamily.Internet + @" 255\.[0-9]+\.[0-9]+\.[0-9]+ " +
"Broadcast: " + SocketAddressFamily.Internet + @" 255.255.255.255",
address.ToString());
}
else
{
Assert.AreEqual(SocketAddressFamily.Internet6, address.Address.Family);
MoreAssert.IsMatch("Address: " + SocketAddressFamily.Internet6 + @" (?:[0-9A-F]{4}:){7}[0-9A-F]{4} " +
"Netmask: " + SocketAddressFamily.Unspecified + @" " + IpV6Address.Zero + " " +
"Broadcast: " + SocketAddressFamily.Unspecified + @" " + IpV6Address.Zero,
address.ToString());
}
}
PacketCommunicator communicator = device.Open(snapshotLength, PacketDeviceOpenAttributes.Promiscuous, 1000);
try
{
MoreAssert.AreSequenceEqual(new[] {DataLinkKind.Ethernet, DataLinkKind.Docsis}.Select(kind => new PcapDataLink(kind)), communicator.SupportedDataLinks);
PacketTotalStatistics totalStatistics = communicator.TotalStatistics;
Assert.AreEqual<object>(totalStatistics, totalStatistics);
Assert.AreNotEqual(null, totalStatistics);
Assert.AreEqual(totalStatistics.GetHashCode(), totalStatistics.GetHashCode());
Assert.IsTrue(totalStatistics.Equals(totalStatistics));
Assert.IsFalse(totalStatistics.Equals(null));
Assert.AreNotEqual(null, totalStatistics);
Assert.AreNotEqual(totalStatistics, 2);
MoreAssert.IsSmallerOrEqual<uint>(1, totalStatistics.PacketsCaptured, "PacketsCaptured");
Assert.AreEqual<uint>(0, totalStatistics.PacketsDroppedByDriver, "PacketsDroppedByDriver");
Assert.AreEqual<uint>(0, totalStatistics.PacketsDroppedByInterface, "PacketsDroppedByInterface");
MoreAssert.IsSmallerOrEqual<uint>(1, totalStatistics.PacketsReceived, "PacketsReceived");
Assert.IsNotNull(totalStatistics.ToString());
communicator.SetKernelBufferSize(2 * 1024 * 1024); // 2 MB instead of 1
communicator.SetKernelMinimumBytesToCopy(10); // 10 bytes minimum to copy
communicator.SetSamplingMethod(new SamplingMethodNone());
Assert.AreEqual(DataLinkKind.Ethernet, communicator.DataLink.Kind);
communicator.DataLink = communicator.DataLink;
Assert.AreEqual("EN10MB (Ethernet)", communicator.DataLink.ToString());
Assert.AreEqual(communicator.DataLink, new PcapDataLink(communicator.DataLink.Name));
Assert.IsTrue(communicator.IsFileSystemByteOrder);
Assert.AreEqual(PacketCommunicatorMode.Capture, communicator.Mode);
Assert.IsFalse(communicator.NonBlocking);
Assert.AreEqual(snapshotLength, communicator.SnapshotLength);
return communicator;
}
catch (Exception)
{
communicator.Dispose();
throw;
}
}
public static PacketCommunicator OpenLiveDevice()
{
return OpenLiveDevice(PacketDevice.DefaultSnapshotLength);
}
private static readonly Random _random = new Random();
}
}
| |
//---------------------------------------------------------------------
// <copyright file="RelationshipEnd.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// @owner [....]
// @backupOwner [....]
//---------------------------------------------------------------------
namespace System.Data.EntityModel.SchemaObjectModel
{
using System.Collections.Generic;
using System.Data.Metadata.Edm;
using System.Diagnostics;
using System.Xml;
/// <summary>
/// Represents an End element in a relationship
/// </summary>
internal sealed class RelationshipEnd : SchemaElement, IRelationshipEnd
{
private string _unresolvedType;
private RelationshipMultiplicity? _multiplicity;
private SchemaEntityType _type;
private List<OnOperation> _operations;
/// <summary>
/// construct a Relationship End
/// </summary>
/// <param name="relationship"></param>
public RelationshipEnd(Relationship relationship)
: base(relationship)
{
}
/// <summary>
/// Type of the End
/// </summary>
public SchemaEntityType Type
{
get
{
return _type;
}
private set
{
_type = value;
}
}
/// <summary>
/// Multiplicity of the End
/// </summary>
public RelationshipMultiplicity? Multiplicity
{
get
{
return _multiplicity;
}
set
{
_multiplicity = value;
}
}
/// <summary>
/// The On<Operation>s defined for the End
/// </summary>
public ICollection<OnOperation> Operations
{
get
{
if (_operations == null)
_operations = new List<OnOperation>();
return _operations;
}
}
/// <summary>
/// do whole element resolution
/// </summary>
internal override void ResolveTopLevelNames()
{
base.ResolveTopLevelNames();
if (Type == null && _unresolvedType != null)
{
SchemaType element;
if (!Schema.ResolveTypeName(this, _unresolvedType, out element))
{
return;
}
Type = element as SchemaEntityType;
if (Type == null)
{
AddError(ErrorCode.InvalidRelationshipEndType, EdmSchemaErrorSeverity.Error,
System.Data.Entity.Strings.InvalidRelationshipEndType(ParentElement.Name, element.FQName));
}
}
}
internal override void Validate()
{
base.Validate();
// Check if the end has multiplicity as many, it cannot have any operation behaviour
if (Multiplicity == RelationshipMultiplicity.Many && Operations.Count != 0)
{
AddError(ErrorCode.EndWithManyMultiplicityCannotHaveOperationsSpecified,
EdmSchemaErrorSeverity.Error,
System.Data.Entity.Strings.EndWithManyMultiplicityCannotHaveOperationsSpecified(this.Name, ParentElement.FQName));
}
// if there is no RefConstraint in Association and multiplicity is null
if (this.ParentElement.Constraints.Count == 0 && Multiplicity == null)
{
AddError(ErrorCode.EndWithoutMultiplicity,
EdmSchemaErrorSeverity.Error,
System.Data.Entity.Strings.EndWithoutMultiplicity(this.Name, ParentElement.FQName));
}
}
/// <summary>
/// Do simple validation across attributes
/// </summary>
protected override void HandleAttributesComplete()
{
// set up the default name in before validating anythig that might want to display it in an error message;
if (Name == null && _unresolvedType != null)
Name = Utils.ExtractTypeName(Schema.DataModel, _unresolvedType);
base.HandleAttributesComplete();
}
protected override bool ProhibitAttribute(string namespaceUri, string localName)
{
if (base.ProhibitAttribute(namespaceUri, localName))
{
return true;
}
if (namespaceUri == null && localName == XmlConstants.Name)
{
return false;
}
return false;
}
protected override bool HandleAttribute(XmlReader reader)
{
if (base.HandleAttribute(reader))
{
return true;
}
else if (CanHandleAttribute(reader, XmlConstants.Multiplicity))
{
HandleMultiplicityAttribute(reader);
return true;
}
else if (CanHandleAttribute(reader, XmlConstants.Role))
{
HandleNameAttribute(reader);
return true;
}
else if (CanHandleAttribute(reader, XmlConstants.TypeElement))
{
HandleTypeAttribute(reader);
return true;
}
return false;
}
protected override bool HandleElement(XmlReader reader)
{
if (base.HandleElement(reader))
{
return true;
}
else if (CanHandleElement(reader, XmlConstants.OnDelete))
{
HandleOnDeleteElement(reader);
return true;
}
return false;
}
/// <summary>
/// Handle the Type attribute
/// </summary>
/// <param name="reader">reader positioned at Type attribute</param>
private void HandleTypeAttribute(XmlReader reader)
{
Debug.Assert(reader != null);
string type;
if (!Utils.GetDottedName(this.Schema, reader, out type))
return;
_unresolvedType = type;
}
/// <summary>
/// Handle the Multiplicity attribute
/// </summary>
/// <param name="reader">reader positioned at Type attribute</param>
private void HandleMultiplicityAttribute(XmlReader reader)
{
Debug.Assert(reader != null);
RelationshipMultiplicity multiplicity;
if (!TryParseMultiplicity(reader.Value, out multiplicity))
{
AddError(ErrorCode.InvalidMultiplicity, EdmSchemaErrorSeverity.Error, reader, System.Data.Entity.Strings.InvalidRelationshipEndMultiplicity(ParentElement.Name, reader.Value));
}
_multiplicity = multiplicity;
}
/// <summary>
/// Handle an OnDelete element
/// </summary>
/// <param name="reader">reader positioned at the element</param>
private void HandleOnDeleteElement(XmlReader reader)
{
HandleOnOperationElement(reader, Operation.Delete);
}
/// <summary>
/// Handle an On<Operation> element
/// </summary>
/// <param name="reader">reader positioned at the element</param>
/// <param name="operation">the kind of operation being handled</param>
private void HandleOnOperationElement(XmlReader reader, Operation operation)
{
Debug.Assert(reader != null);
foreach (OnOperation other in Operations)
{
if (other.Operation == operation)
AddError(ErrorCode.InvalidOperation, EdmSchemaErrorSeverity.Error, reader, System.Data.Entity.Strings.DuplicationOperation(reader.Name));
}
OnOperation onOperation = new OnOperation(this, operation);
onOperation.Parse(reader);
_operations.Add(onOperation);
}
/// <summary>
/// The parent element as an IRelationship
/// </summary>
internal new IRelationship ParentElement
{
get
{
return (IRelationship)(base.ParentElement);
}
}
/// <summary>
/// Create a new Multiplicity object from a string
/// </summary>
/// <param name="value">string containing Multiplicity definition</param>
/// <param name="multiplicity">new multiplicity object (null if there were errors)</param>
/// <returns>try if the string was parsable, false otherwise</returns>
private static bool TryParseMultiplicity(string value, out RelationshipMultiplicity multiplicity)
{
switch (value)
{
case "0..1":
multiplicity = RelationshipMultiplicity.ZeroOrOne;
return true;
case "1":
multiplicity = RelationshipMultiplicity.One;
return true;
case "*":
multiplicity = RelationshipMultiplicity.Many;
return true;
default:
multiplicity = (RelationshipMultiplicity)(- 1);
return false;
}
}
}
}
| |
// Copyright (c) DotSpatial Team. All rights reserved.
// Licensed under the MIT license. See License.txt file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
namespace DotSpatial.Data
{
/// <summary>
/// HfaEntry.
/// </summary>
public class HfaEntry
{
#region Fields
private char[] _name;
private char[] _typeName;
#endregion
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="HfaEntry"/> class.
/// </summary>
/// <param name="hfaIn">The HfaInfo.</param>
/// <param name="nPos">The position.</param>
/// <param name="parent">The parent.</param>
/// <param name="prev">The previous HfaEntry.</param>
public HfaEntry(HfaInfo hfaIn, long nPos, HfaEntry parent, HfaEntry prev)
{
// Initialize fields
_name = new char[64];
_typeName = new char[32];
Hfa = hfaIn;
FilePos = nPos;
Parent = parent;
Prev = prev;
Hfa.Fp.Seek(nPos, SeekOrigin.Begin);
Children = new List<HfaEntry>();
// Read entry information from the file
int[] anEntryNums = new int[6];
byte[] entryBytes = new byte[24];
Hfa.Fp.Read(entryBytes, 0, 24);
Buffer.BlockCopy(entryBytes, 0, anEntryNums, 0, 24);
NextPos = anEntryNums[0];
ChildPos = anEntryNums[3];
DataPos = anEntryNums[4];
DataSize = anEntryNums[5];
// Read the name
byte[] nameBytes = new byte[64];
Hfa.Fp.Read(nameBytes, 0, 64);
_name = Encoding.Default.GetChars(nameBytes);
// Read the type
byte[] typeBytes = new byte[32];
Hfa.Fp.Read(typeBytes, 0, 32);
_typeName = Encoding.Default.GetChars(typeBytes);
}
/// <summary>
/// Initializes a new instance of the <see cref="HfaEntry"/> class with the intention that it would
/// be written to disk later.
/// </summary>
/// <param name="hfaIn">The HfaInfo.</param>
/// <param name="name">The name.</param>
/// <param name="typename">The type name.</param>
/// <param name="parent">The parent.</param>
public HfaEntry(HfaInfo hfaIn, string name, string typename, HfaEntry parent)
{
// Initialize
Hfa = hfaIn;
Parent = parent;
Name = name;
TypeName = typename;
Children = new List<HfaEntry>();
// Update the previous or parent node to refer to this one
if (parent == null)
{
// do nothing
}
else if (parent.Child == null)
{
parent.Child = this;
parent.IsDirty = true;
}
else
{
HfaEntry prev = parent.Children[parent.Children.Count - 1];
prev.Next = this;
prev.IsDirty = true;
}
IsDirty = true;
}
/// <summary>
/// Initializes a new instance of the <see cref="HfaEntry"/> class.
/// </summary>
public HfaEntry()
{
_name = new char[64];
_typeName = new char[32];
}
#endregion
#region Properties
/// <summary>
/// Gets or sets the first child belonging to this entry.
/// </summary>
public HfaEntry Child
{
get
{
return Children?[0];
}
set
{
if (Children == null) Children = new List<HfaEntry>();
Children[0] = value;
}
}
/// <summary>
/// Gets or sets the long position of the child.
/// </summary>
public long ChildPos { get; set; }
/// <summary>
/// Gets or sets the collection of all the children.
/// </summary>
public List<HfaEntry> Children { get; set; }
/// <summary>
/// Gets or sets the data for this entry.
/// </summary>
public byte[] Data { get; set; }
/// <summary>
/// Gets or sets the GUInt32 Data Position of this entry.
/// </summary>
public long DataPos { get; set; }
/// <summary>
/// Gets or sets the GUInt32 Data Size of this entry.
/// </summary>
public long DataSize { get; set; }
/// <summary>
/// Gets or sets the long integer file position.
/// </summary>
public long FilePos { get; set; }
/// <summary>
/// Gets or sets the HFA Info.
/// </summary>
public HfaInfo Hfa { get; set; }
/// <summary>
/// Gets or sets a value indicating whether this is changed.
/// </summary>
public bool IsDirty { get; set; }
/// <summary>
/// Gets or sets the 64 character name of the entry.
/// </summary>
public string Name
{
get
{
return new string(_name);
}
set
{
_name = value.ToCharArray(0, 64);
IsDirty = true;
}
}
/// <summary>
/// Gets or sets the HfaEntry that is the next entry.
/// </summary>
public HfaEntry Next { get; set; }
/// <summary>
/// Gets or sets the long integer position of the next entry in the file.
/// </summary>
public long NextPos { get; set; }
/// <summary>
/// Gets or sets the hfa parent.
/// </summary>
public HfaEntry Parent { get; set; }
/// <summary>
/// Gets or sets the previous entry.
/// </summary>
public HfaEntry Prev { get; set; }
/// <summary>
/// Gets or sets the type for this entry.
/// </summary>
public HfaType Type { get; set; }
/// <summary>
/// gets or sets the 32 character typestring.
/// </summary>
public string TypeName
{
get
{
return new string(_typeName);
}
set
{
_typeName = value.ToCharArray(0, 32);
IsDirty = true;
}
}
#endregion
#region Methods
/// <summary>
/// Gets or sets the HfaEntry that is the child.
/// </summary>
/// <returns>The first child or null.</returns>
public HfaEntry GetFirstChild()
{
return Children?.First();
}
/// <summary>
/// This parses a complete "path" separated by periods in order
/// to search for a specific child node.
/// </summary>
/// <param name="name">Name of the child.</param>
/// <returns>The child with the given name or null.</returns>
public HfaEntry GetNamedChild(string name)
{
string firstName = GetFirstChildName(name);
string subTree = GetSubtreeName(name);
foreach (HfaEntry child in Children)
{
if (child.Name != firstName) continue;
return subTree != null ? child.GetNamedChild(subTree) : child;
}
return null;
}
/// <summary>
/// Loads the data bytes for this element from the file.
/// </summary>
public void LoadData()
{
if (Data != null || DataSize == 0) return;
Hfa.Fp.Seek(DataPos, SeekOrigin.Begin);
Hfa.Fp.Read(Data, 0, (int)DataSize);
Type = Hfa.Dictionary[TypeName];
}
/// <summary>
/// Parses a name which may have an unwanted : or multiple sub-tree
/// names separated with periods.
/// </summary>
/// <param name="name">Name that gets parsed.</param>
/// <returns>The first part of the name.</returns>
private static string GetFirstChildName(string name)
{
int pos = name.Length;
if (name.Contains(":")) pos = name.IndexOf(':');
if (name.Contains("."))
{
int tempPos = name.IndexOf('.');
if (tempPos < pos) pos = tempPos;
}
return name.Substring(0, pos);
}
/// <summary>
/// If this is null, then there is no further subtree.
/// </summary>
/// <param name="name">The name that gets parsed.</param>
/// <returns>The subtree name.</returns>
private static string GetSubtreeName(string name)
{
if (name.Contains("."))
{
int start = name.IndexOf('.') + 1;
return name.Substring(start, name.Length - start);
}
return null;
}
#endregion
}
}
| |
using System;
using System.Drawing;
using System.Windows.Forms;
using AudioSwitch.Classes;
using AudioSwitch.CoreAudioApi;
using AudioSwitch.Properties;
namespace AudioSwitch.Controls
{
internal partial class VolumeBar : UserControl
{
internal EventHandler VolumeMuteChanged;
internal MMDevice Device;
internal bool Stereo;
private static DateTime LastScroll = DateTime.Now;
private static readonly TimeSpan ShortInterval = new TimeSpan(0, 0, 0, 0, 80);
private Point pMousePosition = Point.Empty;
private bool Moving;
private readonly ToolTip handleTip = new ToolTip();
private bool _mute;
internal bool Mute
{
get { return _mute; }
private set
{
_mute = value;
Thumb.BackgroundImage.Dispose();
Thumb.BackgroundImage = value ? Resources.ThumbMute : Resources.ThumbNormal;
VolumeMuteChanged?.Invoke(this, null);
}
}
private float _value;
internal float Value
{
get { return _value; }
private set
{
_value = value;
MoveThumb();
VolumeMuteChanged?.Invoke(this, null);
}
}
protected override void WndProc(ref Message m)
{
if (m.Msg == 522)
{
var bytes = BitConverter.GetBytes((int)m.WParam);
var y = BitConverter.ToInt16(bytes, 2);
DoScroll(this, new ScrollEventArgs((ScrollEventType)(m.WParam.ToInt32() & 0xffff), y));
}
else
base.WndProc(ref m);
}
public VolumeBar()
{
InitializeComponent();
handleTip.SetToolTip(Thumb, "Master Volume");
}
private void ChangeMute()
{
Mute = !Mute;
Device.AudioEndpointVolume.Mute = Mute;
}
internal void ChangeVolume(float value)
{
_value = value;
Device.AudioEndpointVolume.MasterVolumeLevelScalar = value;
VolumeMuteChanged?.Invoke(this, null);
}
private void MoveThumb()
{
var trackStep = (double)(ClientSize.Width - Thumb.Width);
Thumb.Left = (int)(_value * trackStep);
}
private void Thumb_MouseDown(object sender, MouseEventArgs e)
{
if (e.Button == MouseButtons.Left)
{
pMousePosition = Thumb.PointToClient(MousePosition);
Moving = true;
}
else
{
ChangeMute();
}
}
private void Thumb_MouseUp(object sender, MouseEventArgs e)
{
Moving = false;
}
private void Thumb_MouseMove(object sender, MouseEventArgs e)
{
if (Moving && e.Button == MouseButtons.Left)
{
var theFormPosition = PointToClient(MousePosition);
theFormPosition.X -= pMousePosition.X;
if (theFormPosition.X > Width - Thumb.Width)
theFormPosition.X = Width - Thumb.Width;
if (theFormPosition.X < 0)
theFormPosition.X = 0;
Thumb.Left = theFormPosition.X;
Thumb.Refresh();
var trackStep = (float)(ClientSize.Width - Thumb.Width);
ChangeVolume(Thumb.Left / trackStep);
}
}
private void lblGraph_MouseDown(object sender, MouseEventArgs e)
{
if (e.Button == MouseButtons.Left)
{
Moving = true;
var theFormPosition = PointToClient(MousePosition);
theFormPosition.X -= Thumb.Width / 2;
if (theFormPosition.X > Width - Thumb.Width)
theFormPosition.X = Width - Thumb.Width;
if (theFormPosition.X < 0)
theFormPosition.X = 0;
Thumb.Left = theFormPosition.X;
}
else
{
ChangeMute();
}
}
private void lblGraph_MouseMove(object sender, MouseEventArgs e)
{
if (Moving && e.Button == MouseButtons.Left)
{
var theFormPosition = PointToClient(MousePosition);
theFormPosition.X -= Thumb.Width / 2;
if (theFormPosition.X > Width - Thumb.Width)
theFormPosition.X = Width - Thumb.Width;
if (theFormPosition.X < 0)
theFormPosition.X = 0;
Thumb.Left = theFormPosition.X;
var trackStep = (float)(ClientSize.Width - Thumb.Width);
ChangeVolume(Thumb.Left / trackStep);
}
}
private void lblGraph_MouseUp(object sender, MouseEventArgs e)
{
Moving = false;
}
private void Thumb_Move(object sender, EventArgs e)
{
Thumb.Refresh();
lblGraph.Refresh();
}
private void Thumb_MouseEnter(object sender, EventArgs e)
{
if (_mute) return;
Thumb.BackgroundImage.Dispose();
Thumb.BackgroundImage = Resources.ThumbHover;
}
private void Thumb_MouseLeave(object sender, EventArgs e)
{
if (_mute) return;
Thumb.BackgroundImage.Dispose();
Thumb.BackgroundImage = Resources.ThumbNormal;
}
internal void DoScroll(object sender, ScrollEventArgs e)
{
var amount = DateTime.Now - LastScroll <= ShortInterval ? 0.04f : 0.02f;
LastScroll = DateTime.Now;
ChangeVolumeSafe(e.NewValue, amount);
}
private void ChangeVolumeSafe(int direction, float amount)
{
if (direction > 0)
if (Value <= 1 - amount)
ChangeVolume(Value + amount);
else
ChangeVolume(1);
else if (direction < 0)
if (Value >= amount)
ChangeVolume(Value - amount);
else
ChangeVolume(0);
}
private void VolNotify(AudioVolumeNotificationData data)
{
if (InvokeRequired)
Invoke(new AudioEndpointVolumeNotificationDelegate(VolNotify), data);
else
{
if (!Moving)
Value = data.MasterVolume;
Mute = data.Muted;
}
}
internal void RegisterDevice(EDataFlow RenderType)
{
Device = EndPoints.DeviceEnumerator.GetDefaultAudioEndpoint(RenderType, ERole.eMultimedia);
Value = Device.AudioEndpointVolume.MasterVolumeLevelScalar;
Mute = Device.AudioEndpointVolume.Mute;
Stereo = Device.AudioMeterInformation.Channels.GetCount() > 1;
Device.AudioEndpointVolume.OnVolumeNotification += VolNotify;
}
}
}
| |
#region MIT License
/*
* Copyright (c) 2005-2008 Jonathan Mark Porter. http://physics2d.googlepages.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
#endregion
#if UseDouble
using Scalar = System.Double;
#else
using Scalar = System.Single;
#endif
using System;
using System.Collections.Generic;
using AdvanceMath;
using AdvanceMath.Geometry2D;
namespace Physics2DDotNet.Shapes
{
/// <summary>
/// Use this to Represent a Polygon in the engine
/// </summary>
[Serializable]
public sealed class PolygonShape :
IShape, IRaySegmentsCollidable, ILineFluidAffectable , IExplosionAffectable
{
#region fields
private DistanceGrid grid;
private Vector2D[] vertexes;
private Vector2D[] vertexNormals;
private Vector2D centroid;
private Scalar area;
private Scalar inertia;
private object tag;
#endregion
#region constructors
/// <summary>
/// Creates a new Polygon Instance.
/// </summary>
/// <param name="vertexes">the vertexes that make up the shape of the Polygon</param>
/// <param name="gridSpacing">
/// How large a grid cell is. Usualy you will want at least 2 cells between major vertexes.
/// The smaller this is the better precision you get, but higher cost in memory.
/// The larger the less precision and if it's to high collision detection may fail completely.
public PolygonShape(Vector2D[] vertexes, Scalar gridSpacing)
{
if (vertexes == null) { throw new ArgumentNullException("vertexes"); }
if (vertexes.Length < 3) { throw new ArgumentException("too few", "vertexes"); }
if (gridSpacing <= 0) { throw new ArgumentOutOfRangeException("gridSpacing"); }
this.vertexes = vertexes;
this.grid = new DistanceGrid(this, gridSpacing);
this.vertexNormals = VertexHelper.GetVertexNormals(this.vertexes);
VertexInfo info = VertexHelper.GetVertexInfo(vertexes);
this.inertia = info.Inertia;
this.centroid = info.Centroid;
this.area = info.Area;
}
/// <summary>
/// Creates a new Polygon Instance.
/// </summary>
/// <param name="vertexes">the vertexes that make up the shape of the Polygon</param>
/// <param name="gridSpacing">
/// How large a grid cell is. Usualy you will want at least 2 cells between major vertexes.
/// The smaller this is the better precision you get, but higher cost in memory.
/// The larger the less precision and if it's to high collision detection may fail completely.
/// </param>
/// <param name="momentOfInertiaMultiplier">
/// How hard it is to turn the shape. Depending on the construtor in the
/// Body this will be multiplied with the mass to determine the moment of inertia.
/// </param>
public PolygonShape(Vector2D[] vertexes, Scalar gridSpacing, Scalar inertia):this(vertexes,gridSpacing)
{
this.inertia = inertia;
}
#endregion
#region properties
public object Tag
{
get { return tag; }
set { tag = value; }
}
public Vector2D[] Vertexes { get { return vertexes; } }
public Vector2D[] VertexNormals { get { return vertexNormals; } }
public Scalar Inertia
{
get { return inertia; }
}
public Vector2D Centroid
{
get { return centroid; }
}
public Scalar Area
{
get { return area; }
}
public bool CanGetIntersection
{
get { return true; }
}
public bool CanGetCustomIntersection
{
get { return false; }
}
#endregion
#region methods
public void CalcBoundingRectangle(ref Matrix2x3 matrix, out BoundingRectangle rectangle)
{
BoundingRectangle.FromVectors(ref matrix, Vertexes, out rectangle);
}
public void GetDistance(ref Vector2D point, out Scalar result)
{
BoundingPolygon.GetDistance(Vertexes, ref point, out result);
}
public bool TryGetIntersection(Vector2D point, out IntersectionInfo info)
{
return grid.TryGetIntersection(point, out info);
}
public bool TryGetCustomIntersection(Body self, Body other, out object customIntersectionInfo)
{
throw new NotSupportedException();
}
DragInfo IGlobalFluidAffectable.GetFluidInfo(Vector2D tangent)
{
Scalar min, max;
ShapeHelper.GetProjectedBounds(this.Vertexes, tangent, out min, out max);
Scalar avg = (max + min) / 2;
return new DragInfo(tangent * avg, max - min);
}
bool IRaySegmentsCollidable.TryGetRayCollision(Body thisBody, Body raysBody, RaySegmentsShape raySegments, out RaySegmentIntersectionInfo info)
{
bool intersects = false;
Scalar temp;
RaySegment[] segments = raySegments.Segments;
Scalar[] result = new Scalar[segments.Length];
Matrix2x3 matrix = raysBody.Matrices.ToBody * thisBody.Matrices.ToWorld;
Vector2D[] polygon = new Vector2D[Vertexes.Length];
for (int index = 0; index < polygon.Length; ++index)
{
Vector2D.Transform(ref matrix, ref Vertexes[index], out polygon[index]);
}
BoundingRectangle rect;
BoundingRectangle.FromVectors(polygon, out rect);
BoundingPolygon poly = new BoundingPolygon(polygon);
for (int index = 0; index < segments.Length; ++index)
{
RaySegment segment = segments[index];
rect.Intersects(ref segment.RayInstance, out temp);
if (temp >= 0 && temp <= segment.Length)
{
poly.Intersects(ref segment.RayInstance, out temp);
if (temp < 0 || temp > segment.Length)
{
result[index] = -1;
}
else
{
result[index] = temp;
intersects = true;
}
}
else
{
result[index] = -1;
}
}
if (intersects)
{
info = new RaySegmentIntersectionInfo(result);
}
else
{
info = null;
}
return intersects;
}
FluidInfo ILineFluidAffectable.GetFluidInfo(GetTangentCallback callback, Line line)
{
return ShapeHelper.GetFluidInfo(Vertexes, callback, line);
}
DragInfo IExplosionAffectable.GetExplosionInfo(Matrix2x3 matrix, Scalar radius, GetTangentCallback callback)
{
//TODO: do this right!
Vector2D[] vertexes2 = new Vector2D[Vertexes.Length];
for (int index = 0; index < vertexes2.Length; ++index)
{
vertexes2[index] = matrix * Vertexes[index];
}
Vector2D[] inter = VertexHelper.GetIntersection(vertexes2, radius);
if (inter.Length < 3) { return null; }
Vector2D centroid = VertexHelper.GetCentroid(inter);
Vector2D tangent = callback(centroid);
Scalar min,max;
ShapeHelper.GetProjectedBounds(inter, tangent, out min, out max);
Scalar avg = (max + min) / 2;
return new DragInfo(tangent * avg, max - min);
}
#endregion
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Globalization;
using Avalonia.Animation;
using Avalonia.Animation.Animators;
using Avalonia.Utilities;
namespace Avalonia
{
/// <summary>
/// Describes the thickness of a frame around a rectangle.
/// </summary>
public readonly struct Thickness
{
static Thickness()
{
Animation.Animation.RegisterAnimator<ThicknessAnimator>(prop => typeof(Thickness).IsAssignableFrom(prop.PropertyType));
}
/// <summary>
/// The thickness on the left.
/// </summary>
private readonly double _left;
/// <summary>
/// The thickness on the top.
/// </summary>
private readonly double _top;
/// <summary>
/// The thickness on the right.
/// </summary>
private readonly double _right;
/// <summary>
/// The thickness on the bottom.
/// </summary>
private readonly double _bottom;
/// <summary>
/// Initializes a new instance of the <see cref="Thickness"/> structure.
/// </summary>
/// <param name="uniformLength">The length that should be applied to all sides.</param>
public Thickness(double uniformLength)
{
_left = _top = _right = _bottom = uniformLength;
}
/// <summary>
/// Initializes a new instance of the <see cref="Thickness"/> structure.
/// </summary>
/// <param name="horizontal">The thickness on the left and right.</param>
/// <param name="vertical">The thickness on the top and bottom.</param>
public Thickness(double horizontal, double vertical)
{
_left = _right = horizontal;
_top = _bottom = vertical;
}
/// <summary>
/// Initializes a new instance of the <see cref="Thickness"/> structure.
/// </summary>
/// <param name="left">The thickness on the left.</param>
/// <param name="top">The thickness on the top.</param>
/// <param name="right">The thickness on the right.</param>
/// <param name="bottom">The thickness on the bottom.</param>
public Thickness(double left, double top, double right, double bottom)
{
_left = left;
_top = top;
_right = right;
_bottom = bottom;
}
/// <summary>
/// Gets the thickness on the left.
/// </summary>
public double Left => _left;
/// <summary>
/// Gets the thickness on the top.
/// </summary>
public double Top => _top;
/// <summary>
/// Gets the thickness on the right.
/// </summary>
public double Right => _right;
/// <summary>
/// Gets the thickness on the bottom.
/// </summary>
public double Bottom => _bottom;
/// <summary>
/// Gets a value indicating whether all sides are set to 0.
/// </summary>
public bool IsEmpty => Left.Equals(0) && IsUniform;
/// <summary>
/// Gets a value indicating whether all sides are equal.
/// </summary>
public bool IsUniform => Left.Equals(Right) && Top.Equals(Bottom) && Right.Equals(Bottom);
/// <summary>
/// Compares two Thicknesses.
/// </summary>
/// <param name="a">The first thickness.</param>
/// <param name="b">The second thickness.</param>
/// <returns>The equality.</returns>
public static bool operator ==(Thickness a, Thickness b)
{
return a.Equals(b);
}
/// <summary>
/// Compares two Thicknesses.
/// </summary>
/// <param name="a">The first thickness.</param>
/// <param name="b">The second thickness.</param>
/// <returns>The inequality.</returns>
public static bool operator !=(Thickness a, Thickness b)
{
return !a.Equals(b);
}
/// <summary>
/// Adds two Thicknesses.
/// </summary>
/// <param name="a">The first thickness.</param>
/// <param name="b">The second thickness.</param>
/// <returns>The equality.</returns>
public static Thickness operator +(Thickness a, Thickness b)
{
return new Thickness(
a.Left + b.Left,
a.Top + b.Top,
a.Right + b.Right,
a.Bottom + b.Bottom);
}
/// <summary>
/// Subtracts two Thicknesses.
/// </summary>
/// <param name="a">The first thickness.</param>
/// <param name="b">The second thickness.</param>
/// <returns>The equality.</returns>
public static Thickness operator -(Thickness a, Thickness b)
{
return new Thickness(
a.Left - b.Left,
a.Top - b.Top,
a.Right - b.Right,
a.Bottom - b.Bottom);
}
/// <summary>
/// Multiplies a Thickness to a scalar.
/// </summary>
/// <param name="a">The thickness.</param>
/// <param name="b">The scalar.</param>
/// <returns>The equality.</returns>
public static Thickness operator *(Thickness a, double b)
{
return new Thickness(
a.Left * b,
a.Top * b,
a.Right * b,
a.Bottom * b);
}
/// <summary>
/// Adds a Thickness to a Size.
/// </summary>
/// <param name="size">The size.</param>
/// <param name="thickness">The thickness.</param>
/// <returns>The equality.</returns>
public static Size operator +(Size size, Thickness thickness)
{
return new Size(
size.Width + thickness.Left + thickness.Right,
size.Height + thickness.Top + thickness.Bottom);
}
/// <summary>
/// Subtracts a Thickness from a Size.
/// </summary>
/// <param name="size">The size.</param>
/// <param name="thickness">The thickness.</param>
/// <returns>The equality.</returns>
public static Size operator -(Size size, Thickness thickness)
{
return new Size(
size.Width - (thickness.Left + thickness.Right),
size.Height - (thickness.Top + thickness.Bottom));
}
/// <summary>
/// Parses a <see cref="Thickness"/> string.
/// </summary>
/// <param name="s">The string.</param>
/// <returns>The <see cref="Thickness"/>.</returns>
public static Thickness Parse(string s)
{
using (var tokenizer = new StringTokenizer(s, CultureInfo.InvariantCulture, exceptionMessage: "Invalid Thickness"))
{
if (tokenizer.TryReadDouble(out var a))
{
if (tokenizer.TryReadDouble(out var b))
{
if (tokenizer.TryReadDouble(out var c))
{
return new Thickness(a, b, c, tokenizer.ReadDouble());
}
return new Thickness(a, b);
}
return new Thickness(a);
}
throw new FormatException("Invalid Thickness.");
}
}
/// <summary>
/// Checks for equality between a thickness and an object.
/// </summary>
/// <param name="obj">The object.</param>
/// <returns>
/// True if <paramref name="obj"/> is a size that equals the current size.
/// </returns>
public override bool Equals(object obj)
{
if (obj is Thickness)
{
Thickness other = (Thickness)obj;
return Left == other.Left &&
Top == other.Top &&
Right == other.Right &&
Bottom == other.Bottom;
}
return false;
}
/// <summary>
/// Returns a hash code for a <see cref="Thickness"/>.
/// </summary>
/// <returns>The hash code.</returns>
public override int GetHashCode()
{
unchecked
{
int hash = 17;
hash = (hash * 23) + Left.GetHashCode();
hash = (hash * 23) + Top.GetHashCode();
hash = (hash * 23) + Right.GetHashCode();
hash = (hash * 23) + Bottom.GetHashCode();
return hash;
}
}
/// <summary>
/// Returns the string representation of the thickness.
/// </summary>
/// <returns>The string representation of the thickness.</returns>
public override string ToString()
{
return $"{_left},{_top},{_right},{_bottom}";
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
#if !MSBUILD12
using Microsoft.Build.Construction;
#endif
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.MSBuild
{
/// <summary>
/// A workspace that can be populated by opening MSBuild solution and project files.
/// </summary>
public sealed class MSBuildWorkspace : Workspace
{
// used to serialize access to public methods
private readonly NonReentrantLock _serializationLock = new NonReentrantLock();
// used to protect access to mutable state
private readonly NonReentrantLock _dataGuard = new NonReentrantLock();
private readonly Dictionary<string, string> _extensionToLanguageMap = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, ProjectId> _projectPathToProjectIdMap = new Dictionary<string, ProjectId>(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, IProjectFileLoader> _projectPathToLoaderMap = new Dictionary<string, IProjectFileLoader>(StringComparer.OrdinalIgnoreCase);
private string _solutionFilePath;
private ImmutableDictionary<string, string> _properties;
private MSBuildWorkspace(
HostServices hostServices,
ImmutableDictionary<string, string> properties)
: base(hostServices, "MSBuildWorkspace")
{
// always make a copy of these build properties (no mutation please!)
_properties = properties ?? ImmutableDictionary<string, string>.Empty;
this.SetSolutionProperties(solutionFilePath: null);
this.LoadMetadataForReferencedProjects = false;
this.SkipUnrecognizedProjects = true;
}
/// <summary>
/// Create a new instance of a workspace that can be populated by opening solution and project files.
/// </summary>
public static MSBuildWorkspace Create()
{
return Create(ImmutableDictionary<string, string>.Empty);
}
/// <summary>
/// Create a new instance of a workspace that can be populated by opening solution and project files.
/// </summary>
/// <param name="properties">An optional set of MSBuild properties used when interpreting project files.
/// These are the same properties that are passed to msbuild via the /property:<n>=<v> command line argument.</param>
public static MSBuildWorkspace Create(IDictionary<string, string> properties)
{
return Create(properties, DesktopMefHostServices.DefaultServices);
}
/// <summary>
/// Create a new instance of a workspace that can be populated by opening solution and project files.
/// </summary>
/// <param name="properties">The MSBuild properties used when interpreting project files.
/// These are the same properties that are passed to msbuild via the /property:<n>=<v> command line argument.</param>
/// <param name="hostServices">The <see cref="HostServices"/> used to configure this workspace.</param>
public static MSBuildWorkspace Create(IDictionary<string, string> properties, HostServices hostServices)
{
if (properties == null)
{
throw new ArgumentNullException(nameof(properties));
}
if (hostServices == null)
{
throw new ArgumentNullException(nameof(hostServices));
}
return new MSBuildWorkspace(hostServices, properties.ToImmutableDictionary());
}
/// <summary>
/// The MSBuild properties used when interpreting project files.
/// These are the same properties that are passed to msbuild via the /property:<n>=<v> command line argument.
/// </summary>
public ImmutableDictionary<string, string> Properties
{
get { return _properties; }
}
/// <summary>
/// Determines if metadata from existing output assemblies is loaded instead of opening referenced projects.
/// If the referenced project is already opened, the metadata will not be loaded.
/// If the metadata assembly cannot be found the referenced project will be opened instead.
/// </summary>
public bool LoadMetadataForReferencedProjects { get; set; }
/// <summary>
/// Determines if unrecognized projects are skipped when solutions or projects are opened.
///
/// An project is unrecognized if it either has
/// a) an invalid file path,
/// b) a non-existent project file,
/// c) has an unrecognized file extension or
/// d) a file extension associated with an unsupported language.
///
/// If unrecognized projects cannot be skipped a corresponding exception is thrown.
/// </summary>
public bool SkipUnrecognizedProjects { get; set; }
/// <summary>
/// Associates a project file extension with a language name.
/// </summary>
public void AssociateFileExtensionWithLanguage(string projectFileExtension, string language)
{
if (language == null)
{
throw new ArgumentNullException(nameof(language));
}
if (projectFileExtension == null)
{
throw new ArgumentNullException(nameof(projectFileExtension));
}
using (_dataGuard.DisposableWait())
{
_extensionToLanguageMap[projectFileExtension] = language;
}
}
/// <summary>
/// Close the open solution, and reset the workspace to a new empty solution.
/// </summary>
public void CloseSolution()
{
using (_serializationLock.DisposableWait())
{
this.ClearSolution();
}
}
protected override void ClearSolutionData()
{
base.ClearSolutionData();
using (_dataGuard.DisposableWait())
{
this.SetSolutionProperties(solutionFilePath: null);
// clear project related data
_projectPathToProjectIdMap.Clear();
_projectPathToLoaderMap.Clear();
}
}
private const string SolutionDirProperty = "SolutionDir";
private void SetSolutionProperties(string solutionFilePath)
{
_solutionFilePath = solutionFilePath;
if (!string.IsNullOrEmpty(solutionFilePath))
{
// When MSBuild is building an individual project, it doesn't define $(SolutionDir).
// However when building an .sln file, or when working inside Visual Studio,
// $(SolutionDir) is defined to be the directory where the .sln file is located.
// Some projects out there rely on $(SolutionDir) being set (although the best practice is to
// use MSBuildProjectDirectory which is always defined).
if (!string.IsNullOrEmpty(solutionFilePath))
{
string solutionDirectory = Path.GetDirectoryName(solutionFilePath);
if (!solutionDirectory.EndsWith(@"\", StringComparison.Ordinal))
{
solutionDirectory += @"\";
}
if (Directory.Exists(solutionDirectory))
{
_properties = _properties.SetItem(SolutionDirProperty, solutionDirectory);
}
}
}
}
private ProjectId GetProjectId(string fullProjectPath)
{
using (_dataGuard.DisposableWait())
{
ProjectId id;
_projectPathToProjectIdMap.TryGetValue(fullProjectPath, out id);
return id;
}
}
private ProjectId GetOrCreateProjectId(string fullProjectPath)
{
using (_dataGuard.DisposableWait())
{
ProjectId id;
if (!_projectPathToProjectIdMap.TryGetValue(fullProjectPath, out id))
{
id = ProjectId.CreateNewId(debugName: fullProjectPath);
_projectPathToProjectIdMap.Add(fullProjectPath, id);
}
return id;
}
}
private bool TryGetLoaderFromProjectPath(string projectFilePath, ReportMode mode, out IProjectFileLoader loader)
{
using (_dataGuard.DisposableWait())
{
// check to see if we already know the loader
if (!_projectPathToLoaderMap.TryGetValue(projectFilePath, out loader))
{
// otherwise try to figure it out from extension
var extension = Path.GetExtension(projectFilePath);
if (extension.Length > 0 && extension[0] == '.')
{
extension = extension.Substring(1);
}
string language;
if (_extensionToLanguageMap.TryGetValue(extension, out language))
{
if (this.Services.SupportedLanguages.Contains(language))
{
loader = this.Services.GetLanguageServices(language).GetService<IProjectFileLoader>();
}
else
{
this.ReportFailure(mode, string.Format(WorkspacesResources.CannotOpenProjectUnsupportedLanguage, projectFilePath, language));
return false;
}
}
else
{
loader = ProjectFileLoader.GetLoaderForProjectFileExtension(this, extension);
if (loader == null)
{
this.ReportFailure(mode, string.Format(WorkspacesResources.CannotOpenProjectUnrecognizedFileExtension, projectFilePath, Path.GetExtension(projectFilePath)));
return false;
}
}
if (loader != null)
{
_projectPathToLoaderMap[projectFilePath] = loader;
}
}
return loader != null;
}
}
private bool TryGetAbsoluteProjectPath(string path, string baseDirectory, ReportMode mode, out string absolutePath)
{
try
{
absolutePath = this.GetAbsolutePath(path, baseDirectory);
}
catch (Exception)
{
ReportFailure(mode, string.Format(WorkspacesResources.InvalidProjectFilePath, path));
absolutePath = null;
return false;
}
if (!File.Exists(absolutePath))
{
ReportFailure(
mode,
string.Format(WorkspacesResources.ProjectFileNotFound, absolutePath),
msg => new FileNotFoundException(msg));
return false;
}
return true;
}
private string GetAbsoluteSolutionPath(string path, string baseDirectory)
{
string absolutePath;
try
{
absolutePath = GetAbsolutePath(path, baseDirectory);
}
catch (Exception)
{
throw new InvalidOperationException(string.Format(WorkspacesResources.InvalidSolutionFilePath, path));
}
if (!File.Exists(absolutePath))
{
throw new FileNotFoundException(string.Format(WorkspacesResources.SolutionFileNotFound, absolutePath));
}
return absolutePath;
}
private enum ReportMode
{
Throw,
Log,
Ignore
}
private void ReportFailure(ReportMode mode, string message, Func<string, Exception> createException = null)
{
switch (mode)
{
case ReportMode.Throw:
if (createException != null)
{
throw createException(message);
}
else
{
throw new InvalidOperationException(message);
}
case ReportMode.Log:
this.OnWorkspaceFailed(new WorkspaceDiagnostic(WorkspaceDiagnosticKind.Failure, message));
break;
case ReportMode.Ignore:
default:
break;
}
}
private string GetAbsolutePath(string path, string baseDirectoryPath)
{
return Path.GetFullPath(FileUtilities.ResolveRelativePath(path, baseDirectoryPath) ?? path);
}
#region Open Solution & Project
/// <summary>
/// Open a solution file and all referenced projects.
/// </summary>
public async Task<Solution> OpenSolutionAsync(string solutionFilePath, CancellationToken cancellationToken = default(CancellationToken))
{
if (solutionFilePath == null)
{
throw new ArgumentNullException(nameof(solutionFilePath));
}
this.ClearSolution();
var absoluteSolutionPath = this.GetAbsoluteSolutionPath(solutionFilePath, Directory.GetCurrentDirectory());
using (_dataGuard.DisposableWait(cancellationToken))
{
this.SetSolutionProperties(absoluteSolutionPath);
}
VersionStamp version = default(VersionStamp);
#if !MSBUILD12
Microsoft.Build.Construction.SolutionFile solutionFile = Microsoft.Build.Construction.SolutionFile.Parse(absoluteSolutionPath);
var reportMode = this.SkipUnrecognizedProjects ? ReportMode.Log : ReportMode.Throw;
var invalidProjects = new List<ProjectInSolution>();
// seed loaders from known project types
using (_dataGuard.DisposableWait(cancellationToken))
{
foreach (var project in solutionFile.ProjectsInOrder)
{
if (project.ProjectType == SolutionProjectType.SolutionFolder)
{
continue;
}
var projectAbsolutePath = TryGetAbsolutePath(project.AbsolutePath, reportMode);
if (projectAbsolutePath != null)
{
var extension = Path.GetExtension(projectAbsolutePath);
if (extension.Length > 0 && extension[0] == '.')
{
extension = extension.Substring(1);
}
var loader = ProjectFileLoader.GetLoaderForProjectFileExtension(this, extension);
if (loader != null)
{
_projectPathToLoaderMap[projectAbsolutePath] = loader;
}
}
else
{
invalidProjects.Add(project);
}
}
}
// a list to accumulate all the loaded projects
var loadedProjects = new List<ProjectInfo>();
// load all the projects
foreach (var project in solutionFile.ProjectsInOrder)
{
cancellationToken.ThrowIfCancellationRequested();
if (project.ProjectType != SolutionProjectType.SolutionFolder && !invalidProjects.Contains(project))
{
var projectAbsolutePath = TryGetAbsolutePath(project.AbsolutePath, reportMode);
if (projectAbsolutePath != null)
{
IProjectFileLoader loader;
if (TryGetLoaderFromProjectPath(projectAbsolutePath, reportMode, out loader))
{
// projects get added to 'loadedProjects' as side-effect
// never perfer metadata when loading solution, all projects get loaded if they can.
var tmp = await GetOrLoadProjectAsync(projectAbsolutePath, loader, preferMetadata: false, loadedProjects: loadedProjects, cancellationToken: cancellationToken).ConfigureAwait(false);
}
}
}
}
#else
SolutionFile solutionFile = null;
using (var reader = new StreamReader(absoluteSolutionPath))
{
version = VersionStamp.Create(File.GetLastWriteTimeUtc(absoluteSolutionPath));
var text = await reader.ReadToEndAsync().ConfigureAwait(false);
solutionFile = SolutionFile.Parse(new StringReader(text));
}
var solutionFolder = Path.GetDirectoryName(absoluteSolutionPath);
// seed loaders from known project types
using (_dataGuard.DisposableWait())
{
foreach (var projectBlock in solutionFile.ProjectBlocks)
{
string absoluteProjectPath;
if (TryGetAbsoluteProjectPath(projectBlock.ProjectPath, solutionFolder, ReportMode.Ignore, out absoluteProjectPath))
{
var loader = ProjectFileLoader.GetLoaderForProjectTypeGuid(this, projectBlock.ProjectTypeGuid);
if (loader != null)
{
_projectPathToLoaderMap[absoluteProjectPath] = loader;
}
}
}
}
// a list to accumulate all the loaded projects
var loadedProjects = new List<ProjectInfo>();
var reportMode = this.SkipUnrecognizedProjects ? ReportMode.Log : ReportMode.Throw;
// load all the projects
foreach (var projectBlock in solutionFile.ProjectBlocks)
{
cancellationToken.ThrowIfCancellationRequested();
string absoluteProjectPath;
if (TryGetAbsoluteProjectPath(projectBlock.ProjectPath, solutionFolder, reportMode, out absoluteProjectPath))
{
IProjectFileLoader loader;
if (TryGetLoaderFromProjectPath(absoluteProjectPath, reportMode, out loader))
{
// projects get added to 'loadedProjects' as side-effect
// never perfer metadata when loading solution, all projects get loaded if they can.
var tmp = await GetOrLoadProjectAsync(absoluteProjectPath, loader, preferMetadata: false, loadedProjects: loadedProjects, cancellationToken: cancellationToken).ConfigureAwait(false);
}
}
}
#endif
// construct workspace from loaded project infos
this.OnSolutionAdded(SolutionInfo.Create(SolutionId.CreateNewId(debugName: absoluteSolutionPath), version, absoluteSolutionPath, loadedProjects));
this.UpdateReferencesAfterAdd();
return this.CurrentSolution;
}
/// <summary>
/// Open a project file and all referenced projects.
/// </summary>
public async Task<Project> OpenProjectAsync(string projectFilePath, CancellationToken cancellationToken = default(CancellationToken))
{
if (projectFilePath == null)
{
throw new ArgumentNullException(nameof(projectFilePath));
}
string fullPath;
if (this.TryGetAbsoluteProjectPath(projectFilePath, Directory.GetCurrentDirectory(), ReportMode.Throw, out fullPath))
{
IProjectFileLoader loader;
if (this.TryGetLoaderFromProjectPath(projectFilePath, ReportMode.Throw, out loader))
{
var loadedProjects = new List<ProjectInfo>();
var projectId = await GetOrLoadProjectAsync(fullPath, loader, this.LoadMetadataForReferencedProjects, loadedProjects, cancellationToken).ConfigureAwait(false);
// add projects to solution
foreach (var project in loadedProjects)
{
this.OnProjectAdded(project);
}
this.UpdateReferencesAfterAdd();
return this.CurrentSolution.GetProject(projectId);
}
}
// unreachable
return null;
}
private string TryGetAbsolutePath(string path, ReportMode mode)
{
try
{
path = Path.GetFullPath(path);
}
catch (Exception)
{
ReportFailure(mode, string.Format(WorkspacesResources.InvalidProjectFilePath, path));
return null;
}
if (!File.Exists(path))
{
ReportFailure(
mode,
string.Format(WorkspacesResources.ProjectFileNotFound, path),
msg => new FileNotFoundException(msg));
return null;
}
return path;
}
private void UpdateReferencesAfterAdd()
{
using (_serializationLock.DisposableWait())
{
var oldSolution = this.CurrentSolution;
var newSolution = this.UpdateReferencesAfterAdd(oldSolution);
if (newSolution != oldSolution)
{
newSolution = this.SetCurrentSolution(newSolution);
var ignore = this.RaiseWorkspaceChangedEventAsync(WorkspaceChangeKind.SolutionChanged, oldSolution, newSolution);
}
}
}
// Updates all projects to properly reference other existing projects via project references instead of using references to built metadata.
private Solution UpdateReferencesAfterAdd(Solution solution)
{
// Build map from output assembly path to ProjectId
// Use explicit loop instead of ToDictionary so we don't throw if multiple projects have same output assembly path.
var outputAssemblyToProjectIdMap = new Dictionary<string, ProjectId>();
foreach (var p in solution.Projects)
{
if (!string.IsNullOrEmpty(p.OutputFilePath))
{
outputAssemblyToProjectIdMap[p.OutputFilePath] = p.Id;
}
}
// now fix each project if necessary
foreach (var pid in solution.ProjectIds)
{
var project = solution.GetProject(pid);
// convert metadata references to project references if the metadata reference matches some project's output assembly.
foreach (var meta in project.MetadataReferences)
{
var pemeta = meta as PortableExecutableReference;
if (pemeta != null)
{
ProjectId matchingProjectId;
// check both Display and FilePath. FilePath points to the actually bits, but Display should match output path if
// the metadata reference is shadow copied.
if ((!string.IsNullOrEmpty(pemeta.Display) && outputAssemblyToProjectIdMap.TryGetValue(pemeta.Display, out matchingProjectId)) ||
(!string.IsNullOrEmpty(pemeta.FilePath) && outputAssemblyToProjectIdMap.TryGetValue(pemeta.FilePath, out matchingProjectId)))
{
var newProjRef = new ProjectReference(matchingProjectId, pemeta.Properties.Aliases, pemeta.Properties.EmbedInteropTypes);
if (!project.ProjectReferences.Contains(newProjRef))
{
project = project.WithProjectReferences(project.ProjectReferences.Concat(newProjRef));
}
project = project.WithMetadataReferences(project.MetadataReferences.Where(mr => mr != meta));
}
}
}
solution = project.Solution;
}
return solution;
}
private async Task<ProjectId> GetOrLoadProjectAsync(string projectFilePath, IProjectFileLoader loader, bool preferMetadata, List<ProjectInfo> loadedProjects, CancellationToken cancellationToken)
{
var projectId = GetProjectId(projectFilePath);
if (projectId == null)
{
projectId = await this.LoadProjectAsync(projectFilePath, loader, preferMetadata, loadedProjects, cancellationToken).ConfigureAwait(false);
}
return projectId;
}
private async Task<ProjectId> LoadProjectAsync(string projectFilePath, IProjectFileLoader loader, bool preferMetadata, List<ProjectInfo> loadedProjects, CancellationToken cancellationToken)
{
System.Diagnostics.Debug.Assert(projectFilePath != null);
System.Diagnostics.Debug.Assert(loader != null);
var projectId = this.GetOrCreateProjectId(projectFilePath);
var projectName = Path.GetFileNameWithoutExtension(projectFilePath);
var projectFile = await loader.LoadProjectFileAsync(projectFilePath, _properties, cancellationToken).ConfigureAwait(false);
var projectFileInfo = await projectFile.GetProjectFileInfoAsync(cancellationToken).ConfigureAwait(false);
VersionStamp version;
if (!string.IsNullOrEmpty(projectFilePath) && File.Exists(projectFilePath))
{
version = VersionStamp.Create(File.GetLastWriteTimeUtc(projectFilePath));
}
else
{
version = VersionStamp.Create();
}
// Documents
var docFileInfos = projectFileInfo.Documents.ToImmutableArrayOrEmpty();
CheckDocuments(docFileInfos, projectFilePath, projectId);
Encoding defaultEncoding = GetDefaultEncoding(projectFileInfo.CodePage);
var docs = new List<DocumentInfo>();
foreach (var docFileInfo in docFileInfos)
{
string name;
ImmutableArray<string> folders;
GetDocumentNameAndFolders(docFileInfo.LogicalPath, out name, out folders);
docs.Add(DocumentInfo.Create(
DocumentId.CreateNewId(projectId, debugName: docFileInfo.FilePath),
name,
folders,
projectFile.GetSourceCodeKind(docFileInfo.FilePath),
new FileTextLoader(docFileInfo.FilePath, defaultEncoding),
docFileInfo.FilePath,
docFileInfo.IsGenerated));
}
var additonalDocs = new List<DocumentInfo>();
foreach (var docFileInfo in projectFileInfo.AdditionalDocuments)
{
string name;
ImmutableArray<string> folders;
GetDocumentNameAndFolders(docFileInfo.LogicalPath, out name, out folders);
additonalDocs.Add(DocumentInfo.Create(
DocumentId.CreateNewId(projectId, debugName: docFileInfo.FilePath),
name,
folders,
SourceCodeKind.Regular,
new FileTextLoader(docFileInfo.FilePath, defaultEncoding),
docFileInfo.FilePath,
docFileInfo.IsGenerated));
}
// project references
var resolvedReferences = await this.ResolveProjectReferencesAsync(
projectFilePath, projectFileInfo.ProjectReferences, preferMetadata, loadedProjects, cancellationToken).ConfigureAwait(false);
var metadataReferences = projectFileInfo.MetadataReferences
.Concat(resolvedReferences.MetadataReferences);
var outputFilePath = projectFileInfo.OutputFilePath;
var assemblyName = projectFileInfo.AssemblyName;
// if the project file loader couldn't figure out an assembly name, make one using the project's file path.
if (string.IsNullOrWhiteSpace(assemblyName))
{
assemblyName = Path.GetFileNameWithoutExtension(projectFilePath);
// if this is still unreasonable, use a fixed name.
if (string.IsNullOrWhiteSpace(assemblyName))
{
assemblyName = "assembly";
}
}
loadedProjects.Add(
ProjectInfo.Create(
projectId,
version,
projectName,
assemblyName,
loader.Language,
projectFilePath,
outputFilePath,
projectFileInfo.CompilationOptions,
projectFileInfo.ParseOptions,
docs,
resolvedReferences.ProjectReferences,
metadataReferences,
analyzerReferences: projectFileInfo.AnalyzerReferences,
additionalDocuments: additonalDocs,
isSubmission: false,
hostObjectType: null));
return projectId;
}
private static Encoding GetDefaultEncoding(int codePage)
{
// If no CodePage was specified in the project file, then the FileTextLoader will
// attempt to use UTF8 before falling back on Encoding.Default.
if (codePage == 0)
{
return null;
}
try
{
return Encoding.GetEncoding(codePage);
}
catch (ArgumentOutOfRangeException)
{
return null;
}
}
private static readonly char[] s_directorySplitChars = new char[] { Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar };
private static void GetDocumentNameAndFolders(string logicalPath, out string name, out ImmutableArray<string> folders)
{
var pathNames = logicalPath.Split(s_directorySplitChars, StringSplitOptions.RemoveEmptyEntries);
if (pathNames.Length > 0)
{
if (pathNames.Length > 1)
{
folders = pathNames.Take(pathNames.Length - 1).ToImmutableArray();
}
else
{
folders = ImmutableArray.Create<string>();
}
name = pathNames[pathNames.Length - 1];
}
else
{
name = logicalPath;
folders = ImmutableArray.Create<string>();
}
}
private void CheckDocuments(IEnumerable<DocumentFileInfo> docs, string projectFilePath, ProjectId projectId)
{
var paths = new HashSet<string>();
foreach (var doc in docs)
{
if (paths.Contains(doc.FilePath))
{
this.OnWorkspaceFailed(new ProjectDiagnostic(WorkspaceDiagnosticKind.Warning, string.Format(WorkspacesResources.DuplicateSourceFileInProject, doc.FilePath, projectFilePath), projectId));
}
paths.Add(doc.FilePath);
}
}
private class ResolvedReferences
{
public readonly List<ProjectReference> ProjectReferences = new List<ProjectReference>();
public readonly List<MetadataReference> MetadataReferences = new List<MetadataReference>();
}
private async Task<ResolvedReferences> ResolveProjectReferencesAsync(
string thisProjectPath,
IReadOnlyList<ProjectFileReference> projectFileReferences,
bool preferMetadata,
List<ProjectInfo> loadedProjects,
CancellationToken cancellationToken)
{
var resolvedReferences = new ResolvedReferences();
var reportMode = this.SkipUnrecognizedProjects ? ReportMode.Log : ReportMode.Throw;
foreach (var projectFileReference in projectFileReferences)
{
string fullPath;
if (TryGetAbsoluteProjectPath(projectFileReference.Path, Path.GetDirectoryName(thisProjectPath), reportMode, out fullPath))
{
// if the project is already loaded, then just reference the one we have
var existingProjectId = this.GetProjectId(fullPath);
if (existingProjectId != null)
{
resolvedReferences.ProjectReferences.Add(new ProjectReference(existingProjectId, projectFileReference.Aliases));
continue;
}
IProjectFileLoader loader;
TryGetLoaderFromProjectPath(fullPath, ReportMode.Ignore, out loader);
// get metadata if preferred or if loader is unknown
if (preferMetadata || loader == null)
{
var projectMetadata = await this.GetProjectMetadata(fullPath, projectFileReference.Aliases, _properties, cancellationToken).ConfigureAwait(false);
if (projectMetadata != null)
{
resolvedReferences.MetadataReferences.Add(projectMetadata);
continue;
}
}
// must load, so we really need loader
if (TryGetLoaderFromProjectPath(fullPath, reportMode, out loader))
{
// load the project
var projectId = await this.GetOrLoadProjectAsync(fullPath, loader, preferMetadata, loadedProjects, cancellationToken).ConfigureAwait(false);
resolvedReferences.ProjectReferences.Add(new ProjectReference(projectId, projectFileReference.Aliases));
continue;
}
}
else
{
fullPath = projectFileReference.Path;
}
// cannot find metadata and project cannot be loaded, so leave a project reference to a non-existent project.
var id = this.GetOrCreateProjectId(fullPath);
resolvedReferences.ProjectReferences.Add(new ProjectReference(id, projectFileReference.Aliases));
}
return resolvedReferences;
}
/// <summary>
/// Gets a MetadataReference to a project's output assembly.
/// </summary>
private async Task<MetadataReference> GetProjectMetadata(string projectFilePath, ImmutableArray<string> aliases, IDictionary<string, string> globalProperties, CancellationToken cancellationToken)
{
// use loader service to determine output file for project if possible
string outputFilePath = null;
try
{
outputFilePath = await ProjectFileLoader.GetOutputFilePathAsync(projectFilePath, globalProperties, cancellationToken).ConfigureAwait(false);
}
catch (Exception e)
{
this.OnWorkspaceFailed(new WorkspaceDiagnostic(WorkspaceDiagnosticKind.Failure, e.Message));
}
if (outputFilePath != null && File.Exists(outputFilePath))
{
if (Workspace.TestHookStandaloneProjectsDoNotHoldReferences)
{
var documentationService = this.Services.GetService<IDocumentationProviderService>();
var docProvider = documentationService.GetDocumentationProvider(outputFilePath);
var metadata = AssemblyMetadata.CreateFromImage(File.ReadAllBytes(outputFilePath));
return metadata.GetReference(
documentation: docProvider,
aliases: aliases,
display: outputFilePath);
}
else
{
var metadataService = this.Services.GetService<IMetadataService>();
return metadataService.GetReference(outputFilePath, new MetadataReferenceProperties(MetadataImageKind.Assembly, aliases));
}
}
return null;
}
#endregion
#region Apply Changes
public override bool CanApplyChange(ApplyChangesKind feature)
{
switch (feature)
{
case ApplyChangesKind.ChangeDocument:
case ApplyChangesKind.AddDocument:
case ApplyChangesKind.RemoveDocument:
case ApplyChangesKind.AddMetadataReference:
case ApplyChangesKind.RemoveMetadataReference:
case ApplyChangesKind.AddProjectReference:
case ApplyChangesKind.RemoveProjectReference:
case ApplyChangesKind.AddAnalyzerReference:
case ApplyChangesKind.RemoveAnalyzerReference:
return true;
default:
return false;
}
}
private bool HasProjectFileChanges(ProjectChanges changes)
{
return changes.GetAddedDocuments().Any() ||
changes.GetRemovedDocuments().Any() ||
changes.GetAddedMetadataReferences().Any() ||
changes.GetRemovedMetadataReferences().Any() ||
changes.GetAddedProjectReferences().Any() ||
changes.GetRemovedProjectReferences().Any() ||
changes.GetAddedAnalyzerReferences().Any() ||
changes.GetRemovedAnalyzerReferences().Any();
}
private IProjectFile _applyChangesProjectFile;
public override bool TryApplyChanges(Solution newSolution)
{
using (_serializationLock.DisposableWait())
{
return base.TryApplyChanges(newSolution);
}
}
protected override void ApplyProjectChanges(ProjectChanges projectChanges)
{
System.Diagnostics.Debug.Assert(_applyChangesProjectFile == null);
var project = projectChanges.OldProject ?? projectChanges.NewProject;
try
{
// if we need to modify the project file, load it first.
if (this.HasProjectFileChanges(projectChanges))
{
var projectPath = project.FilePath;
IProjectFileLoader loader;
if (this.TryGetLoaderFromProjectPath(projectPath, ReportMode.Ignore, out loader))
{
try
{
_applyChangesProjectFile = loader.LoadProjectFileAsync(projectPath, _properties, CancellationToken.None).Result;
}
catch (System.IO.IOException exception)
{
this.OnWorkspaceFailed(new ProjectDiagnostic(WorkspaceDiagnosticKind.Failure, exception.Message, projectChanges.ProjectId));
}
}
}
// do normal apply operations
base.ApplyProjectChanges(projectChanges);
// save project file
if (_applyChangesProjectFile != null)
{
try
{
_applyChangesProjectFile.Save();
}
catch (System.IO.IOException exception)
{
this.OnWorkspaceFailed(new ProjectDiagnostic(WorkspaceDiagnosticKind.Failure, exception.Message, projectChanges.ProjectId));
}
}
}
finally
{
_applyChangesProjectFile = null;
}
}
protected override void ApplyDocumentTextChanged(DocumentId documentId, SourceText text)
{
var document = this.CurrentSolution.GetDocument(documentId);
if (document != null)
{
Encoding encoding = DetermineEncoding(text, document);
this.SaveDocumentText(documentId, document.FilePath, text, encoding ?? Encoding.UTF8);
this.OnDocumentTextChanged(documentId, text, PreservationMode.PreserveValue);
}
}
private static Encoding DetermineEncoding(SourceText text, Document document)
{
if (text.Encoding != null)
{
return text.Encoding;
}
try
{
using (ExceptionHelpers.SuppressFailFast())
{
using (var stream = new FileStream(document.FilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var onDiskText = EncodedStringText.Create(stream);
return onDiskText.Encoding;
}
}
}
catch (IOException)
{
}
catch (InvalidDataException)
{
}
return null;
}
protected override void ApplyDocumentAdded(DocumentInfo info, SourceText text)
{
System.Diagnostics.Debug.Assert(_applyChangesProjectFile != null);
var project = this.CurrentSolution.GetProject(info.Id.ProjectId);
IProjectFileLoader loader;
if (this.TryGetLoaderFromProjectPath(project.FilePath, ReportMode.Ignore, out loader))
{
var extension = _applyChangesProjectFile.GetDocumentExtension(info.SourceCodeKind);
var fileName = Path.ChangeExtension(info.Name, extension);
var relativePath = (info.Folders != null && info.Folders.Count > 0)
? Path.Combine(Path.Combine(info.Folders.ToArray()), fileName)
: fileName;
var fullPath = GetAbsolutePath(relativePath, Path.GetDirectoryName(project.FilePath));
var newDocumentInfo = info.WithName(fileName)
.WithFilePath(fullPath)
.WithTextLoader(new FileTextLoader(fullPath, text.Encoding));
// add document to project file
_applyChangesProjectFile.AddDocument(relativePath);
// add to solution
this.OnDocumentAdded(newDocumentInfo);
// save text to disk
if (text != null)
{
this.SaveDocumentText(info.Id, fullPath, text, text.Encoding ?? Encoding.UTF8);
}
}
}
private void SaveDocumentText(DocumentId id, string fullPath, SourceText newText, Encoding encoding)
{
try
{
using (ExceptionHelpers.SuppressFailFast())
{
var dir = Path.GetDirectoryName(fullPath);
if (!Directory.Exists(dir))
{
Directory.CreateDirectory(dir);
}
Debug.Assert(encoding != null);
using (var writer = new StreamWriter(fullPath, append: false, encoding: encoding))
{
newText.Write(writer);
}
}
}
catch (IOException exception)
{
this.OnWorkspaceFailed(new DocumentDiagnostic(WorkspaceDiagnosticKind.Failure, exception.Message, id));
}
}
protected override void ApplyDocumentRemoved(DocumentId documentId)
{
Debug.Assert(_applyChangesProjectFile != null);
var document = this.CurrentSolution.GetDocument(documentId);
if (document != null)
{
_applyChangesProjectFile.RemoveDocument(document.FilePath);
this.DeleteDocumentFile(document.Id, document.FilePath);
this.OnDocumentRemoved(documentId);
}
}
private void DeleteDocumentFile(DocumentId documentId, string fullPath)
{
try
{
if (File.Exists(fullPath))
{
File.Delete(fullPath);
}
}
catch (IOException exception)
{
this.OnWorkspaceFailed(new DocumentDiagnostic(WorkspaceDiagnosticKind.Failure, exception.Message, documentId));
}
catch (NotSupportedException exception)
{
this.OnWorkspaceFailed(new DocumentDiagnostic(WorkspaceDiagnosticKind.Failure, exception.Message, documentId));
}
catch (UnauthorizedAccessException exception)
{
this.OnWorkspaceFailed(new DocumentDiagnostic(WorkspaceDiagnosticKind.Failure, exception.Message, documentId));
}
}
protected override void ApplyMetadataReferenceAdded(ProjectId projectId, MetadataReference metadataReference)
{
Debug.Assert(_applyChangesProjectFile != null);
var identity = GetAssemblyIdentity(projectId, metadataReference);
_applyChangesProjectFile.AddMetadataReference(metadataReference, identity);
this.OnMetadataReferenceAdded(projectId, metadataReference);
}
protected override void ApplyMetadataReferenceRemoved(ProjectId projectId, MetadataReference metadataReference)
{
Debug.Assert(_applyChangesProjectFile != null);
var identity = GetAssemblyIdentity(projectId, metadataReference);
_applyChangesProjectFile.RemoveMetadataReference(metadataReference, identity);
this.OnMetadataReferenceRemoved(projectId, metadataReference);
}
private AssemblyIdentity GetAssemblyIdentity(ProjectId projectId, MetadataReference metadataReference)
{
var project = this.CurrentSolution.GetProject(projectId);
if (!project.MetadataReferences.Contains(metadataReference))
{
project = project.AddMetadataReference(metadataReference);
}
var compilation = project.GetCompilationAsync(CancellationToken.None).WaitAndGetResult(CancellationToken.None);
var symbol = compilation.GetAssemblyOrModuleSymbol(metadataReference) as IAssemblySymbol;
return symbol != null ? symbol.Identity : null;
}
protected override void ApplyProjectReferenceAdded(ProjectId projectId, ProjectReference projectReference)
{
Debug.Assert(_applyChangesProjectFile != null);
var project = this.CurrentSolution.GetProject(projectReference.ProjectId);
if (project != null)
{
_applyChangesProjectFile.AddProjectReference(project.Name, new ProjectFileReference(project.FilePath, projectReference.Aliases));
}
this.OnProjectReferenceAdded(projectId, projectReference);
}
protected override void ApplyProjectReferenceRemoved(ProjectId projectId, ProjectReference projectReference)
{
Debug.Assert(_applyChangesProjectFile != null);
var project = this.CurrentSolution.GetProject(projectReference.ProjectId);
if (project != null)
{
_applyChangesProjectFile.RemoveProjectReference(project.Name, project.FilePath);
}
this.OnProjectReferenceRemoved(projectId, projectReference);
}
protected override void ApplyAnalyzerReferenceAdded(ProjectId projectId, AnalyzerReference analyzerReference)
{
Debug.Assert(_applyChangesProjectFile != null);
_applyChangesProjectFile.AddAnalyzerReference(analyzerReference);
this.OnAnalyzerReferenceAdded(projectId, analyzerReference);
}
protected override void ApplyAnalyzerReferenceRemoved(ProjectId projectId, AnalyzerReference analyzerReference)
{
Debug.Assert(_applyChangesProjectFile != null);
_applyChangesProjectFile.RemoveAnalyzerReference(analyzerReference);
this.OnAnalyzerReferenceRemoved(projectId, analyzerReference);
}
}
#endregion
}
| |
using System;
using System.Runtime.Caching;
using System.Web.Caching;
namespace KnockoutJsCommentBindingClassifier
{
public static class CacheExtensions
{
public const int DefaultCacheExpiration = 20;
private static readonly object sync = new object();
/// <summary>
/// Allows Caching of typed data
/// </summary>
/// <example><![CDATA[
/// var user = HttpRuntime
/// .Cache
/// .GetOrStore<User>(
/// string.Format("User{0}", _userId),
/// () => Repository.GetUser(_userId));
///
/// ]]></example>
/// <typeparam name="T"></typeparam>
/// <param name="cache">calling object</param>
/// <param name="key">Cache key</param>
/// <param name="generator">Func that returns the object to store in cache</param>
/// <returns></returns>
/// <remarks>Uses a default cache expiration period as defined in <see cref="CacheExtensions.DefaultCacheExpiration"/></remarks>
private static T GetOrStore<T>(this ICache cache, string key, Func<T> generator)
{
return cache.GetOrStore(key, (cache[key] == null && generator != null) ? generator() : default(T),
DefaultCacheExpiration);
}
public static T GetOrStore<T>(this Cache cache, string key, Func<T> generator)
{
return cache.ToICache().GetOrStore(key, generator);
}
public static T GetOrStore<T>(this MemoryCache cache, string key, Func<T> generator)
{
return cache.ToICache().GetOrStore(key, generator);
}
private static ICache ToICache(this Cache cache)
{
return new HttpCacheWrapper(cache);
}
private static ICache ToICache(this MemoryCache cache)
{
return new MemoryCacheWrapper(cache);
}
/// <summary>
/// Allows Caching of typed data
/// </summary>
/// <example><![CDATA[
/// var user = HttpRuntime
/// .Cache
/// .GetOrStore<User>(
/// string.Format("User{0}", _userId),
/// () => Repository.GetUser(_userId));
///
/// ]]></example>
/// <typeparam name="T"></typeparam>
/// <param name="cache">calling object</param>
/// <param name="key">Cache key</param>
/// <param name="generator">Func that returns the object to store in cache</param>
/// <param name="expireInMinutes">Time to expire cache in minutes</param>
/// <returns></returns>
public static T GetOrStore<T>(this MemoryCache cache, string key, Func<T> generator, double expireInMinutes)
{
return cache.ToICache().GetOrStore(key, generator, expireInMinutes);
}
public static T GetOrStore<T>(this Cache cache, string key, Func<T> generator, double expireInMinutes)
{
return cache.ToICache().GetOrStore(key, generator, expireInMinutes);
}
private static T GetOrStore<T>(this ICache cache, string key, Func<T> generator, double expireInMinutes)
{
return cache.GetOrStore(key, (cache[key] == null && generator != null) ? generator() : default(T),
expireInMinutes);
}
/// <summary>
/// Allows Caching of typed data
/// </summary>
/// <example><![CDATA[
/// var user = HttpRuntime
/// .Cache
/// .GetOrStore<User>(
/// string.Format("User{0}", _userId),_userId));
///
/// ]]></example>
/// <typeparam name="T"></typeparam>
/// <param name="cache">calling object</param>
/// <param name="key">Cache key</param>
/// <param name="obj">Object to store in cache</param>
/// <returns></returns>
/// <remarks>Uses a default cache expiration period as defined in <see cref="CacheExtensions.DefaultCacheExpiration"/></remarks>
public static T GetOrStore<T>(this Cache cache, string key, T obj)
{
return cache.ToICache().GetOrStore<T>(key, obj);
}
public static T GetOrStore<T>(this MemoryCache cache, string key, T obj)
{
return cache.ToICache().GetOrStore<T>(key, obj);
}
private static T GetOrStore<T>(this ICache cache, string key, T obj)
{
return cache.GetOrStore(key, obj, DefaultCacheExpiration);
}
/// <summary>
/// Allows Caching of typed data
/// </summary>
/// <example><![CDATA[
/// var user = HttpRuntime
/// .Cache
/// .GetOrStore<User>(
/// string.Format("User{0}", _userId),
/// () => Repository.GetUser(_userId));
///
/// ]]></example>
/// <typeparam name="T"></typeparam>
/// <param name="cache">calling object</param>
/// <param name="key">Cache key</param>
/// <param name="obj">Object to store in cache</param>
/// <param name="expireInMinutes">Time to expire cache in minutes</param>
/// <returns></returns>
public static T GetOrStore<T>(this Cache cache, string key, T obj, double expireInMinutes)
{
return cache.ToICache().GetOrStore<T>(key, obj, expireInMinutes);
}
public static T GetOrStore<T>(this MemoryCache cache, string key, T obj, double expireInMinutes)
{
return cache.ToICache().GetOrStore<T>(key, obj, expireInMinutes);
}
private static T GetOrStore<T>(this ICache cache, string key, T obj, double expireInMinutes)
{
var result = cache[key];
if (result == null)
{
lock (sync)
{
if (result == null)
{
result = obj != null ? obj : default(T);
cache.Insert(key, result, DateTime.Now.AddMinutes(expireInMinutes));
}
}
}
return (T) result;
}
private class HttpCacheWrapper : ICache
{
private readonly Cache cache;
public HttpCacheWrapper(Cache cache)
{
this.cache = cache;
}
public void Insert(string key, object value, DateTime expiryTime)
{
cache.Insert(key, value, null, expiryTime, Cache.NoSlidingExpiration);
}
public object this[string key]
{
get { return cache[key]; }
}
}
private interface ICache
{
object this[string key] { get; }
void Insert(string key, object value, DateTime expiryTime);
}
private class MemoryCacheWrapper : ICache
{
private readonly MemoryCache cache;
public MemoryCacheWrapper(MemoryCache cache)
{
this.cache = cache;
}
public void Insert(string key, object value, DateTime expiryTime)
{
cache.Add(key, value, expiryTime);
}
public object this[string key]
{
get { return cache[key]; }
}
}
}
}
| |
/*
MIT License
Copyright (c) 2017 Saied Zarrinmehr
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows;
namespace SpatialAnalysis.Agents.Visualization.AgentModel
{
/// <summary>
/// Includes information about the male agent's shape and the logic for interpolation for animation.
/// </summary>
internal class MaleAgentShapeData
{
/// <summary>
/// Gets the curve interpolated date of the agent's 2D shape.
/// </summary>
/// <param name="walkedLength">Walked length.</param>
/// <param name="scale">The scale.</param>
/// <param name="coneVisible">if set to <c>true</c> shows the visibility cone.</param>
/// <param name="angle">The visibility angle.</param>
/// <returns>Point[][].</returns>
public static Point[][] GetCurveInterpolatedDate(double walkedLength, double scale, bool coneVisible, double angle)
{
walkedLength /= (2 * scale);
int R = (int)Math.Floor(walkedLength);
double h = walkedLength - R;
int n = (int)Math.Floor(h * 4);
double t = h - n * 0.250;
Point[][] pnts;
if (!coneVisible)
{
pnts = new Point[MaleAgentShapeData.AllCurveData.Length][];
for (int j = 0; j < MaleAgentShapeData.AllCurveData.Length; j++)
{
int k = MaleAgentShapeData.AllCurveData[j].GetLength(0);
pnts[j] = new Point[k];
for (int i = 0; i < k; i++)
{
var _x = ((MaleAgentShapeData.AllCurveData[j][i, n].X * (0.2500 - t) + MaleAgentShapeData.AllCurveData[j][i, n + 1].X * t) / 0.2500);
var _y = ((MaleAgentShapeData.AllCurveData[j][i, n].Y * (0.2500 - t) + MaleAgentShapeData.AllCurveData[j][i, n + 1].Y * t) / 0.2500);
pnts[j][i] = new Point(scale * _x, scale * _y);
}
}
}
else
{
pnts = new Point[MaleAgentShapeData.AllCurveData.Length + 3][];
for (int j = 0; j < MaleAgentShapeData.AllCurveData.Length; j++)
{
int k = MaleAgentShapeData.AllCurveData[j].GetLength(0);
pnts[j] = new Point[k];
for (int i = 0; i < k; i++)
{
var _x = ((MaleAgentShapeData.AllCurveData[j][i, n].X * (0.2500 - t) +
MaleAgentShapeData.AllCurveData[j][i, n + 1].X * t) / 0.2500);
var _y = ((MaleAgentShapeData.AllCurveData[j][i, n].Y * (0.2500 - t) +
MaleAgentShapeData.AllCurveData[j][i, n + 1].Y * t) / 0.2500);
pnts[j][i] = new Point(scale * _x, scale * _y);
}
}
angle = (Math.PI - angle) / 2;
pnts[MaleAgentShapeData.AllCurveData.Length] = new Point[2]
{
new Point(scale * Math.Cos(angle) * 0.55d,
scale * Math.Sin(angle) * 0.55d),
new Point(scale * Math.Cos(angle) * 2d,
scale * Math.Sin(angle) * 2d),
};
pnts[MaleAgentShapeData.AllCurveData.Length + 1] = new Point[2]
{
new Point(-scale * Math.Cos(angle) * 0.55d,
scale * Math.Sin(angle) * 0.55d),
new Point(-scale * Math.Cos(angle) * 2d,
scale * Math.Sin(angle) * 2d),
};
pnts[MaleAgentShapeData.AllCurveData.Length + 2] = new Point[2]
{
new Point(0, scale* 0.55d),
new Point(0, scale * 2d),
};
}
return pnts;
}
//the shape information that was retrieved from my Rhino model
private static readonly Point[][,] AllCurveData =
{
//Head
new Point[,]
{
{
new Point(0.107364754397092,0.0807424558483167),
new Point(0.107364754397092,0.0807424558483167),
new Point(0.107364754397092,0.0807424558483167),
new Point(0.107364754397092,0.0807424558483167),
new Point(0.107364754397092,0.0807424558483167),
},
{
new Point(0.131146444595877,0.0909659434685959),
new Point(0.131146444595877,0.0909659434685959),
new Point(0.131146444595877,0.0909659434685959),
new Point(0.131146444595877,0.0909659434685959),
new Point(0.131146444595877,0.0909659434685959),
},
{
new Point(0.140891670236668,0.0735372566493833),
new Point(0.140891670236668,0.0735372566493833),
new Point(0.140891670236668,0.0735372566493833),
new Point(0.140891670236668,0.0735372566493833),
new Point(0.140891670236668,0.0735372566493833),
},
{
new Point(0.143770922471535,-0.0735372566493826),
new Point(0.143770922471535,-0.0735372566493826),
new Point(0.143770922471535,-0.0735372566493826),
new Point(0.143770922471535,-0.0735372566493826),
new Point(0.143770922471535,-0.0735372566493826),
},
{
new Point(0.102166519333689,-0.13677026627863),
new Point(0.102166519333689,-0.13677026627863),
new Point(0.102166519333689,-0.13677026627863),
new Point(0.102166519333689,-0.13677026627863),
new Point(0.102166519333689,-0.13677026627863),
},
{
new Point(0,-0.16784402691648),
new Point(0,-0.16784402691648),
new Point(0,-0.16784402691648),
new Point(0,-0.16784402691648),
new Point(0,-0.16784402691648),
},
{
new Point(-0.102166519333689,-0.13677026627863),
new Point(-0.102166519333689,-0.13677026627863),
new Point(-0.102166519333689,-0.13677026627863),
new Point(-0.102166519333689,-0.13677026627863),
new Point(-0.102166519333689,-0.13677026627863),
},
{
new Point(-0.143770922471535,-0.0735372566493826),
new Point(-0.143770922471535,-0.0735372566493826),
new Point(-0.143770922471535,-0.0735372566493826),
new Point(-0.143770922471535,-0.0735372566493826),
new Point(-0.143770922471535,-0.0735372566493826),
},
{
new Point(-0.140891670236668,0.0735372566493833),
new Point(-0.140891670236668,0.0735372566493833),
new Point(-0.140891670236668,0.0735372566493833),
new Point(-0.140891670236668,0.0735372566493833),
new Point(-0.140891670236668,0.0735372566493833),
},
{
new Point(-0.131146444595877,0.0909659434685958),
new Point(-0.131146444595877,0.0909659434685958),
new Point(-0.131146444595877,0.0909659434685958),
new Point(-0.131146444595877,0.0909659434685958),
new Point(-0.131146444595877,0.0909659434685958),
},
{
new Point(-0.107364754397092,0.0807424558483167),
new Point(-0.107364754397092,0.0807424558483167),
new Point(-0.107364754397092,0.0807424558483167),
new Point(-0.107364754397092,0.0807424558483167),
new Point(-0.107364754397092,0.0807424558483167),
},
{
new Point(-0.0424710815809005,0.112478462448582),
new Point(-0.0424710815809005,0.112478462448582),
new Point(-0.0424710815809005,0.112478462448582),
new Point(-0.0424710815809005,0.112478462448582),
new Point(-0.0424710815809005,0.112478462448582),
},
{
new Point(0.0424710815809006,0.112478462448582),
new Point(0.0424710815809006,0.112478462448582),
new Point(0.0424710815809006,0.112478462448582),
new Point(0.0424710815809006,0.112478462448582),
new Point(0.0424710815809006,0.112478462448582),
},
{
new Point(0.107364754397092,0.0807424558483167),
new Point(0.107364754397092,0.0807424558483167),
new Point(0.107364754397092,0.0807424558483167),
new Point(0.107364754397092,0.0807424558483167),
new Point(0.107364754397092,0.0807424558483167),
},
},
new Point[,]
{
{
new Point(-0.430077637380643,-0.0966403885565035),
new Point(-0.408571658454857,-0.044342714532541),
new Point(-0.482601313769199,0.142175698709484),
new Point(-0.408571658454857,-0.044342714532541),
new Point(-0.430077637380643,-0.0966403885565035),
},
{
new Point(-0.471073900040423,-0.102422439942408),
new Point(-0.442318204967156,-0.0504077781289407),
new Point(-0.494305655253277,0.162386931434678),
new Point(-0.442318204967156,-0.0504077781289407),
new Point(-0.471073900040423,-0.102422439942408),
},
{
new Point(-0.496309331885004,-0.00662630129599351),
new Point(-0.45445590744668,0.0476050344380057),
new Point(-0.489555343377694,0.195525390336348),
new Point(-0.45445590744668,0.0476050344380057),
new Point(-0.496309331885004,-0.00662630129599351),
},
{
new Point(-0.47667044348818,0.0319590106558332),
new Point(-0.439628078777297,0.0659217639707732),
new Point(-0.384867723770923,0.231948347369653),
new Point(-0.439628078777297,0.0659217639707732),
new Point(-0.47667044348818,0.0319590106558332),
},
{
new Point(-0.433784870366022,0.0257095551101474),
new Point(-0.418067217031086,0.0634500465463992),
new Point(-0.363819357491664,0.208801644961583),
new Point(-0.418067217031086,0.0634500465463992),
new Point(-0.433784870366022,0.0257095551101474),
},
},
new Point[,]
{
{
new Point(0.482601313769199,0.142175698709484),
new Point(0.408571658454857,-0.044342714532541),
new Point(0.430077637380643,-0.0966403885565035),
new Point(0.408571658454857,-0.044342714532541),
new Point(0.482601313769199,0.142175698709484),
},
{
new Point(0.494305655253277,0.162386931434678),
new Point(0.442318204967156,-0.0504077781289407),
new Point(0.471073900040423,-0.102422439942408),
new Point(0.442318204967156,-0.0504077781289407),
new Point(0.494305655253277,0.162386931434678),
},
{
new Point(0.489555343377694,0.195525390336348),
new Point(0.45445590744668,0.0476050344380057),
new Point(0.496309331885004,-0.00662630129599351),
new Point(0.45445590744668,0.0476050344380057),
new Point(0.489555343377694,0.195525390336348),
},
{
new Point(0.384867723770923,0.231948347369653),
new Point(0.439628078777297,0.0659217639707732),
new Point(0.47667044348818,0.0319590106558332),
new Point(0.439628078777297,0.0659217639707732),
new Point(0.384867723770923,0.231948347369653),
},
{
new Point(0.363819357491664,0.208801644961583),
new Point(0.418067217031086,0.0634500465463992),
new Point(0.433784870366022,0.0257095551101474),
new Point(0.418067217031086,0.0634500465463992),
new Point(0.363819357491664,0.208801644961583),
},
},
new Point[,]
{
{
new Point(0.240757275046721,0.144946578234001),
new Point(0.306666492401578,0.111094068269461),
new Point(0.307503502499014,0.104881972485726),
new Point(0.306666492401578,0.111094068269461),
new Point(0.240757275046721,0.144946578234001),
},
{
new Point(0.307842796701859,0.108385030175004),
new Point(0.310466014695214,0.0631228897005025),
new Point(0.329640426514579,0.0585582052293738),
new Point(0.310466014695214,0.0631228897005025),
new Point(0.307842796701859,0.108385030175004),
},
},
new Point[,]
{
{
new Point(0.308925831929527,-0.0573084273672801),
new Point(0.297874566533481,-0.0747834567564583),
new Point(0.29094185647884,-0.104759626134904),
new Point(0.297874566533481,-0.0747834567564583),
new Point(0.308925831929527,-0.0573084273672801),
},
{
new Point(0.32905232419208,-0.0159130758167572),
new Point(0.305724736696157,-0.0448051589726468),
new Point(0.301756694515737,-0.0830882028702615),
new Point(0.305724736696157,-0.0448051589726468),
new Point(0.32905232419208,-0.0159130758167572),
},
},
new Point[,]
{
{
new Point(-0.307503502499014,0.104881972485726),
new Point(-0.306666492401578,0.111094068269461),
new Point(-0.240757275046721,0.144946578234001),
new Point(-0.306666492401578,0.111094068269461),
new Point(-0.307503502499014,0.104881972485726),
},
{
new Point(-0.329640426514579,0.0585582052293738),
new Point(-0.310466014695214,0.0631228897005025),
new Point(-0.307842796701859,0.108385030175004),
new Point(-0.310466014695214,0.0631228897005025),
new Point(-0.329640426514579,0.0585582052293738),
},
},
new Point[,]
{
{
new Point(-0.29094185647884,-0.104759626134904),
new Point(-0.297874566533481,-0.0747834567564583),
new Point(-0.308925831929527,-0.0573084273672801),
new Point(-0.297874566533481,-0.0747834567564583),
new Point(-0.29094185647884,-0.104759626134904),
},
{
new Point(-0.301756694515737,-0.0830882028702615),
new Point(-0.305724736696157,-0.0448051589726468),
new Point(-0.32905232419208,-0.0159130758167572),
new Point(-0.305724736696157,-0.0448051589726468),
new Point(-0.301756694515737,-0.0830882028702615),
},
},
new Point[,]
{
{
new Point(0.129929363729563,-0.094574535187306),
new Point(0.123365861944228,-0.104550161330014),
new Point(0.117170265734277,-0.113966621253853),
new Point(0.123365861944228,-0.104550161330014),
new Point(0.129929363729563,-0.094574535187306),
},
{
new Point(0.303048092457458,-0.0705117689939982),
new Point(0.297062013432116,-0.0778864415750182),
new Point(0.288299328116747,-0.110054883486735),
new Point(0.297062013432116,-0.0778864415750182),
new Point(0.303048092457458,-0.0705117689939982),
},
{
new Point(0.308925831929527,-0.0573084273672801),
new Point(0.297874566533481,-0.0747834567564583),
new Point(0.29094185647884,-0.104759626134904),
new Point(0.297874566533481,-0.0747834567564583),
new Point(0.308925831929527,-0.0573084273672801),
},
{
new Point(0.493776473822091,0.132700218809407),
new Point(0.4032287458698,-0.054097961078218),
new Point(0.422394899979147,-0.106989672320645),
new Point(0.4032287458698,-0.054097961078218),
new Point(0.493776473822091,0.132700218809407),
},
{
new Point(0.430206840598732,0.186601261748593),
new Point(0.425534931050781,-0.0133706737663166),
new Point(0.452628591568196,-0.0662623850087436),
new Point(0.425534931050781,-0.0133706737663166),
new Point(0.430206840598732,0.186601261748593),
},
{
new Point(0.351524517999222,0.212913115008342),
new Point(0.41675922136607,0.0769054585891578),
new Point(0.463189635709005,0.00727416441167694),
new Point(0.41675922136607,0.0769054585891578),
new Point(0.351524517999222,0.212913115008342),
},
{
new Point(0.284490562589796,0.153990212457358),
new Point(0.306666492401578,0.111094068269461),
new Point(0.307503502499014,0.104881972485726),
new Point(0.306666492401578,0.111094068269461),
new Point(0.284490562589796,0.153990212457358),
},
{
new Point(0.240757275046721,0.144946578234001),
new Point(0.305829482304142,0.121661807512726),
new Point(0.305829482304142,0.108385030175004),
new Point(0.305829482304142,0.121661807512726),
new Point(0.240757275046721,0.144946578234001),
},
{
new Point(0.111135896703743,0.215590111098249),
new Point(0.0817854270039611,0.21559011109825),
new Point(0.0437009088754721,0.21559011109825),
new Point(0.0817854270039611,0.21559011109825),
new Point(0.111135896703743,0.215590111098249),
},
{
new Point(-0.0437009088754721,0.21559011109825),
new Point(-0.0817854270039613,0.21559011109825),
new Point(-0.111135896703743,0.215590111098249),
new Point(-0.0817854270039613,0.21559011109825),
new Point(-0.0437009088754721,0.21559011109825),
},
{
new Point(-0.305829482304142,0.108385030175004),
new Point(-0.305829482304142,0.121661807512726),
new Point(-0.240757275046721,0.144946578234001),
new Point(-0.305829482304142,0.121661807512726),
new Point(-0.305829482304142,0.108385030175004),
},
{
new Point(-0.307503502499014,0.104881972485726),
new Point(-0.306666492401578,0.111094068269461),
new Point(-0.284490562589796,0.153990212457358),
new Point(-0.306666492401578,0.111094068269461),
new Point(-0.307503502499014,0.104881972485726),
},
{
new Point(-0.463189635709005,0.00727416441167694),
new Point(-0.41675922136607,0.0769054585891578),
new Point(-0.351524517999222,0.212913115008342),
new Point(-0.41675922136607,0.0769054585891578),
new Point(-0.463189635709005,0.00727416441167694),
},
{
new Point(-0.452628591568196,-0.0662623850087436),
new Point(-0.425534931050781,-0.0133706737663166),
new Point(-0.430206840598732,0.186601261748593),
new Point(-0.425534931050781,-0.0133706737663166),
new Point(-0.452628591568196,-0.0662623850087436),
},
{
new Point(-0.422394899979147,-0.106989672320645),
new Point(-0.4032287458698,-0.054097961078218),
new Point(-0.493776473822091,0.132700218809407),
new Point(-0.4032287458698,-0.054097961078218),
new Point(-0.422394899979147,-0.106989672320645),
},
{
new Point(-0.29094185647884,-0.104759626134904),
new Point(-0.297874566533481,-0.0747834567564583),
new Point(-0.308925831929527,-0.0573084273672801),
new Point(-0.297874566533481,-0.0747834567564583),
new Point(-0.29094185647884,-0.104759626134904),
},
{
new Point(-0.288299328116747,-0.110054883486735),
new Point(-0.297062013432116,-0.0778864415750182),
new Point(-0.303048092457458,-0.0705117689939982),
new Point(-0.297062013432116,-0.0778864415750182),
new Point(-0.288299328116747,-0.110054883486735),
},
{
new Point(-0.117170265734277,-0.113966621253853),
new Point(-0.123365861944228,-0.104550161330014),
new Point(-0.129929363729563,-0.094574535187306),
new Point(-0.123365861944228,-0.104550161330014),
new Point(-0.117170265734277,-0.113966621253853),
},
},
new Point[,]
{
{
new Point(-0.131146444595877,0.0909659434685958),
new Point(-0.131146444595877,0.0909659434685958),
new Point(-0.131146444595877,0.0909659434685958),
new Point(-0.131146444595877,0.0909659434685958),
new Point(-0.131146444595877,0.0909659434685958),
},
{
new Point(-0.121160752516052,0.108824688493841),
new Point(-0.121160752516052,0.108824688493841),
new Point(-0.121160752516052,0.108824688493841),
new Point(-0.121160752516052,0.108824688493841),
new Point(-0.121160752516052,0.108824688493841),
},
{
new Point(-0.0582537231547879,0.160138956792329),
new Point(-0.0582537231547879,0.160138956792329),
new Point(-0.0582537231547879,0.160138956792329),
new Point(-0.0582537231547879,0.160138956792329),
new Point(-0.0582537231547879,0.160138956792329),
},
{
new Point(-0.0441855183217985,0.163662301109958),
new Point(-0.0441855183217985,0.163662301109958),
new Point(-0.0441855183217985,0.163662301109958),
new Point(-0.0441855183217985,0.163662301109958),
new Point(-0.0441855183217985,0.163662301109958),
},
{
new Point(-0.0106825969415127,0.203669051619277),
new Point(-0.0106825969415127,0.203669051619277),
new Point(-0.0106825969415127,0.203669051619277),
new Point(-0.0106825969415127,0.203669051619277),
new Point(-0.0106825969415127,0.203669051619277),
},
{
new Point(0.0106825969415127,0.203669051619277),
new Point(0.0106825969415127,0.203669051619277),
new Point(0.0106825969415127,0.203669051619277),
new Point(0.0106825969415127,0.203669051619277),
new Point(0.0106825969415127,0.203669051619277),
},
{
new Point(0.0441855183217985,0.163662301109958),
new Point(0.0441855183217985,0.163662301109958),
new Point(0.0441855183217985,0.163662301109958),
new Point(0.0441855183217985,0.163662301109958),
new Point(0.0441855183217985,0.163662301109958),
},
{
new Point(0.0582537231547877,0.160138956792329),
new Point(0.0582537231547877,0.160138956792329),
new Point(0.0582537231547877,0.160138956792329),
new Point(0.0582537231547877,0.160138956792329),
new Point(0.0582537231547877,0.160138956792329),
},
{
new Point(0.121160752516052,0.108824688493841),
new Point(0.121160752516052,0.108824688493841),
new Point(0.121160752516052,0.108824688493841),
new Point(0.121160752516052,0.108824688493841),
new Point(0.121160752516052,0.108824688493841),
},
{
new Point(0.131146444595877,0.0909659434685959),
new Point(0.131146444595877,0.0909659434685959),
new Point(0.131146444595877,0.0909659434685959),
new Point(0.131146444595877,0.0909659434685959),
new Point(0.131146444595877,0.0909659434685959),
},
},
new Point[,]
{
{
new Point(0.276531541898837,-0.0741974566202512),
new Point(0.276531541898837,-0.0810380297634268),
new Point(0.276531541898837,-0.0810380297634268),
new Point(0.276531541898837,-0.0810380297634268),
new Point(0.276531541898837,-0.0741974566202512),
},
{
new Point(0.236924125728309,-0.126720995160761),
new Point(0.236924125728309,-0.0871180783767322),
new Point(0.236924125728309,-0.0871180783767322),
new Point(0.236924125728309,-0.0871180783767322),
new Point(0.236924125728309,-0.126720995160761),
},
{
new Point(0.129929363729563,-0.150758693546223),
new Point(0.129929363729563,-0.103542612396336),
new Point(0.129929363729563,-0.103542612396336),
new Point(0.129929363729563,-0.103542612396336),
new Point(0.129929363729563,-0.150758693546223),
},
{
new Point(0.102166519333689,-0.150251431986506),
new Point(0.102166519333689,-0.136770266278629),
new Point(0.102166519333689,-0.136770266278629),
new Point(0.102166519333689,-0.136770266278629),
new Point(0.102166519333689,-0.150251431986506),
},
{
new Point(0.0824331835006293,-0.142772124404578),
new Point(0.0824331835006293,-0.142772124404578),
new Point(0.0824331835006293,-0.142772124404578),
new Point(0.0824331835006293,-0.142772124404578),
new Point(0.0824331835006293,-0.142772124404578),
},
},
new Point[,]
{
{
new Point(0.236924125728309,-0.126720995160762),
new Point(0.236924125728309,-0.0871180783767323),
new Point(0.236924125728309,-0.0871180783767323),
new Point(0.236924125728309,-0.0871180783767323),
new Point(0.236924125728309,-0.126720995160762),
},
{
new Point(0.17801812784553,-0.259345736235297),
new Point(0.17801812784553,-0.0961606104419538),
new Point(0.17801812784553,-0.0961606104419538),
new Point(0.17801812784553,-0.0961606104419538),
new Point(0.17801812784553,-0.259345736235297),
},
{
new Point(0.142497845797798,-0.281332923483859),
new Point(0.142497845797798,-0.101613251954309),
new Point(0.142497845797798,-0.101613251954309),
new Point(0.142497845797798,-0.101613251954309),
new Point(0.142497845797798,-0.281332923483859),
},
{
new Point(0.102878336615591,-0.272365373749282),
new Point(0.102878336615591,-0.135688401244017),
new Point(0.102878336615591,-0.135688401244017),
new Point(0.102878336615591,-0.135688401244017),
new Point(0.102878336615591,-0.272365373749282),
},
{
new Point(0.0526728670237262,-0.227703998701976),
new Point(0.0526728670237262,-0.151823670417314),
new Point(0.0526728670237262,-0.151823670417314),
new Point(0.0526728670237262,-0.151823670417314),
new Point(0.0526728670237262,-0.227703998701976),
},
{
new Point(0.0374528685812345,-0.157212082244538),
new Point(0.0374528685812345,-0.156452805202319),
new Point(0.0374528685812345,-0.156452805202319),
new Point(0.0374528685812345,-0.156452805202319),
new Point(0.0374528685812345,-0.157212082244538),
},
},
new Point[,]
{
{
new Point(-0.288963396052676,0.115282905205552),
new Point(-0.288963396052676,0.128732751071594),
new Point(-0.288963396052676,0.128732751071594),
new Point(-0.288963396052676,0.128732751071594),
new Point(-0.288963396052676,0.115282905205552),
},
{
new Point(-0.249749854432044,0.255326797812352),
new Point(-0.249749854432044,0.145172649184808),
new Point(-0.249749854432044,0.145172649184808),
new Point(-0.249749854432044,0.145172649184808),
new Point(-0.249749854432044,0.255326797812352),
},
{
new Point(-0.145224893790358,0.316343742123144),
new Point(-0.145224893790358,0.188993728134048),
new Point(-0.145224893790358,0.188993728134048),
new Point(-0.145224893790358,0.188993728134048),
new Point(-0.145224893790358,0.316343742123144),
},
{
new Point(-0.0779970769205458,0.288709863187129),
new Point(-0.0779970769205458,0.21559011109825),
new Point(-0.0779970769205458,0.21559011109825),
new Point(-0.0779970769205458,0.21559011109825),
new Point(-0.0779970769205458,0.288709863187129),
},
{
new Point(-0.0437009088754721,0.21559011109825),
new Point(-0.0437009088754721,0.21559011109825),
new Point(-0.0437009088754721,0.21559011109825),
new Point(-0.0437009088754721,0.21559011109825),
new Point(-0.0437009088754721,0.21559011109825),
},
},
new Point[,]
{
{
new Point(-0.265444729287913,0.199275462534948),
new Point(-0.265444729287913,0.138592724677444),
new Point(-0.265444729287913,0.138592724677444),
new Point(-0.265444729287913,0.138592724677444),
new Point(-0.265444729287913,0.199275462534948),
},
{
new Point(-0.246599137821197,0.357152058570458),
new Point(-0.246599137821197,0.146493556655167),
new Point(-0.246599137821197,0.146493556655167),
new Point(-0.246599137821197,0.146493556655167),
new Point(-0.246599137821197,0.357152058570458),
},
{
new Point(-0.176408451475286,0.384592430966104),
new Point(-0.176408451475286,0.175920323132598),
new Point(-0.176408451475286,0.175920323132598),
new Point(-0.176408451475286,0.175920323132598),
new Point(-0.176408451475286,0.384592430966104),
},
{
new Point(-0.143529911351892,0.358824278324074),
new Point(-0.143529911351892,0.189704333130191),
new Point(-0.143529911351892,0.189704333130191),
new Point(-0.143529911351892,0.189704333130191),
new Point(-0.143529911351892,0.358824278324074),
},
{
new Point(-0.111088325607559,0.302311964261292),
new Point(-0.111088325607559,0.203305154229892),
new Point(-0.111088325607559,0.203305154229892),
new Point(-0.111088325607559,0.203305154229892),
new Point(-0.111088325607559,0.302311964261292),
},
},
new Point[,]
{
{
new Point(0.288963396052676,0.128732751071594),
new Point(0.288963396052676,0.128732751071594),
new Point(0.288963396052676,0.115282905205552),
new Point(0.288963396052676,0.128732751071594),
new Point(0.288963396052676,0.128732751071594),
},
{
new Point(0.249749854432044,0.145172649184808),
new Point(0.249749854432044,0.145172649184808),
new Point(0.249749854432044,0.255326797812352),
new Point(0.249749854432044,0.145172649184808),
new Point(0.249749854432044,0.145172649184808),
},
{
new Point(0.145224893790358,0.188993728134048),
new Point(0.145224893790358,0.188993728134048),
new Point(0.145224893790358,0.316343742123144),
new Point(0.145224893790358,0.188993728134048),
new Point(0.145224893790358,0.188993728134048),
},
{
new Point(0.0779970769205458,0.21559011109825),
new Point(0.0779970769205458,0.21559011109825),
new Point(0.0779970769205458,0.288709863187129),
new Point(0.0779970769205458,0.21559011109825),
new Point(0.0779970769205458,0.21559011109825),
},
{
new Point(0.0437009088754721,0.21559011109825),
new Point(0.0437009088754721,0.21559011109825),
new Point(0.0437009088754721,0.21559011109825),
new Point(0.0437009088754721,0.21559011109825),
new Point(0.0437009088754721,0.21559011109825),
},
},
new Point[,]
{
{
new Point(0.265444729287913,0.138592724677444),
new Point(0.265444729287913,0.138592724677444),
new Point(0.265444729287913,0.199275462534948),
new Point(0.265444729287913,0.138592724677444),
new Point(0.265444729287913,0.138592724677444),
},
{
new Point(0.246599137821197,0.146493556655167),
new Point(0.246599137821197,0.146493556655167),
new Point(0.246599137821197,0.357152058570458),
new Point(0.246599137821197,0.146493556655167),
new Point(0.246599137821197,0.146493556655167),
},
{
new Point(0.176408451475286,0.175920323132597),
new Point(0.176408451475286,0.175920323132597),
new Point(0.176408451475286,0.384592430966104),
new Point(0.176408451475286,0.175920323132597),
new Point(0.176408451475286,0.175920323132597),
},
{
new Point(0.143529911351892,0.18970433313019),
new Point(0.143529911351892,0.18970433313019),
new Point(0.143529911351892,0.358824278324074),
new Point(0.143529911351892,0.18970433313019),
new Point(0.143529911351892,0.18970433313019),
},
{
new Point(0.111088325607559,0.203305154229892),
new Point(0.111088325607559,0.203305154229892),
new Point(0.111088325607559,0.302311964261292),
new Point(0.111088325607559,0.203305154229892),
new Point(0.111088325607559,0.203305154229892),
},
},
new Point[,]
{
{
new Point(-0.276531541898837,-0.0851258313367492),
new Point(-0.276531541898837,-0.0851258313367492),
new Point(-0.276531541898837,-0.0741974566202512),
new Point(-0.276531541898837,-0.0851258313367492),
new Point(-0.276531541898837,-0.0851258313367492),
},
{
new Point(-0.236924125728309,-0.0901488072547543),
new Point(-0.236924125728309,-0.0901488072547543),
new Point(-0.236924125728309,-0.126720995160761),
new Point(-0.236924125728309,-0.0901488072547543),
new Point(-0.236924125728309,-0.0901488072547543),
},
{
new Point(-0.129929363729563,-0.103717784098851),
new Point(-0.129929363729563,-0.103717784098851),
new Point(-0.129929363729563,-0.150758693546223),
new Point(-0.129929363729563,-0.103717784098851),
new Point(-0.129929363729563,-0.103717784098851),
},
{
new Point(-0.102166519333689,-0.13677026627863),
new Point(-0.102166519333689,-0.13677026627863),
new Point(-0.102166519333689,-0.150251431986506),
new Point(-0.102166519333689,-0.13677026627863),
new Point(-0.102166519333689,-0.13677026627863),
},
{
new Point(-0.0824331835006293,-0.142772124404579),
new Point(-0.0824331835006293,-0.142772124404579),
new Point(-0.0824331835006293,-0.142772124404578),
new Point(-0.0824331835006293,-0.142772124404579),
new Point(-0.0824331835006293,-0.142772124404579),
},
},
new Point[,]
{
{
new Point(-0.236924125728309,-0.0901488072547542),
new Point(-0.236924125728309,-0.0901488072547542),
new Point(-0.236924125728309,-0.126720995160762),
new Point(-0.236924125728309,-0.0901488072547542),
new Point(-0.236924125728309,-0.0901488072547542),
},
{
new Point(-0.17801812784553,-0.0976192114719181),
new Point(-0.17801812784553,-0.0976192114719181),
new Point(-0.17801812784553,-0.259345736235297),
new Point(-0.17801812784553,-0.0976192114719181),
new Point(-0.17801812784553,-0.0976192114719181),
},
{
new Point(-0.142497845797798,-0.102123860817342),
new Point(-0.142497845797798,-0.102123860817342),
new Point(-0.142497845797798,-0.281332923483859),
new Point(-0.142497845797798,-0.102123860817342),
new Point(-0.142497845797798,-0.102123860817342),
},
{
new Point(-0.102878336615591,-0.135688401244017),
new Point(-0.102878336615591,-0.135688401244017),
new Point(-0.102878336615591,-0.272365373749282),
new Point(-0.102878336615591,-0.135688401244017),
new Point(-0.102878336615591,-0.135688401244017),
},
{
new Point(-0.0526728670237262,-0.151823670417314),
new Point(-0.0526728670237262,-0.151823670417314),
new Point(-0.0526728670237262,-0.227703998701976),
new Point(-0.0526728670237262,-0.151823670417314),
new Point(-0.0526728670237262,-0.151823670417314),
},
{
new Point(-0.0374528685812345,-0.156452805202319),
new Point(-0.0374528685812345,-0.156452805202319),
new Point(-0.0374528685812345,-0.157212082244538),
new Point(-0.0374528685812345,-0.156452805202319),
new Point(-0.0374528685812345,-0.156452805202319),
},
},
};
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reactive.Concurrency;
using System.Reactive.Linq;
using System.Threading;
using System.Threading.Tasks;
using Common.Logging;
using Lucene.Net.Index;
using Lucene.Net.Linq;
using Lucene.Net.Linq.Abstractions;
using Lucene.Net.Search;
using NuGet.Lucene.Util;
namespace NuGet.Lucene
{
public class PackageIndexer : IPackageIndexer, IDisposable
{
public static ILog Log = LogManager.GetLogger<PackageIndexer>();
private static readonly TimeSpan InfiniteTimeSpan = Timeout.InfiniteTimeSpan;
private enum UpdateType { Add, Remove, RemoveByPath, Increment }
private class Update
{
private readonly LucenePackage package;
private readonly UpdateType updateType;
private readonly TaskCompletionSource<object> signal = new TaskCompletionSource<object>();
public Update(LucenePackage package, UpdateType updateType)
{
this.package = package;
this.updateType = updateType;
}
public LucenePackage Package
{
get { return package; }
}
public UpdateType UpdateType
{
get { return updateType; }
}
public Task Task
{
get
{
return signal.Task;
}
}
public void SetComplete()
{
if (!signal.Task.IsCompleted)
{
signal.SetResult(null);
}
}
public void SetException(Exception exception)
{
signal.SetException(exception);
}
}
private volatile IndexingState indexingState = IndexingState.Idle;
private readonly object synchronizationStatusLock = new object();
private volatile SynchronizationStatus synchronizationStatus = new SynchronizationStatus(SynchronizationState.Idle);
private readonly BlockingCollection<Update> pendingUpdates = new BlockingCollection<Update>();
private Task indexUpdaterTask;
private readonly TaskPoolScheduler eventScheduler = TaskPoolScheduler.Default;
public IFileSystem FileSystem { get; set; }
public IIndexWriter Writer { get; set; }
public LuceneDataProvider Provider { get; set; }
public ILucenePackageRepository PackageRepository { get; set; }
private event EventHandler statusChanged;
public void Initialize()
{
indexUpdaterTask = Task.Factory.StartNew(IndexUpdateLoop, TaskCreationOptions.LongRunning);
}
public void Dispose()
{
pendingUpdates.CompleteAdding();
indexUpdaterTask.Wait();
}
/// <summary>
/// Gets status of index building activity.
/// </summary>
public IndexingStatus GetIndexingStatus()
{
return new IndexingStatus(indexingState, synchronizationStatus);
}
public IObservable<IndexingStatus> StatusChanged
{
get
{
return Observable.FromEventPattern<EventHandler, EventArgs>(
eh => eh.Invoke,
eh => statusChanged += eh,
eh => statusChanged -= eh)
.Select(_ => GetIndexingStatus());
}
}
public void Optimize()
{
using (UpdateStatus(IndexingState.Optimizing))
{
Writer.Optimize();
}
}
public async Task SynchronizeIndexWithFileSystemAsync(SynchronizationMode mode, CancellationToken cancellationToken)
{
lock (synchronizationStatusLock)
{
if (synchronizationStatus.SynchronizationState != SynchronizationState.Idle)
{
throw new InvalidOperationException("Already running");
}
UpdateSynchronizationStatus(SynchronizationState.ScanningFiles);
}
try
{
var differences = IndexDifferenceCalculator.FindDifferences(
FileSystem, PackageRepository.LucenePackages, cancellationToken, UpdateSynchronizationStatus, mode);
await SynchronizeIndexWithFileSystemAsync(differences, cancellationToken);
}
finally
{
UpdateSynchronizationStatus(SynchronizationState.Idle);
}
}
public Task AddPackageAsync(LucenePackage package, CancellationToken cancellationToken)
{
var update = new Update(package, UpdateType.Add);
pendingUpdates.Add(update, cancellationToken);
return update.Task;
}
public Task RemovePackageAsync(IPackage package, CancellationToken cancellationToken)
{
if (!(package is LucenePackage)) throw new ArgumentException("Package of type " + package.GetType() + " not supported.");
var update = new Update((LucenePackage)package, UpdateType.Remove);
pendingUpdates.Add(update, cancellationToken);
return update.Task;
}
public Task IncrementDownloadCountAsync(IPackage package, CancellationToken cancellationToken)
{
if (!(package is LucenePackage)) throw new ArgumentException("Package of type " + package.GetType() + " not supported.");
if (string.IsNullOrWhiteSpace(package.Id))
{
throw new InvalidOperationException("Package Id must be specified.");
}
if (package.Version == null)
{
throw new InvalidOperationException("Package Version must be specified.");
}
var update = new Update((LucenePackage) package, UpdateType.Increment);
pendingUpdates.Add(update);
return update.Task;
}
internal async Task SynchronizeIndexWithFileSystemAsync(IndexDifferences diff, CancellationToken cancellationToken)
{
if (diff.IsEmpty) return;
var tasks = new ConcurrentQueue<Task>();
Log.Info(string.Format("Updates to process: {0} packages added, {1} packages updated, {2} packages removed.", diff.NewPackages.Count(), diff.ModifiedPackages.Count(), diff.MissingPackages.Count()));
foreach (var path in diff.MissingPackages)
{
cancellationToken.ThrowIfCancellationRequested();
var package = new LucenePackage(FileSystem) { Path = path };
var update = new Update(package, UpdateType.RemoveByPath);
pendingUpdates.Add(update, cancellationToken);
tasks.Enqueue(update.Task);
}
var pathsToIndex = diff.NewPackages.Union(diff.ModifiedPackages).OrderBy(p => p).ToArray();
var packagesToIndex = pathsToIndex.Length;
var i = 0;
Parallel.ForEach(pathsToIndex, new ParallelOptions {MaxDegreeOfParallelism = 4, CancellationToken = cancellationToken}, (p, s) =>
{
UpdateSynchronizationStatus(SynchronizationState.Indexing, Interlocked.Increment(ref i),
packagesToIndex);
tasks.Enqueue(SynchronizePackage(p, cancellationToken));
});
var task = Task.WhenAll(tasks.ToArray());
try
{
await task;
}
finally
{
if (task.IsFaulted && task.Exception?.InnerExceptions.Count > 1)
{
throw new AggregateException(task.Exception.InnerExceptions);
}
}
}
private async Task SynchronizePackage(string path, CancellationToken cancellationToken)
{
try
{
var package = PackageRepository.LoadFromFileSystem(path);
await AddPackageAsync(package, cancellationToken);
}
catch (Exception ex)
{
throw new IOException("The package file '" + path + "' could not be loaded.", ex);
}
}
private void IndexUpdateLoop()
{
var items = new List<Update>();
while (!pendingUpdates.IsCompleted)
{
try
{
pendingUpdates.TakeAvailable(items, InfiniteTimeSpan);
}
catch (OperationCanceledException)
{
}
if (items.Any())
{
ApplyUpdates(items);
items.Clear();
}
}
Log.Info("Update task shutting down.");
}
private void ApplyUpdates(List<Update> items)
{
Log.Info(m => m("Processing {0} updates.", items.Count()));
try
{
using (var session = OpenSession())
{
using (UpdateStatus(IndexingState.Updating))
{
var removals =
items.Where(i => i.UpdateType == UpdateType.Remove).ToList();
removals.ForEach(pkg => RemovePackageInternal(pkg, session));
var removalsByPath =
items.Where(i => i.UpdateType == UpdateType.RemoveByPath).ToList();
RemovePackagesByPath(removalsByPath, session);
var additions = items.Where(i => i.UpdateType == UpdateType.Add).ToList();
ApplyPendingAdditions(additions, session);
var downloadUpdates =
items.Where(i => i.UpdateType == UpdateType.Increment).Select(i => i.Package).ToList();
ApplyPendingDownloadIncrements(downloadUpdates, session);
}
using (UpdateStatus(IndexingState.Committing))
{
session.Commit();
items.ForEach(i => i.SetComplete());
}
}
}
catch (Exception ex)
{
Log.Error("Error while indexing packages", ex);
items.ForEach(i => i.SetException(ex));
}
}
private static void RemovePackagesByPath(IEnumerable<Update> removalsByPath, ISession<LucenePackage> session)
{
var deleteQueries = removalsByPath.Select(p => (Query) new TermQuery(new Term("Path", p.Package.Path))).ToArray();
session.Delete(deleteQueries);
}
private void ApplyPendingAdditions(List<Update> additions, ISession<LucenePackage> session)
{
foreach (var grouping in additions.GroupBy(update => update.Package.Id))
{
try
{
AddPackagesInternal(grouping.Key, grouping.Select(p => p.Package).ToList(), session);
}
catch (Exception ex)
{
additions.ForEach(i => i.SetException(ex));
}
}
}
private void AddPackagesInternal(string packageId, IEnumerable<LucenePackage> packages, ISession<LucenePackage> session)
{
var currentPackages = (from p in session.Query()
where p.Id == packageId
orderby p.Version descending
select p).ToList();
var newest = currentPackages.FirstOrDefault();
var totalDownloadCount = newest != null ? newest.DownloadCount : 0;
foreach (var package in packages)
{
var packageToReplace = currentPackages.Find(p => p.Version == package.Version);
package.DownloadCount = totalDownloadCount;
package.VersionDownloadCount = packageToReplace != null ? packageToReplace.VersionDownloadCount : 0;
if (packageToReplace == null)
{
session.Add(KeyConstraint.None, package);
}
else
{
currentPackages.Remove(packageToReplace);
package.OriginUrl = packageToReplace.OriginUrl;
package.IsMirrored = packageToReplace.IsMirrored;
session.Add(KeyConstraint.Unique, package);
}
currentPackages.Add(package);
}
UpdatePackageVersionFlags(currentPackages.OrderByDescending(p => p.Version));
}
private void RemovePackageInternal(Update update, ISession<LucenePackage> session)
{
try
{
session.Delete(update.Package);
var remainingPackages = from p in session.Query()
where p.Id == update.Package.Id
orderby p.Version descending
select p;
UpdatePackageVersionFlags(remainingPackages);
}
catch (Exception e)
{
update.SetException(e);
}
}
private void UpdatePackageVersionFlags(IEnumerable<LucenePackage> packages)
{
var first = true;
var firstNonPreRelease = true;
foreach (var p in packages)
{
if (!p.IsPrerelease && firstNonPreRelease)
{
p.IsLatestVersion = true;
firstNonPreRelease = false;
}
else
{
p.IsLatestVersion = false;
}
p.IsAbsoluteLatestVersion = first;
if (first)
{
first = false;
}
}
}
public void ApplyPendingDownloadIncrements(IList<LucenePackage> increments, ISession<LucenePackage> session)
{
if (increments.Count == 0) return;
var byId = increments.ToLookup(p => p.Id);
foreach (var grouping in byId)
{
var packageId = grouping.Key;
var packages = from p in session.Query() where p.Id == packageId select p;
var byVersion = grouping.ToLookup(p => p.Version);
foreach (var lucenePackage in packages)
{
lucenePackage.DownloadCount += grouping.Count();
lucenePackage.VersionDownloadCount += byVersion[lucenePackage.Version].Count();
}
}
}
protected internal virtual ISession<LucenePackage> OpenSession()
{
return Provider.OpenSession(() => new LucenePackage(FileSystem));
}
private void UpdateSynchronizationStatus(SynchronizationState state)
{
UpdateSynchronizationStatus(state, 0, 0);
}
private void UpdateSynchronizationStatus(SynchronizationState state, int completedPackages, int packagesToIndex)
{
synchronizationStatus = new SynchronizationStatus(
state,
completedPackages,
packagesToIndex
);
RaiseStatusChanged();
}
private IDisposable UpdateStatus(IndexingState state)
{
var prev = indexingState;
indexingState = state;
RaiseStatusChanged();
return new DisposableAction(() =>
{
indexingState = prev;
RaiseStatusChanged();
});
}
private void RaiseStatusChanged()
{
var tmp = statusChanged;
if (tmp != null)
{
eventScheduler.Schedule(() => tmp(this, new EventArgs()));
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Azure.Core;
using Azure.Core.Pipeline;
namespace Azure.Security.Attestation
{
/// <summary>
/// Attestation Client for the Microsoft Azure Attestation service.
///
/// The Attestation client contains the implementation of the "Attest" family of MAA apis.
/// </summary>
#pragma warning disable CA1001 // Types that own disposable fields should be disposable
public class AttestationAdministrationClient
#pragma warning restore CA1001 // Types that own disposable fields should be disposable
{
private readonly AttestationClientOptions _options;
private readonly HttpPipeline _pipeline;
private readonly ClientDiagnostics _clientDiagnostics;
private readonly PolicyRestClient _policyClient;
private readonly PolicyCertificatesRestClient _policyManagementClient;
private readonly AttestationClient _attestationClient;
private IReadOnlyList<AttestationSigner> _signers;
// NOTE The SemaphoreSlim type does NOT need Disposable based on the current usage because AvailableWaitHandle is not referenced.
private SemaphoreSlim _statelock = new SemaphoreSlim(1, 1);
// Default scope for data plane APIs.
private const string DefaultScope = "https://attest.azure.net/.default";
/// <summary>
/// Returns the URI used to communicate with the service.
/// </summary>
public Uri Endpoint { get; }
/// <summary>
/// Initializes a new instance of the <see cref="AttestationClient"/> class.
/// </summary>
/// <param name="endpoint">Uri for the Microsoft Azure Attestation Service Instance to use.</param>
/// <param name="credential">Credentials to be used in the Client.</param>
public AttestationAdministrationClient(Uri endpoint, TokenCredential credential)
: this(endpoint, credential, new AttestationClientOptions())
{
}
/// <summary>
/// Initializes a new instance of the <see cref="AttestationClient"/> class.
/// </summary>
/// <param name="endpoint">Uri for the Microsoft Azure Attestation Service Instance to use.</param>
/// <param name="credential">Credentials to be used in the Client.</param>
/// <param name="options"><see cref="AttestationClientOptions"/> used to configure the API client.</param>
public AttestationAdministrationClient(Uri endpoint, TokenCredential credential, AttestationClientOptions options)
{
Argument.AssertNotNull(endpoint, nameof(endpoint));
Argument.AssertNotNull(credential, nameof(credential));
Argument.AssertNotNull(options, nameof(options));
_options = options;
// Add the authentication policy to our builder.
_pipeline = HttpPipelineBuilder.Build(options, new BearerTokenAuthenticationPolicy(credential, DefaultScope));
// Initialize the ClientDiagnostics.
_clientDiagnostics = new ClientDiagnostics(options);
Endpoint = endpoint;
// Initialize the Policy Rest Client.
_policyClient = new PolicyRestClient(_clientDiagnostics, _pipeline, Endpoint.AbsoluteUri, options.Version);
// Initialize the Certificates Rest Client.
_policyManagementClient = new PolicyCertificatesRestClient(_clientDiagnostics, _pipeline, Endpoint.AbsoluteUri, options.Version);
// Initialize the Attestation Rest Client.
_attestationClient = new AttestationClient(endpoint, credential, options);
}
/// <summary>
/// Parameterless constructor for mocking.
/// </summary>
protected AttestationAdministrationClient()
{
}
/// <summary>
/// Retrieves the attesttion policy for the specified <see cref="AttestationType"/>.
/// </summary>
/// <param name="attestationType"><see cref="AttestationType"/> to retrieve.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <returns>An <see cref="AttestationResponse{String}"/> with the policy for the specified attestation type.</returns>
/// <remarks>
/// This API returns the underlying attestation policy object stored in the attestation service for this <paramref name="attestationType"/>.
///
/// The actual service response to the API is an RFC 7519 JSON Web Token(see https://tools.ietf.org/html/rfc7519"). This token can be retrieved from <see cref="AttestationResponse{T}.Token"/>.
/// For the GetPolicy API, the body of the <see cref="AttestationResponse{T}.Token"/> is a <see cref="StoredAttestationPolicy"/> object, NOT a string.
/// </remarks>
public virtual AttestationResponse<string> GetPolicy(AttestationType attestationType, CancellationToken cancellationToken = default)
=> GetPolicyInternalAsync(attestationType, false, cancellationToken).EnsureCompleted();
/// <summary>
/// Retrieves the attesttion policy for the specified <see cref="AttestationType"/>.
/// </summary>
/// <param name="attestationType">Attestation Type to retrieve.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <returns>An <see cref="AttestationResponse{String}"/> with the policy for the specified attestation type.</returns>
/// <remarks>
/// This API returns the underlying attestation policy object stored in the attestation service for this <paramref name="attestationType"/>.
///
/// The actual service response to the API is an RFC 7519 JSON Web Token(see https://tools.ietf.org/html/rfc7519"). This token can be retrieved from <see cref="AttestationResponse{T}.Token"/>.
/// For the GetPolicyAsync API, the body of the <see cref="AttestationResponse{T}.Token"/> is a <see cref="StoredAttestationPolicy"/> object, NOT a string.
/// </remarks>
public virtual async Task<AttestationResponse<string>> GetPolicyAsync(AttestationType attestationType, CancellationToken cancellationToken = default)
=> await GetPolicyInternalAsync(attestationType, true, cancellationToken).ConfigureAwait(false);
/// <summary>
/// Retrieves the attesttion policy for the specified <see cref="AttestationType"/>.
/// </summary>
/// <param name="attestationType"><see cref="AttestationType"/> to retrieve.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <param name="async">True if the call should be asynchronous.</param>
/// <returns>An <see cref="AttestationResponse{String}"/> with the policy for the specified attestation type.</returns>
/// <remarks>
/// This API returns the underlying attestation policy object stored in the attestation service for this <paramref name="attestationType"/>.
///
/// The actual service response to the API is an RFC 7519 JSON Web Token (see https://tools.ietf.org/html/rfc7519"). This token can be retrieved from <see cref="AttestationResponse{T}.Token"/>.
/// For the GetPolicy API, the body of the <see cref="AttestationResponse{T}.Token"/> is a <see cref="StoredAttestationPolicy"/> object, NOT a string.
/// </remarks>
private async Task<AttestationResponse<string>> GetPolicyInternalAsync(AttestationType attestationType, bool async, CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(AttestationAdministrationClient)}.{nameof(GetPolicy)}");
scope.Start();
try
{
Response<PolicyResponse> result;
if (async)
{
result = await _policyClient.GetAsync(attestationType, cancellationToken).ConfigureAwait(false);
}
else
{
result = _policyClient.Get(attestationType, cancellationToken);
}
var token = AttestationToken.Deserialize(result.Value.Token, _clientDiagnostics);
if (_options.TokenOptions.ValidateToken)
{
var signers = await GetSignersAsync(async, cancellationToken).ConfigureAwait(false);
if (!await token.ValidateTokenInternal(_options.TokenOptions, signers, async, cancellationToken).ConfigureAwait(false))
{
AttestationTokenValidationFailedException.ThrowFailure(signers, token);
}
}
PolicyModificationResult policyResult = token.GetBody<PolicyModificationResult>();
var response = new AttestationResponse<StoredAttestationPolicy>(result.GetRawResponse(), policyResult.PolicyToken);
return new AttestationResponse<string>(result.GetRawResponse(), token, response.Value.AttestationPolicy);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Sets the attesttion policy for the specified <see cref="AttestationType"/>.
/// </summary>
/// <param name="attestationType"><see cref="AttestationType"/> whose policy should be set.</param>
/// <param name="policyToSet">Specifies the attestation policy to set.</param>
/// <param name="signingKey">If provided, specifies the signing key used to sign the request to the attestation service.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <returns>An <see cref="AttestationResponse{PolicyResult}"/> with the policy for the specified attestation type.</returns>
/// <remarks>
/// If the <paramref name="signingKey"/> parameter is not provided, then the policy document sent to the
/// attestation service will be unsigned. Unsigned attestation policies are only allowed when the attestation instance is running in AAD mode - if the
/// attestation instance is running in Isolated mode, then a signing key and signing certificate MUST be provided to ensure that the caller of the API is authorized to change policy.
/// The <see cref="AttestationTokenSigningKey.Certificate"/> field MUST be one of the certificates returned by the <see cref="GetPolicyManagementCertificates(CancellationToken)"/> API.
/// <para/>
/// Clients need to be able to verify that the attestation policy document was not modified before the policy document was received by the attestation service's enclave.
/// There are two properties provided in the [PolicyResult][attestation_policy_result] that can be used to verify that the service received the policy document:
/// <list type="bullet">
/// <item>
/// <description><see cref="PolicyModificationResult.PolicySigner"/> - if the <see cref="SetPolicy(AttestationType, string, AttestationTokenSigningKey, CancellationToken)"/> call included a signing certificate, this will be the certificate provided at the time of the `SetPolicy` call. If no policy signer was set, this will be null. </description>
/// </item>
/// <item>
/// <description><see cref="PolicyModificationResult.PolicyTokenHash"/> - this is the hash of the [JSON Web Token][json_web_token] sent to the service</description>
/// </item>
/// </list>
/// To verify the hash, clients can generate an attestation token and verify the hash generated from that token:
/// <code snippet="Snippet:VerifySigningHash" language="csharp">
/// // The SetPolicyAsync API will create an AttestationToken signed with the TokenSigningKey to transmit the policy.
/// // To verify that the policy specified by the caller was received by the service inside the enclave, we
/// // verify that the hash of the policy document returned from the Attestation Service matches the hash
/// // of an attestation token created locally.
/// TokenSigningKey signingKey = new TokenSigningKey(<Customer provided signing key>, <Customer provided certificate>)
/// var policySetToken = new AttestationToken(
/// BinaryData.FromObjectAsJson(new StoredAttestationPolicy { AttestationPolicy = attestationPolicy }),
/// signingKey);
///
/// using var shaHasher = SHA256.Create();
/// byte[] attestationPolicyHash = shaHasher.ComputeHash(Encoding.UTF8.GetBytes(policySetToken.Serialize()));
///
/// Debug.Assert(attestationPolicyHash.SequenceEqual(setResult.Value.PolicyTokenHash.ToArray()));
/// </code>
///
/// If the signing key and certificate are not provided, then the SetPolicyAsync API will create an unsecured attestation token
/// wrapping the attestation policy. To validate the <see cref="PolicyModificationResult.PolicyTokenHash"/> return value, a developer
/// can create their own <see cref="AttestationToken"/> and create the hash of that.
/// <code>
/// using var shaHasher = SHA256Managed.Create();
/// var policySetToken = new UnsecuredAttestationToken(new StoredAttestationPolicy { AttestationPolicy = disallowDebugging });
/// disallowDebuggingHash = shaHasher.ComputeHash(Encoding.UTF8.GetBytes(policySetToken.Serialize()));
/// </code>
/// </remarks>
public virtual AttestationResponse<PolicyModificationResult> SetPolicy(
AttestationType attestationType,
string policyToSet,
AttestationTokenSigningKey signingKey = default,
CancellationToken cancellationToken = default)
{
Argument.AssertNotNullOrWhiteSpace(policyToSet, nameof(policyToSet));
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(AttestationAdministrationClient)}.{nameof(SetPolicy)}");
scope.Start();
try
{
AttestationToken tokenToSet = new AttestationToken(
BinaryData.FromObjectAsJson(new StoredAttestationPolicy { AttestationPolicy = policyToSet, }),
signingKey);
var result = _policyClient.Set(attestationType, tokenToSet.Serialize(), cancellationToken);
var token = AttestationToken.Deserialize(result.Value.Token);
if (_options.TokenOptions.ValidateToken)
{
var signers = GetSignersAsync(false, cancellationToken).EnsureCompleted();
if (!token.ValidateToken(_options.TokenOptions, signers, cancellationToken))
{
AttestationTokenValidationFailedException.ThrowFailure(signers, token);
}
}
return new AttestationResponse<PolicyModificationResult>(result.GetRawResponse(), token);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Sets the attesttion policy for the specified <see cref="AttestationType"/>.
/// </summary>
/// <param name="attestationType"><see cref="AttestationType"/> whose policy should be set.</param>
/// <param name="policyToSet">Specifies the attestation policy to set.</param>
/// <param name="signingKey">If provided, specifies the signing key used to sign the request to the attestation service.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <returns>An <see cref="AttestationResponse{PolicyResult}"/> with the policy for the specified attestation type.</returns>
/// <remarks>
/// If the <paramref name="signingKey"/> parameter is not provided, then the policy document sent to the
/// attestation service will be unsigned. Unsigned attestation policies are only allowed when the attestation instance is running in AAD mode - if the
/// attestation instance is running in Isolated mode, then a signing key and signing certificate MUST be provided to ensure that the caller of the API is authorized to change policy.
/// The <see cref="AttestationTokenSigningKey.Certificate"/> field MUST be one of the certificates returned by the <see cref="GetPolicyManagementCertificates(CancellationToken)"/> API.
/// <para/>
/// Clients need to be able to verify that the attestation policy document was not modified before the policy document was received by the attestation service's enclave.
/// There are two properties provided in the [PolicyResult][attestation_policy_result] that can be used to verify that the service received the policy document:
/// <list type="bullet">
/// <item>
/// <description><see cref="PolicyModificationResult.PolicySigner"/> - if the <see cref="SetPolicy(AttestationType, string, AttestationTokenSigningKey, CancellationToken)"/> call included a signing certificate, this will be the certificate provided at the time of the `SetPolicy` call. If no policy signer was set, this will be null. </description>
/// </item>
/// <item>
/// <description><see cref="PolicyModificationResult.PolicyTokenHash"/> - this is the hash of the [JSON Web Token][json_web_token] sent to the service</description>
/// </item>
/// </list>
/// To verify the hash, clients can generate an attestation token and verify the hash generated from that token:
/// <code snippet="Snippet:VerifySigningHash" language="csharp">
/// // The SetPolicyAsync API will create an AttestationToken signed with the TokenSigningKey to transmit the policy.
/// // To verify that the policy specified by the caller was received by the service inside the enclave, we
/// // verify that the hash of the policy document returned from the Attestation Service matches the hash
/// // of an attestation token created locally.
/// TokenSigningKey signingKey = new TokenSigningKey(<Customer provided signing key>, <Customer provided certificate>)
/// var policySetToken = new AttestationToken(
/// BinaryData.FromObjectAsJson(new StoredAttestationPolicy { AttestationPolicy = attestationPolicy }),
/// signingKey);
///
/// using var shaHasher = SHA256.Create();
/// byte[] attestationPolicyHash = shaHasher.ComputeHash(Encoding.UTF8.GetBytes(policySetToken.Serialize()));
///
/// Debug.Assert(attestationPolicyHash.SequenceEqual(setResult.Value.PolicyTokenHash.ToArray()));
/// </code>
///
/// If the signing key and certificate are not provided, then the SetPolicyAsync API will create an unsecured attestation token
/// wrapping the attestation policy. To validate the <see cref="PolicyModificationResult.PolicyTokenHash"/> return value, a developer
/// can create their own <see cref="AttestationToken"/> and create the hash of that.
/// <code>
/// using var shaHasher = SHA256Managed.Create();
/// var policySetToken = new AttestationToken(new StoredAttestationPolicy { AttestationPolicy = disallowDebugging });
/// disallowDebuggingHash = shaHasher.ComputeHash(Encoding.UTF8.GetBytes(policySetToken.ToString()));
/// </code>
/// </remarks>
public virtual async Task<AttestationResponse<PolicyModificationResult>> SetPolicyAsync(
AttestationType attestationType,
string policyToSet,
AttestationTokenSigningKey signingKey = default,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrEmpty(policyToSet))
{
throw new ArgumentException($"'{nameof(policyToSet)}' cannot be null or empty.", nameof(policyToSet));
}
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(AttestationAdministrationClient)}.{nameof(SetPolicy)}");
scope.Start();
try
{
AttestationToken tokenToSet = new AttestationToken(BinaryData.FromObjectAsJson(new StoredAttestationPolicy { AttestationPolicy = policyToSet, }), signingKey);
var result = await _policyClient.SetAsync(attestationType, tokenToSet.Serialize(), cancellationToken).ConfigureAwait(false);
var token = AttestationToken.Deserialize(result.Value.Token, _clientDiagnostics);
if (_options.TokenOptions.ValidateToken)
{
var signers = await GetSignersAsync(true, cancellationToken).ConfigureAwait(false);
if (!await token.ValidateTokenAsync(_options.TokenOptions, signers, cancellationToken).ConfigureAwait(false))
{
AttestationTokenValidationFailedException.ThrowFailure(signers, token);
}
}
return new AttestationResponse<PolicyModificationResult>(result.GetRawResponse(), token);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Resets the policy for the specified <see cref="AttestationType"/> to the default value.
/// </summary>
/// <param name="attestationType"><see cref="AttestationType"/> whose policy should be reset.</param>
/// <param name="signingKey">If provided, specifies the signing key and certificate used to sign the request to the attestation service.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <returns>An <see cref="AttestationResponse{PolicyResult}"/> with the policy for the specified attestation type.</returns>
/// <remarks>
/// If the <paramref name="signingKey"/> parameter is not provided, then the policy document sent to the
/// attestation service will be unsigned. Unsigned attestation policies are only allowed when the attestation instance is running in AAD mode - if the
/// attestation instance is running in Isolated mode, then a signing key and signing certificate MUST be provided to ensure that the caller of the API is authorized to change policy.
/// The <see cref="AttestationTokenSigningKey.Certificate"/> fieldMUST be one of the certificates returned by the <see cref="GetPolicyManagementCertificates(CancellationToken)"/> API.
/// <para/>
/// </remarks>
///
public virtual AttestationResponse<PolicyModificationResult> ResetPolicy(
AttestationType attestationType,
AttestationTokenSigningKey signingKey = default,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(AttestationAdministrationClient)}.{nameof(ResetPolicy)}");
scope.Start();
try
{
AttestationToken tokenToSet = new AttestationToken(null, signingKey);
var result = _policyClient.Reset(attestationType, tokenToSet.Serialize(), cancellationToken);
var token = AttestationToken.Deserialize(result.Value.Token, _clientDiagnostics);
if (_options.TokenOptions.ValidateToken)
{
var signers = GetSignersAsync(false, cancellationToken).EnsureCompleted();
if (!token.ValidateTokenInternal(_options.TokenOptions, signers, false, cancellationToken).EnsureCompleted())
{
AttestationTokenValidationFailedException.ThrowFailure(signers, token);
}
}
return new AttestationResponse<PolicyModificationResult>(result.GetRawResponse(), token);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Resets the policy for the specified <see cref="AttestationType"/> to the default value.
/// </summary>
/// <param name="attestationType"><see cref="AttestationType"/> whose policy should be reset.</param>
/// <param name="signingKey">If provided, specifies the signing key used to sign the request to the attestation service.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <returns>An <see cref="AttestationResponse{PolicyResult}"/> with the policy for the specified attestation type.</returns>
/// <remarks>
/// If the <paramref name="signingKey"/> parameter is not provided, then the policy document sent to the
/// attestation service will be unsigned. Unsigned attestation policies are only allowed when the attestation instance is running in AAD mode - if the
/// attestation instance is running in Isolated mode, then a signing key and signing certificate MUST be provided to ensure that the caller of the API is authorized to change policy.
/// The <see cref="AttestationTokenSigningKey.Certificate"/> parameter MUST be one of the certificates returned by the <see cref="GetPolicyManagementCertificates(CancellationToken)"/> API.
/// <para/>
/// </remarks>
public virtual async Task<AttestationResponse<PolicyModificationResult>> ResetPolicyAsync(
AttestationType attestationType,
AttestationTokenSigningKey signingKey = default,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(AttestationAdministrationClient)}.{nameof(ResetPolicy)}");
scope.Start();
try
{
AttestationToken tokenToSet = new AttestationToken(null, signingKey);
var result = await _policyClient.ResetAsync(attestationType, tokenToSet.Serialize(), cancellationToken).ConfigureAwait(false);
var token = AttestationToken.Deserialize(result.Value.Token, _clientDiagnostics);
if (_options.TokenOptions.ValidateToken)
{
var signers = await GetSignersAsync(true, cancellationToken).ConfigureAwait(false);
if (!await token.ValidateTokenAsync(_options.TokenOptions, signers, cancellationToken).ConfigureAwait(false))
{
AttestationTokenValidationFailedException.ThrowFailure(signers, token);
}
}
return new AttestationResponse<PolicyModificationResult>(result.GetRawResponse(), token);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Returns the set of policy management certificates currently configured for the attestation service instance.
///
/// If the service instance is running in AAD mode, this list will always be empty.
/// </summary>
/// <param name="cancellationToken">Cancellation token used to cancel the operation.</param>
/// <returns>A set of <see cref="X509Certificate2"/> objects representing the set of root certificates for policy management.</returns>
public virtual AttestationResponse<IReadOnlyList<X509Certificate2>> GetPolicyManagementCertificates(CancellationToken cancellationToken = default)
=> GetPolicyManagementCertificatesInternalAsync(false, cancellationToken).EnsureCompleted();
/// <summary>
/// Returns the set of policy management certificates currently configured for the attestation service instance.
///
/// If the service instance is running in AAD mode, this list will always be empty.
/// </summary>
/// <param name="cancellationToken">Cancellation token used to cancel the operation.</param>
/// <returns>A set of <see cref="X509Certificate2"/> objects representing the set of root certificates for policy management.</returns>
public virtual async Task<AttestationResponse<IReadOnlyList<X509Certificate2>>> GetPolicyManagementCertificatesAsync(CancellationToken cancellationToken = default)
=> await GetPolicyManagementCertificatesInternalAsync(true, cancellationToken).ConfigureAwait(false);
/// <summary>
/// Returns the set of policy management certificates currently configured for the attestation service instance.
///
/// If the service instance is running in AAD mode, this list will always be empty.
/// </summary>
/// <param name="cancellationToken">Cancellation token used to cancel the operation.</param>
/// <param name="async">True if this request should be processed asyncly.</param>
/// <returns>A set of <see cref="X509Certificate2"/> objects representing the set of root certificates for policy management.</returns>
private async Task<AttestationResponse<IReadOnlyList<X509Certificate2>>> GetPolicyManagementCertificatesInternalAsync(bool async, CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(AttestationAdministrationClient)}.{nameof(GetPolicyManagementCertificates)}");
scope.Start();
try
{
Response<PolicyCertificatesResponse> result;
if (async)
{
result = await _policyManagementClient.GetAsync(cancellationToken).ConfigureAwait(false);
}
else
{
result = _policyManagementClient.Get(cancellationToken);
}
var token = AttestationToken.Deserialize(result.Value.Token);
if (_options.TokenOptions.ValidateToken)
{
var signers = await GetSignersAsync(async, cancellationToken).ConfigureAwait(false);
if (!await token.ValidateTokenInternal(_options.TokenOptions, signers, async, cancellationToken).ConfigureAwait(false))
{
AttestationTokenValidationFailedException.ThrowFailure(signers, token);
}
}
List<X509Certificate2> certificates = new List<X509Certificate2>();
foreach (var cert in token.GetBody<PolicyCertificatesResult>().InternalPolicyCertificates.Keys)
{
certificates.Add(new X509Certificate2(Convert.FromBase64String(cert.X5C[0])));
}
return new AttestationResponse<IReadOnlyList<X509Certificate2>>(result.GetRawResponse(), token, certificates.AsReadOnly());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Adds the specified new signing certificate to the set of policy management certificates.
/// </summary>
/// <param name="newSigningCertificate">The new certificate to add.</param>
/// <param name="existingSigningKey">An existing key corresponding to the existing certificate.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <returns>An <see cref="AttestationResponse{PolicyCertificatesModificationResult}"/> with the policy for the specified attestation type.</returns>
/// <remarks>
/// </remarks>
public virtual AttestationResponse<PolicyCertificatesModificationResult> AddPolicyManagementCertificate(
X509Certificate2 newSigningCertificate,
AttestationTokenSigningKey existingSigningKey,
CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(existingSigningKey, nameof(existingSigningKey));
Argument.AssertNotNull(newSigningCertificate, nameof(newSigningCertificate));
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(AttestationAdministrationClient)}.{nameof(AddPolicyManagementCertificate)}");
scope.Start();
try
{
var tokenToAdd = new AttestationToken(
BinaryData.FromObjectAsJson(new PolicyCertificateModification(newSigningCertificate)),
existingSigningKey);
var result = _policyManagementClient.Add(tokenToAdd.Serialize(), cancellationToken);
var token = AttestationToken.Deserialize(result.Value.Token, _clientDiagnostics);
if (_options.TokenOptions.ValidateToken)
{
var signers = GetSignersAsync(false, cancellationToken).EnsureCompleted();
if (!token.ValidateTokenInternal(_options.TokenOptions, signers, false, cancellationToken).EnsureCompleted())
{
AttestationTokenValidationFailedException.ThrowFailure(signers, token);
}
}
return new AttestationResponse<PolicyCertificatesModificationResult>(result.GetRawResponse(), token);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Adds the specified new signing certificate to the set of policy management certificates.
/// </summary>
/// <param name="newSigningCertificate">The new certificate to add.</param>
/// <param name="existingSigningKey">An existing key corresponding to the existing certificate.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <returns>An <see cref="AttestationResponse{PolicyCertificatesModificationResult}"/> with the policy for the specified attestation type.</returns>
public virtual async Task<AttestationResponse<PolicyCertificatesModificationResult>> AddPolicyManagementCertificateAsync(
X509Certificate2 newSigningCertificate,
AttestationTokenSigningKey existingSigningKey,
CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(existingSigningKey, nameof(existingSigningKey));
Argument.AssertNotNull(newSigningCertificate, nameof(newSigningCertificate));
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(AttestationAdministrationClient)}.{nameof(AddPolicyManagementCertificate)}");
scope.Start();
try
{
var tokenToAdd = new AttestationToken(
BinaryData.FromObjectAsJson(new PolicyCertificateModification(newSigningCertificate)),
existingSigningKey);
var result = await _policyManagementClient.AddAsync(tokenToAdd.Serialize(), cancellationToken).ConfigureAwait(false);
var token = AttestationToken.Deserialize(result.Value.Token, _clientDiagnostics);
if (_options.TokenOptions.ValidateToken)
{
var signers = await GetSignersAsync(true, cancellationToken).ConfigureAwait(false);
if (!await token.ValidateTokenInternal(_options.TokenOptions, signers, true, cancellationToken).ConfigureAwait(false))
{
AttestationTokenValidationFailedException.ThrowFailure(signers, token);
}
}
return new AttestationResponse<PolicyCertificatesModificationResult>(result.GetRawResponse(), token);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Retrieves the attesttion policy for the specified <see cref="AttestationType"/>.
/// </summary>
/// <param name="certificateToRemove">The certificate to remove.</param>
/// <param name="existingSigningKey">An existing key corresponding to the existing certificate.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <returns>An <see cref="AttestationResponse{PolicyCertificatesModificationResult}"/> with the policy for the specified attestation type.</returns>
public virtual AttestationResponse<PolicyCertificatesModificationResult> RemovePolicyManagementCertificate(
X509Certificate2 certificateToRemove,
AttestationTokenSigningKey existingSigningKey,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(AttestationAdministrationClient)}.{nameof(RemovePolicyManagementCertificate)}");
scope.Start();
try
{
var tokenToRemove = new AttestationToken(
BinaryData.FromObjectAsJson(new PolicyCertificateModification(certificateToRemove)),
existingSigningKey);
var result = _policyManagementClient.Remove(tokenToRemove.Serialize(), cancellationToken);
var token = AttestationToken.Deserialize(result.Value.Token, _clientDiagnostics);
if (_options.TokenOptions.ValidateToken)
{
var signers = GetSignersAsync(false, cancellationToken).EnsureCompleted();
if (!token.ValidateTokenInternal(_options.TokenOptions, signers, false, cancellationToken).EnsureCompleted())
{
AttestationTokenValidationFailedException.ThrowFailure(signers, token);
}
}
return new AttestationResponse<PolicyCertificatesModificationResult>(result.GetRawResponse(), token);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Removes one of the attestation policy management certificates.
/// </summary>
/// <param name="certificateToRemove">The certificate to remove.</param>
/// <param name="existingSigningKey">An existing key corresponding to the existing certificate.</param>
/// <param name="cancellationToken">Cancellation token used to cancel this operation.</param>
/// <returns>An <see cref="AttestationResponse{PolicyCertificatesModificationResult}"/> with the policy for the specified attestation type.</returns>
public virtual async Task<AttestationResponse<PolicyCertificatesModificationResult>> RemovePolicyManagementCertificateAsync(
X509Certificate2 certificateToRemove,
AttestationTokenSigningKey existingSigningKey,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(AttestationAdministrationClient)}.{nameof(RemovePolicyManagementCertificate)}");
scope.Start();
try
{
var tokenToRemove = new AttestationToken(
BinaryData.FromObjectAsJson(new PolicyCertificateModification(certificateToRemove)),
existingSigningKey);
var result = await _policyManagementClient.RemoveAsync(tokenToRemove.Serialize(), cancellationToken).ConfigureAwait(false);
var token = AttestationToken.Deserialize(result.Value.Token, _clientDiagnostics);
if (_options.TokenOptions.ValidateToken)
{
var signers = await GetSignersAsync(true, cancellationToken).ConfigureAwait(false);
if (!await token.ValidateTokenAsync(_options.TokenOptions, signers, cancellationToken).ConfigureAwait(false))
{
AttestationTokenValidationFailedException.ThrowFailure(signers, token);
}
}
return new AttestationResponse<PolicyCertificatesModificationResult>(result.GetRawResponse(), token);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
private async Task<IReadOnlyList<AttestationSigner>> GetSignersAsync(bool async, CancellationToken cancellationToken)
{
if (async)
{
await _statelock.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_signers == null)
{
_signers = (await _attestationClient.GetSigningCertificatesAsync(cancellationToken).ConfigureAwait(false)).Value;
}
return _signers;
}
finally
{
_statelock.Release();
}
}
else
{
_statelock.Wait(cancellationToken);
try
{
if (_signers == null)
{
_signers = _attestationClient.GetSigningCertificates(cancellationToken).Value;
}
return _signers;
}
finally
{
_statelock.Release();
}
}
}
}
}
| |
#define SQLITE_ASCII
#define SQLITE_DISABLE_LFS
#define SQLITE_ENABLE_OVERSIZE_CELL_CHECK
#define SQLITE_MUTEX_OMIT
#define SQLITE_OMIT_AUTHORIZATION
#define SQLITE_OMIT_DEPRECATED
#define SQLITE_OMIT_GET_TABLE
#define SQLITE_OMIT_INCRBLOB
#define SQLITE_OMIT_LOOKASIDE
#define SQLITE_OMIT_SHARED_CACHE
#define SQLITE_OMIT_UTF16
#define SQLITE_OMIT_WAL
#define SQLITE_OS_WIN
#define SQLITE_SYSTEM_MALLOC
#define VDBE_PROFILE_OFF
#define WINDOWS_MOBILE
#define NDEBUG
#define _MSC_VER
#define YYFALLBACK
using System.Diagnostics;
using va_list = System.Object;
namespace Community.CsharpSqlite
{
public partial class Sqlite3
{
/*
** 2004 May 22
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
**
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
**
******************************************************************************
**
** This file contains macros and a little bit of code that is common to
** all of the platform-specific files (os_*.c) and is #included into those
** files.
**
** This file should be #included by the os_*.c files only. It is not a
** general purpose header file.
*************************************************************************
** Included in SQLite3 port to C#-SQLite; 2008 Noah B Hart
** C#-SQLite is an independent reimplementation of the SQLite software library
**
** SQLITE_SOURCE_ID: 2010-08-23 18:52:01 42537b60566f288167f1b5864a5435986838e3a3
**
*************************************************************************
*/
//#if !_OS_COMMON_H_
//#define _OS_COMMON_H_
/*
** At least two bugs have slipped in because we changed the MEMORY_DEBUG
** macro to SQLITE_DEBUG and some older makefiles have not yet made the
** switch. The following code should catch this problem at compile-time.
*/
#if MEMORY_DEBUG
//# error "The MEMORY_DEBUG macro is obsolete. Use SQLITE_DEBUG instead."
#endif
#if SQLITE_DEBUG || TRACE
static bool sqlite3OsTrace = false;
//#define OSTRACE(X) if( sqlite3OSTrace ) sqlite3DebugPrintf X
static void OSTRACE( string X, params va_list[] ap )
{
if ( sqlite3OsTrace )
sqlite3DebugPrintf( X, ap );
}
#else
//#define OSTRACE(X)
static void OSTRACE( string X, params object[] ap) { }
#endif
/*
** Macros for performance tracing. Normally turned off. Only works
** on i486 hardware.
*/
#if SQLITE_PERFORMANCE_TRACE
/*
** hwtime.h contains inline assembler code for implementing
** high-performance timing routines.
*/
//#include "hwtime.h"
static sqlite_u3264 g_start;
static sqlite_u3264 g_elapsed;
//#define TIMER_START g_start=sqlite3Hwtime()
//#define TIMER_END g_elapsed=sqlite3Hwtime()-g_start
//#define TIMER_ELAPSED g_elapsed
#else
const int TIMER_START = 0; //#define TIMER_START
const int TIMER_END = 0; //#define TIMER_END
const int TIMER_ELAPSED = 0; //#define TIMER_ELAPSED ((sqlite_u3264)0)
#endif
/*
** If we compile with the SQLITE_TEST macro set, then the following block
** of code will give us the ability to simulate a disk I/O error. This
** is used for testing the I/O recovery logic.
*/
#if SQLITE_TEST
#if !TCLSH
static int sqlite3_io_error_hit = 0; /* Total number of I/O Errors */
static int sqlite3_io_error_hardhit = 0; /* Number of non-benign errors */
static int sqlite3_io_error_pending = 0; /* Count down to first I/O error */
static int sqlite3_io_error_persist = 0; /* True if I/O errors persist */
static int sqlite3_io_error_benign = 0; /* True if errors are benign */
static int sqlite3_diskfull_pending = 0;
static int sqlite3_diskfull = 0;
#else
static tcl.lang.Var.SQLITE3_GETSET sqlite3_io_error_hit = new tcl.lang.Var.SQLITE3_GETSET( "sqlite_io_error_hit" );
static tcl.lang.Var.SQLITE3_GETSET sqlite3_io_error_hardhit = new tcl.lang.Var.SQLITE3_GETSET( "sqlite_io_error_hardhit" );
static tcl.lang.Var.SQLITE3_GETSET sqlite3_io_error_pending = new tcl.lang.Var.SQLITE3_GETSET( "sqlite_io_error_pending" );
static tcl.lang.Var.SQLITE3_GETSET sqlite3_io_error_persist = new tcl.lang.Var.SQLITE3_GETSET( "sqlite_io_error_persist" );
static tcl.lang.Var.SQLITE3_GETSET sqlite3_io_error_benign = new tcl.lang.Var.SQLITE3_GETSET( "sqlite_io_error_benign" );
static tcl.lang.Var.SQLITE3_GETSET sqlite3_diskfull_pending = new tcl.lang.Var.SQLITE3_GETSET( "sqlite_diskfull_pending" );
static tcl.lang.Var.SQLITE3_GETSET sqlite3_diskfull = new tcl.lang.Var.SQLITE3_GETSET( "sqlite_diskfull" );
#endif
static void SimulateIOErrorBenign( int X )
{
#if !TCLSH
sqlite3_io_error_benign = ( X );
#else
sqlite3_io_error_benign.iValue = ( X );
#endif
}
//#define SimulateIOError(CODE) \
// if( (sqlite3_io_error_persist && sqlite3_io_error_hit) \
// || sqlite3_io_error_pending-- == 1 ) \
// { local_ioerr(); CODE; }
static bool SimulateIOError()
{
#if !TCLSH
if ( ( sqlite3_io_error_persist != 0 && sqlite3_io_error_hit != 0 )
|| sqlite3_io_error_pending-- == 1 )
#else
if ( ( sqlite3_io_error_persist.iValue != 0 && sqlite3_io_error_hit.iValue != 0 )
|| sqlite3_io_error_pending.iValue-- == 1 )
#endif
{
local_ioerr();
return true;
}
return false;
}
static void local_ioerr()
{
#if TRACE
IOTRACE( "IOERR\n" );
#endif
#if !TCLSH
sqlite3_io_error_hit++;
if ( sqlite3_io_error_benign == 0 )
sqlite3_io_error_hardhit++;
#else
sqlite3_io_error_hit.iValue++;
if ( sqlite3_io_error_benign.iValue == 0 )
sqlite3_io_error_hardhit.iValue++;
#endif
}
//#define SimulateDiskfullError(CODE) \
// if( sqlite3_diskfull_pending ){ \
// if( sqlite3_diskfull_pending == 1 ){ \
// local_ioerr(); \
// sqlite3_diskfull = 1; \
// sqlite3_io_error_hit = 1; \
// CODE; \
// }else{ \
// sqlite3_diskfull_pending--; \
// } \
// }
static bool SimulateDiskfullError()
{
#if !TCLSH
if ( sqlite3_diskfull_pending != 0 )
{
if ( sqlite3_diskfull_pending == 1 )
{
#else
if ( sqlite3_diskfull_pending.iValue != 0 )
{
if ( sqlite3_diskfull_pending.iValue == 1 )
{
#endif
local_ioerr();
#if !TCLSH
sqlite3_diskfull = 1;
sqlite3_io_error_hit = 1;
#else
sqlite3_diskfull.iValue = 1;
sqlite3_io_error_hit.iValue = 1;
#endif
return true;
}
else
{
#if !TCLSH
sqlite3_diskfull_pending--;
#else
sqlite3_diskfull_pending.iValue--;
#endif
}
}
return false;
}
#else
static bool SimulateIOError() { return false; }
//#define SimulateIOErrorBenign(X)
static void SimulateIOErrorBenign( int x ) { }
//#define SimulateIOError(A)
//#define SimulateDiskfullError(A)
#endif
/*
** When testing, keep a count of the number of open files.
*/
#if SQLITE_TEST
#if !TCLSH
static int sqlite3_open_file_count = 0;
#else
static tcl.lang.Var.SQLITE3_GETSET sqlite3_open_file_count = new tcl.lang.Var.SQLITE3_GETSET( "sqlite3_open_file_count" );
#endif
static void OpenCounter( int X )
{
#if !TCLSH
sqlite3_open_file_count += ( X );
#else
sqlite3_open_file_count.iValue += ( X );
#endif
}
#else
//#define OpenCounter(X)
#endif
//#endif //* !_OS_COMMON_H_) */
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Data;
using gView.Framework.Data;
namespace gView.DataSources.Shape
{
internal class DBFFileHader
{
public DBFFileHader(BinaryReader br)
{
version = br.ReadByte();
YY = br.ReadByte();
MM = br.ReadByte();
DD = br.ReadByte();
recordCount = (uint)br.ReadInt32();
headerLength = br.ReadInt16();
LegthOfEachRecord = br.ReadInt16();
Reserved1 = br.ReadInt16();
IncompleteTransac = br.ReadByte();
EncryptionFlag = br.ReadByte();
FreeRecordThread = (uint)br.ReadInt32();
Reserved2 = br.ReadInt32();
Reserved3 = br.ReadInt32();
MDX = br.ReadByte();
LanguageDriver = br.ReadByte();
Reserved4 = br.ReadInt16();
}
public static bool Write(BinaryWriter bw, Fields fields)
{
if (bw == null || fields == null) return false;
int c = 0, rl = 1; // deleted Flag
foreach (IField field in fields.ToEnumerable())
{
switch (field.type)
{
case FieldType.biginteger:
c++; rl += 18;
break;
case FieldType.boolean:
c++; rl += 1;
break;
case FieldType.character:
c++; rl += 1;
break;
case FieldType.Date:
c++; rl += 8;
break;
case FieldType.Double:
c++; rl += 31;
break;
case FieldType.Float:
c++; rl += 11;
break;
case FieldType.ID:
c++; rl += 9;
break;
case FieldType.integer:
c++; rl += 9;
break;
case FieldType.Shape:
break;
case FieldType.smallinteger:
c++; rl += 6;
break;
case FieldType.String:
c++; rl += (field.size > 255) ? 255 : field.size;
break;
default:
c++; rl += (field.size <= 0) ? 255 : field.size;
break;
}
}
short hLength = (short)(32 * c + 33);
bw.Write((byte)3); // Version
bw.Write((byte)106); // YY
bw.Write((byte)6); // MM
bw.Write((byte)12); // DD
bw.Write((int)0); // recordCount
bw.Write(hLength); // headerLength
bw.Write((short)rl); // Length of each record
bw.Write((short)0); // Reserved1
bw.Write((byte)0); // IncompleteTransac
bw.Write((byte)0); // EncryptionFlag
bw.Write((int)0); // FreeRecordThread
bw.Write((int)0); // Reserved2
bw.Write((int)0); // Reserved3
bw.Write((byte)0); // MDX
bw.Write((byte)CodePage.DOS_Multilingual); // LanguageDriver Codepage 850
bw.Write((short)0); // Reserved4
foreach (IField field in fields.ToEnumerable())
{
FieldDescriptor.Write(bw, field);
}
bw.Write((byte)13); // Terminator 0x0D
return true;
}
public int FieldsCount
{
get
{
return (headerLength - 1) / 32 - 1;
}
}
public byte version;
public byte YY, MM, DD;
public uint recordCount;
public short headerLength;
public short LegthOfEachRecord;
public short Reserved1;
public byte IncompleteTransac;
public byte EncryptionFlag;
public uint FreeRecordThread;
public int Reserved2;
public int Reserved3;
public byte MDX;
public byte LanguageDriver;
public short Reserved4;
}
internal class FieldDescriptor
{
public FieldDescriptor(BinaryReader br)
{
br.Read(fieldName, 0, 11);
FieldType = br.ReadChar();
FieldDataAddress = (uint)br.ReadInt32();
FieldLength = br.ReadByte();
DecimalCount = br.ReadByte();
Reserved1 = br.ReadInt16();
WorkAreaID = br.ReadByte();
Reserved2 = br.ReadInt16();
FlagForSET_FIELDS = br.ReadByte();
Reserved3 = br.ReadByte();
Reserved4 = br.ReadByte();
Reserved5 = br.ReadByte();
Reserved6 = br.ReadByte();
Reserved7 = br.ReadByte();
Reserved8 = br.ReadByte();
Reserved9 = br.ReadByte();
IndexFieldFlag = br.ReadByte();
}
public static bool Write(BinaryWriter bw, IField field)
{
if (bw == null || field == null) return false;
byte decimalCount = 0, fieldLength = 0;
char fieldType = 'C';
switch (field.type)
{
case gView.Framework.Data.FieldType.biginteger:
fieldLength = 18;
fieldType = 'N';
break;
case gView.Framework.Data.FieldType.boolean:
fieldLength = 1;
fieldType = 'L';
break;
case gView.Framework.Data.FieldType.character:
fieldLength = 1;
fieldType = 'C';
break;
case gView.Framework.Data.FieldType.Date:
fieldLength = 8;
fieldType = 'D';
break;
case gView.Framework.Data.FieldType.Double:
fieldLength = 31;
decimalCount = 31;
fieldType = 'F';
break;
case gView.Framework.Data.FieldType.Float:
fieldLength = 11;
fieldType = 'F';
break;
case gView.Framework.Data.FieldType.ID:
fieldLength = 9;
fieldType = 'N';
break;
case gView.Framework.Data.FieldType.integer:
fieldLength = 9;
fieldType = 'N';
break;
case gView.Framework.Data.FieldType.Shape:
return false;
case gView.Framework.Data.FieldType.smallinteger:
fieldLength = 6;
fieldType = 'N';
break;
case gView.Framework.Data.FieldType.String:
fieldLength = (byte)(field.size > 255 ? 255 : field.size);
fieldType = 'C';
break;
default:
fieldLength = (byte)(field.size > 0 ? field.size : 255);
fieldType = 'C';
break;
}
// fieldName
for (int i = 0; i < 10; i++)
{
if (i < field.name.Length)
bw.Write((byte)field.name[i]);
else
bw.Write((byte)0);
}
bw.Write((byte)0);
bw.Write((byte)fieldType); // FieldType
bw.Write((int)0); // FieldDataAddress
bw.Write((byte)fieldLength); // FieldLength
bw.Write((byte)decimalCount); // DecimalCount
bw.Write((short)0); // Reserved1
bw.Write((byte)0); // WorkAreaID
bw.Write((short)0); // Reserved2
bw.Write((byte)0); // FlagForSET_FIELDS
bw.Write((byte)0); // Reserved3
bw.Write((byte)0); // Reserved4
bw.Write((byte)0); // Reserved5
bw.Write((byte)0); // Reserved6
bw.Write((byte)0); // Reserved7
bw.Write((byte)0); // Reserved8
bw.Write((byte)0); // Reserved9
bw.Write((byte)0); // IndexFieldFlag
return true;
}
public string FieldName
{
get
{
char[] trims = { '\0' };
System.Text.ASCIIEncoding encoder = new System.Text.ASCIIEncoding();
return encoder.GetString(fieldName).TrimEnd(trims);
}
}
private byte[] fieldName = new byte[11];
public char FieldType;
public uint FieldDataAddress;
public byte FieldLength;
public byte DecimalCount;
public short Reserved1;
public byte WorkAreaID;
public short Reserved2;
public byte FlagForSET_FIELDS;
public byte Reserved3;
public byte Reserved4;
public byte Reserved5;
public byte Reserved6;
public byte Reserved7;
public byte Reserved8;
public byte Reserved9;
public byte IndexFieldFlag;
}
internal enum CodePage
{
DOS_USA = 0x01,
DOS_Multilingual = 0x02,
Windows_ANSI = 0x03,
EE_MS_DOS = 0x64,
Nordic_MS_DOS = 0x65,
Russian_MS_DOS = 0x66,
Windows_EE = 0xc8,
UTF_7 = 0x57
}
internal class DBFFile
{
private string _filename;
private DBFFileHader _header;
private List<FieldDescriptor> _fields;
private Encoding _encoder = null;
private char[] _trims = { '\0', ' ' };
private static IFormatProvider _nhi = System.Globalization.CultureInfo.InvariantCulture.NumberFormat;
private string _idField = "FID";
public DBFFile(string filename)
{
try
{
FileInfo fi = new FileInfo(filename);
if (!fi.Exists) return;
_filename = filename;
StreamReader sr = new StreamReader(filename);
BinaryReader br = new BinaryReader(sr.BaseStream);
_header = new DBFFileHader(br);
_fields = new List<FieldDescriptor>();
for (int i = 0; i < _header.FieldsCount; i++)
{
FieldDescriptor field = new FieldDescriptor(br);
_fields.Add(field);
}
sr.Close();
int c = 1;
string idFieldName = _idField;
while (HasField(idFieldName))
idFieldName = _idField + "_" + c++;
_idField = idFieldName;
_encoder = null;
try
{
switch ((CodePage)_header.LanguageDriver)
{
case CodePage.DOS_USA:
_encoder = EncodingFromCodePage(437);
break;
case CodePage.DOS_Multilingual:
_encoder = EncodingFromCodePage(850);
break;
case CodePage.Windows_ANSI:
_encoder = EncodingFromCodePage(1252);
break;
case CodePage.EE_MS_DOS:
_encoder = EncodingFromCodePage(852);
break;
case CodePage.Nordic_MS_DOS:
_encoder = EncodingFromCodePage(865);
break;
case CodePage.Russian_MS_DOS:
_encoder = EncodingFromCodePage(866);
break;
case CodePage.Windows_EE:
_encoder = EncodingFromCodePage(1250);
break;
case CodePage.UTF_7:
_encoder = new UTF7Encoding();
break;
}
}
catch { }
if (_encoder == null)
{
FileInfo encFi = new FileInfo(fi.Directory.FullName + @"\dbf_default_encoding.txt");
if (encFi.Exists)
{
using (StreamReader encSr = new StreamReader(encFi.FullName))
{
switch (encSr.ReadLine().ToLower())
{
case "utf8":
case "utf-8":
_encoder = new UTF8Encoding();
break;
case "unicode":
_encoder = new UnicodeEncoding();
break;
case "ascii":
_encoder = new ASCIIEncoding();
break;
}
}
}
if (_encoder == null)
_encoder = new UTF7Encoding();
}
//Record(0);
//Record(1);
}
catch
{
}
}
private Encoding EncodingFromCodePage(int codePage)
{
foreach (EncodingInfo ei in Encoding.GetEncodings())
{
if (ei.CodePage == codePage)
{
return ei.GetEncoding();
}
}
return null;
}
private bool HasField(string name)
{
foreach (FieldDescriptor fd in _fields)
{
if (fd.FieldName == name) return true;
}
return false;
}
public string Filename
{
get { return _filename; }
}
internal DataTable DataTable()
{
return DataTable(null);
}
internal DataTable DataTable(string[] fieldnames)
{
DataTable tab = new DataTable();
if (fieldnames != null)
{
foreach (string fieldname in fieldnames)
{
if (fieldname == _idField)
{
tab.Columns.Add(_idField, typeof(uint));
}
foreach (FieldDescriptor field in _fields)
{
if (field.FieldName == fieldname)
{
if (tab.Columns[fieldname] == null)
tab.Columns.Add(fieldname, DataType(field));
}
}
}
}
else
{
tab.Columns.Add(_idField, typeof(uint));
foreach (FieldDescriptor field in _fields)
{
if (tab.Columns[field.FieldName] == null)
tab.Columns.Add(field.FieldName, DataType(field));
}
}
return tab;
}
private Type DataType(FieldDescriptor fd)
{
switch (fd.FieldType)
{
case 'C': return typeof(string);
case 'F':
case 'N':
if (fd.DecimalCount == 0)
{
if (fd.FieldLength <= 6) return typeof(short);
if (fd.FieldLength <= 9) return typeof(int);
return typeof(long);
}
else // if( fd.DecimalCount==9 && fd.FieldLength==31 )
{
if (fd.DecimalCount <= 9) return typeof(float);
return typeof(double);
}
case 'L': return typeof(bool);
case 'D': return typeof(DateTime);
case 'I': return typeof(int);
case 'O': return typeof(double);
case '+': return typeof(int); // Autoincrement
default: return typeof(string);
}
}
private FieldType FieldType(FieldDescriptor fd)
{
switch (fd.FieldType)
{
case 'C': return gView.Framework.Data.FieldType.String;
case 'F':
case 'N':
if (fd.DecimalCount == 0)
{
if (fd.FieldLength <= 6) return gView.Framework.Data.FieldType.smallinteger;
if (fd.FieldLength <= 9) return gView.Framework.Data.FieldType.integer;
return gView.Framework.Data.FieldType.biginteger;
}
else // if( fd.DecimalCount==9 && fd.FieldLength==31 )
{
if (fd.DecimalCount <= 9) return gView.Framework.Data.FieldType.Float;
return gView.Framework.Data.FieldType.Double;
}
case 'L': return gView.Framework.Data.FieldType.boolean;
case 'D': return gView.Framework.Data.FieldType.Date;
case 'I': return gView.Framework.Data.FieldType.integer;
case 'O': return gView.Framework.Data.FieldType.Double;
case '+': return gView.Framework.Data.FieldType.integer; // Autoincrement
default: return gView.Framework.Data.FieldType.String;
}
}
public DataTable Record(uint index)
{
return Record(index, "*");
}
public DataTable Record(uint index, string fieldnames)
{
StreamReader sr = new StreamReader(_filename);
BinaryReader br = new BinaryReader(sr.BaseStream);
string[] names = null;
fieldnames = fieldnames.Replace(" ", "");
if (fieldnames != "*") names = fieldnames.Split(',');
DataTable tab = DataTable(names);
Record(index, tab, br);
sr.Close();
return tab;
}
internal void Record(uint index, DataTable tab, BinaryReader br)
{
if (index > _header.recordCount || index < 1) return;
br.BaseStream.Position = _header.headerLength + _header.LegthOfEachRecord * (index - 1);
char deleted = br.ReadChar();
if (deleted != ' ') return;
DataRow row = tab.NewRow();
foreach (FieldDescriptor field in _fields)
{
if (tab.Columns[field.FieldName] == null)
{
br.BaseStream.Position += field.FieldLength;
continue;
}
switch ((char)field.FieldType)
{
case 'C':
row[field.FieldName] = _encoder.GetString(br.ReadBytes(field.FieldLength)).TrimEnd(_trims);
break;
case 'F':
case 'N':
string str2 = _encoder.GetString(br.ReadBytes(field.FieldLength)).TrimEnd(_trims);
if (str2 != "")
{
try
{
if (field.DecimalCount == 0)
{
row[field.FieldName] = Convert.ToInt64(str2);
}
else
{
row[field.FieldName] = double.Parse(str2, _nhi);
}
}
catch { }
}
break;
case '+':
case 'I':
row[field.FieldName] = br.ReadInt32();
break;
case 'O':
row[field.FieldName] = br.ReadDouble();
break;
case 'L':
char c = br.ReadChar();
if (c == 'Y' || c == 'y' ||
c == 'T' || c == 't') row[field.FieldName] = true;
else if (c == 'N' || c == 'n' ||
c == 'F' || c == 'f') row[field.FieldName] = false;
else
row[field.FieldName] = null;
break;
case 'D':
string date = _encoder.GetString(br.ReadBytes(field.FieldLength)).TrimEnd(_trims);
if (date.Length == 8)
{
int y = int.Parse(date.Substring(0, 4));
int m = int.Parse(date.Substring(4, 2));
int d = int.Parse(date.Substring(6, 2));
DateTime td = new DateTime(y, m, d);
row[field.FieldName] = td;
}
break;
}
}
if (tab.Columns[_idField] != null) row[_idField] = index;
tab.Rows.Add(row);
}
internal void Records(DataTable tab, BinaryReader br)
{
uint rowCount = _header.recordCount;
for (uint i = 0; i < rowCount; i++)
{
br.BaseStream.Position = _header.headerLength + _header.LegthOfEachRecord * (i);
char deleted = br.ReadChar();
if (deleted != ' ') continue;
DataRow row = tab.NewRow();
foreach (FieldDescriptor field in _fields)
{
if (tab.Columns[field.FieldName] == null)
{
br.BaseStream.Position += field.FieldLength;
continue;
}
switch ((char)field.FieldType)
{
case 'C':
row[field.FieldName] = _encoder.GetString(br.ReadBytes(field.FieldLength)).TrimEnd(_trims);
break;
case 'F':
case 'N':
string str2 = _encoder.GetString(br.ReadBytes(field.FieldLength)).TrimEnd(_trims);
if (str2 != "")
{
try
{
if (field.DecimalCount == 0)
{
row[field.FieldName] = long.Parse(str2);
}
else
{
row[field.FieldName] = double.Parse(str2, _nhi);
}
}
catch { }
}
break;
case '+':
case 'I':
row[field.FieldName] = br.ReadInt32();
break;
case 'O':
row[field.FieldName] = br.ReadDouble();
break;
case 'L':
char c = br.ReadChar();
if (c == 'Y' || c == 'y' ||
c == 'T' || c == 't') row[field.FieldName] = true;
else if (c == 'N' || c == 'n' ||
c == 'F' || c == 'f') row[field.FieldName] = false;
else
row[field.FieldName] = null;
break;
case 'D':
string date = _encoder.GetString(br.ReadBytes(field.FieldLength)).TrimEnd(_trims);
if (date.Length == 8)
{
int y = int.Parse(date.Substring(0, 4));
int m = int.Parse(date.Substring(4, 2));
int d = int.Parse(date.Substring(6, 2));
DateTime td = new DateTime(y, m, d);
row[field.FieldName] = td;
}
break;
}
}
if (tab.Columns[_idField] != null) row[_idField] = i + 1;
tab.Rows.Add(row);
}
}
#region Writer
public static bool Create(string filename, Fields fields)
{
try
{
FileInfo fi = new FileInfo(filename);
if (fi.Exists) fi.Delete();
StreamWriter sw = new StreamWriter(filename);
BinaryWriter bw = new BinaryWriter(sw.BaseStream);
bool ret = DBFFileHader.Write(bw, fields);
bw.Flush();
sw.Flush();
sw.Close();
return ret;
}
catch
{
return false;
}
}
internal bool WriteRecord(uint index, IFeature feature)
{
if (feature == null) return false;
FileStream fs = null;
BinaryWriter bw = null;
BinaryReader br = null;
try
{
fs = new FileStream(_filename, FileMode.Open);
bw = new BinaryWriter(fs);
long pos0 = bw.BaseStream.Position = _header.headerLength + _header.LegthOfEachRecord * (index - 1);
long posX = 1;
bw.Write((byte)' '); // deleted Flag
string str;
foreach (FieldDescriptor fd in _fields)
{
object obj = feature[fd.FieldName];
if (obj == null || obj == DBNull.Value)
{
WriteNull(fd, bw);
}
else
{
try
{
switch (fd.FieldType)
{
case 'C':
str = obj.ToString().PadRight(fd.FieldLength, ' ');
WriteString(fd, bw, str);
break;
case 'N':
case 'F':
if (fd.DecimalCount == 0)
{
str = Convert.ToInt32(obj).ToString();
str = str.PadLeft(fd.FieldLength, ' ');
WriteString(fd, bw, str);
}
else
{
str = Convert.ToDouble(obj).ToString(_nhi);
str = str.PadLeft(fd.FieldLength, ' ');
WriteString(fd, bw, str);
}
break;
case '+':
case 'I':
bw.Write(Convert.ToInt32(obj));
break;
case 'O':
bw.Write(Convert.ToDouble(obj));
break;
case 'L':
bool v = Convert.ToBoolean(obj);
str = (v) ? "T" : "F";
WriteString(fd, bw, str);
break;
case 'D':
DateTime td = Convert.ToDateTime(obj);
str = td.Year.ToString().PadLeft(4, '0') +
td.Month.ToString().PadLeft(2, '0') +
td.Day.ToString().PadLeft(2, '0');
WriteString(fd, bw, str);
break;
default:
WriteNull(fd, bw);
break;
}
}
catch
{
WriteNull(fd, bw);
}
}
posX += fd.FieldLength;
bw.BaseStream.Position = pos0 + posX;
}
br = new BinaryReader(fs);
br.BaseStream.Position = 4;
uint recCount = (uint)br.ReadInt32();
DateTime now = DateTime.Now;
bw.BaseStream.Position = 1;
bw.Write((byte)(now.Year - 1900));
bw.Write((byte)now.Month);
bw.Write((byte)now.Day);
bw.Write((int)recCount + 1);
fs.Flush();
return true;
}
catch (Exception ex)
{
string err = ex.Message;
return false;
}
finally
{
if (fs != null)
fs.Close();
}
}
private void WriteNull(FieldDescriptor fd, BinaryWriter bw)
{
for (int i = 0; i < fd.FieldLength; i++)
{
bw.Write((byte)' ');
}
}
private void WriteString(FieldDescriptor fd, BinaryWriter bw, string str)
{
byte[] bytes = _encoder.GetBytes(str);
for (int i = 0; i < fd.FieldLength; i++)
{
if (i < bytes.Length)
bw.Write((byte)bytes[i]);
else
bw.Write((byte)0);
}
}
#endregion
public IFields Fields
{
get
{
Fields fields = new Fields();
// ID
Field field = new Field();
field.name = _idField;
field.type = gView.Framework.Data.FieldType.ID;
fields.Add(field);
foreach (FieldDescriptor fd in _fields)
{
field = new Field();
field.name = fd.FieldName;
field.size = fd.FieldLength;
field.precision = fd.DecimalCount;
field.type = FieldType(fd);
fields.Add(field);
}
return fields;
}
}
}
internal class DBFDataReader
{
private DBFFile _file;
private StreamReader _sr = null;
private BinaryReader _br = null;
private DataTable _tab;
public DBFDataReader(DBFFile file, string fieldnames)
{
if (file == null) return;
_file = file;
_sr = new StreamReader(_file.Filename);
_br = new BinaryReader(_sr.BaseStream);
string[] names = null;
fieldnames = fieldnames.Replace(" ", "");
if (fieldnames != "*") names = fieldnames.Split(',');
_tab = _file.DataTable(names);
}
public DataTable AllRecords
{
get
{
if (_file == null) return null;
_file.Records(_tab, _br);
return _tab;
}
}
public void AddRecord(uint index)
{
_file.Record(index, _tab, _br);
}
public void Clear()
{
_tab.Rows.Clear();
}
public DataTable Table
{
get { return _tab; }
}
public void Dispose()
{
if (_tab != null)
{
_tab.Rows.Clear();
_tab.Dispose();
_tab = null;
}
if (_sr != null)
{
_sr.Close();
_sr = null;
}
}
}
internal class DBFDataWriter
{
public DBFDataWriter()
{
}
}
}
| |
/// This code was generated by
/// \ / _ _ _| _ _
/// | (_)\/(_)(_|\/| |(/_ v1.0.0
/// / /
using System;
using System.Collections.Generic;
using System.Linq;
using Twilio.Base;
using Twilio.Converters;
namespace Twilio.Rest.Conversations.V1.Conversation
{
/// <summary>
/// Retrieve a list of all webhooks scoped to the conversation
/// </summary>
public class ReadWebhookOptions : ReadOptions<WebhookResource>
{
/// <summary>
/// The unique ID of the Conversation for this webhook.
/// </summary>
public string PathConversationSid { get; }
/// <summary>
/// Construct a new ReadWebhookOptions
/// </summary>
/// <param name="pathConversationSid"> The unique ID of the Conversation for this webhook. </param>
public ReadWebhookOptions(string pathConversationSid)
{
PathConversationSid = pathConversationSid;
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public override List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
if (PageSize != null)
{
p.Add(new KeyValuePair<string, string>("PageSize", PageSize.ToString()));
}
return p;
}
}
/// <summary>
/// Fetch the configuration of a conversation-scoped webhook
/// </summary>
public class FetchWebhookOptions : IOptions<WebhookResource>
{
/// <summary>
/// The unique ID of the Conversation for this webhook.
/// </summary>
public string PathConversationSid { get; }
/// <summary>
/// A 34 character string that uniquely identifies this resource.
/// </summary>
public string PathSid { get; }
/// <summary>
/// Construct a new FetchWebhookOptions
/// </summary>
/// <param name="pathConversationSid"> The unique ID of the Conversation for this webhook. </param>
/// <param name="pathSid"> A 34 character string that uniquely identifies this resource. </param>
public FetchWebhookOptions(string pathConversationSid, string pathSid)
{
PathConversationSid = pathConversationSid;
PathSid = pathSid;
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
return p;
}
}
/// <summary>
/// Create a new webhook scoped to the conversation
/// </summary>
public class CreateWebhookOptions : IOptions<WebhookResource>
{
/// <summary>
/// The unique ID of the Conversation for this webhook.
/// </summary>
public string PathConversationSid { get; }
/// <summary>
/// The target of this webhook.
/// </summary>
public WebhookResource.TargetEnum Target { get; }
/// <summary>
/// The absolute url the webhook request should be sent to.
/// </summary>
public string ConfigurationUrl { get; set; }
/// <summary>
/// The HTTP method to be used when sending a webhook request.
/// </summary>
public WebhookResource.MethodEnum ConfigurationMethod { get; set; }
/// <summary>
/// The list of events, firing webhook event for this Conversation.
/// </summary>
public List<string> ConfigurationFilters { get; set; }
/// <summary>
/// The list of keywords, firing webhook event for this Conversation.
/// </summary>
public List<string> ConfigurationTriggers { get; set; }
/// <summary>
/// The studio flow SID, where the webhook should be sent to.
/// </summary>
public string ConfigurationFlowSid { get; set; }
/// <summary>
/// The message index for which and it's successors the webhook will be replayed.
/// </summary>
public int? ConfigurationReplayAfter { get; set; }
/// <summary>
/// Construct a new CreateWebhookOptions
/// </summary>
/// <param name="pathConversationSid"> The unique ID of the Conversation for this webhook. </param>
/// <param name="target"> The target of this webhook. </param>
public CreateWebhookOptions(string pathConversationSid, WebhookResource.TargetEnum target)
{
PathConversationSid = pathConversationSid;
Target = target;
ConfigurationFilters = new List<string>();
ConfigurationTriggers = new List<string>();
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
if (Target != null)
{
p.Add(new KeyValuePair<string, string>("Target", Target.ToString()));
}
if (ConfigurationUrl != null)
{
p.Add(new KeyValuePair<string, string>("Configuration.Url", ConfigurationUrl));
}
if (ConfigurationMethod != null)
{
p.Add(new KeyValuePair<string, string>("Configuration.Method", ConfigurationMethod.ToString()));
}
if (ConfigurationFilters != null)
{
p.AddRange(ConfigurationFilters.Select(prop => new KeyValuePair<string, string>("Configuration.Filters", prop)));
}
if (ConfigurationTriggers != null)
{
p.AddRange(ConfigurationTriggers.Select(prop => new KeyValuePair<string, string>("Configuration.Triggers", prop)));
}
if (ConfigurationFlowSid != null)
{
p.Add(new KeyValuePair<string, string>("Configuration.FlowSid", ConfigurationFlowSid.ToString()));
}
if (ConfigurationReplayAfter != null)
{
p.Add(new KeyValuePair<string, string>("Configuration.ReplayAfter", ConfigurationReplayAfter.ToString()));
}
return p;
}
}
/// <summary>
/// Update an existing conversation-scoped webhook
/// </summary>
public class UpdateWebhookOptions : IOptions<WebhookResource>
{
/// <summary>
/// The unique ID of the Conversation for this webhook.
/// </summary>
public string PathConversationSid { get; }
/// <summary>
/// A 34 character string that uniquely identifies this resource.
/// </summary>
public string PathSid { get; }
/// <summary>
/// The absolute url the webhook request should be sent to.
/// </summary>
public string ConfigurationUrl { get; set; }
/// <summary>
/// The HTTP method to be used when sending a webhook request.
/// </summary>
public WebhookResource.MethodEnum ConfigurationMethod { get; set; }
/// <summary>
/// The list of events, firing webhook event for this Conversation.
/// </summary>
public List<string> ConfigurationFilters { get; set; }
/// <summary>
/// The list of keywords, firing webhook event for this Conversation.
/// </summary>
public List<string> ConfigurationTriggers { get; set; }
/// <summary>
/// The studio flow SID, where the webhook should be sent to.
/// </summary>
public string ConfigurationFlowSid { get; set; }
/// <summary>
/// Construct a new UpdateWebhookOptions
/// </summary>
/// <param name="pathConversationSid"> The unique ID of the Conversation for this webhook. </param>
/// <param name="pathSid"> A 34 character string that uniquely identifies this resource. </param>
public UpdateWebhookOptions(string pathConversationSid, string pathSid)
{
PathConversationSid = pathConversationSid;
PathSid = pathSid;
ConfigurationFilters = new List<string>();
ConfigurationTriggers = new List<string>();
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
if (ConfigurationUrl != null)
{
p.Add(new KeyValuePair<string, string>("Configuration.Url", ConfigurationUrl));
}
if (ConfigurationMethod != null)
{
p.Add(new KeyValuePair<string, string>("Configuration.Method", ConfigurationMethod.ToString()));
}
if (ConfigurationFilters != null)
{
p.AddRange(ConfigurationFilters.Select(prop => new KeyValuePair<string, string>("Configuration.Filters", prop)));
}
if (ConfigurationTriggers != null)
{
p.AddRange(ConfigurationTriggers.Select(prop => new KeyValuePair<string, string>("Configuration.Triggers", prop)));
}
if (ConfigurationFlowSid != null)
{
p.Add(new KeyValuePair<string, string>("Configuration.FlowSid", ConfigurationFlowSid.ToString()));
}
return p;
}
}
/// <summary>
/// Remove an existing webhook scoped to the conversation
/// </summary>
public class DeleteWebhookOptions : IOptions<WebhookResource>
{
/// <summary>
/// The unique ID of the Conversation for this webhook.
/// </summary>
public string PathConversationSid { get; }
/// <summary>
/// A 34 character string that uniquely identifies this resource.
/// </summary>
public string PathSid { get; }
/// <summary>
/// Construct a new DeleteWebhookOptions
/// </summary>
/// <param name="pathConversationSid"> The unique ID of the Conversation for this webhook. </param>
/// <param name="pathSid"> A 34 character string that uniquely identifies this resource. </param>
public DeleteWebhookOptions(string pathConversationSid, string pathSid)
{
PathConversationSid = pathConversationSid;
PathSid = pathSid;
}
/// <summary>
/// Generate the necessary parameters
/// </summary>
public List<KeyValuePair<string, string>> GetParams()
{
var p = new List<KeyValuePair<string, string>>();
return p;
}
}
}
| |
//==============================================================================
// TorqueLab ->
// Copyright (c) 2015 All Right Reserved, http://nordiklab.com/
//------------------------------------------------------------------------------
//==============================================================================
function ObjectBuilderGui::buildSphereEmitter(%this)
{
%this.objectClassName = "ParticleEmitterNode";
%this.addField("dataBlock", "TypeDataBlock", "datablock",
"ParticleEmitterNodeData");
%this.addField("emitter", "TypeDataBlock", "Particle data",
"SphereEmitterData");
%this.process();
}
function ObjectBuilderGui::buildRaySphereEmitter(%this)
{
%this.objectClassName = "ParticleEmitterNode";
%this.addField("dataBlock", "TypeDataBlock", "datablock",
"RaySphereEmitterNodeData");
%this.addField("emitter", "TypeDataBlock", "Particle data",
"RaySphereEmitterData");
%this.process();
}
function ObjectBuilderGui::buildGraphEmitter(%this)
{
%this.objectClassName = "ParticleEmitterNode";
%this.addField("dataBlock", "TypeDataBlock", "datablock",
"ParticleEmitterNodeData");
%this.addField("emitter", "TypeDataBlock", "Particle data",
"GraphEmitterData");
%this.process();
}
function ObjectBuilderGui::buildGroundEmitter(%this)
{
%this.objectClassName = "ParticleEmitterNode";
%this.addField("dataBlock", "TypeDataBlock", "datablock",
"ParticleEmitterNodeData");
%this.addField("emitter", "TypeDataBlock", "Particle data",
"GroundEmitterData");
%this.process();
}
function ObjectBuilderGui::buildMaskEmitter(%this)
{
%this.objectClassName = "ParticleEmitterNode";
%this.addField("dataBlock", "TypeDataBlock", "datablock",
"ParticleEmitterNodeData");
%this.addField("emitter", "TypeDataBlock", "Particle data",
"MaskEmitterData");
%this.process();
}
function ObjectBuilderGui::buildMeshEmitter(%this)
{
%this.objectClassName = "MeshEmitter";
%this.addField("dataBlock", "TypeDataBlock", "datablock","MeshEmitterData");
%this.process();
}
function ObjectBuilderGui::buildRadiusMeshEmitter(%this)
{
%this.objectClassName = "RadiusMeshEmitter";
%this.addField("dataBlock", "TypeDataBlock", "datablock",
"RadiusMeshEmitterData");
%this.process();
}
function ObjectBuilderGui::buildNodeMeshEmitter(%this)
{
%this.objectClassName = "NodeMeshEmitter";
%this.addField("dataBlock", "TypeDataBlock", "datablock",
"NodeMeshEmitterData");
%this.process();
}
function ObjectBuilderGui::buildBotSpawn(%this) {
%this.objectClassName = "BotSpawnSphere";
%this.addField("dataBlock", "TypeDataBlock", "dataBlock", "MissionMarkerData BotSpawnMarker");
%this.addField("radius", "TypeFloat", "Radius", 1);
%this.addField("sphereWeight", "TypeFloat", "Sphere Weight", 1);
%this.addField("spawnClass", "TypeString", "Spawn Class", "Player");
%this.addField("spawnDatablock", "TypeDataBlock", "Spawn Data", "PlayerData DefaultPlayerData");
if( Scene.getActiveSimGroup().getID() == MissionGroup.getID() ) {
if( !isObject("BotStuff") )
MissionGroup.add( new SimGroup("BotStuff") );
if( !isObject("BotSpawns") )
BotsStuff.add( new SimGroup("BotSpawns") );
Scene.setActiveSimGroup("BotSpawns");
}
%this.process();
}
function ObjectBuilderGui::buildBotGoal(%this) {
%this.objectClassName = "BotGoalPoint";
%this.addField("dataBlock", "TypeDataBlock", "dataBlock", "MissionMarkerData BotGoalMarker");
if( Scene.getActiveSimGroup().getID() == MissionGroup.getID() ) {
if( !isObject("BotStuff") )
MissionGroup.add( new SimGroup("BotStuff") );
if( !isObject("BotGoals") )
BotsStuff.add( new SimGroup("BotGoals") );
Scene.setActiveSimGroup("BotSpawns");
}
%this.process();
}
function ObjectBuilderGui::buildPlayerDropPoint(%this) {
%this.objectClassName = "SpawnSphere";
%this.addField("dataBlock", "TypeDataBlock", "dataBlock", "MissionMarkerData SpawnSphereMarker");
%this.addField("radius", "TypeFloat", "Radius", 1);
%this.addField("sphereWeight", "TypeFloat", "Sphere Weight", 1);
%this.addField("spawnClass", "TypeString", "Spawn Class", "Player");
%this.addField("spawnDatablock", "TypeDataBlock", "Spawn Data", "PlayerData DefaultPlayerData");
if( Scene.getActiveSimGroup().getID() == MissionGroup.getID() ) {
if( !isObject("Spawnpoints") )
MissionGroup.add( new SimGroup("Spawnpoints") );
Scene.setActiveSimGroup("Spawnpoints");
}
%this.process();
}
function ObjectBuilderGui::buildObserverDropPoint(%this) {
%this.objectClassName = "SpawnSphere";
%this.addField("dataBlock", "TypeDataBlock", "dataBlock", "MissionMarkerData SpawnSphereMarker");
%this.addField("radius", "TypeFloat", "Radius", 1);
%this.addField("sphereWeight", "TypeFloat", "Sphere Weight", 1);
%this.addField("spawnClass", "TypeString", "Spawn Class", "Camera");
%this.addField("spawnDatablock", "TypeDataBlock", "Spawn Data", "CameraData Observer");
if( Scene.getActiveSimGroup().getID() == MissionGroup.getID() ) {
if( !isObject("ObserverDropPoints") )
MissionGroup.add( new SimGroup("ObserverDropPoints") );
Scene.setActiveSimGroup("ObserverDropPoints");
}
%this.process();
}
//------------------------------------------------------------------------------
// This function is used for objects that don't require any special
// fields/functionality when being built
//------------------------------------------------------------------------------
function ObjectBuilderGui::buildObject(%this, %className) {
%this.objectClassName = %className;
%this.process();
}
//------------------------------------------------------------------------------
// Environment
//------------------------------------------------------------------------------
function ObjectBuilderGui::buildScatterSky( %this, %dontWarnAboutSun ) {
if( !%dontWarnAboutSun ) {
// Check for sun object already in the level. If there is one,
// warn the user.
initContainerTypeSearch( $TypeMasks::EnvironmentObjectType );
while( 1 ) {
%object = containerSearchNext();
if( !%object )
break;
if( %object.isMemberOfClass( "Sun" ) ) {
LabMsgYesNo( "Warning",
"A ScatterSky object will conflict with the Sun object that is already in the level." SPC
"Do you still want to create a ScatterSky object?",
%this @ ".buildScatterSky( true );" );
return;
}
}
}
%this.objectClassName = "ScatterSky";
%this.addField("rayleighScattering", "TypeFloat", "Rayleigh Scattering", "0.0035");
%this.addField("mieScattering", "TypeFloat", "Mie Scattering", "0.0045");
%this.addField("skyBrightness", "TypeFloat", "Sky Brightness", "25");
%this.process();
// This is a trick... any fields added after process won't show
// up as controls, but will be applied to the created object.
%this.addField( "flareType", "TypeLightFlareDataPtr", "Flare", "ScatterSkyFlareExample" );
%this.addField( "moonMat", "TypeMaterialName", "Moon Material", "Moon_Glow_Mat" );
%this.addField( "nightCubemap", "TypeCubemapName", "Night Cubemap", "NightCubemap" );
%this.addField( "useNightCubemap", "TypeBool", "Use Night Cubemap", "true" );
}
function ObjectBuilderGui::buildCloudLayer(%this) {
OBObjectName.setValue( "" );
%this.objectClassName = "CloudLayer";
%this.addField( "texture", "TypeImageFilename", "Texture", "art/textures/skies/clouds/clouds_normal_displacement" );
%this.process();
}
function ObjectBuilderGui::buildBasicClouds(%this) {
OBObjectName.setValue( "" );
%this.objectClassName = "BasicClouds";
%this.process();
// This is a trick... any fields added after process won't show
// up as controls, but will be applied to the created object.
%this.addField( "texture[0]", "TypeImageFilename", "Texture", "art/textures/skies/clouds/cloud1" );
%this.addField( "texture[1]", "TypeImageFilename", "Texture", "art/textures/skies/clouds/cloud2" );
%this.addField( "texture[2]", "TypeImageFilename", "Texture", "art/textures/skies/clouds/cloud3" );
}
function ObjectBuilderGui::checkExists( %this, %classname ) {
for ( %i = 0; %i < SceneEd.getActiveSimGroup().getCount(); %i++ ) {
%obj = SceneEd.getActiveSimGroup().getObject( %i );
if ( %obj.getClassName() $= %classname )
return true;
}
return false;
}
function ObjectBuilderGui::buildsgMissionLightingFilter(%this) {
%this.objectClassName = "sgMissionLightingFilter";
%this.addField("dataBlock", "TypeDataBlock", "sgMissionLightingFilter Data", "sgMissionFilterData");
%this.process();
}
function ObjectBuilderGui::buildsgDecalProjector(%this) {
%this.objectClassName = "sgDecalProjector";
%this.addField("dataBlock", "TypeDataBlock", "DecalData Data", "DecalData");
%this.process();
}
function ObjectBuilderGui::buildsgLightObject(%this) {
%this.objectClassName = "sgLightObject";
%this.addField("dataBlock", "TypeDataBlock", "LightObject Data", "sgLightObjectData");
%this.process();
}
function ObjectBuilderGui::buildSun( %this, %dontWarnAboutScatterSky ) {
if( !%dontWarnAboutScatterSky ) {
// Check for scattersky object already in the level. If there is one,
// warn the user.
initContainerTypeSearch( $TypeMasks::EnvironmentObjectType );
while( 1 ) {
%object = containerSearchNext();
if( !%object )
break;
if( %object.isMemberOfClass( "ScatterSky" ) ) {
LabMsgYesNo( "Warning",
"A Sun object will conflict with the ScatterSky object that is already in the level." SPC
"Do you still want to create a Sun object?",
%this @ ".buildSun( true );" );
return;
}
}
}
%this.objectClassName = "Sun";
%this.addField("direction", "TypeVector", "Direction", "1 1 -1");
%this.addField("color", "TypeColor", "Sun color", "0.8 0.8 0.8");
%this.addField("ambient", "TypeColor", "Ambient color", "0.2 0.2 0.2");
%this.process();
// This is a trick... any fields added after process won't show
// up as controls, but will be applied to the created object.
%this.addField( "coronaMaterial", "TypeMaterialName", "Corona Material", "Corona_Mat" );
%this.addField( "flareType", "TypeLightFlareDataPtr", "Flare", "SunFlareExample" );
}
function ObjectBuilderGui::buildLightning(%this) {
%this.objectClassName = "Lightning";
%this.addField("dataBlock", "TypeDataBlock", "Data block", "LightningData DefaultStorm");
%this.process();
}
function ObjectBuilderGui::addWaterObjectFields(%this) {
%this.addField("rippleDir[0]", "TypePoint2", "Ripple Direction", "0.000000 1.000000");
%this.addField("rippleDir[1]", "TypePoint2", "Ripple Direction", "0.707000 0.707000");
%this.addField("rippleDir[2]", "TypePoint2", "Ripple Direction", "0.500000 0.860000");
%this.addField("rippleTexScale[0]", "TypePoint2", "Ripple Texture Scale", "7.140000 7.140000");
%this.addField("rippleTexScale[1]", "TypePoint2", "Ripple Texture Scale", "6.250000 12.500000");
%this.addField("rippleTexScale[2]", "TypePoint2", "Ripple Texture Scale", "50.000000 50.000000");
%this.addField("rippleSpeed[0]", "TypeFloat", "Ripple Speed", "0.065");
%this.addField("rippleSpeed[1]", "TypeFloat", "Ripple Speed", "0.09");
%this.addField("rippleSpeed[2]", "TypeFloat", "Ripple Speed", "0.04");
%this.addField("rippleMagnitude[0]", "TypeFloat", "Ripple Magnitude", "1.0");
%this.addField("rippleMagnitude[1]", "TypeFloat", "Ripple Magnitude", "1.0");
%this.addField("rippleMagnitude[2]", "TypeFloat", "Ripple Magnitude", "0.3");
%this.addField("overallRippleMagnitude", "TypeFloat", "Overall Ripple Magnitude", "1.0");
%this.addField("waveDir[0]", "TypePoint2", "Wave Direction", "0.000000 1.000000");
%this.addField("waveDir[1]", "TypePoint2", "Wave Direction", "0.707000 0.707000");
%this.addField("waveDir[2]", "TypePoint2", "Wave Direction", "0.500000 0.860000");
%this.addField("waveMagnitude[0]", "TypePoint2", "Wave Magnitude", "0.2");
%this.addField("waveMagnitude[1]", "TypePoint2", "Wave Magnitude", "0.2");
%this.addField("waveMagnitude[2]", "TypePoint2", "Wave Magnitude", "0.2");
%this.addField("waveSpeed[0]", "TypeFloat", "Wave Speed", "1");
%this.addField("waveSpeed[1]", "TypeFloat", "Wave Speed", "1");
%this.addField("waveSpeed[2]", "TypeFloat", "Wave Speed", "1");
%this.addField("overallWaveMagnitude", "TypeFloat", "Overall Wave Magnitude", "1.0");
%this.addField("rippleTex", "TypeImageFilename", "Ripple Texture", "core/art/textures/water/ripple" );
%this.addField("depthGradientTex", "TypeImageFilename", "Depth Gradient Texture", "core/art/textures/water/depthcolor_ramp" );
%this.addField("foamTex", "TypeImageFilename", "Foam Texture", "core/art/textures/water/foam" );
}
function ObjectBuilderGui::buildWaterBlock(%this) {
%this.objectClassName = "WaterBlock";
%this.addField( "baseColor", "TypeColorI", "Base Color", "45 108 171 255" );
%this.process();
// This is a trick... any fields added after process won't show
// up as controls, but will be applied to the created object.
%this.addWaterObjectFields();
}
function ObjectBuilderGui::buildWaterPlane(%this) {
%this.objectClassName = "WaterPlane";
%this.addField( "baseColor", "TypeColorI", "Base Color", "45 108 171 255" );
%this.process();
// This is a trick... any fields added after process won't show
// up as controls, but will be applied to the created object.
%this.addWaterObjectFields();
}
function ObjectBuilderGui::buildTerrainBlock(%this) {
%this.objectClassName = "TerrainBlock";
%this.createCallback = "ETerrainEditor.attachTerrain();";
%this.addField("terrainFile", "TypeFile", "Terrain file", "", "*.ter");
%this.addField("squareSize", "TypeInt", "Square size", "8");
%this.process();
}
function ObjectBuilderGui::buildGroundCover( %this ) {
%this.objectClassName = "GroundCover";
%this.addField( "material", "TypeMaterialName", "Material Name", "" );
%this.addField( "shapeFilename[0]", "TypeFile", "Shape File [Optional]", "", "*.*");
%this.process();
// This is a trick... any fields added after process won't show
// up as controls, but will be applied to the created object.
%this.addField( "probability[0]", "TypeFloat", "Probability", "1" );
}
function ObjectBuilderGui::buildPrecipitation(%this) {
%this.objectClassName = "Precipitation";
%this.addField("dataBlock", "TypeDataBlock", "Precipitation data", "PrecipitationData");
%this.process();
}
function ObjectBuilderGui::buildParticleEmitterNode(%this) {
%this.objectClassName = "ParticleEmitterNode";
%this.addField("dataBlock", "TypeDataBlock", "datablock", "ParticleEmitterNodeData");
%this.addField("emitter", "TypeDataBlock", "Particle data", "ParticleEmitterData");
%this.process();
}
function ObjectBuilderGui::buildParticleSimulation(%this) {
%this.objectClassName = "ParticleSimulation";
%this.addField("datablock", "TypeDataBlock", "datablock", "ParticleSimulationData");
%this.process();
}
//------------------------------------------------------------------------------
// Mission
//------------------------------------------------------------------------------
function ObjectBuilderGui::buildTrigger(%this) {
%this.objectClassName = "Trigger";
%this.addField("dataBlock", "TypeDataBlock", "Data Block", "TriggerData defaultTrigger");
%this.addField("polyhedron", "TypeTriggerPolyhedron", "Polyhedron", "-0.5 0.5 0.0 1.0 0.0 0.0 0.0 -1.0 0.0 0.0 0.0 1.0");
%this.process();
}
function ObjectBuilderGui::buildPhysicalZone(%this) {
%this.objectClassName = "PhysicalZone";
%this.addField("polyhedron", "TypeTriggerPolyhedron", "Polyhedron", "-0.5 0.5 0.0 1.0 0.0 0.0 0.0 -1.0 0.0 0.0 0.0 1.0");
%this.process();
}
function ObjectBuilderGui::buildCamera(%this) {
%this.objectClassName = "Camera";
%this.addField("position", "TypePoint3", "Position", "0 0 0");
%this.addField("rotation", "TypePoint4", "Rotation", "1 0 0 0");
%this.addField("dataBlock", "TypeDataBlock", "Data block", "CameraData Observer");
%this.addField("team", "TypeInt", "Team", "0");
%this.process();
}
function ObjectBuilderGui::buildLevelInfo(%this) {
if ( %this.checkExists( "LevelInfo" ) ) {
GenericPromptDialog-->GenericPromptWindow.text = "Warning";
GenericPromptDialog-->GenericPromptText.text = "There is already an existing LevelInfo in the scene.";
Canvas.pushDialog( GenericPromptDialog );
return;
}
OBObjectName.setValue( "theLevelInfo" );
%this.objectClassName = "LevelInfo";
%this.process();
}
function ObjectBuilderGui::buildTimeOfDay(%this) {
if ( %this.checkExists( "TimeOfDay" ) ) {
GenericPromptDialog-->GenericPromptWindow.text = "Warning";
GenericPromptDialog-->GenericPromptText.text = "There is already an existing TimeOfDay in the scene.";
Canvas.pushDialog( GenericPromptDialog );
return;
}
%this.objectClassName = "TimeOfDay";
%this.process();
}
//------------------------------------------------------------------------------
// System
//------------------------------------------------------------------------------
function ObjectBuilderGui::buildPhysicsEntity(%this) {
%this.objectClassName = "PhysicsEntity";
%this.addField("dataBlock", "TypeDataBlock", "Data block", "PhysicsEntityData");
%this.process();
}
//------------------------------------------------------------------------------
// Functions to allow scripted/datablock objects to be instantiated
//------------------------------------------------------------------------------
function PhysicsEntityData::create(%data) {
%obj = new PhysicsEntity() {
dataBlock = %data;
parentGroup = Scene.getActiveSimGroup();
};
return %obj;
}
function StaticShapeData::create(%data) {
%obj = new StaticShape() {
dataBlock = %data;
parentGroup = Scene.getActiveSimGroup();
};
return %obj;
}
function MissionMarkerData::create(%block) {
switch$(%block) {
case "WayPointMarker":
%obj = new WayPoint() {
dataBlock = %block;
parentGroup = Scene.getActiveSimGroup();
};
return(%obj);
case "SpawnSphereMarker":
%obj = new SpawnSphere() {
datablock = %block;
parentGroup = Scene.getActiveSimGroup();
};
return(%obj);
default:
%obj = new WayPoint() {
dataBlock = %block;
parentGroup = Scene.getActiveSimGroup();
};
return(%obj);
}
return(-1);
}
function ItemData::create(%data) {
%obj = new Item([%data.getName()]) {
dataBlock = %data;
parentGroup = Scene.getActiveSimGroup();
static = true;
rotate = true;
};
return %obj;
}
function TurretShapeData::create(%block) {
%obj = new TurretShape() {
dataBlock = %block;
static = true;
respawn = true;
parentGroup = Scene.getActiveSimGroup();
};
return %obj;
}
function AITurretShapeData::create(%block) {
%obj = new AITurretShape() {
dataBlock = %block;
static = true;
respawn = true;
parentGroup = Scene.getActiveSimGroup();
};
return %obj;
}
function WheeledVehicleData::create(%block) {
%obj = new WheeledVehicle() {
dataBlock = %block;
parentGroup = Scene.getActiveSimGroup();
};
return %obj;
}
function FlyingVehicleData::create(%block) {
%obj = new FlyingVehicle() {
dataBlock = %block;
parentGroup = Scene.getActiveSimGroup();
};
return(%obj);
}
function HoverVehicleData::create(%block) {
%obj = new HoverVehicle() {
dataBlock = %block;
parentGroup = Scene.getActiveSimGroup();
};
return(%obj);
}
function RigidShapeData::create(%data) {
%obj = new RigidShape() {
dataBlock = %data;
parentGroup = Scene.getActiveSimGroup();
};
return %obj;
}
function PhysicsShapeData::create( %datablock )
{
%obj = new Px3Shape()
{
dataBlock = %datablock;
parentGroup = EWCreatorWindow.objectGroup;
invulnerable = false;
damageRadius = 0;
areaImpulse = 0;
radiusDamage = 0;
minDamageAmount = 0;
};
return %obj;
}
function Px3ShapeData::create( %datablock )
{
%obj = new Px3Shape(){
dataBlock = %datablock;
parentGroup = Scene.getActiveSimGroup();
invulnerable = false;
damageRadius = 0;
areaImpulse = 0;
radiusDamage = 0;
minDamageAmount = 0;
};
return %obj;
}
function RigidPhysicsShapeData::create( %datablock ) {
%obj = new RigidPhysicsShape() {
dataBlock = %datablock;
parentGroup = Scene.getActiveSimGroup();
invulnerable = false;
damageRadius = 0;
areaImpulse = 0;
radiusDamage = 0;
minDamageAmount = 0;
};
return %obj;
}
function ProximityMineData::create( %datablock ) {
%obj = new ProximityMine() {
dataBlock = %dataBlock;
parentGroup = Scene.getActiveSimGroup();
static = true; // mines created by the editor are static, and armed immediately
};
return %obj;
}
| |
using System;
using System.Runtime.CompilerServices;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Graphics;
namespace CocosSharp
{
public class CCPrimitiveBatch : IDisposable
{
const int DefaultBufferSize = 500;
// a basic effect, which contains the shaders that we will use to draw our
// primitives.
readonly BasicEffect basicEffect;
// the device that we will issue draw calls to.
readonly GraphicsDevice device;
readonly CCV3F_C4B[] lineVertices;
readonly CCV3F_C4B[] triangleVertices;
// hasBegun is flipped to true once Begin is called, and is used to make
// sure users don't call End before Begin is called.
bool hasBegun;
bool isDisposed;
int lineVertsCount;
int triangleVertsCount;
#region Properties
internal CCDrawManager DrawManager { get; set; }
#endregion Properties
#region Constructors
internal CCPrimitiveBatch(CCDrawManager drawManager, int bufferSize=DefaultBufferSize)
{
DrawManager = drawManager;
if (drawManager.XnaGraphicsDevice == null)
{
throw new ArgumentNullException("graphicsDevice");
}
device = drawManager.XnaGraphicsDevice;
triangleVertices = new CCV3F_C4B[bufferSize - bufferSize % 3];
lineVertices = new CCV3F_C4B[bufferSize - bufferSize % 2];
// set up a new basic effect, and enable vertex colors.
basicEffect = new BasicEffect(drawManager.XnaGraphicsDevice);
basicEffect.VertexColorEnabled = true;
}
public CCPrimitiveBatch(int bufferSize=DefaultBufferSize)
: this (CCDrawManager.SharedDrawManager, bufferSize)
{ }
#endregion Constructors
#region Cleaning up
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (disposing && !isDisposed)
{
if (basicEffect != null)
basicEffect.Dispose();
isDisposed = true;
}
}
#endregion Cleaning up
public void SetProjection(ref Matrix projection)
{
basicEffect.Projection = projection;
}
// Begin is called to tell the PrimitiveBatch what kind of primitives will be
// drawn, and to prepare the graphics card to render those primitives.
public void Begin()
{
if (hasBegun)
{
throw new InvalidOperationException("End must be called before Begin can be called again.");
}
//tell our basic effect to begin.
UpdateMatrix();
// flip the error checking boolean. It's now ok to call AddVertex, Flush,
// and End.
hasBegun = true;
}
public void UpdateMatrix()
{
basicEffect.Projection = DrawManager.ProjectionMatrix; ;
basicEffect.View = DrawManager.ViewMatrix;
basicEffect.World = DrawManager.WorldMatrix;
basicEffect.CurrentTechnique.Passes[0].Apply();
}
public bool IsReady()
{
return hasBegun;
}
public void AddVertex(CCVector2 vertex, CCColor4B color, PrimitiveType primitiveType)
{
AddVertex(ref vertex, color, primitiveType);
}
public void AddVertex(ref CCVector2 vertex, CCColor4B color, PrimitiveType primitiveType)
{
if (!hasBegun)
{
throw new InvalidOperationException("Begin must be called before AddVertex can be called.");
}
if (primitiveType == PrimitiveType.LineStrip || primitiveType == PrimitiveType.TriangleStrip)
{
throw new NotSupportedException("The specified primitiveType is not supported by PrimitiveBatch.");
}
if (primitiveType == PrimitiveType.TriangleList)
{
if (triangleVertsCount >= triangleVertices.Length)
{
FlushTriangles();
}
triangleVertices[triangleVertsCount].Vertices = new CCVertex3F(vertex.X, vertex.Y, -0.1f);
triangleVertices[triangleVertsCount].Colors = color;
triangleVertsCount++;
}
if (primitiveType == PrimitiveType.LineList)
{
if (lineVertsCount >= lineVertices.Length)
{
FlushLines();
}
lineVertices[lineVertsCount].Vertices = new CCVertex3F(vertex.X, vertex.Y, 0.0f);
lineVertices[lineVertsCount].Colors = color;
lineVertsCount++;
}
}
public void AddVertex(CCVertex3F vertex, CCColor4B color, PrimitiveType primitiveType)
{
AddVertex(new CCVector2(vertex.X, vertex.Y), color, primitiveType);
}
// End is called once all the primitives have been drawn using AddVertex.
// it will call Flush to actually submit the draw call to the graphics card, and
// then tell the basic effect to end.
public void End()
{
if (!hasBegun)
{
throw new InvalidOperationException("Begin must be called before End can be called.");
}
// Draw whatever the user wanted us to draw
FlushTriangles();
FlushLines();
hasBegun = false;
}
void FlushTriangles()
{
if (!hasBegun)
{
throw new InvalidOperationException("Begin must be called before Flush can be called.");
}
if (triangleVertsCount >= 3)
{
int primitiveCount = triangleVertsCount / 3;
// submit the draw call to the graphics card
#if NETFX_CORE
device.SamplerStates[0] = SamplerState.LinearClamp;
#else
device.SamplerStates[0] = SamplerState.AnisotropicClamp;
#endif
device.DrawUserPrimitives(PrimitiveType.TriangleList, triangleVertices, 0, primitiveCount);
triangleVertsCount -= primitiveCount * 3;
DrawManager.DrawCount++;
}
}
void FlushLines()
{
if (!hasBegun)
{
throw new InvalidOperationException("Begin must be called before Flush can be called.");
}
if (lineVertsCount >= 2)
{
int primitiveCount = lineVertsCount / 2;
// submit the draw call to the graphics card
#if NETFX_CORE
device.SamplerStates[0] = SamplerState.LinearClamp;
#else
device.SamplerStates[0] = SamplerState.AnisotropicClamp;
#endif
device.DrawUserPrimitives(PrimitiveType.LineList, lineVertices, 0, primitiveCount);
lineVertsCount -= primitiveCount * 2;
DrawManager.DrawCount++;
}
}
}
}
| |
// <file>
// <copyright see="prj:///doc/copyright.txt"/>
// <license see="prj:///doc/license.txt"/>
// <owner name="Matthew Ward" email="mrward@users.sourceforge.net"/>
// <version>$Revision: 1965 $</version>
// </file>
using ICSharpCode.AvalonEdit.CodeCompletion;
using System;
using System.Collections;
namespace InnovatorAdmin.Editor
{
/// <summary>
/// A collection that stores <see cref='XmlCompletionData'/> objects.
/// </summary>
[Serializable()]
public class XmlCompletionDataCollection : CollectionBase {
/// <summary>
/// Initializes a new instance of <see cref='XmlCompletionDataCollection'/>.
/// </summary>
public XmlCompletionDataCollection()
{
}
/// <summary>
/// Initializes a new instance of <see cref='XmlCompletionDataCollection'/> based on another <see cref='XmlCompletionDataCollection'/>.
/// </summary>
/// <param name='val'>
/// A <see cref='XmlCompletionDataCollection'/> from which the contents are copied
/// </param>
public XmlCompletionDataCollection(XmlCompletionDataCollection val)
{
this.AddRange(val);
}
/// <summary>
/// Initializes a new instance of <see cref='XmlCompletionDataCollection'/> containing any array of <see cref='XmlCompletionData'/> objects.
/// </summary>
/// <param name='val'>
/// A array of <see cref='XmlCompletionData'/> objects with which to intialize the collection
/// </param>
public XmlCompletionDataCollection(XmlCompletionData[] val)
{
this.AddRange(val);
}
/// <summary>
/// Represents the entry at the specified index of the <see cref='XmlCompletionData'/>.
/// </summary>
/// <param name='index'>The zero-based index of the entry to locate in the collection.</param>
/// <value>The entry at the specified index of the collection.</value>
/// <exception cref='ArgumentOutOfRangeException'><paramref name='index'/> is outside the valid range of indexes for the collection.</exception>
public XmlCompletionData this[int index] {
get {
return ((XmlCompletionData)(List[index]));
}
set {
List[index] = value;
}
}
/// <summary>
/// Adds a <see cref='XmlCompletionData'/> with the specified value to the
/// <see cref='XmlCompletionDataCollection'/>.
/// </summary>
/// <remarks>
/// If the completion data already exists in the collection it is not added.
/// </remarks>
/// <param name='val'>The <see cref='XmlCompletionData'/> to add.</param>
/// <returns>The index at which the new element was inserted.</returns>
/// <seealso cref='XmlCompletionDataCollection.AddRange'/>
public int Add(XmlCompletionData val)
{
int index = -1;
if (!Contains(val)) {
index = List.Add(val);
}
return index;
}
/// <summary>
/// Copies the elements of an array to the end of the <see cref='XmlCompletionDataCollection'/>.
/// </summary>
/// <param name='val'>
/// An array of type <see cref='XmlCompletionData'/> containing the objects to add to the collection.
/// </param>
/// <seealso cref='XmlCompletionDataCollection.Add'/>
public void AddRange(XmlCompletionData[] val)
{
for (int i = 0; i < val.Length; i++) {
this.Add(val[i]);
}
}
/// <summary>
/// Adds the contents of another <see cref='XmlCompletionDataCollection'/> to the end of the collection.
/// </summary>
/// <param name='val'>
/// A <see cref='XmlCompletionDataCollection'/> containing the objects to add to the collection.
/// </param>
/// <seealso cref='XmlCompletionDataCollection.Add'/>
public void AddRange(XmlCompletionDataCollection val)
{
for (int i = 0; i < val.Count; i++)
{
this.Add(val[i]);
}
}
/// <summary>
/// Gets a value indicating whether the
/// <see cref='XmlCompletionDataCollection'/> contains the specified <see cref='XmlCompletionData'/>.
/// </summary>
/// <param name='val'>The <see cref='XmlCompletionData'/> to locate.</param>
/// <returns>
/// <see langword='true'/> if the <see cref='XmlCompletionData'/> is contained in the collection;
/// otherwise, <see langword='false'/>.
/// </returns>
/// <seealso cref='XmlCompletionDataCollection.IndexOf'/>
public bool Contains(XmlCompletionData val)
{
if (val.Text != null) {
if (val.Text.Length > 0) {
return Contains(val.Text);
}
}
return false;
}
public bool Contains(string name)
{
bool contains = false;
foreach (XmlCompletionData data in this) {
if (data.Text != null) {
if (data.Text.Length > 0) {
if (data.Text == name) {
contains = true;
break;
}
}
}
}
return contains;
}
/// <summary>
/// Copies the <see cref='XmlCompletionDataCollection'/> values to a one-dimensional <see cref='Array'/> instance at the
/// specified index.
/// </summary>
/// <param name='array'>The one-dimensional <see cref='Array'/> that is the destination of the values copied from <see cref='XmlCompletionDataCollection'/>.</param>
/// <param name='index'>The index in <paramref name='array'/> where copying begins.</param>
/// <exception cref='ArgumentException'>
/// <para><paramref name='array'/> is multidimensional.</para>
/// <para>-or-</para>
/// <para>The number of elements in the <see cref='XmlCompletionDataCollection'/> is greater than
/// the available space between <paramref name='arrayIndex'/> and the end of
/// <paramref name='array'/>.</para>
/// </exception>
/// <exception cref='ArgumentNullException'><paramref name='array'/> is <see langword='null'/>. </exception>
/// <exception cref='ArgumentOutOfRangeException'><paramref name='arrayIndex'/> is less than <paramref name='array'/>'s lowbound. </exception>
/// <seealso cref='Array'/>
public void CopyTo(XmlCompletionData[] array, int index)
{
List.CopyTo(array, index);
}
/// <summary>
/// Copies the <see cref='XmlCompletionDataCollection'/> values to a one-dimensional <see cref='Array'/> instance at the
/// specified index.
/// </summary>
public void CopyTo(ICompletionData[] array, int index)
{
List.CopyTo(array, index);
}
/// <summary>
/// Returns the index of a <see cref='XmlCompletionData'/> in
/// the <see cref='XmlCompletionDataCollection'/>.
/// </summary>
/// <param name='val'>The <see cref='XmlCompletionData'/> to locate.</param>
/// <returns>
/// The index of the <see cref='XmlCompletionData'/> of <paramref name='val'/> in the
/// <see cref='XmlCompletionDataCollection'/>, if found; otherwise, -1.
/// </returns>
/// <seealso cref='XmlCompletionDataCollection.Contains'/>
public int IndexOf(XmlCompletionData val)
{
return List.IndexOf(val);
}
/// <summary>
/// Inserts a <see cref='XmlCompletionData'/> into the <see cref='XmlCompletionDataCollection'/> at the specified index.
/// </summary>
/// <param name='index'>The zero-based index where <paramref name='val'/> should be inserted.</param>
/// <param name='val'>The <see cref='XmlCompletionData'/> to insert.</param>
/// <seealso cref='XmlCompletionDataCollection.Add'/>
public void Insert(int index, XmlCompletionData val)
{
List.Insert(index, val);
}
/// <summary>
/// Returns an array of <see cref="ICompletionData"/> items.
/// </summary>
/// <returns></returns>
public ICompletionData[] ToArray()
{
ICompletionData[] data = new ICompletionData[Count];
CopyTo(data, 0);
return data;
}
/// <summary>
/// Returns an enumerator that can iterate through the <see cref='XmlCompletionDataCollection'/>.
/// </summary>
/// <seealso cref='IEnumerator'/>
public new XmlCompletionDataEnumerator GetEnumerator()
{
return new XmlCompletionDataEnumerator(this);
}
/// <summary>
/// Removes a specific <see cref='XmlCompletionData'/> from the <see cref='XmlCompletionDataCollection'/>.
/// </summary>
/// <param name='val'>The <see cref='XmlCompletionData'/> to remove from the <see cref='XmlCompletionDataCollection'/>.</param>
/// <exception cref='ArgumentException'><paramref name='val'/> is not found in the Collection.</exception>
public void Remove(XmlCompletionData val)
{
List.Remove(val);
}
/// <summary>
/// Enumerator that can iterate through a XmlCompletionDataCollection.
/// </summary>
/// <seealso cref='IEnumerator'/>
/// <seealso cref='XmlCompletionDataCollection'/>
/// <seealso cref='XmlCompletionData'/>
public class XmlCompletionDataEnumerator : IEnumerator
{
IEnumerator baseEnumerator;
IEnumerable temp;
/// <summary>
/// Initializes a new instance of <see cref='XmlCompletionDataEnumerator'/>.
/// </summary>
public XmlCompletionDataEnumerator(XmlCompletionDataCollection mappings)
{
this.temp = ((IEnumerable)(mappings));
this.baseEnumerator = temp.GetEnumerator();
}
/// <summary>
/// Gets the current <see cref='XmlCompletionData'/> in the <seealso cref='XmlCompletionDataCollection'/>.
/// </summary>
public XmlCompletionData Current {
get {
return ((XmlCompletionData)(baseEnumerator.Current));
}
}
object IEnumerator.Current {
get {
return baseEnumerator.Current;
}
}
/// <summary>
/// Advances the enumerator to the next <see cref='XmlCompletionData'/> of the <see cref='XmlCompletionDataCollection'/>.
/// </summary>
public bool MoveNext()
{
return baseEnumerator.MoveNext();
}
/// <summary>
/// Sets the enumerator to its initial position, which is before the first element in the <see cref='XmlCompletionDataCollection'/>.
/// </summary>
public void Reset()
{
baseEnumerator.Reset();
}
}
}
}
| |
/* Generated SBE (Simple Binary Encoding) message codec */
using System;
using System.Text;
using System.Collections.Generic;
using Adaptive.Agrona;
namespace Adaptive.Cluster.Codecs {
public class SnapshotMarkerEncoder
{
public const ushort BLOCK_LENGTH = 40;
public const ushort TEMPLATE_ID = 100;
public const ushort SCHEMA_ID = 111;
public const ushort SCHEMA_VERSION = 7;
private SnapshotMarkerEncoder _parentMessage;
private IMutableDirectBuffer _buffer;
protected int _offset;
protected int _limit;
public SnapshotMarkerEncoder()
{
_parentMessage = this;
}
public ushort SbeBlockLength()
{
return BLOCK_LENGTH;
}
public ushort SbeTemplateId()
{
return TEMPLATE_ID;
}
public ushort SbeSchemaId()
{
return SCHEMA_ID;
}
public ushort SbeSchemaVersion()
{
return SCHEMA_VERSION;
}
public string SbeSemanticType()
{
return "";
}
public IMutableDirectBuffer Buffer()
{
return _buffer;
}
public int Offset()
{
return _offset;
}
public SnapshotMarkerEncoder Wrap(IMutableDirectBuffer buffer, int offset)
{
this._buffer = buffer;
this._offset = offset;
Limit(offset + BLOCK_LENGTH);
return this;
}
public SnapshotMarkerEncoder WrapAndApplyHeader(
IMutableDirectBuffer buffer, int offset, MessageHeaderEncoder headerEncoder)
{
headerEncoder
.Wrap(buffer, offset)
.BlockLength(BLOCK_LENGTH)
.TemplateId(TEMPLATE_ID)
.SchemaId(SCHEMA_ID)
.Version(SCHEMA_VERSION);
return Wrap(buffer, offset + MessageHeaderEncoder.ENCODED_LENGTH);
}
public int EncodedLength()
{
return _limit - _offset;
}
public int Limit()
{
return _limit;
}
public void Limit(int limit)
{
this._limit = limit;
}
public static int TypeIdEncodingOffset()
{
return 0;
}
public static int TypeIdEncodingLength()
{
return 8;
}
public static long TypeIdNullValue()
{
return -9223372036854775808L;
}
public static long TypeIdMinValue()
{
return -9223372036854775807L;
}
public static long TypeIdMaxValue()
{
return 9223372036854775807L;
}
public SnapshotMarkerEncoder TypeId(long value)
{
_buffer.PutLong(_offset + 0, value, ByteOrder.LittleEndian);
return this;
}
public static int LogPositionEncodingOffset()
{
return 8;
}
public static int LogPositionEncodingLength()
{
return 8;
}
public static long LogPositionNullValue()
{
return -9223372036854775808L;
}
public static long LogPositionMinValue()
{
return -9223372036854775807L;
}
public static long LogPositionMaxValue()
{
return 9223372036854775807L;
}
public SnapshotMarkerEncoder LogPosition(long value)
{
_buffer.PutLong(_offset + 8, value, ByteOrder.LittleEndian);
return this;
}
public static int LeadershipTermIdEncodingOffset()
{
return 16;
}
public static int LeadershipTermIdEncodingLength()
{
return 8;
}
public static long LeadershipTermIdNullValue()
{
return -9223372036854775808L;
}
public static long LeadershipTermIdMinValue()
{
return -9223372036854775807L;
}
public static long LeadershipTermIdMaxValue()
{
return 9223372036854775807L;
}
public SnapshotMarkerEncoder LeadershipTermId(long value)
{
_buffer.PutLong(_offset + 16, value, ByteOrder.LittleEndian);
return this;
}
public static int IndexEncodingOffset()
{
return 24;
}
public static int IndexEncodingLength()
{
return 4;
}
public static int IndexNullValue()
{
return -2147483648;
}
public static int IndexMinValue()
{
return -2147483647;
}
public static int IndexMaxValue()
{
return 2147483647;
}
public SnapshotMarkerEncoder Index(int value)
{
_buffer.PutInt(_offset + 24, value, ByteOrder.LittleEndian);
return this;
}
public static int MarkEncodingOffset()
{
return 28;
}
public static int MarkEncodingLength()
{
return 4;
}
public SnapshotMarkerEncoder Mark(SnapshotMark value)
{
_buffer.PutInt(_offset + 28, (int)value, ByteOrder.LittleEndian);
return this;
}
public static int TimeUnitEncodingOffset()
{
return 32;
}
public static int TimeUnitEncodingLength()
{
return 4;
}
public SnapshotMarkerEncoder TimeUnit(ClusterTimeUnit value)
{
_buffer.PutInt(_offset + 32, (int)value, ByteOrder.LittleEndian);
return this;
}
public static int AppVersionEncodingOffset()
{
return 36;
}
public static int AppVersionEncodingLength()
{
return 4;
}
public static int AppVersionNullValue()
{
return 0;
}
public static int AppVersionMinValue()
{
return 1;
}
public static int AppVersionMaxValue()
{
return 16777215;
}
public SnapshotMarkerEncoder AppVersion(int value)
{
_buffer.PutInt(_offset + 36, value, ByteOrder.LittleEndian);
return this;
}
public override string ToString()
{
return AppendTo(new StringBuilder(100)).ToString();
}
public StringBuilder AppendTo(StringBuilder builder)
{
SnapshotMarkerDecoder writer = new SnapshotMarkerDecoder();
writer.Wrap(_buffer, _offset, BLOCK_LENGTH, SCHEMA_VERSION);
return writer.AppendTo(builder);
}
}
}
| |
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using CommandLine;
using Google.Ads.GoogleAds.Lib;
using Google.Ads.GoogleAds.V10.Common;
using Google.Ads.GoogleAds.V10.Errors;
using Google.Ads.GoogleAds.V10.Resources;
using Google.Ads.GoogleAds.V10.Services;
using Google.Api.Gax;
using Grpc.Core;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using static Google.Ads.GoogleAds.V10.Enums.AffiliateLocationFeedRelationshipTypeEnum.Types;
using static Google.Ads.GoogleAds.V10.Enums.AffiliateLocationPlaceholderFieldEnum.Types;
using static Google.Ads.GoogleAds.V10.Enums.FeedOriginEnum.Types;
using static Google.Ads.GoogleAds.V10.Enums.PlaceholderTypeEnum.Types;
using static Google.Ads.GoogleAds.V10.Resources.Feed.Types;
namespace Google.Ads.GoogleAds.Examples.V10
{
/// <summary>
/// This code example adds a feed that syncs retail addresses for a given retail chain ID and
/// associates the feed with a campaign for serving affiliate location extensions.
/// </summary>
public class AddAffiliateLocationExtensions : ExampleBase
{
/// <summary>
/// Command line options for running the <see cref="AddAffiliateLocationExtensions"/>
/// example.
/// </summary>
public class Options : OptionsBase
{
/// <summary>
/// The customer ID for which the call is made.
/// </summary>
[Option("customerId", Required = true, HelpText =
"The customer ID for which the call is made.")]
public long CustomerId { get; set; }
/// <summary>
/// The retail chain ID.
/// </summary>
[Option("chainId", Required = true, HelpText =
"The retail chain ID.")]
public long ChainId { get; set; }
/// <summary>
/// The campaign ID for which the affiliate location extensions are added.
/// </summary>
[Option("campaignId", Required = true, HelpText =
"The campaign ID for which the affiliate location extensions are added.")]
public long CampaignId { get; set; }
}
/// <summary>
/// Main method, to run this code example as a standalone application.
/// </summary>
/// <param name="args">The command line arguments.</param>
public static void Main(string[] args)
{
Options options = new Options();
CommandLine.Parser.Default.ParseArguments<Options>(args).MapResult(
delegate (Options o)
{
options = o;
return 0;
}, delegate (IEnumerable<Error> errors)
{
// The customer ID for which the call is made.
options.CustomerId = long.Parse("INSERT_CUSTOMER_ID_HERE");
// The retail chain ID.
options.ChainId = long.Parse("INSERT_CHAIN_ID_HERE");
// The campaign ID for which the affiliate location extensions are added.
options.CampaignId = long.Parse("INSERT_CAMPAIGN_ID_HERE");
return 0;
});
AddAffiliateLocationExtensions codeExample = new AddAffiliateLocationExtensions();
Console.WriteLine(codeExample.Description);
codeExample.Run(new GoogleAdsClient(), options.CustomerId, options.ChainId, options.CampaignId);
}
// The maximum number of attempts to make to retrieve the FeedMappings before throwing an
// exception.
private const int MAX_FEEDMAPPING_RETRIEVAL_ATTEMPTS = 10;
/// <summary>
/// Returns a description about the code example.
/// </summary>
public override string Description =>
"This code example adds a feed that syncs retail addresses for a given retail chain " +
"ID and associates the feed with a campaign for serving affiliate location extensions.";
/// <summary>
/// Runs the code example.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <param name="chainId">The retail chain ID.</param>
/// <param name="campaignId">
/// The campaign ID for which the affiliate location extensions are added.
/// </param>
public void Run(GoogleAdsClient client, long customerId, long chainId, long campaignId)
{
try
{
string feedResourceName = CreateAffiliateLocationExtensionFeed(
client, customerId, chainId);
// After the completion of the feed creation operation above the added feed will not
// be available for usage in a campaign feed until the feed mappings are created. We
// will wait with an exponential back-off policy until the feed mappings have been
// created.
FeedMapping feedMapping = WaitForFeedToBeReady(client, customerId,
feedResourceName);
CreateCampaignFeed(client, customerId, campaignId, feedMapping,
feedResourceName, chainId);
}
catch (GoogleAdsException e)
{
Console.WriteLine("Failure:");
Console.WriteLine($"Message: {e.Message}");
Console.WriteLine($"Failure: {e.Failure}");
Console.WriteLine($"Request ID: {e.RequestId}");
throw;
}
}
/// <summary>
/// Creates the Affiliate Location Extension feed.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <param name="chainId">The retail chain ID.</param>
/// <returns>Resource name of the newly created Affiliate Location Extension feed.</returns>
// [START add_affiliate_location_extensions]
private static string CreateAffiliateLocationExtensionFeed(GoogleAdsClient client,
long customerId, long chainId)
{
// Optional: Delete all existing location extension feeds. This is an optional step, and
// is required for this code example to run correctly more than once.
// 1. Google Ads only allows one location extension feed per email address.
// 2. A Google Ads account cannot have a location extension feed and an affiliate
// location extension feed at the same time.
DeleteLocationExtensionFeeds(client, customerId);
// Get the FeedServiceClient.
FeedServiceClient feedService = client.GetService(Services.V10.FeedService);
// Creates a feed that will sync to retail addresses for a given retail chain ID. Do not
// add FeedAttributes to this object as Google Ads will add them automatically because
// this will be a system generated feed.
Feed feed = new Feed()
{
Name = "Affiliate Location Extension feed #" + ExampleUtilities.GetRandomString(),
AffiliateLocationFeedData = new AffiliateLocationFeedData()
{
ChainIds = { chainId },
RelationshipType = AffiliateLocationFeedRelationshipType.GeneralRetailer
},
// Since this feed's contents will be managed by Google, you must set its origin to
// GOOGLE.
Origin = FeedOrigin.Google
};
FeedOperation operation = new FeedOperation()
{
Create = feed
};
// Adds the feed.
MutateFeedsResponse response =
feedService.MutateFeeds(customerId.ToString(), new[] { operation });
// Displays the results.
string feedResourceName = response.Results[0].ResourceName;
Console.WriteLine($"Affliate location extension feed created with resource name: " +
$"{feedResourceName}.");
return feedResourceName;
}
// [END add_affiliate_location_extensions]
/// <summary>
/// Deletes the old location extension feeds.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
private static void DeleteLocationExtensionFeeds(GoogleAdsClient client, long customerId)
{
// To delete a location extension feed, you need to
// 1. Delete the CustomerFeed so that the location extensions from the feed stop
// serving.
// 2. Delete the feed so that Google Ads will no longer sync from the Business Profile
// account.
CustomerFeed[] oldCustomerFeeds =
GetLocationExtensionCustomerFeeds(client, customerId);
if (oldCustomerFeeds.Length != 0)
{
// Optional: You may also want to delete the CampaignFeeds and AdGroupFeeds.
DeleteCustomerFeeds(client, customerId, oldCustomerFeeds);
}
Feed[] feeds = GetLocationExtensionFeeds(client, customerId);
if (feeds.Length != 0)
{
RemoveFeeds(client, customerId, feeds);
}
}
/// <summary>
/// Gets the location extension feeds.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <returns>The list of location extension feeds.</returns>
private static Feed[] GetLocationExtensionFeeds(GoogleAdsClient client, long customerId)
{
List<Feed> feeds = new List<Feed>();
GoogleAdsServiceClient googleAdsService = client.GetService(
Services.V10.GoogleAdsService);
// Create the query.
string query = $"SELECT feed.resource_name, feed.status, " +
$"feed.places_location_feed_data.email_address, " +
$"feed.affiliate_location_feed_data.chain_ids " +
$" from feed where feed.status = ENABLED";
PagedEnumerable<SearchGoogleAdsResponse, GoogleAdsRow> result =
googleAdsService.Search(customerId.ToString(), query);
foreach (GoogleAdsRow row in result)
{
// A location extension feed can be identified by checking whether the
// PlacesLocationFeedData field is set (Location extensions feeds) or
// AffiliateLocationFeedData field is set (Affiliate location extension feeds)
Feed feed = row.Feed;
if (feed.PlacesLocationFeedData != null || feed.AffiliateLocationFeedData != null)
{
feeds.Add(feed);
}
}
return feeds.ToArray();
}
/// <summary>
/// Removes the feeds.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <param name="feeds">The list of feeds to remove.</param>
private static void RemoveFeeds(GoogleAdsClient client, long customerId, Feed[] feeds)
{
List<FeedOperation> operations = new List<FeedOperation>();
foreach (Feed feed in feeds)
{
FeedOperation operation = new FeedOperation()
{
Remove = feed.ResourceName,
};
operations.Add(operation);
}
FeedServiceClient feedService = client.GetService(
Services.V10.FeedService);
feedService.MutateFeeds(customerId.ToString(), operations.ToArray());
}
/// <summary>
/// Gets the location extension customer feeds.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <returns>The list of location extension customer feeds.</returns>
private static CustomerFeed[] GetLocationExtensionCustomerFeeds(GoogleAdsClient client,
long customerId)
{
List<CustomerFeed> customerFeeds = new List<CustomerFeed>();
GoogleAdsServiceClient googleAdsService = client.GetService(
Services.V10.GoogleAdsService);
// Create the query. A location extension customer feed can be identified by filtering
// for placeholder_types=LOCATION (location extension feeds) or placeholder_types
// =AFFILIATE_LOCATION (affiliate location extension feeds)
string query = $"SELECT customer_feed.resource_name, customer_feed.feed, " +
$"customer_feed.status, customer_feed.matching_function.function_string from " +
$"customer_feed " +
$"WHERE customer_feed.placeholder_types CONTAINS " +
$"ANY(LOCATION, AFFILIATE_LOCATION) and customer_feed.status=ENABLED";
PagedEnumerable<SearchGoogleAdsResponse, GoogleAdsRow> result =
googleAdsService.Search(customerId.ToString(), query);
foreach (GoogleAdsRow row in result)
{
customerFeeds.Add(row.CustomerFeed);
}
return customerFeeds.ToArray();
}
/// <summary>
/// Deletes the customer feeds.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <param name="customerFeeds">The customer feeds to delete.</param>
private static void DeleteCustomerFeeds(GoogleAdsClient client, long customerId,
CustomerFeed[] customerFeeds)
{
List<CustomerFeedOperation> operations = new List<CustomerFeedOperation>();
foreach (CustomerFeed customerFeed in customerFeeds)
{
CustomerFeedOperation operation = new CustomerFeedOperation()
{
Remove = customerFeed.ResourceName,
};
operations.Add(operation);
}
CustomerFeedServiceClient feedService = client.GetService(
Services.V10.CustomerFeedService);
feedService.MutateCustomerFeeds(customerId.ToString(), operations.ToArray());
}
/// <summary>
/// Gets the Affiliate Location Extension feed mapping.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <param name="feedResourceName">The feed resource name.</param>
/// <returns>The newly created feed mapping.</returns>
// [START add_affiliate_location_extensions_1]
private static FeedMapping GetAffiliateLocationExtensionFeedMapping(GoogleAdsClient client,
long customerId, string feedResourceName)
{
// Get the GoogleAdsService.
GoogleAdsServiceClient googleAdsService = client.GetService(
Services.V10.GoogleAdsService);
// Create the query.
string query = $"SELECT feed_mapping.resource_name, " +
$"feed_mapping.attribute_field_mappings, feed_mapping.status FROM " +
$"feed_mapping WHERE feed_mapping.feed = '{feedResourceName}' and " +
$"feed_mapping.status = ENABLED and feed_mapping.placeholder_type = " +
"AFFILIATE_LOCATION LIMIT 1";
// Issue a search request.
PagedEnumerable<SearchGoogleAdsResponse, GoogleAdsRow> result =
googleAdsService.Search(customerId.ToString(), query);
// Display the results.
GoogleAdsRow googleAdsRow = result.FirstOrDefault();
return (googleAdsRow == null) ? null : googleAdsRow.FeedMapping;
}
// [END add_affiliate_location_extensions_1]
/// <summary>
/// Waits for the Affliate location extension feed to be ready.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <param name="feedResourceName">Resource name of the feed.</param>
// [START add_affiliate_location_extensions_2]
private static FeedMapping WaitForFeedToBeReady(GoogleAdsClient client, long customerId,
string feedResourceName)
{
int numAttempts = 0;
int sleepSeconds = 0;
while (numAttempts < MAX_FEEDMAPPING_RETRIEVAL_ATTEMPTS)
{
// Once you create a feed, Google's servers will setup the feed by creating feed
// attributes and feed mapping. Once the feed mapping is created, it is ready to be
// used for creating customer feed. This process is asynchronous, so we wait until
// the feed mapping is created, peforming exponential backoff.
FeedMapping feedMapping = GetAffiliateLocationExtensionFeedMapping(
client, customerId, feedResourceName);
if (feedMapping == null)
{
numAttempts++;
sleepSeconds = (int) (5 * Math.Pow(2, numAttempts));
Console.WriteLine($"Checked: #{numAttempts} time(s). Feed is not ready " +
$"yet. Waiting {sleepSeconds} seconds before trying again.");
Thread.Sleep(sleepSeconds * 1000);
}
else
{
Console.WriteLine($"Feed {feedResourceName} is now ready.");
return feedMapping;
}
}
throw new RpcException(new Status(StatusCode.DeadlineExceeded,
$"Feed is not ready after {MAX_FEEDMAPPING_RETRIEVAL_ATTEMPTS}" +
$" retries."));
}
// [END add_affiliate_location_extensions_2]
/// <summary>
/// Creates the campaign feed.
/// </summary>
/// <param name="client">The Google Ads client.</param>
/// <param name="customerId">The customer ID for which the call is made.</param>
/// <param name="campaignId">
/// The campaign ID for which the affiliate location extensions are added.
/// </param>
/// <param name="feedMapping">
/// The affliate location extension feedmapping for <paramref name="feedResourceName"/>
/// </param>
/// <param name="feedResourceName">The feed resource name.</param>
/// <param name="chainId">The retail chain ID.</param>
// [START add_affiliate_location_extensions_3]
private static void CreateCampaignFeed(GoogleAdsClient client, long customerId,
long campaignId, FeedMapping feedMapping, string feedResourceName, long chainId)
{
// Get the CampaignFeedService.
CampaignFeedServiceClient campaignFeedService = client.GetService(
Services.V10.CampaignFeedService);
long attributeIdForChainId = GetAttributeIdForChainId(feedMapping);
string feedId = FeedName.Parse(feedResourceName).FeedId;
string matchingFunction =
$"IN(FeedAttribute[{feedId}, {attributeIdForChainId}], {chainId})";
// Adds a CampaignFeed that associates the feed with this campaign for the
// AFFILIATE_LOCATION placeholder type.
CampaignFeed campaignFeed = new CampaignFeed()
{
Feed = feedResourceName,
PlaceholderTypes = { PlaceholderType.AffiliateLocation },
MatchingFunction = new MatchingFunction()
{
FunctionString = matchingFunction
},
Campaign = ResourceNames.Campaign(customerId, campaignId),
};
CampaignFeedOperation operation = new CampaignFeedOperation()
{
Create = campaignFeed
};
MutateCampaignFeedsResponse campaignFeedsResponse =
campaignFeedService.MutateCampaignFeeds(
customerId.ToString(), new[] { operation });
// Displays the result.
string addedCampaignFeed = campaignFeedsResponse.Results[0].ResourceName;
Console.WriteLine($"Campaign feed created with resource name: {addedCampaignFeed}.");
return;
}
// [END add_affiliate_location_extensions_3]
/// <summary>
/// Gets the feed attribute ID for the retail chain Id.
/// </summary>
/// <param name="feedMapping">The feed mapping.</param>
/// <returns>The feeed attribute ID.</returns>
/// <exception cref="ArgumentException">
/// Affiliate location feed mapping isn't setup correctly.
/// </exception>
// [START add_affiliate_location_extensions_4]
public static long GetAttributeIdForChainId(FeedMapping feedMapping)
{
foreach (AttributeFieldMapping fieldMapping in feedMapping.AttributeFieldMappings)
{
if (fieldMapping.AffiliateLocationField ==
AffiliateLocationPlaceholderField.ChainId)
{
return fieldMapping.FeedAttributeId;
}
}
throw new ArgumentException("Affiliate location feed mapping isn't setup correctly.");
}
// [END add_affiliate_location_extensions_4]
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
namespace Codehaus.Parsec
{
using ShowToken = Map<object, string>;
/*
struct State
{
internal readonly int at;
internal readonly int step;
internal readonly object ustate;
internal State (int at, int step, object ustate) {
this.at = at;
this.step = step;
this.ustate = ustate;
}
}*/
abstract class ParseContext
{
internal string getModuleName() { return module; }
internal abstract bool isEof();
internal abstract int getIndex();
internal abstract Tok getToken();
internal abstract char peekChar();
internal abstract string getSource();
internal abstract ParsecError getSysUnexpected();
protected int at;
private int step;
//user state doesn't seem to be useful. skip it for now.
//private object userState;
private readonly string module;
private readonly PositionMap pmap;
/*
internal State getState () {
return new State (at, step, userState);
}
internal void setState (State state) {
this.at = state.at;
this.step = state.step;
this.userState = state.ustate;
}*/
internal void setState(int step, int at/*, object ustate*/)
{
this.step = step;
this.at = at;
//this.userState = ustate;
}
internal PositionMap getPositionMap()
{
return pmap;
}
//internal object getUserState () { return userState; }
internal int getStep() { return step; }
internal int getAt() { return at; }
internal void setAt(int at)
{
this.at = at;
}
/*
internal void setAt (State state) {
this.step = state.step;
this.at = state.at;
}
internal void setAt (int step, int at) {
this.step = step;
this.at = at;
}*/
internal void setStep(int s)
{
this.step = s;
}
internal void next()
{
at++;
step++;
}
internal void next(int n)
{
at += n;
if (n > 0) step++;
}
/*
internal void setUserState (object obj) {
userState = obj;
}*/
//caller should not change input after it is passed in.
internal ParseContext(int at, string module, PositionMap pmap)
{
//this.userState = us;
this.step = 0;
this.at = at;
this.module = module;
this.pmap = pmap;
}
/*
internal void prependError (AbstractParsecError err) {
this.err = AbstractParsecError.mergeError (err, this.err);
}
internal void appendError (AbstractParsecError err) {
this.err = AbstractParsecError.mergeError (this.err, err);
}
*/
}
sealed class ScannerState : ParseContext
{
private readonly string src;
private readonly int len;
internal ScannerState(string src,
int a, string module, PositionMap pmap)
: base(a, module, pmap)
{
this.src = src;
this.len = src.Length;
}
internal ScannerState(string src, int a,
string module, PositionMap pmap,
int l)
: base(a, module, pmap)
{
this.src = src;
this.len = l;
}
internal override char peekChar()
{
return src[at];
}
internal override bool isEof()
{
return len == at;
}
internal override string getSource()
{
return src;
}
internal int length() { return len; }
internal override int getIndex()
{
return at;
}
internal override Tok getToken()
{
throw new System.NotSupportedException("Parser not on token level");
}
internal override ParsecError getSysUnexpected()
{
string msg = (len == at) ? "EOF" : ("" + src[at]);
return ParsecError.raiseSysUnexpected(getIndex(), msg);
}
}
sealed class ParserState : ParseContext
{
private readonly Tok[] input;
private readonly ParsecError[] sys_unexpected;
//in case a terminating eof token is not explicitly created,
//the implicit one is used.
private readonly int end_index;
private readonly ParsecError eof_unexpected;
private readonly ShowToken show;
internal override bool isEof()
{
return at >= input.Length; //|| input[at].getToken()==Tokens.eof();
}
internal int length() { return input.Length; }
internal override int getIndex()
{
if (at == input.Length) return end_index;
return input[at].Index;
}
internal override Tok getToken()
{
return input[at];
}
//caller should not change input after it is passed in.
internal ParserState(Tok[] input,
int at, string module, PositionMap pmap,
int end_index,
string eof_str, ShowToken show)
: base(at, module, pmap)
{
this.input = input;
this.sys_unexpected = new ParsecError[input.Length];
this.show = show;
this.end_index = end_index;
this.eof_unexpected = ParsecError.raiseSysUnexpected(
end_index, eof_str);
}
internal override ParsecError getSysUnexpected()
{
return getSysUnexpected(at);
}
private ParsecError getSysUnexpected(int i)
{
if (i >= sys_unexpected.Length) return eof_unexpected;
ParsecError r = sys_unexpected[i];
if (r == null)
{
Tok ptok = input[i];
r = ParsecError.raiseSysUnexpected(ptok.Index,
show(ptok.Token));
sys_unexpected[i] = r;
}
return r;
}
internal override char peekChar()
{
throw new NotSupportedException("parser not on char level.");
}
internal char peekChar(int i)
{
throw new NotSupportedException("parser not on char level.");
}
internal override string getSource()
{
throw new NotSupportedException("parser not on char level.");
}
}
}
| |
using System;
using NUnit.Framework;
using Raksha.Crypto;
using Raksha.Crypto.Digests;
using Raksha.Crypto.Engines;
using Raksha.Crypto.Encodings;
using Raksha.Crypto.Parameters;
using Raksha.Utilities.Encoders;
using Raksha.Tests.Utilities;
namespace Raksha.Tests.Crypto
{
/**
* standard vector test for SHA-384 from FIPS Draft 180-2.
*
* Note, the first two vectors are _not_ from the draft, the last three are.
*/
[TestFixture]
public class Sha384DigestTest
: ITest
{
// static private string testVec1 = "";
static private string resVec1 = "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b";
static private string testVec2 = "61";
static private string resVec2 = "54a59b9f22b0b80880d8427e548b7c23abd873486e1f035dce9cd697e85175033caa88e6d57bc35efae0b5afd3145f31";
static private string testVec3 = "616263";
static private string resVec3 = "cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7";
static private string testVec4 = "61626364656667686263646566676869636465666768696a6465666768696a6b65666768696a6b6c666768696a6b6c6d6768696a6b6c6d6e68696a6b6c6d6e6f696a6b6c6d6e6f706a6b6c6d6e6f70716b6c6d6e6f7071726c6d6e6f707172736d6e6f70717273746e6f707172737475";
static private string resVec4 = "09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039";
// 1 million 'a'
static private string testVec5 = "61616161616161616161";
static private string resVec5 = "9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b07b8b3dc38ecc4ebae97ddd87f3d8985";
public string Name
{
get { return "SHA384"; }
}
public ITestResult Perform()
{
IDigest digest = new Sha384Digest();
byte[] resBuf = new byte[digest.GetDigestSize()];
string resStr;
//
// test 1
//
digest.DoFinal(resBuf, 0);
resStr = Hex.ToHexString(resBuf);
if (!resVec1.Equals(resStr))
{
return new SimpleTestResult(false,
"SHA-384 failing standard vector test 1"
+ SimpleTest.NewLine
+ " expected: " + resVec1
+ SimpleTest.NewLine
+ " got : " + resStr);
}
//
// test 2
//
byte[] bytes = Hex.Decode(testVec2);
digest.BlockUpdate(bytes, 0, bytes.Length);
digest.DoFinal(resBuf, 0);
resStr = Hex.ToHexString(resBuf);
if (!resVec2.Equals(resStr))
{
return new SimpleTestResult(false,
"SHA-384 failing standard vector test 2"
+ SimpleTest.NewLine
+ " expected: " + resVec2
+ SimpleTest.NewLine
+ " got : " + resStr);
}
//
// test 3
//
bytes = Hex.Decode(testVec3);
digest.BlockUpdate(bytes, 0, bytes.Length);
digest.DoFinal(resBuf, 0);
resStr = Hex.ToHexString(resBuf);
if (!resVec3.Equals(resStr))
{
return new SimpleTestResult(false,
"SHA-384 failing standard vector test 3"
+ SimpleTest.NewLine
+ " expected: " + resVec3
+ SimpleTest.NewLine
+ " got : " + resStr);
}
//
// test 4
//
bytes = Hex.Decode(testVec4);
digest.BlockUpdate(bytes, 0, bytes.Length);
digest.DoFinal(resBuf, 0);
resStr = Hex.ToHexString(resBuf);
if (!resVec4.Equals(resStr))
{
return new SimpleTestResult(false,
"SHA-384 failing standard vector test 4"
+ SimpleTest.NewLine
+ " expected: " + resVec4
+ SimpleTest.NewLine
+ " got : " + resStr);
}
//
// test 5
//
bytes = Hex.Decode(testVec4);
digest.BlockUpdate(bytes, 0, bytes.Length/2);
// clone the IDigest
IDigest d = new Sha384Digest((Sha384Digest)digest);
digest.BlockUpdate(bytes, bytes.Length/2, bytes.Length - bytes.Length/2);
digest.DoFinal(resBuf, 0);
resStr = Hex.ToHexString(resBuf);
if (!resVec4.Equals(resStr))
{
return new SimpleTestResult(false,
"SHA384 failing standard vector test 5"
+ SimpleTest.NewLine
+ " expected: " + resVec4
+ SimpleTest.NewLine
+ " got : " + resStr);
}
d.BlockUpdate(bytes, bytes.Length/2, bytes.Length - bytes.Length/2);
d.DoFinal(resBuf, 0);
resStr = Hex.ToHexString(resBuf);
if (!resVec4.Equals(resStr))
{
return new SimpleTestResult(false,
"SHA384 failing standard vector test 5"
+ SimpleTest.NewLine
+ " expected: " + resVec4
+ SimpleTest.NewLine
+ " got : " + resStr);
}
// test 6
bytes = Hex.Decode(testVec5);
for ( int i = 0; i < 100000; i++ )
{
digest.BlockUpdate(bytes, 0, bytes.Length);
}
digest.DoFinal(resBuf, 0);
resStr = Hex.ToHexString(resBuf);
if (!resVec5.Equals(resStr))
{
return new SimpleTestResult(false,
"SHA-384 failing standard vector test 5"
+ SimpleTest.NewLine
+ " expected: " + resVec5
+ SimpleTest.NewLine
+ " got : " + resStr);
}
return new SimpleTestResult(true, Name + ": Okay");
}
public static void Main(
string[] args)
{
ITest test = new Sha384DigestTest();
ITestResult result = test.Perform();
Console.WriteLine(result);
}
[Test]
public void TestFunction()
{
string resultText = Perform().ToString();
Assert.AreEqual(Name + ": Okay", resultText);
}
}
}
| |
using System.Linq;
using System.Security.Claims;
using System.Threading.Tasks;
using allverse3.Services;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.Extensions.Logging;
using allverse3.Models;
using allverse3.Models.AccountViewModels;
using Newtonsoft.Json;
namespace allverse3.Controllers
{
[Authorize]
public class AccountController : Controller
{
private readonly UserManager<ApplicationUser> _userManager;
private readonly SignInManager<ApplicationUser> _signInManager;
private readonly IEmailSender _emailSender;
private readonly ISmsSender _smsSender;
private readonly ILogger _logger;
public AccountController(
UserManager<ApplicationUser> userManager,
SignInManager<ApplicationUser> signInManager,
IEmailSender emailSender,
ISmsSender smsSender,
ILoggerFactory loggerFactory)
{
_userManager = userManager;
_signInManager = signInManager;
_emailSender = emailSender;
_smsSender = smsSender;
_logger = loggerFactory.CreateLogger<AccountController>();
}
[HttpGet]
[AllowAnonymous]
public async Task<string> LoginJS(string userName, string password)
{
var result = await _signInManager.PasswordSignInAsync(userName, password, true, lockoutOnFailure: false);
if (result.Succeeded)
{
return JsonConvert.SerializeObject(true);
}
return JsonConvert.SerializeObject(false);
}
//
// GET: /Account/Login
[HttpGet]
[AllowAnonymous]
public IActionResult Login(string returnUrl = null)
{
ViewData["ReturnUrl"] = returnUrl;
return View();
}
//
// POST: /Account/Login
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public async Task<IActionResult> Login(LoginViewModel model, string returnUrl = null)
{
ViewData["ReturnUrl"] = returnUrl;
if (ModelState.IsValid)
{
// This doesn't count login failures towards account lockout
// To enable password failures to trigger account lockout, set lockoutOnFailure: true
var result = await _signInManager.PasswordSignInAsync(model.UserName, model.Password, model.RememberMe, lockoutOnFailure: false);
if (result.Succeeded)
{
_logger.LogInformation(1, "User logged in.");
return Redirect("~");
}
if (result.RequiresTwoFactor)
{
return RedirectToAction(nameof(SendCode), new { ReturnUrl = returnUrl, RememberMe = model.RememberMe });
}
if (result.IsLockedOut)
{
_logger.LogWarning(2, "User account locked out.");
return View("Lockout");
}
else
{
ModelState.AddModelError(string.Empty, "Invalid login attempt.");
return View(model);
}
}
// If we got this far, something failed, redisplay form
return Redirect("~");
}
//
// GET: /Account/Register
[HttpGet]
[AllowAnonymous]
public IActionResult Register(string returnUrl = null)
{
ViewData["ReturnUrl"] = returnUrl;
return View();
}
//
// POST: /Account/Register
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public async Task<IActionResult> Register(RegisterViewModel model, string returnUrl = null)
{
ViewData["ReturnUrl"] = returnUrl;
if (ModelState.IsValid)
{
var user = new ApplicationUser { UserName = model.UserName, Email = model.Email };
var result = await _userManager.CreateAsync(user, model.Password);
if (result.Succeeded)
{
// For more information on how to enable account confirmation and password reset please visit https://go.microsoft.com/fwlink/?LinkID=532713
// Send an email with this link
//var code = await _userManager.GenerateEmailConfirmationTokenAsync(user);
//var callbackUrl = Url.Action("ConfirmEmail", "Account", new { userId = user.Id, code = code }, protocol: HttpContext.Request.Scheme);
//await _emailSender.SendEmailAsync(model.Email, "Confirm your account",
// $"Please confirm your account by clicking this link: <a href='{callbackUrl}'>link</a>");
await _signInManager.SignInAsync(user, isPersistent: false);
_logger.LogInformation(3, "User created a new account with password.");
return RedirectToLocal(returnUrl);
}
AddErrors(result);
}
// If we got this far, something failed, redisplay form
return View(model);
}
[HttpGet]
public string LogOut()
{
_signInManager.SignOutAsync();
string ret = JsonConvert.SerializeObject(true);
return ret;
}
//
// POST: /Account/LogOff
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<IActionResult> LogOff()
{
await _signInManager.SignOutAsync();
_logger.LogInformation(4, "User logged out.");
return RedirectToAction(nameof(HomeController.Index), "Home");
}
//
// POST: /Account/ExternalLogin
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public IActionResult ExternalLogin(string provider, string returnUrl = null)
{
// Request a redirect to the external login provider.
var redirectUrl = Url.Action("ExternalLoginCallback", "Account", new { ReturnUrl = returnUrl });
var properties = _signInManager.ConfigureExternalAuthenticationProperties(provider, redirectUrl);
return Challenge(properties, provider);
}
//
// GET: /Account/ExternalLoginCallback
[HttpGet]
[AllowAnonymous]
public async Task<IActionResult> ExternalLoginCallback(string returnUrl = null, string remoteError = null)
{
if (remoteError != null)
{
ModelState.AddModelError(string.Empty, $"Error from external provider: {remoteError}");
return View(nameof(Login));
}
var info = await _signInManager.GetExternalLoginInfoAsync();
if (info == null)
{
return RedirectToAction(nameof(Login));
}
// Sign in the user with this external login provider if the user already has a login.
var result = await _signInManager.ExternalLoginSignInAsync(info.LoginProvider, info.ProviderKey, isPersistent: false);
if (result.Succeeded)
{
_logger.LogInformation(5, "User logged in with {Name} provider.", info.LoginProvider);
return RedirectToLocal(returnUrl);
}
if (result.RequiresTwoFactor)
{
return RedirectToAction(nameof(SendCode), new { ReturnUrl = returnUrl });
}
if (result.IsLockedOut)
{
return View("Lockout");
}
else
{
// If the user does not have an account, then ask the user to create an account.
ViewData["ReturnUrl"] = returnUrl;
ViewData["LoginProvider"] = info.LoginProvider;
var email = info.Principal.FindFirstValue(ClaimTypes.Email);
return View("ExternalLoginConfirmation", new ExternalLoginConfirmationViewModel { Email = email });
}
}
//
// POST: /Account/ExternalLoginConfirmation
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public async Task<IActionResult> ExternalLoginConfirmation(ExternalLoginConfirmationViewModel model, string returnUrl = null)
{
if (ModelState.IsValid)
{
// Get the information about the user from the external login provider
var info = await _signInManager.GetExternalLoginInfoAsync();
if (info == null)
{
return View("ExternalLoginFailure");
}
var user = new ApplicationUser { UserName = model.Email, Email = model.Email };
var result = await _userManager.CreateAsync(user);
if (result.Succeeded)
{
result = await _userManager.AddLoginAsync(user, info);
if (result.Succeeded)
{
await _signInManager.SignInAsync(user, isPersistent: false);
_logger.LogInformation(6, "User created an account using {Name} provider.", info.LoginProvider);
return RedirectToLocal(returnUrl);
}
}
AddErrors(result);
}
ViewData["ReturnUrl"] = returnUrl;
return View(model);
}
// GET: /Account/ConfirmEmail
[HttpGet]
[AllowAnonymous]
public async Task<IActionResult> ConfirmEmail(string userId, string code)
{
if (userId == null || code == null)
{
return View("Error");
}
var user = await _userManager.FindByIdAsync(userId);
if (user == null)
{
return View("Error");
}
var result = await _userManager.ConfirmEmailAsync(user, code);
return View(result.Succeeded ? "ConfirmEmail" : "Error");
}
//
// GET: /Account/ForgotPassword
[HttpGet]
[AllowAnonymous]
public IActionResult ForgotPassword()
{
return View();
}
//
// POST: /Account/ForgotPassword
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public async Task<IActionResult> ForgotPassword(ForgotPasswordViewModel model)
{
if (ModelState.IsValid)
{
var user = await _userManager.FindByNameAsync(model.Email);
if (user == null || !(await _userManager.IsEmailConfirmedAsync(user)))
{
// Don't reveal that the user does not exist or is not confirmed
return View("ForgotPasswordConfirmation");
}
// For more information on how to enable account confirmation and password reset please visit https://go.microsoft.com/fwlink/?LinkID=532713
// Send an email with this link
//var code = await _userManager.GeneratePasswordResetTokenAsync(user);
//var callbackUrl = Url.Action("ResetPassword", "Account", new { userId = user.Id, code = code }, protocol: HttpContext.Request.Scheme);
//await _emailSender.SendEmailAsync(model.Email, "Reset Password",
// $"Please reset your password by clicking here: <a href='{callbackUrl}'>link</a>");
//return View("ForgotPasswordConfirmation");
}
// If we got this far, something failed, redisplay form
return View(model);
}
//
// GET: /Account/ForgotPasswordConfirmation
[HttpGet]
[AllowAnonymous]
public IActionResult ForgotPasswordConfirmation()
{
return View();
}
//
// GET: /Account/ResetPassword
[HttpGet]
[AllowAnonymous]
public IActionResult ResetPassword(string code = null)
{
return code == null ? View("Error") : View();
}
//
// POST: /Account/ResetPassword
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public async Task<IActionResult> ResetPassword(ResetPasswordViewModel model)
{
if (!ModelState.IsValid)
{
return View(model);
}
var user = await _userManager.FindByNameAsync(model.Email);
if (user == null)
{
// Don't reveal that the user does not exist
return RedirectToAction(nameof(AccountController.ResetPasswordConfirmation), "Account");
}
var result = await _userManager.ResetPasswordAsync(user, model.Code, model.Password);
if (result.Succeeded)
{
return RedirectToAction(nameof(AccountController.ResetPasswordConfirmation), "Account");
}
AddErrors(result);
return View();
}
//
// GET: /Account/ResetPasswordConfirmation
[HttpGet]
[AllowAnonymous]
public IActionResult ResetPasswordConfirmation()
{
return View();
}
//
// GET: /Account/SendCode
[HttpGet]
[AllowAnonymous]
public async Task<ActionResult> SendCode(string returnUrl = null, bool rememberMe = false)
{
var user = await _signInManager.GetTwoFactorAuthenticationUserAsync();
if (user == null)
{
return View("Error");
}
var userFactors = await _userManager.GetValidTwoFactorProvidersAsync(user);
var factorOptions = userFactors.Select(purpose => new SelectListItem { Text = purpose, Value = purpose }).ToList();
return View(new SendCodeViewModel { Providers = factorOptions, ReturnUrl = returnUrl, RememberMe = rememberMe });
}
//
// POST: /Account/SendCode
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public async Task<IActionResult> SendCode(SendCodeViewModel model)
{
if (!ModelState.IsValid)
{
return View();
}
var user = await _signInManager.GetTwoFactorAuthenticationUserAsync();
if (user == null)
{
return View("Error");
}
// Generate the token and send it
var code = await _userManager.GenerateTwoFactorTokenAsync(user, model.SelectedProvider);
if (string.IsNullOrWhiteSpace(code))
{
return View("Error");
}
var message = "Your security code is: " + code;
if (model.SelectedProvider == "Email")
{
await _emailSender.SendEmailAsync(await _userManager.GetEmailAsync(user), "Security Code", message);
}
else if (model.SelectedProvider == "Phone")
{
await _smsSender.SendSmsAsync(await _userManager.GetPhoneNumberAsync(user), message);
}
return RedirectToAction(nameof(VerifyCode), new { Provider = model.SelectedProvider, ReturnUrl = model.ReturnUrl, RememberMe = model.RememberMe });
}
//
// GET: /Account/VerifyCode
[HttpGet]
[AllowAnonymous]
public async Task<IActionResult> VerifyCode(string provider, bool rememberMe, string returnUrl = null)
{
// Require that the user has already logged in via username/password or external login
var user = await _signInManager.GetTwoFactorAuthenticationUserAsync();
if (user == null)
{
return View("Error");
}
return View(new VerifyCodeViewModel { Provider = provider, ReturnUrl = returnUrl, RememberMe = rememberMe });
}
//
// POST: /Account/VerifyCode
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public async Task<IActionResult> VerifyCode(VerifyCodeViewModel model)
{
if (!ModelState.IsValid)
{
return View(model);
}
// The following code protects for brute force attacks against the two factor codes.
// If a user enters incorrect codes for a specified amount of time then the user account
// will be locked out for a specified amount of time.
var result = await _signInManager.TwoFactorSignInAsync(model.Provider, model.Code, model.RememberMe, model.RememberBrowser);
if (result.Succeeded)
{
return RedirectToLocal(model.ReturnUrl);
}
if (result.IsLockedOut)
{
_logger.LogWarning(7, "User account locked out.");
return View("Lockout");
}
else
{
ModelState.AddModelError(string.Empty, "Invalid code.");
return View(model);
}
}
#region Helpers
private void AddErrors(IdentityResult result)
{
foreach (var error in result.Errors)
{
ModelState.AddModelError(string.Empty, error.Description);
}
}
private Task<ApplicationUser> GetCurrentUserAsync()
{
return _userManager.GetUserAsync(HttpContext.User);
}
private IActionResult RedirectToLocal(string returnUrl)
{
if (Url.IsLocalUrl(returnUrl))
{
return Redirect(returnUrl);
}
else
{
return RedirectToAction(nameof(HomeController.Index), "Home");
}
}
#endregion
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using NUnit.Framework;
using osu.Framework.Graphics;
using osu.Framework.Graphics.UserInterface;
using osu.Framework.Input.Bindings;
using osu.Framework.Input.Events;
using osu.Framework.Testing;
using osuTK;
using osuTK.Input;
namespace osu.Framework.Tests.Visual.Input
{
[HeadlessTest]
public class TestSceneKeyBindingContainer : ManualInputManagerTestScene
{
[Test]
public void TestTriggerWithNoKeyBindings()
{
bool pressedReceived = false;
bool releasedReceived = false;
TestKeyBindingContainer keyBindingContainer = null;
AddStep("add container", () =>
{
pressedReceived = false;
releasedReceived = false;
Child = keyBindingContainer = new TestKeyBindingContainer
{
Child = new TestKeyBindingReceptor
{
Pressed = _ => pressedReceived = true,
Released = _ => releasedReceived = true
}
};
});
AddStep("trigger press", () => keyBindingContainer.TriggerPressed(TestAction.ActionA));
AddAssert("press received", () => pressedReceived);
AddStep("trigger release", () => keyBindingContainer.TriggerReleased(TestAction.ActionA));
AddAssert("release received", () => releasedReceived);
}
[Test]
public void TestPressKeyBeforeKeyBindingContainerAdded()
{
List<TestAction> pressedActions = new List<TestAction>();
List<TestAction> releasedActions = new List<TestAction>();
AddStep("press key B", () => InputManager.PressKey(Key.B));
AddStep("add container", () =>
{
pressedActions.Clear();
releasedActions.Clear();
Child = new TestKeyBindingContainer
{
Child = new TestKeyBindingReceptor
{
Pressed = a => pressedActions.Add(a),
Released = a => releasedActions.Add(a)
}
};
});
AddStep("press key A", () => InputManager.PressKey(Key.A));
AddAssert("only one action triggered", () => pressedActions.Count == 1);
AddAssert("ActionA triggered", () => pressedActions[0] == TestAction.ActionA);
AddAssert("no actions released", () => releasedActions.Count == 0);
AddStep("release key A", () => InputManager.ReleaseKey(Key.A));
AddAssert("only one action triggered", () => pressedActions.Count == 1);
AddAssert("only one action released", () => releasedActions.Count == 1);
AddAssert("ActionA released", () => releasedActions[0] == TestAction.ActionA);
}
[Test]
public void TestKeyHandledByOtherDrawableDoesNotTrigger()
{
List<TestAction> pressedActions = new List<TestAction>();
List<TestAction> releasedActions = new List<TestAction>();
TextBox textBox = null;
AddStep("add children", () =>
{
pressedActions.Clear();
releasedActions.Clear();
Child = new TestKeyBindingContainer
{
Children = new Drawable[]
{
textBox = new BasicTextBox
{
Anchor = Anchor.Centre,
Origin = Anchor.Centre,
Size = new Vector2(200, 30)
},
new TestKeyBindingReceptor
{
Pressed = a => pressedActions.Add(a),
Released = a => releasedActions.Add(a)
}
}
};
});
AddStep("focus textbox and move mouse away", () =>
{
InputManager.MoveMouseTo(textBox);
InputManager.Click(MouseButton.Left);
InputManager.MoveMouseTo(textBox, new Vector2(0, 100));
});
AddStep("press enter", () => InputManager.PressKey(Key.Enter));
AddStep("press mouse button", () => InputManager.PressButton(MouseButton.Left));
AddStep("release enter", () => InputManager.ReleaseKey(Key.Enter));
AddStep("release mouse button", () => InputManager.ReleaseButton(MouseButton.Left));
AddAssert("no pressed actions", () => pressedActions.Count == 0);
AddAssert("no released actions", () => releasedActions.Count == 0);
}
[Test]
public void TestReleasingSpecificModifierDoesNotReleaseCommonBindingIfOtherKeyIsActive()
{
bool pressedReceived = false;
bool releasedReceived = false;
AddStep("add container", () =>
{
pressedReceived = false;
releasedReceived = false;
Child = new TestKeyBindingContainer
{
Child = new TestKeyBindingReceptor
{
Pressed = _ => pressedReceived = true,
Released = _ => releasedReceived = true
}
};
});
AddStep("press lctrl", () => InputManager.PressKey(Key.LControl));
AddAssert("press received", () => pressedReceived);
AddStep("reset variables", () =>
{
pressedReceived = false;
releasedReceived = false;
});
AddStep("press rctrl", () => InputManager.PressKey(Key.RControl));
AddAssert("press not received", () => !pressedReceived);
AddAssert("release not received", () => !releasedReceived);
AddStep("release rctrl", () => InputManager.ReleaseKey(Key.RControl));
AddAssert("release not received", () => !releasedReceived);
AddStep("release lctrl", () => InputManager.ReleaseKey(Key.LControl));
AddAssert("release received", () => releasedReceived);
}
[Test]
public void TestSingleKeyRepeatEvents()
{
bool pressedReceived = false;
bool repeatedReceived = false;
bool releasedReceived = false;
AddStep("add container", () =>
{
pressedReceived = false;
repeatedReceived = false;
releasedReceived = false;
Child = new TestKeyBindingContainer(true)
{
Child = new TestKeyBindingReceptor
{
Pressed = a => pressedReceived = a == TestAction.ActionA,
Repeated = a => repeatedReceived = a == TestAction.ActionA,
Released = a => releasedReceived = a == TestAction.ActionA
}
};
});
AddStep("press A", () => InputManager.PressKey(Key.A));
AddAssert("press received", () => pressedReceived);
for (int i = 0; i < 10; i++)
{
AddUntilStep($"repeat #{1 + i} received", () => repeatedReceived);
AddStep("reset for next repeat", () => repeatedReceived = false);
}
AddStep("release A", () => InputManager.ReleaseKey(Key.A));
AddAssert("release received", () => releasedReceived);
}
[Test]
public void TestKeyCombinationRepeatEvents()
{
bool pressedReceived = false;
bool repeatedReceived = false;
bool releasedReceived = false;
AddStep("add container", () =>
{
pressedReceived = false;
repeatedReceived = false;
releasedReceived = false;
Child = new TestKeyBindingContainer(true)
{
Child = new TestKeyBindingReceptor
{
Pressed = a => pressedReceived = a == TestAction.ActionAB,
Repeated = a => repeatedReceived = a == TestAction.ActionAB,
Released = a => releasedReceived = a == TestAction.ActionAB,
}
};
});
AddStep("press A+B", () =>
{
InputManager.PressKey(Key.A);
InputManager.PressKey(Key.B);
});
AddAssert("press received", () => pressedReceived);
for (int i = 0; i < 10; i++)
{
AddUntilStep($"repeat #{1 + i} received", () => repeatedReceived);
AddStep("reset for next repeat", () => repeatedReceived = false);
}
AddStep("release A", () => InputManager.ReleaseKey(Key.A));
AddAssert("release received", () => releasedReceived);
AddStep("reset for potential repeat", () => repeatedReceived = false);
AddWaitStep("wait", 5);
AddAssert("no repeat received", () => !repeatedReceived);
AddStep("release B", () => InputManager.ReleaseKey(Key.B));
}
private class TestKeyBindingReceptor : Drawable, IKeyBindingHandler<TestAction>
{
public Action<TestAction> Pressed;
public Action<TestAction> Repeated;
public Action<TestAction> Released;
public TestKeyBindingReceptor()
{
RelativeSizeAxes = Axes.Both;
}
public bool OnPressed(KeyBindingPressEvent<TestAction> e)
{
if (e.Repeat)
Repeated?.Invoke(e.Action);
else
Pressed?.Invoke(e.Action);
return true;
}
public void OnReleased(KeyBindingReleaseEvent<TestAction> e)
{
Released?.Invoke(e.Action);
}
}
private class TestKeyBindingContainer : KeyBindingContainer<TestAction>
{
protected override bool SendRepeats { get; }
public TestKeyBindingContainer(bool sendRepeats = false)
{
SendRepeats = sendRepeats;
}
public override IEnumerable<IKeyBinding> DefaultKeyBindings => new IKeyBinding[]
{
new KeyBinding(InputKey.A, TestAction.ActionA),
new KeyBinding(new KeyCombination(InputKey.A, InputKey.B), TestAction.ActionAB),
new KeyBinding(InputKey.Enter, TestAction.ActionEnter),
new KeyBinding(InputKey.Control, TestAction.ActionControl)
};
}
private enum TestAction
{
ActionA,
ActionAB,
ActionEnter,
ActionControl
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Text;
using System;
using System.Diagnostics.Contracts;
namespace System.Text
{
// A Decoder is used to decode a sequence of blocks of bytes into a
// sequence of blocks of characters. Following instantiation of a decoder,
// sequential blocks of bytes are converted into blocks of characters through
// calls to the GetChars method. The decoder maintains state between the
// conversions, allowing it to correctly decode byte sequences that span
// adjacent blocks.
//
// Instances of specific implementations of the Decoder abstract base
// class are typically obtained through calls to the GetDecoder method
// of Encoding objects.
//
[System.Runtime.InteropServices.ComVisible(true)]
public abstract class Decoder
{
internal DecoderFallback m_fallback = null;
internal DecoderFallbackBuffer m_fallbackBuffer = null;
protected Decoder()
{
// We don't call default reset because default reset probably isn't good if we aren't initialized.
}
[System.Runtime.InteropServices.ComVisible(false)]
public DecoderFallback Fallback
{
get
{
return m_fallback;
}
set
{
if (value == null)
throw new ArgumentNullException("value");
Contract.EndContractBlock();
// Can't change fallback if buffer is wrong
if (m_fallbackBuffer != null && m_fallbackBuffer.Remaining > 0)
throw new ArgumentException(
SR.Argument_FallbackBufferNotEmpty, "value");
m_fallback = value;
m_fallbackBuffer = null;
}
}
// Note: we don't test for threading here because async access to Encoders and Decoders
// doesn't work anyway.
[System.Runtime.InteropServices.ComVisible(false)]
public DecoderFallbackBuffer FallbackBuffer
{
get
{
if (m_fallbackBuffer == null)
{
if (m_fallback != null)
m_fallbackBuffer = m_fallback.CreateFallbackBuffer();
else
m_fallbackBuffer = DecoderFallback.ReplacementFallback.CreateFallbackBuffer();
}
return m_fallbackBuffer;
}
}
internal bool InternalHasFallbackBuffer
{
get
{
return m_fallbackBuffer != null;
}
}
// Reset the Decoder
//
// Normally if we call GetChars() and an error is thrown we don't change the state of the Decoder. This
// would allow the caller to correct the error condition and try again (such as if they need a bigger buffer.)
//
// If the caller doesn't want to try again after GetChars() throws an error, then they need to call Reset().
//
// Virtual implimentation has to call GetChars with flush and a big enough buffer to clear a 0 byte string
// We avoid GetMaxCharCount() because a) we can't call the base encoder and b) it might be really big.
[System.Runtime.InteropServices.ComVisible(false)]
public virtual void Reset()
{
byte[] byteTemp = { };
char[] charTemp = new char[GetCharCount(byteTemp, 0, 0, true)];
GetChars(byteTemp, 0, 0, charTemp, 0, true);
if (m_fallbackBuffer != null)
m_fallbackBuffer.Reset();
}
// Returns the number of characters the next call to GetChars will
// produce if presented with the given range of bytes. The returned value
// takes into account the state in which the decoder was left following the
// last call to GetChars. The state of the decoder is not affected
// by a call to this method.
//
public abstract int GetCharCount(byte[] bytes, int index, int count);
[System.Runtime.InteropServices.ComVisible(false)]
public virtual int GetCharCount(byte[] bytes, int index, int count, bool flush)
{
return GetCharCount(bytes, index, count);
}
// We expect this to be the workhorse for NLS Encodings, but for existing
// ones we need a working (if slow) default implementation)
[System.Runtime.InteropServices.ComVisible(false)]
internal virtual unsafe int GetCharCount(byte* bytes, int count, bool flush)
{
// Validate input parameters
if (bytes == null)
throw new ArgumentNullException("bytes",
SR.ArgumentNull_Array);
if (count < 0)
throw new ArgumentOutOfRangeException("count",
SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
byte[] arrbyte = new byte[count];
int index;
for (index = 0; index < count; index++)
arrbyte[index] = bytes[index];
return GetCharCount(arrbyte, 0, count);
}
// Decodes a range of bytes in a byte array into a range of characters
// in a character array. The method decodes byteCount bytes from
// bytes starting at index byteIndex, storing the resulting
// characters in chars starting at index charIndex. The
// decoding takes into account the state in which the decoder was left
// following the last call to this method.
//
// An exception occurs if the character array is not large enough to
// hold the complete decoding of the bytes. The GetCharCount method
// can be used to determine the exact number of characters that will be
// produced for a given range of bytes. Alternatively, the
// GetMaxCharCount method of the Encoding that produced this
// decoder can be used to determine the maximum number of characters that
// will be produced for a given number of bytes, regardless of the actual
// byte values.
//
public abstract int GetChars(byte[] bytes, int byteIndex, int byteCount,
char[] chars, int charIndex);
public virtual int GetChars(byte[] bytes, int byteIndex, int byteCount,
char[] chars, int charIndex, bool flush)
{
return GetChars(bytes, byteIndex, byteCount, chars, charIndex);
}
// We expect this to be the workhorse for NLS Encodings, but for existing
// ones we need a working (if slow) default implimentation)
//
// WARNING WARNING WARNING
//
// WARNING: If this breaks it could be a security threat. Obviously we
// call this internally, so you need to make sure that your pointers, counts
// and indexes are correct when you call this method.
//
// In addition, we have internal code, which will be marked as "safe" calling
// this code. However this code is dependent upon the implimentation of an
// external GetChars() method, which could be overridden by a third party and
// the results of which cannot be guaranteed. We use that result to copy
// the char[] to our char* output buffer. If the result count was wrong, we
// could easily overflow our output buffer. Therefore we do an extra test
// when we copy the buffer so that we don't overflow charCount either.
[System.Runtime.InteropServices.ComVisible(false)]
internal virtual unsafe int GetChars(byte* bytes, int byteCount,
char* chars, int charCount, bool flush)
{
// Validate input parameters
if (chars == null || bytes == null)
throw new ArgumentNullException(chars == null ? "chars" : "bytes",
SR.ArgumentNull_Array);
if (byteCount < 0 || charCount < 0)
throw new ArgumentOutOfRangeException((byteCount < 0 ? "byteCount" : "charCount"),
SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
// Get the byte array to convert
byte[] arrByte = new byte[byteCount];
int index;
for (index = 0; index < byteCount; index++)
arrByte[index] = bytes[index];
// Get the char array to fill
char[] arrChar = new char[charCount];
// Do the work
int result = GetChars(arrByte, 0, byteCount, arrChar, 0, flush);
// The only way this could fail is a bug in GetChars
Contract.Assert(result <= charCount, "Returned more chars than we have space for");
// Copy the char array
// WARNING: We MUST make sure that we don't copy too many chars. We can't
// rely on result because it could be a 3rd party implimentation. We need
// to make sure we never copy more than charCount chars no matter the value
// of result
if (result < charCount)
charCount = result;
// We check both result and charCount so that we don't accidentally overrun
// our pointer buffer just because of any GetChars bug.
for (index = 0; index < charCount; index++)
chars[index] = arrChar[index];
return charCount;
}
// This method is used when the output buffer might not be large enough.
// It will decode until it runs out of bytes, and then it will return
// true if it the entire input was converted. In either case it
// will also return the number of converted bytes and output characters used.
// It will only throw a buffer overflow exception if the entire lenght of chars[] is
// too small to store the next char. (like 0 or maybe 1 or 4 for some encodings)
// We're done processing this buffer only if completed returns true.
//
// Might consider checking Max...Count to avoid the extra counting step.
//
// Note that if all of the input bytes are not consumed, then we'll do a /2, which means
// that its likely that we didn't consume as many bytes as we could have. For some
// applications this could be slow. (Like trying to exactly fill an output buffer from a bigger stream)
[System.Runtime.InteropServices.ComVisible(false)]
public virtual void Convert(byte[] bytes, int byteIndex, int byteCount,
char[] chars, int charIndex, int charCount, bool flush,
out int bytesUsed, out int charsUsed, out bool completed)
{
// Validate parameters
if (bytes == null || chars == null)
throw new ArgumentNullException((bytes == null ? "bytes" : "chars"),
SR.ArgumentNull_Array);
if (byteIndex < 0 || byteCount < 0)
throw new ArgumentOutOfRangeException((byteIndex < 0 ? "byteIndex" : "byteCount"),
SR.ArgumentOutOfRange_NeedNonNegNum);
if (charIndex < 0 || charCount < 0)
throw new ArgumentOutOfRangeException((charIndex < 0 ? "charIndex" : "charCount"),
SR.ArgumentOutOfRange_NeedNonNegNum);
if (bytes.Length - byteIndex < byteCount)
throw new ArgumentOutOfRangeException("bytes",
SR.ArgumentOutOfRange_IndexCountBuffer);
if (chars.Length - charIndex < charCount)
throw new ArgumentOutOfRangeException("chars",
SR.ArgumentOutOfRange_IndexCountBuffer);
Contract.EndContractBlock();
bytesUsed = byteCount;
// Its easy to do if it won't overrun our buffer.
while (bytesUsed > 0)
{
if (GetCharCount(bytes, byteIndex, bytesUsed, flush) <= charCount)
{
charsUsed = GetChars(bytes, byteIndex, bytesUsed, chars, charIndex, flush);
completed = (bytesUsed == byteCount &&
(m_fallbackBuffer == null || m_fallbackBuffer.Remaining == 0));
return;
}
// Try again with 1/2 the count, won't flush then 'cause won't read it all
flush = false;
bytesUsed /= 2;
}
// Oops, we didn't have anything, we'll have to throw an overflow
throw new ArgumentException(SR.Argument_ConversionOverflow);
}
}
}
| |
#region Copyright
////////////////////////////////////////////////////////////////////////////////
// The following FIT Protocol software provided may be used with FIT protocol
// devices only and remains the copyrighted property of Dynastream Innovations Inc.
// The software is being provided on an "as-is" basis and as an accommodation,
// and therefore all warranties, representations, or guarantees of any kind
// (whether express, implied or statutory) including, without limitation,
// warranties of merchantability, non-infringement, or fitness for a particular
// purpose, are specifically disclaimed.
//
// Copyright 2014 Dynastream Innovations Inc.
////////////////////////////////////////////////////////////////////////////////
// ****WARNING**** This file is auto-generated! Do NOT edit this file.
// Profile Version = 12.20Release
// Tag = $Name$
////////////////////////////////////////////////////////////////////////////////
#endregion
namespace FitFilePreviewer.Decode.Fit.Mesgs
{
/// <summary>
/// Implements the BloodPressure profile message.
/// </summary>
public class BloodPressureMesg : Mesg
{
#region Fields
#endregion
#region Constructors
public BloodPressureMesg() : base((Mesg) Profile.mesgs[Profile.BloodPressureIndex])
{
}
public BloodPressureMesg(Mesg mesg) : base(mesg)
{
}
#endregion // Constructors
#region Methods
///<summary>
/// Retrieves the Timestamp field
/// Units: s</summary>
/// <returns>Returns DateTime representing the Timestamp field</returns>
public Types.DateTime GetTimestamp()
{
return TimestampToDateTime((uint?)GetFieldValue(253, 0, Fit.SubfieldIndexMainField));
}
/// <summary>
/// Set Timestamp field
/// Units: s</summary>
/// <param name="timestamp_">Nullable field value to be set</param>
public void SetTimestamp(Types.DateTime timestamp_)
{
SetFieldValue(253, 0, timestamp_.GetTimeStamp(), Fit.SubfieldIndexMainField);
}
///<summary>
/// Retrieves the SystolicPressure field
/// Units: mmHg</summary>
/// <returns>Returns nullable ushort representing the SystolicPressure field</returns>
public ushort? GetSystolicPressure()
{
return (ushort?)GetFieldValue(0, 0, Fit.SubfieldIndexMainField);
}
/// <summary>
/// Set SystolicPressure field
/// Units: mmHg</summary>
/// <param name="systolicPressure_">Nullable field value to be set</param>
public void SetSystolicPressure(ushort? systolicPressure_)
{
SetFieldValue(0, 0, systolicPressure_, Fit.SubfieldIndexMainField);
}
///<summary>
/// Retrieves the DiastolicPressure field
/// Units: mmHg</summary>
/// <returns>Returns nullable ushort representing the DiastolicPressure field</returns>
public ushort? GetDiastolicPressure()
{
return (ushort?)GetFieldValue(1, 0, Fit.SubfieldIndexMainField);
}
/// <summary>
/// Set DiastolicPressure field
/// Units: mmHg</summary>
/// <param name="diastolicPressure_">Nullable field value to be set</param>
public void SetDiastolicPressure(ushort? diastolicPressure_)
{
SetFieldValue(1, 0, diastolicPressure_, Fit.SubfieldIndexMainField);
}
///<summary>
/// Retrieves the MeanArterialPressure field
/// Units: mmHg</summary>
/// <returns>Returns nullable ushort representing the MeanArterialPressure field</returns>
public ushort? GetMeanArterialPressure()
{
return (ushort?)GetFieldValue(2, 0, Fit.SubfieldIndexMainField);
}
/// <summary>
/// Set MeanArterialPressure field
/// Units: mmHg</summary>
/// <param name="meanArterialPressure_">Nullable field value to be set</param>
public void SetMeanArterialPressure(ushort? meanArterialPressure_)
{
SetFieldValue(2, 0, meanArterialPressure_, Fit.SubfieldIndexMainField);
}
///<summary>
/// Retrieves the Map3SampleMean field
/// Units: mmHg</summary>
/// <returns>Returns nullable ushort representing the Map3SampleMean field</returns>
public ushort? GetMap3SampleMean()
{
return (ushort?)GetFieldValue(3, 0, Fit.SubfieldIndexMainField);
}
/// <summary>
/// Set Map3SampleMean field
/// Units: mmHg</summary>
/// <param name="map3SampleMean_">Nullable field value to be set</param>
public void SetMap3SampleMean(ushort? map3SampleMean_)
{
SetFieldValue(3, 0, map3SampleMean_, Fit.SubfieldIndexMainField);
}
///<summary>
/// Retrieves the MapMorningValues field
/// Units: mmHg</summary>
/// <returns>Returns nullable ushort representing the MapMorningValues field</returns>
public ushort? GetMapMorningValues()
{
return (ushort?)GetFieldValue(4, 0, Fit.SubfieldIndexMainField);
}
/// <summary>
/// Set MapMorningValues field
/// Units: mmHg</summary>
/// <param name="mapMorningValues_">Nullable field value to be set</param>
public void SetMapMorningValues(ushort? mapMorningValues_)
{
SetFieldValue(4, 0, mapMorningValues_, Fit.SubfieldIndexMainField);
}
///<summary>
/// Retrieves the MapEveningValues field
/// Units: mmHg</summary>
/// <returns>Returns nullable ushort representing the MapEveningValues field</returns>
public ushort? GetMapEveningValues()
{
return (ushort?)GetFieldValue(5, 0, Fit.SubfieldIndexMainField);
}
/// <summary>
/// Set MapEveningValues field
/// Units: mmHg</summary>
/// <param name="mapEveningValues_">Nullable field value to be set</param>
public void SetMapEveningValues(ushort? mapEveningValues_)
{
SetFieldValue(5, 0, mapEveningValues_, Fit.SubfieldIndexMainField);
}
///<summary>
/// Retrieves the HeartRate field
/// Units: bpm</summary>
/// <returns>Returns nullable byte representing the HeartRate field</returns>
public byte? GetHeartRate()
{
return (byte?)GetFieldValue(6, 0, Fit.SubfieldIndexMainField);
}
/// <summary>
/// Set HeartRate field
/// Units: bpm</summary>
/// <param name="heartRate_">Nullable field value to be set</param>
public void SetHeartRate(byte? heartRate_)
{
SetFieldValue(6, 0, heartRate_, Fit.SubfieldIndexMainField);
}
///<summary>
/// Retrieves the HeartRateType field</summary>
/// <returns>Returns nullable HrType enum representing the HeartRateType field</returns>
public Types.HrType? GetHeartRateType()
{
object obj = GetFieldValue(7, 0, Fit.SubfieldIndexMainField);
Types.HrType? value = obj == null ? (Types.HrType?)null : (Types.HrType)obj;
return value;
}
/// <summary>
/// Set HeartRateType field</summary>
/// <param name="heartRateType_">Nullable field value to be set</param>
public void SetHeartRateType(Types.HrType? heartRateType_)
{
SetFieldValue(7, 0, heartRateType_, Fit.SubfieldIndexMainField);
}
///<summary>
/// Retrieves the Status field</summary>
/// <returns>Returns nullable BpStatus enum representing the Status field</returns>
public Types.BpStatus? GetStatus()
{
object obj = GetFieldValue(8, 0, Fit.SubfieldIndexMainField);
Types.BpStatus? value = obj == null ? (Types.BpStatus?)null : (Types.BpStatus)obj;
return value;
}
/// <summary>
/// Set Status field</summary>
/// <param name="status_">Nullable field value to be set</param>
public void SetStatus(Types.BpStatus? status_)
{
SetFieldValue(8, 0, status_, Fit.SubfieldIndexMainField);
}
///<summary>
/// Retrieves the UserProfileIndex field
/// Comment: Associates this blood pressure message to a user. This corresponds to the index of the user profile message in the blood pressure file.</summary>
/// <returns>Returns nullable ushort representing the UserProfileIndex field</returns>
public ushort? GetUserProfileIndex()
{
return (ushort?)GetFieldValue(9, 0, Fit.SubfieldIndexMainField);
}
/// <summary>
/// Set UserProfileIndex field
/// Comment: Associates this blood pressure message to a user. This corresponds to the index of the user profile message in the blood pressure file.</summary>
/// <param name="userProfileIndex_">Nullable field value to be set</param>
public void SetUserProfileIndex(ushort? userProfileIndex_)
{
SetFieldValue(9, 0, userProfileIndex_, Fit.SubfieldIndexMainField);
}
#endregion // Methods
} // Class
} // namespace
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Baseline;
using Baseline.Reflection;
using LamarCodeGeneration;
using Marten.Events.CodeGeneration;
using Marten.Exceptions;
using Marten.Linq;
using Marten.Linq.Includes;
using Marten.Linq.QueryHandlers;
using Marten.Schema.Arguments;
using Marten.Util;
using Npgsql;
namespace Marten.Internal.CompiledQueries
{
public class CompiledQueryPlan
{
public CompiledQueryPlan(Type queryType, Type outputType)
{
QueryType = queryType;
OutputType = outputType;
}
public Type QueryType { get; }
public Type OutputType { get; }
public IList<MemberInfo> InvalidMembers { get; } = new List<MemberInfo>();
public IList<IQueryMember> Parameters { get; } = new List<IQueryMember>();
public NpgsqlCommand Command { get; set; }
public IQueryHandler HandlerPrototype { get; set; }
public MemberInfo StatisticsMember { get; set; }
public IList<MemberInfo> IncludeMembers { get; } = new List<MemberInfo>();
internal IList<IIncludePlan> IncludePlans { get; } = new List<IIncludePlan>();
public void FindMembers()
{
foreach (var member in findMembers())
{
var memberType = member.GetRawMemberType();
if (memberType == typeof(QueryStatistics))
{
StatisticsMember = member;
}
else if (memberType.Closes(typeof(IDictionary<,>)))
{
IncludeMembers.Add(member);
}
else if (memberType.Closes(typeof(Action<>)))
{
IncludeMembers.Add(member);
}
else if (memberType.Closes(typeof(IList<>)))
{
IncludeMembers.Add(member);
}
else if (memberType.IsNullable())
{
InvalidMembers.Add(member);
}
else if (QueryCompiler.Finders.All(x => !x.Matches(memberType)))
{
InvalidMembers.Add(member);
}
else if (member is PropertyInfo)
{
var queryMember = typeof(PropertyQueryMember<>).CloseAndBuildAs<IQueryMember>(member, memberType);
Parameters.Add(queryMember);
}
else if (member is FieldInfo)
{
var queryMember = typeof(FieldQueryMember<>).CloseAndBuildAs<IQueryMember>(member, memberType);
Parameters.Add(queryMember);
}
}
}
private IEnumerable<MemberInfo> findMembers()
{
foreach (var field in QueryType.GetFields(BindingFlags.Instance | BindingFlags.Public)
.Where(x => !x.HasAttribute<MartenIgnoreAttribute>())) yield return field;
foreach (var property in QueryType.GetProperties(BindingFlags.Instance | BindingFlags.Public)
.Where(x => !x.HasAttribute<MartenIgnoreAttribute>())) yield return property;
}
public string CorrectedCommandText()
{
var text = Command.CommandText;
for (var i = Command.Parameters.Count - 1; i >= 0; i--)
{
var parameterName = Command.Parameters[i].ParameterName;
if (parameterName == TenantIdArgument.ArgName)
{
continue;
}
text = text.Replace(":" + parameterName, "?");
}
return text;
}
public QueryStatistics GetStatisticsIfAny(object query)
{
if (StatisticsMember is PropertyInfo p)
{
return (QueryStatistics)p.GetValue(query) ?? new QueryStatistics();
}
if (StatisticsMember is FieldInfo f)
{
return (QueryStatistics)f.GetValue(query) ?? new QueryStatistics();
}
return null;
}
public ICompiledQuery<TDoc, TOut> CreateQueryTemplate<TDoc, TOut>(ICompiledQuery<TDoc, TOut> query)
{
foreach (var parameter in Parameters) parameter.StoreValue(query);
if (!(query is IQueryPlanning) && AreAllMemberValuesUnique(query))
{
return query;
}
try
{
return (ICompiledQuery<TDoc, TOut>)TryCreateUniqueTemplate(query.GetType());
}
catch (Exception e)
{
throw new InvalidCompiledQueryException("Unable to create a Compiled Query template", e);
}
}
private bool AreAllMemberValuesUnique(object query)
{
return QueryCompiler.Finders.All(x => x.AreValuesUnique(query, this));
}
public object TryCreateUniqueTemplate(Type type)
{
var constructor = type.GetConstructors()
.OrderByDescending(x => x.GetParameters().Count())
.FirstOrDefault();
if (constructor == null)
{
throw new InvalidOperationException("Cannot find a suitable constructor for query planning for type " +
type.FullNameInCode());
}
var valueSource = new UniqueValueSource();
var ctorArgs = valueSource.ArgsFor(constructor);
var query = Activator.CreateInstance(type, ctorArgs);
if (query is IQueryPlanning planning)
{
planning.SetUniqueValuesForQueryPlanning();
foreach (var member in Parameters) member.StoreValue(query);
}
if (AreAllMemberValuesUnique(query))
{
return query;
}
foreach (var queryMember in Parameters) queryMember.TryWriteValue(valueSource, query);
if (AreAllMemberValuesUnique(query))
{
return query;
}
throw new InvalidCompiledQueryException("Marten is unable to create a compiled query plan for type " +
type.FullNameInCode());
}
public void ReadCommand(NpgsqlCommand command, StoreOptions storeOptions)
{
Command = command;
var parameters = command.Parameters.ToList();
parameters.RemoveAll(x => x.ParameterName == TenantIdArgument.ArgName);
foreach (var parameter in Parameters) parameter.TryMatch(parameters, storeOptions);
var missing = Parameters.Where(x => !x.ParameterIndexes.Any());
if (missing.Any())
{
throw new InvalidCompiledQueryException(
$"Unable to match compiled query member(s) {missing.Select(x => x.Member.Name).Join(", ")} with a command parameter");
}
}
}
}
| |
using System;
using System.Collections;
using System.IO;
using System.Text;
using System.Xml;
namespace GuruComponents.CodeEditor.CodeEditor.Syntax
{
/// <summary>
/// File type class.
/// </summary>
public class FileType
{
/// <summary>
/// The file type extension
/// </summary>
public string Extension="";
/// <summary>
/// The name of the file type
/// </summary>
public string Name="";
}
/// <summary>
/// The Language class describes a language.<br/>
/// It consists of a MainBlock , which is the start BlockType of the Language<br/>
/// It also have a list of filetypes that is valid for this language<br/>
/// </summary>
/// <example>
/// <b>Apply a Syntax to a SyntaxBox</b>
/// <code>
/// SyntaxBoxControl1.Document.SyntaxFile="C#.syn";
/// </code>
/// </example>
public class Language
{
#region PUBLIC PROPERTY SEPARATORS
private string _Separators=".,:;{}()[]+-*/\\ \t=&%$#@!|&";
public string Separators
{
get
{
return _Separators;
}
set
{
_Separators = value;
}
}
#endregion
public string FileDialogFilters
{
get
{
FileType current = null;
StringBuilder sb = new StringBuilder();
sb.Append(this.Name);
sb.Append(" |");
for (int i = 0; i < FileTypes.Count; i++)
{
current = (FileType)FileTypes[i];
sb.Append("*");
sb.Append(current.Extension);
if (i < FileTypes.Count - 1)
sb.Append(";");
}
return sb.ToString();
}
}
#region PUBLIC PROPERTY VERSION
private long _Version = long.MinValue;
public long Version
{
get
{
return _Version;
}
set
{
_Version = value;
}
}
#endregion
public void UpdateLists()
{
BlockType[] blocks =this.Blocks;
foreach (BlockType block in blocks)
{
block.Parent = this;
block.ResetLookupTable ();
block.KeywordsList.Parent=block;
foreach (PatternList patterns in block.KeywordsList)
{
patterns.Parent = block.KeywordsList;
foreach (Pattern pattern in patterns)
{
block.AddToLookupTable (pattern);
}
}
block.OperatorsList.Parent=block;
foreach (PatternList patterns in block.OperatorsList)
{
patterns.Parent = block.OperatorsList;
foreach (Pattern pattern in patterns)
{
block.AddToLookupTable (pattern);
}
}
block.BuildLookupTable ();
}
}
public void ChangeVersion()
{
this.Version ++;
if (this.Version > long.MaxValue-10)
this.Version = long.MinValue;
}
private Hashtable _Blocks=new Hashtable ();
private Hashtable _Styles=new Hashtable ();
/// <summary>
/// Name of the Language
/// </summary>
public string Name="";
/// <summary>
/// The start BlockType for this language
/// </summary>
public BlockType MainBlock=null;
/// <summary>
/// ArrayList containing the valid filetypes for this language
/// </summary>
public ArrayList FileTypes=new ArrayList ();
/// <summary>
///
/// </summary>
public Language()
{
}
//public TextStyle[] TextStyles
//{
// get
// {
// TextStyle[] sts = new TextStyle[_Styles.Count];
// _Styles.CopyTo(sts,0);
// return sts;
// }
// set
// {
// _Styles.Clear();
// TextStyle[] sts = value;
// foreach (TextStyle style in sts)
// {
// _Styles.Add(style.Name, style);
// }
// }
//}
public static Language FromSyntaxFile(string filename)
{
SyntaxLoader sl=new SyntaxLoader();
return sl.Load (filename);
}
public static Language FromSyntaxFile(Stream stream)
{
SyntaxLoader sl = new SyntaxLoader();
return sl.Load(stream);
}
public void MergeByMainBlock(Language Target)
{
BlockType [] blocks = this.Blocks;
foreach (BlockType bt in blocks)
{
bt.ChildBlocks.Insert (0,Target.MainBlock);
}
}
public void MergeByChildBlocks(Language Target)
{
BlockType [] blocks = this.Blocks;
foreach (BlockType bt in blocks)
{
for (int i=Target.MainBlock.ChildBlocks.Count-1 ;i>=0;i--)
{
BlockType child=Target.MainBlock.ChildBlocks[i];
bt.ChildBlocks.Insert (0,child);
}
}
}
/// <summary>
/// Gets all BlockTypes in a given language.
/// </summary>
public BlockType[] Blocks
{
get
{
_Blocks.Clear ();
FillBlocks(this.MainBlock);
BlockType[] blocks=new BlockType[_Blocks.Values.Count];
int i=0;
foreach (BlockType bt in _Blocks.Values)
{
blocks[i]=bt;
i++;
}
return blocks;
}
}
public TextStyle[] Styles
{
get
{
_Styles.Clear ();
BlockType[] blocks=this.Blocks ;
foreach (BlockType bt in blocks)
{
_Styles[bt.Style]=bt.Style;
foreach (Scope sc in bt.ScopePatterns)
{
if (sc.Style!= null)
_Styles[sc.Style]=sc.Style;
}
foreach (PatternList pl in bt.KeywordsList)
{
if (pl.Style!= null)
_Styles[pl.Style]=pl.Style;
}
foreach (PatternList pl in bt.OperatorsList)
{
if (pl.Style!= null)
_Styles[pl.Style]=pl.Style;
}
}
TextStyle[] styles=new TextStyle[_Styles.Values.Count];
int i=0;
foreach (TextStyle st in _Styles.Values)
{
styles[i]=st;
i++;
}
return styles;
}
}
private void FillBlocks(BlockType bt)
{
if (bt==null)
return;
if (_Blocks[bt]!=null)
return;
_Blocks[bt]=bt;
foreach (BlockType btc in bt.ChildBlocks)
{
FillBlocks(btc);
}
foreach (Scope sc in bt.ScopePatterns)
{
FillBlocks(sc.SpawnBlockOnEnd);
FillBlocks(sc.SpawnBlockOnStart);
}
}
/// <summary>
/// Save user styles configuration on the directory set on UserCustomStyles
/// if UserCustomStyles is null the function throw an exception.
/// </summary>
/// <returns></returns>
public void SaveStyles()
{
if (SyntaxLoader.UserCustomStyles == null)
{
throw new IOException("Invalid user config dir.");
}
else
{
if (!Directory.Exists(SyntaxLoader.UserCustomStyles))
{
throw new IOException("Invalid user config path name, or path don't exist.");
}
}
string path = Path.Combine(SyntaxLoader.UserCustomStyles, this.Name + ".conf");
XmlTextWriter xwr = new XmlTextWriter(path, Encoding.UTF8);
xwr.Formatting = Formatting.Indented;
xwr.WriteStartElement("styles");
TextStyle[] styles = this.Styles;
foreach(TextStyle style in styles)
{
xwr.WriteStartElement("Style");
xwr.WriteAttributeString("Name", style.Name);
xwr.WriteAttributeString("ForeColor", style.ForeColor.Name);
xwr.WriteAttributeString("BackColor", style.BackColor.Name);
xwr.WriteAttributeString("Bold", style.Bold.ToString());
xwr.WriteAttributeString("Italic", style.Italic.ToString());
xwr.WriteAttributeString("Underline", style.Italic.ToString());
xwr.WriteEndElement();
}
xwr.WriteEndElement();
xwr.Flush();
xwr.Close();
}
public override string ToString()
{
return this.Name;
}
// /// <summary>
// /// Serializes the language object into an xml string.
// /// </summary>
// /// <returns></returns>
// public string ToXML()
// {
// return "";
// }
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure;
using Microsoft.Azure.Management.RecoveryServices;
using Microsoft.Azure.Management.RecoveryServices.Models;
namespace Microsoft.Azure.Management.RecoveryServices
{
public static partial class VaultExtendedInfoOperationsExtensions
{
/// <summary>
/// Get the vault extended info.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultExtendedInfoOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource group/ Cloud service containing
/// the resource/ Vault collection.
/// </param>
/// <param name='resourceName'>
/// Required. The name of the resource.
/// </param>
/// <param name='extendedInfoArgs'>
/// Required. Create resource extended info input parameters.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse CreateExtendedInfo(this IVaultExtendedInfoOperations operations, string resourceGroupName, string resourceName, ResourceExtendedInformationArgs extendedInfoArgs, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IVaultExtendedInfoOperations)s).CreateExtendedInfoAsync(resourceGroupName, resourceName, extendedInfoArgs, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get the vault extended info.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultExtendedInfoOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource group/ Cloud service containing
/// the resource/ Vault collection.
/// </param>
/// <param name='resourceName'>
/// Required. The name of the resource.
/// </param>
/// <param name='extendedInfoArgs'>
/// Required. Create resource extended info input parameters.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> CreateExtendedInfoAsync(this IVaultExtendedInfoOperations operations, string resourceGroupName, string resourceName, ResourceExtendedInformationArgs extendedInfoArgs, CustomRequestHeaders customRequestHeaders)
{
return operations.CreateExtendedInfoAsync(resourceGroupName, resourceName, extendedInfoArgs, customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// Get the vault extended info.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultExtendedInfoOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group containing the job
/// collection.
/// </param>
/// <param name='resourceName'>
/// Required. The name of the resource.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the resource extended information object
/// </returns>
public static ResourceExtendedInformationResponse GetExtendedInfo(this IVaultExtendedInfoOperations operations, string resourceGroupName, string resourceName, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IVaultExtendedInfoOperations)s).GetExtendedInfoAsync(resourceGroupName, resourceName, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get the vault extended info.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultExtendedInfoOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group containing the job
/// collection.
/// </param>
/// <param name='resourceName'>
/// Required. The name of the resource.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the resource extended information object
/// </returns>
public static Task<ResourceExtendedInformationResponse> GetExtendedInfoAsync(this IVaultExtendedInfoOperations operations, string resourceGroupName, string resourceName, CustomRequestHeaders customRequestHeaders)
{
return operations.GetExtendedInfoAsync(resourceGroupName, resourceName, customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// Update the vault extended info.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultExtendedInfoOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group containing the vault.
/// </param>
/// <param name='resourceName'>
/// Required. The name of the resource.
/// </param>
/// <param name='extendedInfoArgs'>
/// Required. Update resource extended info input parameters.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the resource extended information object
/// </returns>
public static ResourceExtendedInformationResponse UpdateExtendedInfo(this IVaultExtendedInfoOperations operations, string resourceGroupName, string resourceName, ResourceExtendedInformationArgs extendedInfoArgs, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IVaultExtendedInfoOperations)s).UpdateExtendedInfoAsync(resourceGroupName, resourceName, extendedInfoArgs, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Update the vault extended info.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultExtendedInfoOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group containing the vault.
/// </param>
/// <param name='resourceName'>
/// Required. The name of the resource.
/// </param>
/// <param name='extendedInfoArgs'>
/// Required. Update resource extended info input parameters.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the resource extended information object
/// </returns>
public static Task<ResourceExtendedInformationResponse> UpdateExtendedInfoAsync(this IVaultExtendedInfoOperations operations, string resourceGroupName, string resourceName, ResourceExtendedInformationArgs extendedInfoArgs, CustomRequestHeaders customRequestHeaders)
{
return operations.UpdateExtendedInfoAsync(resourceGroupName, resourceName, extendedInfoArgs, customRequestHeaders, CancellationToken.None);
}
/// <summary>
/// Get the vault extended info.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultExtendedInfoOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group containing the job
/// collection.
/// </param>
/// <param name='resourceName'>
/// Required. The name of the resource.
/// </param>
/// <param name='parameters'>
/// Required. Upload Vault Certificate input parameters.
/// </param>
/// <param name='certFriendlyName'>
/// Required. Certificate friendly name
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the upload certificate response
/// </returns>
public static UploadCertificateResponse UploadCertificate(this IVaultExtendedInfoOperations operations, string resourceGroupName, string resourceName, CertificateArgs parameters, string certFriendlyName, CustomRequestHeaders customRequestHeaders)
{
return Task.Factory.StartNew((object s) =>
{
return ((IVaultExtendedInfoOperations)s).UploadCertificateAsync(resourceGroupName, resourceName, parameters, certFriendlyName, customRequestHeaders);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get the vault extended info.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.RecoveryServices.IVaultExtendedInfoOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group containing the job
/// collection.
/// </param>
/// <param name='resourceName'>
/// Required. The name of the resource.
/// </param>
/// <param name='parameters'>
/// Required. Upload Vault Certificate input parameters.
/// </param>
/// <param name='certFriendlyName'>
/// Required. Certificate friendly name
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <returns>
/// The response model for the upload certificate response
/// </returns>
public static Task<UploadCertificateResponse> UploadCertificateAsync(this IVaultExtendedInfoOperations operations, string resourceGroupName, string resourceName, CertificateArgs parameters, string certFriendlyName, CustomRequestHeaders customRequestHeaders)
{
return operations.UploadCertificateAsync(resourceGroupName, resourceName, parameters, certFriendlyName, customRequestHeaders, CancellationToken.None);
}
}
}
| |
#region license
// Copyright (c) 2004, Rodrigo B. de Oliveira (rbo@acm.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Rodrigo B. de Oliveira nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
// THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion
using System.Linq;
using System.Runtime.Remoting.Messaging;
using Boo.Lang.Compiler.TypeSystem.Builders;
using Boo.Lang.Compiler.TypeSystem.Core;
using Boo.Lang.Compiler.Util;
using Boo.Lang.Compiler.Ast;
using Boo.Lang.Compiler.TypeSystem;
namespace Boo.Lang.Compiler.Steps
{
public class InjectCallableConversions : AbstractVisitorCompilerStep
{
IMethod _current;
IType _asyncResultType;
IMethod _asyncResultTypeAsyncDelegateGetter;
readonly List<AdaptorRecord> _adaptors = new List<AdaptorRecord>();
override public void Run()
{
if (Errors.Count != 0)
return;
Visit(CompileUnit);
CheckFieldInvocations();
}
private void CheckFieldInvocations()
{
var invocations = ContextAnnotations.GetFieldInvocations();
if (invocations == null) return;
foreach (var node in invocations)
{
var et = node.Target.ExpressionType;
if (et is AnonymousCallableType)
{
et = ((AnonymousCallableType) et).ConcreteType;
node.Target.ExpressionType = et;
}
var invoke = NameResolutionService.Resolve(et, "Invoke") as IMethod;
if (invoke == null)
throw new System.NotSupportedException("Invoke method on callable field not found");
node.Target = CodeBuilder.CreateMemberReference(node.Target.LexicalInfo, node.Target, invoke);
}
}
override public void LeaveExpressionStatement(ExpressionStatement node)
{
// allow interactive evaluation of closures (see booish)
var converted = ConvertExpression(node.Expression);
if (converted != null)
node.Expression = converted;
}
override public void LeaveReturnStatement(ReturnStatement node)
{
Expression expression = node.Expression;
if (null == expression)
return;
if (!HasReturnType(_current))
return;
Expression newExpression = Convert(_current.ReturnType, expression);
if (null == newExpression)
return;
node.Expression = newExpression;
}
override public void LeaveExpressionPair(ExpressionPair pair)
{
Expression converted = ConvertExpression(pair.First);
if (null != converted)
{
pair.First = converted;
}
converted = ConvertExpression(pair.Second);
if (null != converted)
{
pair.Second = converted;
}
}
override public void LeaveListLiteralExpression(ListLiteralExpression node)
{
ConvertExpressions(node.Items);
}
override public void LeaveArrayLiteralExpression(ArrayLiteralExpression node)
{
IType elementType = GetExpressionType(node).ElementType;
for (int i = 0; i < node.Items.Count; ++i)
{
Expression converted = Convert(elementType, node.Items[i]);
if (null != converted)
{
node.Items.ReplaceAt(i, converted);
}
}
}
override public void LeaveMethodInvocationExpression(MethodInvocationExpression node)
{
var parameters = ParametersFor(node.Target);
if (parameters == null)
return;
ConvertMethodInvocation(node, parameters);
}
private static IParameter[] ParametersFor(Expression callableExpression)
{
var entity = callableExpression.Entity as IMethod;
if (entity != null)
return entity.GetParameters();
var type = callableExpression.ExpressionType as ICallableType;
if (type != null)
return type.GetSignature().Parameters;
return null;
}
override public void LeaveMemberReferenceExpression(MemberReferenceExpression node)
{
if (IsEndInvokeOnStandaloneMethodReference(node) && AstUtil.IsTargetOfMethodInvocation(node))
{
ReplaceEndInvokeTargetByGetAsyncDelegate((MethodInvocationExpression)node.ParentNode);
return;
}
var newTarget = ConvertExpression(node.Target);
if (null != newTarget)
node.Target = newTarget;
}
override public void LeaveCastExpression(CastExpression node)
{
var newExpression = Convert(node.ExpressionType, node.Target);
if (newExpression != null)
node.Target = newExpression;
}
public override void LeaveConditionalExpression(ConditionalExpression node)
{
var newTrueValue = Convert(node.ExpressionType, node.TrueValue);
if (newTrueValue != null)
node.TrueValue = newTrueValue;
var newFalseValue = Convert(node.ExpressionType, node.FalseValue);
if (newFalseValue != null)
node.FalseValue = newFalseValue;
}
override public void LeaveTryCastExpression(TryCastExpression node)
{
Expression newExpression = Convert(node.ExpressionType, node.Target);
if (null != newExpression)
node.Target = newExpression;
}
override public void LeaveBinaryExpression(BinaryExpression node)
{
if (BinaryOperatorType.Assign != node.Operator)
return;
Expression newRight = Convert(node.Left.ExpressionType, node.Right);
if (null != newRight)
node.Right = newRight;
}
override public void LeaveGeneratorExpression(GeneratorExpression node)
{
Expression newExpression = Convert(
GetConcreteExpressionType(node.Expression),
node.Expression);
if (null != newExpression)
{
node.Expression = newExpression;
}
}
void InitializeAsyncResultType()
{
if (_asyncResultType != null)
return;
var type = typeof(AsyncResult);
_asyncResultType = TypeSystemServices.Map(type);
_asyncResultTypeAsyncDelegateGetter = TypeSystemServices.Map(Methods.GetterOf<AsyncResult, object>(r => r.AsyncDelegate));
}
override public void Dispose()
{
_asyncResultType = null;
_asyncResultTypeAsyncDelegateGetter = null;
_adaptors.Clear();
base.Dispose();
}
override public void OnMethod(Method node)
{
_current = GetEntity(node);
Visit(node.Body);
}
bool HasReturnType(IMethod method)
{
return TypeSystemServices.VoidType != method.ReturnType;
}
bool IsMethodReference(Expression node)
{
IEntity entity = GetEntity(node);
return EntityType.Method == entity.EntityType;
}
static bool IsNotTargetOfMethodInvocation(Expression node)
{
var mie = node.ParentNode as MethodInvocationExpression;
return mie == null || mie.Target != node;
}
bool IsStandaloneMethodReference(Expression node)
{
return
(node is ReferenceExpression || node is GenericReferenceExpression)
&& IsMethodReference(node)
&& IsNotTargetOfMethodInvocation(node);
}
private void ConvertMethodInvocation(MethodInvocationExpression node, IParameter[] parameters)
{
ExpressionCollection arguments = node.Arguments;
for (int i=0; i<parameters.Length; ++i)
{
Expression newArgument = Convert(parameters[i].Type, arguments[i]);
if (null != newArgument)
{
arguments.ReplaceAt(i, newArgument);
}
}
}
void ConvertExpressions(ExpressionCollection items)
{
for (int i=0; i<items.Count; ++i)
{
Expression converted = ConvertExpression(items[i]);
if (null != converted)
{
items.ReplaceAt(i, converted);
}
}
}
Expression ConvertExpression(Expression expression)
{
return Convert(expression.ExpressionType, expression);
}
Expression Convert(IType expectedType, Expression argument)
{
if (IsStandaloneMethodReference(argument))
return ConvertMethodReference(expectedType, argument);
var callableType = expectedType as ICallableType;
if (callableType != null)
{
var argumentType = GetExpressionType(argument);
if (expectedType != argumentType && !argumentType.IsNull())
return Adapt(callableType, argument);
}
return null;
}
private Expression ConvertMethodReference(IType expectedType, Expression argument)
{
var expectedCallable = expectedType as ICallableType;
if (expectedCallable != null)
{
var argumentType = (ICallableType) GetExpressionType(argument);
var expectedSig = expectedCallable.GetSignature();
var argSig = argumentType.GetSignature();
if (argSig != expectedSig)
{
if (TypeSystemServices.CompatibleSignatures(argSig, expectedSig) ||
(TypeSystemServices.CompatibleGenericSignatures(argSig, expectedSig) /*&& IsUnspecializedGenericMethodReference(argument)*/)
)
{
argument.ExpressionType = expectedType;
return CreateDelegate(expectedType, argument);
}
return Adapt(expectedCallable, CreateDelegate(GetConcreteExpressionType(argument), argument));
}
return CreateDelegate(expectedType, argument);
}
return CreateDelegate(GetConcreteExpressionType(argument), argument);
}
private static bool IsUnspecializedGenericMethodReference(Expression argument)
{
if (argument.NodeType != NodeType.MemberReferenceExpression)
return false;
var target = ((MemberReferenceExpression) argument).Target;
if (target.NodeType != NodeType.MethodInvocationExpression)
return false;
target = ((MethodInvocationExpression)target).Target;
if (target.Entity.EntityType != EntityType.Constructor)
return false;
var cls = ((IConstructor) target.Entity).DeclaringType;
return cls.GenericInfo != null && (cls.ConstructedInfo == null || !cls.ConstructedInfo.FullyConstructed);
}
Expression Adapt(ICallableType expected, Expression callable)
{
ICallableType actual = GetExpressionType(callable) as ICallableType;
if (null == actual)
{
// TODO: should we adapt System.Object, System.Delegate,
// System.MulticastDelegate and ICallable as well?
return null;
}
ClassDefinition adaptor = GetAdaptor(expected, actual);
Method adapt = (Method)adaptor.Members["Adapt"];
return CodeBuilder.CreateMethodInvocation((IMethod)adapt.Entity, callable);
}
ClassDefinition GetAdaptor(ICallableType to, ICallableType from)
{
return FindAdaptor(to, from) ?? CreateAdaptor(to, from);
}
sealed class AdaptorRecord
{
public readonly ICallableType To;
public readonly ICallableType From;
public readonly ClassDefinition Adaptor;
public AdaptorRecord(ICallableType to, ICallableType from, ClassDefinition adaptor)
{
To = to;
From = from;
Adaptor = adaptor;
}
}
ClassDefinition FindAdaptor(ICallableType to, ICallableType from)
{
foreach (AdaptorRecord record in _adaptors)
if (from == record.From && to == record.To)
return record.Adaptor;
return null;
}
ClassDefinition CreateAdaptor(ICallableType to, ICallableType from)
{
BooClassBuilder adaptor = CodeBuilder.CreateClass("$adaptor$" + from.Name + "$" + to.Name + "$" + _adaptors.Count);
adaptor.AddBaseType(TypeSystemServices.ObjectType);
adaptor.Modifiers = TypeMemberModifiers.Final|TypeMemberModifiers.Internal;
Field callable = adaptor.AddField("$from", from);
BooMethodBuilder constructor = adaptor.AddConstructor();
ParameterDeclaration param = constructor.AddParameter("from", from);
constructor.Body.Add(
CodeBuilder.CreateSuperConstructorInvocation(TypeSystemServices.ObjectType));
constructor.Body.Add(
CodeBuilder.CreateAssignment(
CodeBuilder.CreateReference(callable),
CodeBuilder.CreateReference(param)));
CallableSignature signature = to.GetSignature();
BooMethodBuilder invoke = adaptor.AddMethod("Invoke", signature.ReturnType);
foreach (IParameter parameter in signature.Parameters)
{
invoke.AddParameter(parameter.Name, parameter.Type, parameter.IsByRef);
}
MethodInvocationExpression mie = CodeBuilder.CreateMethodInvocation(
CodeBuilder.CreateReference(callable),
GetInvokeMethod(from));
int fromParameterCount = from.GetSignature().Parameters.Length;
for (int i=0; i<fromParameterCount; ++i)
{
mie.Arguments.Add(
CodeBuilder.CreateReference(invoke.Parameters[i]));
}
if (signature.ReturnType != TypeSystemServices.VoidType &&
from.GetSignature().ReturnType != TypeSystemServices.VoidType)
{
invoke.Body.Add(new ReturnStatement(mie));
}
else
{
invoke.Body.Add(mie);
}
BooMethodBuilder adapt = adaptor.AddMethod("Adapt", to);
adapt.Modifiers = TypeMemberModifiers.Static|TypeMemberModifiers.Public;
param = adapt.AddParameter("from", from);
adapt.Body.Add(
new ReturnStatement(
CodeBuilder.CreateConstructorInvocation(
to.GetConstructors().First(),
CodeBuilder.CreateConstructorInvocation(
(IConstructor)constructor.Entity,
CodeBuilder.CreateReference(param)),
CodeBuilder.CreateAddressOfExpression(invoke.Entity))));
var collector = new GenericTypeCollector(this.CodeBuilder);
collector.Process(adaptor.ClassDefinition);
RegisterAdaptor(to, from, adaptor.ClassDefinition);
return adaptor.ClassDefinition;
}
void RegisterAdaptor(ICallableType to, ICallableType from, ClassDefinition adaptor)
{
_adaptors.Add(new AdaptorRecord(to, from, adaptor));
TypeSystemServices.GetCompilerGeneratedTypesModule().Members.Add(adaptor);
}
bool IsEndInvokeOnStandaloneMethodReference(MemberReferenceExpression node)
{
if (IsStandaloneMethodReference(node.Target))
{
return node.Entity.Name == "EndInvoke";
}
return false;
}
void ReplaceEndInvokeTargetByGetAsyncDelegate(MethodInvocationExpression node)
{
InitializeAsyncResultType();
var asyncResult = node.Arguments.Last;
var endInvoke = (MemberReferenceExpression)node.Target;
var callableType = ((IMember)endInvoke.Entity).DeclaringType;
endInvoke.Target = CodeBuilder.CreateCast(callableType,
CodeBuilder.CreateMethodInvocation(
CodeBuilder.CreateCast(_asyncResultType, asyncResult.CloneNode()),
_asyncResultTypeAsyncDelegateGetter));
}
Expression CreateDelegate(IType type, Expression source)
{
var method = (IMethod)GetEntity(source);
Expression target = method.IsStatic
? CodeBuilder.CreateNullLiteral()
: ((MemberReferenceExpression)source).Target;
var cType = GetConcreteType(type) ??
TypeSystemServices.GetConcreteCallableType(source, (AnonymousCallableType) type);
return CodeBuilder.CreateConstructorInvocation(cType.GetConstructors().First(),
target,
CodeBuilder.CreateAddressOfExpression(method));
}
static IType GetConcreteType(IType type)
{
var anonymous = type as AnonymousCallableType;
return null == anonymous ? type : anonymous.ConcreteType;
}
IMethod GetInvokeMethod(ICallableType type)
{
return NameResolutionService.ResolveMethod(type, "Invoke");
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Compute
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// Extension methods for AvailabilitySetsOperations.
/// </summary>
public static partial class AvailabilitySetsOperationsExtensions
{
/// <summary>
/// The operation to create or update the availability set.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='name'>
/// Parameters supplied to the Create Availability Set operation.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the Create Availability Set operation.
/// </param>
public static AvailabilitySet CreateOrUpdate(this IAvailabilitySetsOperations operations, string resourceGroupName, string name, AvailabilitySet parameters)
{
return Task.Factory.StartNew(s => ((IAvailabilitySetsOperations)s).CreateOrUpdateAsync(resourceGroupName, name, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The operation to create or update the availability set.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='name'>
/// Parameters supplied to the Create Availability Set operation.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the Create Availability Set operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<AvailabilitySet> CreateOrUpdateAsync(this IAvailabilitySetsOperations operations, string resourceGroupName, string name, AvailabilitySet parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, name, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// The operation to delete the availability set.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='availabilitySetName'>
/// The name of the availability set.
/// </param>
public static void Delete(this IAvailabilitySetsOperations operations, string resourceGroupName, string availabilitySetName)
{
Task.Factory.StartNew(s => ((IAvailabilitySetsOperations)s).DeleteAsync(resourceGroupName, availabilitySetName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The operation to delete the availability set.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='availabilitySetName'>
/// The name of the availability set.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this IAvailabilitySetsOperations operations, string resourceGroupName, string availabilitySetName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DeleteWithHttpMessagesAsync(resourceGroupName, availabilitySetName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// The operation to get the availability set.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='availabilitySetName'>
/// The name of the availability set.
/// </param>
public static AvailabilitySet Get(this IAvailabilitySetsOperations operations, string resourceGroupName, string availabilitySetName)
{
return Task.Factory.StartNew(s => ((IAvailabilitySetsOperations)s).GetAsync(resourceGroupName, availabilitySetName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The operation to get the availability set.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='availabilitySetName'>
/// The name of the availability set.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<AvailabilitySet> GetAsync(this IAvailabilitySetsOperations operations, string resourceGroupName, string availabilitySetName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, availabilitySetName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// The operation to list the availability sets.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
public static IEnumerable<AvailabilitySet> List(this IAvailabilitySetsOperations operations, string resourceGroupName)
{
return Task.Factory.StartNew(s => ((IAvailabilitySetsOperations)s).ListAsync(resourceGroupName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The operation to list the availability sets.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IEnumerable<AvailabilitySet>> ListAsync(this IAvailabilitySetsOperations operations, string resourceGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(resourceGroupName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists all available virtual machine sizes that can be used to create a new
/// virtual machine in an existing availability set.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='availabilitySetName'>
/// The name of the availability set.
/// </param>
public static IEnumerable<VirtualMachineSize> ListAvailableSizes(this IAvailabilitySetsOperations operations, string resourceGroupName, string availabilitySetName)
{
return Task.Factory.StartNew(s => ((IAvailabilitySetsOperations)s).ListAvailableSizesAsync(resourceGroupName, availabilitySetName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists all available virtual machine sizes that can be used to create a new
/// virtual machine in an existing availability set.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='availabilitySetName'>
/// The name of the availability set.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IEnumerable<VirtualMachineSize>> ListAvailableSizesAsync(this IAvailabilitySetsOperations operations, string resourceGroupName, string availabilitySetName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListAvailableSizesWithHttpMessagesAsync(resourceGroupName, availabilitySetName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
namespace DotNetty.Common.Utilities
{
using System.Diagnostics.Contracts;
using System.Threading;
/// <summary>
/// Default <see cref="IAttributeMap" /> implementation which use simple synchronization per bucket to keep the memory
/// overhead
/// as low as possible.
/// </summary>
public class DefaultAttributeMap : IAttributeMap
{
const int BucketSize = 4;
const int Mask = BucketSize - 1;
// Initialize lazily to reduce memory consumption; updated by AtomicReferenceFieldUpdater above.
volatile DefaultAttribute[] attributes;
public IAttribute<T> GetAttribute<T>(AttributeKey<T> key)
where T : class
{
Contract.Requires(key != null);
DefaultAttribute[] attrs = this.attributes;
if (attrs == null)
{
attrs = new DefaultAttribute[BucketSize];
// Not using ConcurrentHashMap due to high memory consumption.
attrs = Interlocked.CompareExchange(ref this.attributes, attrs, null) ?? attrs;
}
int i = Index(key);
DefaultAttribute head = Volatile.Read(ref attrs[i]);
if (head == null)
{
// No head exists yet which means we may be able to add the attribute without synchronization and just
// use compare and set. At worst we need to fallback to synchronization
head = new DefaultAttribute<T>(key);
if (Interlocked.CompareExchange(ref this.attributes[i], head, null) == null)
{
// we were able to add it so return the head right away
return (IAttribute<T>)head;
}
head = Volatile.Read(ref attrs[i]);
}
lock (head)
{
DefaultAttribute curr = head;
while (true)
{
if (!curr.Removed && curr.GetKey() == key)
{
return (IAttribute<T>)curr;
}
DefaultAttribute next = curr.Next;
if (next == null)
{
var attr = new DefaultAttribute<T>(head, key);
curr.Next = attr;
attr.Prev = curr;
return attr;
}
else
{
curr = next;
}
}
}
}
public bool HasAttribute<T>(AttributeKey<T> key)
where T : class
{
Contract.Requires(key != null);
DefaultAttribute[] attrs = this.attributes;
if (attrs == null)
{
// no attribute exists
return false;
}
int i = Index(key);
DefaultAttribute head = Volatile.Read(ref attrs[i]);
if (head == null)
{
// No attribute exists which point to the bucket in which the head should be located
return false;
}
// check on the head can be done without synchronization
if (head.GetKey() == key && !head.Removed)
{
return true;
}
lock (head)
{
// we need to synchronize on the head
DefaultAttribute curr = head.Next;
while (curr != null)
{
if (!curr.Removed && curr.GetKey() == key)
{
return true;
}
curr = curr.Next;
}
return false;
}
}
static int Index<T>(AttributeKey<T> key) => key.Id & Mask;
abstract class DefaultAttribute
{
// The head of the linked-list this attribute belongs to, which may be itself
protected readonly DefaultAttribute Head;
// Double-linked list to prev and next node to allow fast removal
public DefaultAttribute Prev;
public DefaultAttribute Next;
// Will be set to true one the attribute is removed via GetAndRemove() or Remove()
public volatile bool Removed;
public abstract IConstant GetKey();
protected DefaultAttribute()
{
this.Head = this;
}
protected DefaultAttribute(DefaultAttribute head)
{
this.Head = head;
}
}
sealed class DefaultAttribute<T> : DefaultAttribute, IAttribute<T>
where T : class
{
readonly AttributeKey<T> key;
T value;
public DefaultAttribute(DefaultAttribute head, AttributeKey<T> key)
: base(head)
{
this.key = key;
}
public DefaultAttribute(AttributeKey<T> key)
{
this.key = key;
}
public AttributeKey<T> Key => this.key;
public T Get() => Volatile.Read(ref this.value);
public void Set(T value) => Volatile.Write(ref this.value, value);
public T GetAndSet(T value) => Interlocked.Exchange(ref this.value, value);
public T SetIfAbsent(T value)
{
while (!this.CompareAndSet(null, value))
{
T old = this.Get();
if (old != null)
{
return old;
}
}
return default(T);
}
public T GetAndRemove()
{
this.Removed = true;
T oldValue = this.GetAndSet(null);
this.Remove0();
return oldValue;
}
public bool CompareAndSet(T oldValue, T newValue) => Interlocked.CompareExchange(ref this.value, newValue, oldValue) == oldValue;
public void Remove()
{
this.Removed = true;
this.Set(null);
this.Remove0();
}
void Remove0()
{
lock (this.Head)
{
// We only update the linked-list structure if prev != null because if it is null this
// DefaultAttribute acts also as head. The head must never be removed completely and just be
// marked as removed as all synchronization is done on the head itself for each bucket.
// The head itself will be GC'ed once the DefaultAttributeMap is GC'ed. So at most 5 heads will
// be removed lazy as the array size is 5.
if (this.Prev != null)
{
this.Prev.Next = this.Next;
if (this.Next != null)
{
this.Next.Prev = this.Prev;
}
// Null out prev and next - this will guard against multiple remove0() calls which may corrupt
// the linked list for the bucket.
this.Prev = null;
this.Next = null;
}
}
}
public override IConstant GetKey() => this.key;
}
}
}
| |
/*
Copyright 2010 Google Inc
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/* Change history
* Oct 13 2008 Joe Feser joseph.feser@gmail.com
* Converted List<object>s and other .NET 1.1 collections to use Generics
* Combined IExtensionElement and IExtensionElementFactory interfaces
*
*/
#region Using directives
#define USE_TRACING
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Xml;
using System.Reflection;
using Google.Apis.Util;
using HttpUtility = ImportedFromMono.System.Web.HttpUtility;
#endregion
namespace Google.Apis
{
/// <summary>String utilities
/// </summary>
public static class Utilities
{
/// <summary>
/// True when the Mono-runtime is used to execute this code.
/// </summary>
public static readonly bool IsRunningOnMono = (Type.GetType("Mono.Runtime") != null);
/// <summary>
/// xsd version of bool:true
/// </summary>
public const string XSDTrue = "true";
/// <summary>
/// xsd version of bool:false
/// </summary>
public const string XSDFalse = "false";
/// <summary>
/// default user string
/// </summary>
public const string DefaultUser = "default";
/// <summary>returns a blank emptyDate. That's the default for an empty string date</summary>
public static DateTime EmptyDate
{
get
{
// that's the blank value you get when setting a DateTime to an empty string inthe property browswer
return new DateTime(1, 1, 1);
}
}
/// <summary>helper to read in a string and Encode it</summary>
/// <param name="content">the xmlreader string</param>
/// <returns>UTF8 encoded string</returns>
public static string EncodeString(string content)
{
// get the encoding
Encoding utf8Encoder = Encoding.UTF8;
Byte[] utf8Bytes = EncodeStringToUtf8(content);
char[] utf8Chars = new char[utf8Encoder.GetCharCount(utf8Bytes, 0, utf8Bytes.Length)];
utf8Encoder.GetChars(utf8Bytes, 0, utf8Bytes.Length, utf8Chars, 0);
String utf8String = new String(utf8Chars);
return utf8String;
}
/// <summary>
/// returns you a bytearray of UTF8 bytes from the string passed in
/// the passed in string is assumed to be UTF16
/// </summary>
/// <param name="content">UTF16 string</param>
/// <returns>utf 8 byte array</returns>
public static Byte[] EncodeStringToUtf8(string content)
{
// get the encoding
Encoding utf8Encoder = Encoding.UTF8;
Encoding utf16Encoder = Encoding.Unicode;
Byte[] bytes = utf16Encoder.GetBytes(content);
Byte[] utf8Bytes = Encoding.Convert(utf16Encoder, utf8Encoder, bytes);
return utf8Bytes;
}
/// <summary>helper to read in a string and Encode it according to
/// RFC 5023 rules for slugheaders</summary>
/// <param name="slug">the Unicode string for the slug header</param>
/// <returns>ASCII encoded string</returns>
public static string EncodeSlugHeader(string slug)
{
if (slug == null)
{
return "";
}
Byte[] bytes = EncodeStringToUtf8(slug);
if (bytes == null)
{
return "";
}
StringBuilder returnString = new StringBuilder(256);
foreach (byte b in bytes)
{
if ((b < 0x20) || (b == 0x25) || (b > 0x7E))
{
returnString.AppendFormat(CultureInfo.InvariantCulture, "%{0:X}", b);
}
else
{
returnString.Append((char)b);
}
}
return returnString.ToString();
}
/// <summary>
/// used as a cover method to hide the actual decoding implementation
/// decodes an html decoded string
/// </summary>
/// <param name="value">the string to decode</param>
public static string DecodedValue(string value)
{
return HttpUtility.HtmlDecode(value);
}
/// <summary>
/// used as a cover method to hide the actual decoding implementation
/// decodes an URL decoded string
/// </summary>
/// <param name="value">the string to decode</param>
public static string UrlDecodedValue(string value)
{
return HttpUtility.UrlDecode(value);
}
/// <summary>
/// tests an etag for weakness. returns TRUE for weak etags and for null strings
/// </summary>
/// <param name="eTag"></param>
/// <returns></returns>
public static bool IsWeakETag(string eTag)
{
if (eTag == null)
{
return true;
}
if (eTag.StartsWith("W/"))
{
return true;
}
return false;
}
/// <summary>Method to output just the date portion as string</summary>
/// <param name="dateTime">the DateTime object to output as a string</param>
/// <returns>an rfc-3339 string</returns>
public static string LocalDateInUTC(DateTime dateTime)
{
// Add "full-date T partial-time"
return dateTime.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture);
}
/// <summary>Helper method to format a TimeSpan as a string compliant with the "time-offset" format defined in RFC-3339</summary>
/// <param name="spanFromUtc">the TimeSpan to format</param>
/// <returns></returns>
public static string FormatTimeOffset(TimeSpan spanFromUtc)
{
// Simply return "Z" if there is no offset
if (spanFromUtc == TimeSpan.Zero)
{
return "Z";
}
// Return the numeric offset
TimeSpan absoluteSpan = spanFromUtc.Duration();
if (spanFromUtc > TimeSpan.Zero)
{
return "+" + FormatNumOffset(absoluteSpan);
}
else
{
return "-" + FormatNumOffset(absoluteSpan);
}
}
/// <summary>Helper method to format a TimeSpan to {HH}:{MM}</summary>
/// <param name="timeSpan">the TimeSpan to format</param>
/// <returns>a string in "hh:mm" format.</returns>
internal static string FormatNumOffset(TimeSpan timeSpan)
{
return String.Format(CultureInfo.InvariantCulture, "{0:00}:{1:00}", timeSpan.Hours, timeSpan.Minutes);
}
/////////////////////////////////////////////////////////////////////////////
/// <summary>
/// Returns the version of the Core library.
/// </summary>
public static string GetLibraryVersion()
{
// Don't use asm.GetName() here, as this will raise a SecurityException on Silverlight.
// Look up the name by parsing the full assembly name instead.
return Regex.Match(typeof(Utilities).FullName, "Version=[\\d\\.]+").Groups[1].ToString();
}
/// <summary>
/// Returns the title of the calling assembly, or null if not set/unavailable.
/// </summary>
public static string GetAssemblyTitle()
{
Assembly asm =
#if SILVERLIGHT
Assembly.GetCallingAssembly();
#else
Assembly.GetEntryAssembly();
#endif
if (asm == null)
{
return null;
}
object[] attributes = asm.GetCustomAttributes(typeof(AssemblyTitleAttribute), false);
return attributes.Length == 0 ? null : ((AssemblyTitleAttribute)attributes[0]).Title;
}
/// <summary>
/// Replaces all the specified characters within the input string with the given replacement
/// </summary>
public static string Replace(this string input, string replace, params char[] invalidCharacters)
{
invalidCharacters.ThrowIfNullOrEmpty("invalidCharacters");
// Create the resulting string
var result = new StringBuilder(input.Length);
foreach (char c in input)
{
// Replace invalid characters with the replacement string
if (invalidCharacters.Contains(c))
{
result.Append(replace);
continue;
}
result.Append(c);
}
return result.ToString();
}
#if SILVERLIGHT
/// <summary>
/// Silverlight implementation of the .Split(char[], int) overload.
/// </summary>
/// <remarks>Will only work with one split character.</remarks>
public static string[] Split(this string str, char[] separators, int segments)
{
if (segments <= 0)
{
throw new ArgumentException("Must have at least one segment.", "segments");
}
if (separators.Length == 0 || segments == 1)
{
// If no seperator has been specified or only one segment is requested, return the original string.
return new[] { str };
}
if (separators.Length != 1)
{
throw new NotImplementedException("Only one separator is supported at the moment.");
}
string[] split = str.Split(separators);
if (split.Length <= segments)
{
return split; // Nothing to be done.
}
string[] newSplit = new string[segments];
int lastElementIndex = segments - 1;
Array.Copy(split, 0, newSplit, 0, segments - 1);
newSplit[lastElementIndex] = String.Join(
separators[0].ToString(), split, lastElementIndex, split.Length - (lastElementIndex));
return newSplit;
}
#endif
#region LINQ extensions
/// <summary>
/// Returns the enumerable with the specified element removed
/// </summary>
public static IEnumerable<T> Without<T>(this IEnumerable<T> enumerable, T toRemove)
{
return enumerable.Except(new[] { toRemove });
}
/// <summary>
/// Returns the enumerable with the specified element added to the end of it.
/// </summary>
public static IEnumerable<T> Concat<T>(this IEnumerable<T> enumerable, T toAdd)
{
return enumerable.Concat(new[] { toAdd });
}
#endregion
}
}
| |
namespace Microsoft.Protocols.TestSuites.MS_ASNOTE
{
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using Microsoft.Protocols.TestSuites.Common;
using Microsoft.Protocols.TestSuites.Common.DataStructures;
using Microsoft.Protocols.TestTools;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Request = Microsoft.Protocols.TestSuites.Common.Request;
using Response = Microsoft.Protocols.TestSuites.Common.Response;
/// <summary>
/// This scenario is designed to synchronize notes on the server.
/// </summary>
[TestClass]
public class S01_SyncCommand : TestSuiteBase
{
#region Class initialize and clean up
/// <summary>
/// Initialize the class.
/// </summary>
/// <param name="testContext">VSTS test context.</param>
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
TestClassBase.Initialize(testContext);
}
/// <summary>
/// Clear the class.
/// </summary>
[ClassCleanup]
public static void ClassCleanUp()
{
TestClassBase.Cleanup();
}
#endregion
#region MSASNOTE_S01_TC01_Sync_AddNote
/// <summary>
/// This test case is designed to test adding a note using the Sync command.
/// </summary>
[TestCategory("MSASNOTE"), TestMethod()]
public void MSASNOTE_S01_TC01_Sync_AddNote()
{
#region Call method Sync to add a note to the server
Dictionary<Request.ItemsChoiceType8, object> addElements = this.CreateNoteElements();
this.SyncAdd(addElements, 1);
#endregion
#region Call method Sync to synchronize the note item with the server.
SyncStore result = this.SyncChanges(1);
Note note = result.AddElements[0].Note;
Site.Assert.IsNotNull(
note.Categories,
@"The Categories element in note class in response should not be null.");
Site.Assert.IsNotNull(
note.Categories.Category,
@"The Category element in note class in response should not be null.");
Site.Assert.AreEqual<int>(
1,
note.Categories.Category.Length,
"The length of category should be 1 in response");
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R211");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R211
// If the value of the single category element is the same in request and response, then MS-ASNOTE_R211 can be captured.
Site.CaptureRequirementIfAreEqual<string>(
((Request.Categories4)addElements[Request.ItemsChoiceType8.Categories2]).Category[0],
note.Categories.Category[0],
211,
@"[In Category] [The Category element] specifies a user-selected label that has been applied to the note.");
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R123");
Site.Assert.IsNotNull(
note.Body,
@"The Body element in note class in response should not be null.");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R123
// If Body element is present in response, and the Data element is not null, then MS-ASNOTE_R123 can be captured.
Site.CaptureRequirementIfIsNotNull(
note.Body.Data,
123,
@"[In Body] When the airsyncbase:Body element is used in a Sync command response ([MS-ASCMD] section 2.2.2.19), the airsyncbase:Data element ([MS-ASAIRS] section 2.2.2.10.1) is a required child element of the airsyncbase:Body element.");
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R58");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R58
// If the value of the subject element is the same in request and response, then MS-ASNOTE_R58 can be captured.
Site.CaptureRequirementIfAreEqual<string>(
addElements[Request.ItemsChoiceType8.Subject1].ToString(),
note.Subject,
58,
@"[In Subject] The Subject element specifies the subject of the note.");
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R51");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R51
// If the value of the LastModifiedDate element is specified, then MS-ASNOTE_R51 can be captured.
Site.CaptureRequirementIfIsTrue(
note.IsLastModifiedDateSpecified,
51,
@"[In LastModifiedDate] The LastModifiedDate element specifies when the note was last changed.");
#endregion
}
#endregion
#region MSASNOTE_S01_TC02_Sync_ChangeNote_WithoutBodyInRequest
/// <summary>
/// This test case is designed to test changing a note's Subject and MessageClass elements without including the note's body in the Sync command.
/// </summary>
[TestCategory("MSASNOTE"), TestMethod()]
public void MSASNOTE_S01_TC02_Sync_ChangeNote_WithoutBodyInRequest()
{
#region Call method Sync to add a note to the server
Dictionary<Request.ItemsChoiceType8, object> addElements = this.CreateNoteElements();
addElements[Request.ItemsChoiceType8.Categories2] = new Request.Categories4();
SyncStore addResult = this.SyncAdd(addElements, 1);
Response.SyncCollectionsCollectionResponsesAdd item = addResult.AddResponses[0];
#endregion
#region Call method Sync to change the note's Subject and MessageClass elements.
// changeElements:Change the note's subject by replacing its subject with a new subject.
Dictionary<Request.ItemsChoiceType7, object> changeElements = new Dictionary<Request.ItemsChoiceType7, object>();
string changedSubject = Common.GenerateResourceName(Site, "subject");
changeElements.Add(Request.ItemsChoiceType7.Subject2, changedSubject);
// changeElements:Change the note's MessageClass by replacing its MessageClass with a new MessageClass.
changeElements.Add(Request.ItemsChoiceType7.MessageClass, "IPM.StickyNote.MSASNOTE");
changeElements = TestSuiteHelper.CombineChangeAndAddNoteElements(addElements, changeElements);
// changeElements:Remove the note's Body in change command
changeElements.Remove(Request.ItemsChoiceType7.Body);
SyncStore changeResult = this.SyncChange(addResult.SyncKey, item.ServerId, changeElements);
Site.Assert.AreEqual<byte>(
1,
changeResult.CollectionStatus,
"The server should return a Status 1 in the Sync command response indicate sync command succeed.");
// The subject of the note is updated.
this.ExistingNoteSubjects.Remove(addElements[Request.ItemsChoiceType8.Subject1].ToString());
this.ExistingNoteSubjects.Add(changeElements[Request.ItemsChoiceType7.Subject2].ToString());
#endregion
#region Call method Sync to synchronize the note item with the server.
// Synchronize the changes with server
SyncStore result = this.SyncChanges(addResult.SyncKey, 1);
bool isNoteFound = TestSuiteHelper.CheckSyncChangeCommands(result, changeElements[Request.ItemsChoiceType7.Subject2].ToString(), this.Site);
Site.Assert.IsTrue(isNoteFound, "The note with subject:{0} should be returned in Sync command response.", changeElements[Request.ItemsChoiceType7.Subject2].ToString());
Note note = result.ChangeElements[0].Note;
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R113");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R113
Site.CaptureRequirementIfIsNotNull(
note.Body,
113,
@"[In Sync Command Response] The absence of an airsyncbase:Body element (section 2.2.2.1) within an airsync:Change element is not to be interpreted as an implicit delete.");
Site.Assert.AreEqual<string>(
changeElements[Request.ItemsChoiceType7.Subject2].ToString(),
note.Subject,
"The subject element in Change Command response should be the same with the changed value of subject in Change Command request.");
Site.Assert.AreEqual<string>(
changeElements[Request.ItemsChoiceType7.MessageClass].ToString(),
note.MessageClass,
"The MessageClass element in Change Command response should be the same with the changed value of MessageClass in Change Command request.");
#endregion
}
#endregion
#region MSASNOTE_S01_TC03_Sync_LastModifiedDateIgnored
/// <summary>
/// This test case is designed to test the server ignores the element LastModifiedDate if includes it in the request.
/// </summary>
[TestCategory("MSASNOTE"), TestMethod()]
public void MSASNOTE_S01_TC03_Sync_LastModifiedDateIgnored()
{
#region Call method Sync to add a note to the server
Dictionary<Request.ItemsChoiceType8, object> addElements = this.CreateNoteElements();
string lastModifiedDate = DateTime.UtcNow.ToString("yyyyMMddTHHmmssZ", CultureInfo.InvariantCulture);
addElements.Add(Request.ItemsChoiceType8.LastModifiedDate, lastModifiedDate);
System.Threading.Thread.Sleep(1000);
SyncStore addResult = this.SyncAdd(addElements, 1);
Response.SyncCollectionsCollectionResponsesAdd item = addResult.AddResponses[0];
#endregion
#region Call method Sync to synchronize the note item with the server.
SyncStore result = this.SyncChanges(1);
Note note =null;
for (int i = 0; i < result.AddElements.Count; i++)
{
if (addResult.AddElements != null && addResult.AddElements.Count > 0)
{
if (addResult.CollectionStatus==1&& result.AddElements[0].Note.Subject.ToString()==addElements[Request.ItemsChoiceType8.Subject1].ToString())
{
note = result.AddElements[i].Note;
break;
}
}
else if (addResult.AddResponses != null && addResult.AddResponses.Count > 0)
{
if (addResult.CollectionStatus == 1 && result.AddElements[0].Note.Subject.ToString() == addElements[Request.ItemsChoiceType8.Subject1].ToString())
{
note = result.AddElements[i].Note;
break;
}
}
}
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R84");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R84
Site.CaptureRequirementIfAreNotEqual<string>(
lastModifiedDate,
note.LastModifiedDate.ToString("yyyyMMddTHHmmssZ", CultureInfo.InvariantCulture),
84,
@"[In LastModifiedDate Element] If it is included in a Sync command request, the server will ignore it.");
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R209");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R209
// this requirement can be captured directly after MS-ASNOTE_R84.
Site.CaptureRequirement(
209,
@"[In LastModifiedDate Element] If a Sync command request includes the LastModifiedDate element, the server ignores the element and returns the actual time that the note was last modified.");
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R126");
bool isVerifiedR126 = note.Body != null && note.Subject != null && note.MessageClass != null && note.IsLastModifiedDateSpecified && note.Categories != null && note.Categories.Category != null;
// Verify MS-ASNOTE requirement: MS-ASNOTE_R126
Site.CaptureRequirementIfIsTrue(
isVerifiedR126,
126,
@"[In Sync Command Response] Any of the elements for the Notes class[airsyncbase:Body, Subject, MessageClass, LastModifiedDate, Categories or Category], as specified in section 2.2.2, can be included in a Sync command response as child elements of the airsync:ApplicationData element ([MS-ASCMD] section 2.2.3.11) within [either] an airsync:Add element ([MS-ASCMD] section 2.2.3.7.2) [or an airsync:Change element ([MS-ASCMD] section 2.2.3.24)].");
#endregion
#region Call method Sync to only change the note's LastModifiedDate element, the server will ignore the change, and the note item should be unchanged.
// changeElements: Change the note's LastModifiedDate by replacing its LastModifiedDate with a new LastModifiedDate.
Dictionary<Request.ItemsChoiceType7, object> changeElements = new Dictionary<Request.ItemsChoiceType7, object>();
lastModifiedDate = DateTime.UtcNow.ToString("yyyyMMddTHHmmssZ", CultureInfo.InvariantCulture);
changeElements.Add(Request.ItemsChoiceType7.LastModifiedDate, lastModifiedDate);
this.SyncChange(result.SyncKey, item.ServerId, changeElements);
#endregion
#region Call method Sync to synchronize the changes with the server.
SyncStore result2 = this.SyncChanges(result.SyncKey, 1);
bool isNoteFound;
if (result2.ChangeElements != null)
{
isNoteFound = TestSuiteHelper.CheckSyncChangeCommands(result, addElements[Request.ItemsChoiceType8.Subject1].ToString(), this.Site);
Site.Assert.IsFalse(isNoteFound, "The note with subject:{0} should not be returned in Sync command response.", addElements[Request.ItemsChoiceType8.Subject1].ToString());
}
else
{
Site.Log.Add(LogEntryKind.Debug, @"The Change elements are null.");
}
#endregion
#region Call method Sync to change the note's LastModifiedDate and subject.
// changeElements: Change the note's LastModifiedDate by replacing its LastModifiedDate with a new LastModifiedDate.
// changeElements: Change the note's subject by replacing its subject with a new subject.
changeElements = new Dictionary<Request.ItemsChoiceType7, object>();
lastModifiedDate = DateTime.UtcNow.ToString("yyyyMMddTHHmmssZ", CultureInfo.InvariantCulture);
changeElements.Add(Request.ItemsChoiceType7.LastModifiedDate, lastModifiedDate);
string changedSubject = Common.GenerateResourceName(Site, "subject");
changeElements.Add(Request.ItemsChoiceType7.Subject2, changedSubject);
changeElements = TestSuiteHelper.CombineChangeAndAddNoteElements(addElements, changeElements);
this.SyncChange(result.SyncKey, item.ServerId, changeElements);
#endregion
#region Call method Sync to synchronize the note item with the server.
result = this.SyncChanges(result.SyncKey, 1);
isNoteFound = TestSuiteHelper.CheckSyncChangeCommands(result, changeElements[Request.ItemsChoiceType7.Subject2].ToString(), this.Site);
Site.Assert.IsTrue(isNoteFound, "The note with subject:{0} should be returned in Sync command response.", changeElements[Request.ItemsChoiceType7.Subject2].ToString());
// The subject of the note is updated.
this.ExistingNoteSubjects.Remove(addElements[Request.ItemsChoiceType8.Subject1].ToString());
this.ExistingNoteSubjects.Add(changeElements[Request.ItemsChoiceType7.Subject2].ToString());
note = result.ChangeElements[0].Note;
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R210");
bool isVerifiedR210 = note.Body != null && note.Subject != null && note.MessageClass != null && note.IsLastModifiedDateSpecified && note.Categories != null && note.Categories.Category != null;
// Verify MS-ASNOTE requirement: MS-ASNOTE_R210
Site.CaptureRequirementIfIsTrue(
isVerifiedR210,
210,
@"[In Sync Command Response] Any of the elements for the Notes class[airsyncbase:Body, Subject, MessageClass, LastModifiedDate, Categories or Category], as specified in section 2.2.2, can be included in a Sync command response as child elements of the airsync:ApplicationData element ([MS-ASCMD] section 2.2.3.11) within [either an airsync:Add element ([MS-ASCMD] section 2.2.3.7.2) or] an airsync:Change element ([MS-ASCMD] section 2.2.3.24).");
Site.Assert.AreEqual<string>(
changeElements[Request.ItemsChoiceType7.Subject2].ToString(),
note.Subject,
"The subject element in Change Command response should be the same with the changed value of subject in Change Command request.");
#endregion
}
#endregion
#region MSASNOTE_S01_TC04_Sync_SupportedError
/// <summary>
/// This test case is designed to test when the client includes an airsync:Supported element in a Sync command request, the server returns a status error 4.
/// </summary>
[TestCategory("MSASNOTE"), TestMethod()]
public void MSASNOTE_S01_TC04_Sync_SupportedError()
{
#region Call an initial method Sync including the Supported option.
Request.SyncCollection syncCollection = new Request.SyncCollection
{
CollectionId = this.UserInformation.NotesCollectionId,
SyncKey = "0",
Supported = new Request.Supported()
};
SyncRequest syncRequest = Common.CreateSyncRequest(new Request.SyncCollection[] { syncCollection });
SyncStore syncResult = this.NOTEAdapter.Sync(syncRequest, false);
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R114");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R114
Site.CaptureRequirementIfAreEqual<int>(
4,
syncResult.Status,
114,
@"[In Sync Command Response] If the airsync:Supported element ([MS-ASCMD] section 2.2.3.164) is included in a Sync command request for Notes class data, the server returns a Status element with a value of 4, as specified in [MS-ASCMD] section 2.2.3.162.16.");
#endregion
}
#endregion
#region MSASNOTE_S01_TC05_Sync_InvalidMessageClass
/// <summary>
/// This test case is designed to test when the MessageClass content does not use the standard format in a Sync request, the server responds with a status error 6.
/// </summary>
[TestCategory("MSASNOTE"), TestMethod()]
public void MSASNOTE_S01_TC05_Sync_InvalidMessageClass()
{
#region Call method Sync to add a note to the server
Dictionary<Request.ItemsChoiceType8, object> addElements = this.CreateNoteElements();
addElements[Request.ItemsChoiceType8.MessageClass] = "IPM.invalidClass";
SyncRequest syncRequest = TestSuiteHelper.CreateInitialSyncRequest(this.UserInformation.NotesCollectionId);
SyncStore syncResult = this.NOTEAdapter.Sync(syncRequest, false);
Site.Assert.AreEqual<byte>(
1,
syncResult.CollectionStatus,
"The server should return a status code 1 in the Sync command response indicate sync command success.");
List<object> addData = new List<object>();
Request.SyncCollectionAdd add = new Request.SyncCollectionAdd
{
ClientId = System.Guid.NewGuid().ToString(),
ApplicationData = new Request.SyncCollectionAddApplicationData
{
ItemsElementName = new Request.ItemsChoiceType8[addElements.Count],
Items = new object[addElements.Count]
}
};
addElements.Keys.CopyTo(add.ApplicationData.ItemsElementName, 0);
addElements.Values.CopyTo(add.ApplicationData.Items, 0);
addData.Add(add);
syncRequest = TestSuiteHelper.CreateSyncRequest(syncResult.SyncKey, this.UserInformation.NotesCollectionId, addData);
SyncStore addResult = this.NOTEAdapter.Sync(syncRequest, false);
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R119");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R119
Site.CaptureRequirementIfAreEqual<int>(
6,
int.Parse(addResult.AddResponses[0].Status),
119,
@"[In MessageClass Element] If a client submits a Sync command request ([MS-ASCMD] section 2.2.2.19) that contains a MessageClass element value that does not conform to the requirements specified in section 2.2.2.5, the server MUST respond with a Status element with a value of 6, as specified in [MS-ASCMD] section 2.2.3.162.16.");
#endregion
}
#endregion
#region MSASNOTE_S01_TC06_Sync_AddNote_WithBodyTypes
/// <summary>
/// This test case is designed to test that the type element of the body in note item has 3 different values:1, 2, 3.
/// </summary>
[TestCategory("MSASNOTE"), TestMethod()]
public void MSASNOTE_S01_TC06_Sync_AddNote_WithBodyTypes()
{
#region Call method Sync to add a note to the server
Dictionary<Request.ItemsChoiceType8, object> addElements = this.CreateNoteElements();
this.SyncAdd(addElements, 1);
#endregion
#region Call method Sync to synchronize the note item with the server and expect to get the body of Type 1.
SyncStore result = this.SyncChanges(1);
Note note = result.AddElements[0].Note;
Site.Assert.AreEqual<string>(
((Request.Body)addElements[Request.ItemsChoiceType8.Body]).Data,
note.Body.Data,
@"The content of body in response should be equal to that in request.");
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R38");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R38
// If the content of the body is the same in request and response and the type is 1, then MS-ASNOTE_R38 can be captured.
Site.CaptureRequirementIfAreEqual<int>(
1,
note.Body.Type,
38,
@"[In Body] The value 1 means Plain text.");
#endregion
#region Call method Sync to synchronize the note item with the server and expect to get the body of Type 2.
result = this.SyncChanges(2);
note = result.AddElements[0].Note;
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R39");
bool isHTML = TestSuiteHelper.IsHTML(note.Body.Data);
Site.Assert.IsTrue(
isHTML,
@"The content of body element in response should be in HTML format. Actual: {0}",
isHTML);
// Verify MS-ASNOTE requirement: MS-ASNOTE_R39
// If the content of the body is in HTML format and the type is 2, then MS-ASNOTE_R39 can be captured.
Site.CaptureRequirementIfAreEqual<int>(
2,
note.Body.Type,
39,
@"[In Body] The value 2 means HTML.");
#endregion
#region Call method Sync to synchronize the note item with the server and expect to get the body of Type 3.
result = this.SyncChanges(3);
note = result.AddElements[0].Note;
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R40");
try
{
byte[] contentBytes = Convert.FromBase64String(note.Body.Data);
System.Text.Encoding.UTF8.GetString(contentBytes);
}
catch (FormatException formatException)
{
throw new FormatException("The content of body should be Base64 encoded", formatException);
}
// Verify MS-ASNOTE requirement: MS-ASNOTE_R40
// If the content of the body is in Base64 format and the type is 3, then MS-ASNOTE_R40 can be captured.
Site.CaptureRequirementIfAreEqual<int>(
3,
note.Body.Type,
40,
@"[In Body] The value 3 means Rich Text Format (RTF).");
#endregion
}
#endregion
#region MSASNOTE_S01_TC07_Sync_ChangeNote_Categories
/// <summary>
/// This test case is designed to test changing a note's Categories element and its child elements.
/// </summary>
[TestCategory("MSASNOTE"), TestMethod()]
public void MSASNOTE_S01_TC07_Sync_ChangeNote_Categories()
{
#region Call method Sync to add a note with two child elements in a Categories element to the server
Dictionary<Request.ItemsChoiceType8, object> addElements = this.CreateNoteElements();
Request.Categories4 categories = new Request.Categories4 { Category = new string[2] };
Collection<string> category = new Collection<string> { "blue category", "red category" };
category.CopyTo(categories.Category, 0);
addElements[Request.ItemsChoiceType8.Categories2] = categories;
this.SyncAdd(addElements, 1);
#endregion
#region Call method Sync to synchronize the note item with the server and expect to get two child elements in response.
// Synchronize the changes with server
SyncStore result = this.SyncChanges(1);
Note noteAdded = result.AddElements[0].Note;
Site.Assert.IsNotNull(noteAdded.Categories, "The Categories element in response should not be null.");
Site.Assert.IsNotNull(noteAdded.Categories.Category, "The category array in response should not be null.");
Site.Assert.AreEqual(2, noteAdded.Categories.Category.Length, "The length of category array in response should be equal to 2.");
#endregion
#region Call method Sync to change the note with MessageClass elements and one child element of Categories element is missing.
Dictionary<Request.ItemsChoiceType7, object> changeElements = new Dictionary<Request.ItemsChoiceType7, object>
{
{
Request.ItemsChoiceType7.MessageClass, "IPM.StickyNote.MSASNOTE1"
}
};
categories.Category = new string[1];
category.Remove("red category");
category.CopyTo(categories.Category, 0);
changeElements.Add(Request.ItemsChoiceType7.Categories3, categories);
SyncStore changeResult = this.SyncChange(result.SyncKey, result.AddElements[0].ServerId, changeElements);
Site.Assert.AreEqual<byte>(
1,
changeResult.CollectionStatus,
"The server should return a Status 1 in the Sync command response indicate sync command succeed.");
#endregion
#region Call method Sync to synchronize the note item with the server, and check if one child element is missing in response.
// Synchronize the changes with server
result = this.SyncChanges(result.SyncKey, 1);
bool isNoteFound = TestSuiteHelper.CheckSyncChangeCommands(result, addElements[Request.ItemsChoiceType8.Subject1].ToString(), this.Site);
Site.Assert.IsTrue(isNoteFound, "The note with subject:{0} should be returned in Sync command response.", addElements[Request.ItemsChoiceType8.Subject1].ToString());
Note note = result.ChangeElements[0].Note;
Site.Assert.IsNotNull(note.Categories, "The Categories element in response should not be null.");
Site.Assert.IsNotNull(note.Categories.Category, "The category array in response should not be null.");
Site.Assert.IsNotNull(note.Subject, "The Subject element in response should not be null.");
Site.Assert.AreEqual(1, note.Categories.Category.Length, "The length of category array in response should be equal to 1.");
bool hasRedCategory = false;
if (note.Categories.Category[0].Equals("red category", StringComparison.Ordinal))
{
hasRedCategory = true;
}
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R10002");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R10002
Site.CaptureRequirementIfIsFalse(
hasRedCategory,
10002,
@"[In Sync Command Response] If a child of the Categories element (section 2.2.2.3) that was previously set is missing, the server will delete that property from the note.");
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R10003");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R10003
Site.CaptureRequirementIfAreEqual<string>(
noteAdded.Subject,
note.Subject,
10003,
@"[In Sync Command Response] The absence of a Subject element (section 2.2.2.6) within an airsync:Change element is not to be interpreted as an implicit delete.");
#endregion
#region Call method Sync to change the note with MessageClass elements and without Categories element.
changeElements = new Dictionary<Request.ItemsChoiceType7, object>
{
{
Request.ItemsChoiceType7.MessageClass, "IPM.StickyNote.MSASNOTE2"
}
};
changeResult = this.SyncChange(result.SyncKey, result.ChangeElements[0].ServerId, changeElements);
Site.Assert.AreEqual<byte>(
1,
changeResult.CollectionStatus,
"The server should return a Status 1 in the Sync command response indicate sync command succeed.");
#endregion
#region Call method Sync to synchronize the note item with the server, and check if the Categories element is missing in response.
// Synchronize the changes with server
result = this.SyncChanges(result.SyncKey, 1);
isNoteFound = TestSuiteHelper.CheckSyncChangeCommands(result, addElements[Request.ItemsChoiceType8.Subject1].ToString(), this.Site);
Site.Assert.IsTrue(isNoteFound, "The note with subject:{0} should be returned in Sync command response.", addElements[Request.ItemsChoiceType8.Subject1].ToString());
note = result.ChangeElements[0].Note;
// Add the debug information
Site.Log.Add(LogEntryKind.Debug, "Verify MS-ASNOTE_R112");
// Verify MS-ASNOTE requirement: MS-ASNOTE_R112
Site.CaptureRequirementIfIsNull(
note.Categories,
112,
@"[In Sync Command Response] If the Categories element (section 2.2.2.2) that was previously set is missing[in an airsync:Change element in a Sync command request], the server will delete that property from the note.");
#endregion
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.ComponentModel;
using System.IO;
using System.Runtime.InteropServices;
using System.Globalization;
namespace System.Diagnostics
{
/// <summary>
/// Set of utility functions for interpreting the counter data
/// </summary>
public static class CounterSampleCalculator
{
private static volatile bool s_perfCounterDllLoaded = false;
/// <summary>
/// Converts 100NS elapsed time to fractional seconds
/// </summary>
/// <internalonly/>
private static float GetElapsedTime(CounterSample oldSample, CounterSample newSample)
{
float eSeconds;
float eDifference;
if (newSample.RawValue == 0)
{
// no data [start time = 0] so return 0
return 0.0f;
}
else
{
float eFreq;
eFreq = (float)(ulong)oldSample.CounterFrequency;
if (oldSample.UnsignedRawValue >= (ulong)newSample.CounterTimeStamp || eFreq <= 0.0f)
return 0.0f;
// otherwise compute difference between current time and start time
eDifference = (float)((ulong)newSample.CounterTimeStamp - oldSample.UnsignedRawValue);
// convert to fractional seconds using object counter
eSeconds = eDifference / eFreq;
return eSeconds;
}
}
/// <summary>
/// Computes the calculated value given a raw counter sample.
/// </summary>
public static float ComputeCounterValue(CounterSample newSample)
{
return ComputeCounterValue(CounterSample.Empty, newSample);
}
/// <summary>
/// Computes the calculated value given a raw counter sample.
/// </summary>
public static float ComputeCounterValue(CounterSample oldSample, CounterSample newSample)
{
int newCounterType = (int)newSample.CounterType;
if (oldSample.SystemFrequency == 0)
{
if ((newCounterType != Interop.Kernel32.PerformanceCounterOptions.PERF_RAW_FRACTION) &&
(newCounterType != Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_RAWCOUNT) &&
(newCounterType != Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_RAWCOUNT_HEX) &&
(newCounterType != Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_LARGE_RAWCOUNT) &&
(newCounterType != Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_LARGE_RAWCOUNT_HEX) &&
(newCounterType != Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_MULTI_BASE))
{
// Since oldSample has a system frequency of 0, this means the newSample is the first sample
// on a two sample calculation. Since we can't do anything with it, return 0.
return 0.0f;
}
}
else if (oldSample.CounterType != newSample.CounterType)
{
throw new InvalidOperationException(SR.Format(SR.MismatchedCounterTypes));
}
if (newCounterType == Interop.Kernel32.PerformanceCounterOptions.PERF_ELAPSED_TIME)
return (float)GetElapsedTime(oldSample, newSample);
Interop.Kernel32.PerformanceCounterOptions.PDH_RAW_COUNTER newPdhValue = new Interop.Kernel32.PerformanceCounterOptions.PDH_RAW_COUNTER();
Interop.Kernel32.PerformanceCounterOptions.PDH_RAW_COUNTER oldPdhValue = new Interop.Kernel32.PerformanceCounterOptions.PDH_RAW_COUNTER();
FillInValues(oldSample, newSample, oldPdhValue, newPdhValue);
LoadPerfCounterDll();
Interop.Kernel32.PerformanceCounterOptions.PDH_FMT_COUNTERVALUE pdhFormattedValue = new Interop.Kernel32.PerformanceCounterOptions.PDH_FMT_COUNTERVALUE();
long timeBase = newSample.SystemFrequency;
int result = Interop.PerfCounter.FormatFromRawValue((uint)newCounterType, Interop.Kernel32.PerformanceCounterOptions.PDH_FMT_DOUBLE | Interop.Kernel32.PerformanceCounterOptions.PDH_FMT_NOSCALE | Interop.Kernel32.PerformanceCounterOptions.PDH_FMT_NOCAP100,
ref timeBase, newPdhValue, oldPdhValue, pdhFormattedValue);
if (result != Interop.Errors.ERROR_SUCCESS)
{
// If the numbers go negative, just return 0. This better matches the old behavior.
if (result == Interop.Kernel32.PerformanceCounterOptions.PDH_CALC_NEGATIVE_VALUE || result == Interop.Kernel32.PerformanceCounterOptions.PDH_CALC_NEGATIVE_DENOMINATOR || result == Interop.Kernel32.PerformanceCounterOptions.PDH_NO_DATA)
return 0;
else
throw new Win32Exception(result, SR.Format(SR.PerfCounterPdhError, result.ToString("x", CultureInfo.InvariantCulture)));
}
return (float)pdhFormattedValue.data;
}
// This method figures out which values are supposed to go into which structures so that PDH can do the
// calculation for us. This was ported from Window's cutils.c
private static void FillInValues(CounterSample oldSample, CounterSample newSample, Interop.Kernel32.PerformanceCounterOptions.PDH_RAW_COUNTER oldPdhValue, Interop.Kernel32.PerformanceCounterOptions.PDH_RAW_COUNTER newPdhValue)
{
int newCounterType = (int)newSample.CounterType;
switch (newCounterType)
{
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_COUNTER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_QUEUELEN_TYPE:
case Interop.Kernel32.PerformanceCounterOptions.PERF_SAMPLE_COUNTER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_OBJ_TIME_TIMER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_OBJ_TIME_QUEUELEN_TYPE:
newPdhValue.FirstValue = newSample.RawValue;
newPdhValue.SecondValue = newSample.TimeStamp;
oldPdhValue.FirstValue = oldSample.RawValue;
oldPdhValue.SecondValue = oldSample.TimeStamp;
break;
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_100NS_QUEUELEN_TYPE:
newPdhValue.FirstValue = newSample.RawValue;
newPdhValue.SecondValue = newSample.TimeStamp100nSec;
oldPdhValue.FirstValue = oldSample.RawValue;
oldPdhValue.SecondValue = oldSample.TimeStamp100nSec;
break;
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_TIMER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_TIMER_INV:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_BULK_COUNT:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_LARGE_QUEUELEN_TYPE:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_MULTI_TIMER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_MULTI_TIMER_INV:
newPdhValue.FirstValue = newSample.RawValue;
newPdhValue.SecondValue = newSample.TimeStamp;
oldPdhValue.FirstValue = oldSample.RawValue;
oldPdhValue.SecondValue = oldSample.TimeStamp;
if (newCounterType == Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_MULTI_TIMER || newCounterType == Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_MULTI_TIMER_INV)
{
// this is to make PDH work like PERFMON for
// this counter type
newPdhValue.FirstValue *= (uint)newSample.CounterFrequency;
if (oldSample.CounterFrequency != 0)
{
oldPdhValue.FirstValue *= (uint)oldSample.CounterFrequency;
}
}
if ((newCounterType & Interop.Kernel32.PerformanceCounterOptions.PERF_MULTI_COUNTER) == Interop.Kernel32.PerformanceCounterOptions.PERF_MULTI_COUNTER)
{
newPdhValue.MultiCount = (int)newSample.BaseValue;
oldPdhValue.MultiCount = (int)oldSample.BaseValue;
}
break;
//
// These counters do not use any time reference
//
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_RAWCOUNT:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_RAWCOUNT_HEX:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_DELTA:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_LARGE_RAWCOUNT:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_LARGE_RAWCOUNT_HEX:
case Interop.Kernel32.PerformanceCounterOptions.PERF_COUNTER_LARGE_DELTA:
newPdhValue.FirstValue = newSample.RawValue;
newPdhValue.SecondValue = 0;
oldPdhValue.FirstValue = oldSample.RawValue;
oldPdhValue.SecondValue = 0;
break;
//
// These counters use the 100 Ns time base in thier calculation
//
case Interop.Kernel32.PerformanceCounterOptions.PERF_100NSEC_TIMER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_100NSEC_TIMER_INV:
case Interop.Kernel32.PerformanceCounterOptions.PERF_100NSEC_MULTI_TIMER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_100NSEC_MULTI_TIMER_INV:
newPdhValue.FirstValue = newSample.RawValue;
newPdhValue.SecondValue = newSample.TimeStamp100nSec;
oldPdhValue.FirstValue = oldSample.RawValue;
oldPdhValue.SecondValue = oldSample.TimeStamp100nSec;
if ((newCounterType & Interop.Kernel32.PerformanceCounterOptions.PERF_MULTI_COUNTER) == Interop.Kernel32.PerformanceCounterOptions.PERF_MULTI_COUNTER)
{
newPdhValue.MultiCount = (int)newSample.BaseValue;
oldPdhValue.MultiCount = (int)oldSample.BaseValue;
}
break;
//
// These counters use two data points
//
case Interop.Kernel32.PerformanceCounterOptions.PERF_SAMPLE_FRACTION:
case Interop.Kernel32.PerformanceCounterOptions.PERF_RAW_FRACTION:
case Interop.Kernel32.PerformanceCounterOptions.PERF_LARGE_RAW_FRACTION:
case Interop.Kernel32.PerformanceCounterOptions.PERF_PRECISION_SYSTEM_TIMER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_PRECISION_100NS_TIMER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_PRECISION_OBJECT_TIMER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_AVERAGE_TIMER:
case Interop.Kernel32.PerformanceCounterOptions.PERF_AVERAGE_BULK:
newPdhValue.FirstValue = newSample.RawValue;
newPdhValue.SecondValue = newSample.BaseValue;
oldPdhValue.FirstValue = oldSample.RawValue;
oldPdhValue.SecondValue = oldSample.BaseValue;
break;
default:
// an unidentified counter was returned so
newPdhValue.FirstValue = 0;
newPdhValue.SecondValue = 0;
oldPdhValue.FirstValue = 0;
oldPdhValue.SecondValue = 0;
break;
}
}
private static void LoadPerfCounterDll()
{
if (s_perfCounterDllLoaded)
return;
string installPath = SharedUtils.GetLatestBuildDllDirectory(".");
string perfcounterPath = Path.Combine(installPath, "perfcounter.dll");
if (Interop.Kernel32.LoadLibrary(perfcounterPath) == IntPtr.Zero)
{
throw new Win32Exception(Marshal.GetLastWin32Error());
}
s_perfCounterDllLoaded = true;
}
}
}
| |
//---------------------------------------------------------------------
// <copyright file="EdmItemCollection.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// @owner [....]
// @backupOwner [....]
//---------------------------------------------------------------------
namespace System.Data.Metadata.Edm
{
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Data.Common.CommandTrees;
using System.Data.Common.Utils;
using System.Data.Entity;
using System.Data.EntityModel.SchemaObjectModel;
using System.Data.Objects.ELinq;
using System.Diagnostics;
using System.Linq;
using System.Runtime.Versioning;
using System.Text;
using System.Threading;
using System.Xml;
/// <summary>
/// Class for representing a collection of items in Edm space.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Edm")]
[CLSCompliant(false)]
public sealed class EdmItemCollection : ItemCollection
{
#region Constructors
/// <summary>
/// constructor that loads the metadata files from the specified xmlReaders, and returns the list of errors
/// encountered during load as the out parameter errors.
/// </summary>
/// <param name="xmlReaders">xmlReaders where the CDM schemas are loaded</param>
/// <param name="filePaths">Paths (URIs)to the CSDL files or resources</param>
/// <param name="errors">An out parameter to return the collection of errors encountered while loading</param>
// referenced by System.Data.Entity.Design.dll
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal EdmItemCollection(IEnumerable<XmlReader> xmlReaders,
System.Collections.ObjectModel.ReadOnlyCollection<string> filePaths,
out IList<EdmSchemaError> errors)
: base(DataSpace.CSpace)
{
// we will check the parameters for this internal ctor becuase
// it is pretty much publicly exposed through the MetadataItemCollectionFactory
// in System.Data.Entity.Design
//
// we are intentionally not checking for an empty enumerable
EntityUtil.CheckArgumentNull(xmlReaders, "xmlReaders");
EntityUtil.CheckArgumentContainsNull(ref xmlReaders, "xmlReaders");
// filePaths is allowed to be null
errors = this.Init(xmlReaders, filePaths, false /*throwOnErrors*/);
}
/// <summary>
/// constructor that loads the metadata files from the specified schemas
/// </summary>
/// <param name="schemas">list of schemas to be loaded into the ItemCollection</param>
// referenced by System.Data.Entity.Design.dll
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal EdmItemCollection(IList<Schema> schemas)
: base(DataSpace.CSpace)
{
this.Init();
LoadItems(MetadataItem.EdmProviderManifest, schemas, this);
}
/// <summary>
/// constructor that loads the metadata files from the specified xmlReaders
/// </summary>
/// <param name="xmlReaders">xmlReaders where the CDM schemas are loaded</param>
/// <param name="filePaths">Paths (URIs)to the CSDL files or resources</param>
internal EdmItemCollection(IEnumerable<XmlReader> xmlReaders,
IEnumerable<string> filePaths)
: base(DataSpace.CSpace)
{
this.Init(xmlReaders, filePaths, true /*throwOnErrors*/);
}
/// <summary>
/// Public constructor that loads the metadata files from the specified XmlReaders
/// </summary>
/// <param name="xmlReaders">XmlReader objects where the EDM schemas are loaded</param>
public EdmItemCollection(IEnumerable<XmlReader> xmlReaders)
: base(DataSpace.CSpace)
{
EntityUtil.CheckArgumentNull(xmlReaders, "xmlReaders");
EntityUtil.CheckArgumentContainsNull(ref xmlReaders, "xmlReaders");
MetadataArtifactLoader composite = MetadataArtifactLoader.CreateCompositeFromXmlReaders(xmlReaders);
this.Init(composite.GetReaders(),
composite.GetPaths(),
true /*throwOnError*/);
}
/// <summary>
/// Constructs the new instance of EdmItemCollection
/// with the list of CDM files provided.
/// </summary>
/// <param name="paths">paths where the CDM schemas are loaded</param>
/// <exception cref="ArgumentException"> Thrown if path name is not valid</exception>
/// <exception cref="System.ArgumentNullException">thrown if paths argument is null</exception>
/// <exception cref="System.Data.MetadataException">For errors related to invalid schemas.</exception>
[ResourceExposure(ResourceScope.Machine)] //Exposes the file path names which are a Machine resource
[ResourceConsumption(ResourceScope.Machine)] //For MetadataArtifactLoader.CreateCompositeFromFilePaths method call but we do not create the file paths in this method
public EdmItemCollection(params string[] filePaths)
: base(DataSpace.CSpace)
{
EntityUtil.CheckArgumentNull(filePaths, "filePaths");
// Wrap the file paths in instances of the MetadataArtifactLoader class, which provides
// an abstraction and a uniform interface over a diverse set of metadata artifacts.
//
MetadataArtifactLoader composite = null;
List<XmlReader> readers = null;
try
{
composite = MetadataArtifactLoader.CreateCompositeFromFilePaths(filePaths, XmlConstants.CSpaceSchemaExtension);
readers = composite.CreateReaders(DataSpace.CSpace);
this.Init(readers,
composite.GetPaths(DataSpace.CSpace),
true /*throwOnError*/);
}
finally
{
if (readers != null)
{
Helper.DisposeXmlReaders(readers);
}
}
}
// the most basic initialization
private void Init()
{
// Load the EDM primitive types
LoadEdmPrimitiveTypesAndFunctions();
}
/// <summary>
/// Public constructor that loads the metadata files from the specified XmlReaders, and
/// returns the list of errors encountered during load as the out parameter 'errors'.
/// </summary>
/// <param name="xmlReaders">XmlReader objects where the EDM schemas are loaded</param>
/// <param name="filePaths">Paths (URIs) to the CSDL files or resources</param>
/// <param name="throwOnError">A flag to indicate whether to throw if LoadItems returns errors</param>
private IList<EdmSchemaError> Init(IEnumerable<XmlReader> xmlReaders,
IEnumerable<string> filePaths,
bool throwOnError)
{
EntityUtil.CheckArgumentNull(xmlReaders, "xmlReaders");
// do the basic initialization
Init();
IList<EdmSchemaError> errors = LoadItems(xmlReaders, filePaths, SchemaDataModelOption.EntityDataModel,
MetadataItem.EdmProviderManifest, this, throwOnError);
return errors;
}
#endregion
#region Fields
// Cache for primitive type maps for Edm to provider
private CacheForPrimitiveTypes _primitiveTypeMaps = new CacheForPrimitiveTypes();
private Double _edmVersion = XmlConstants.UndefinedVersion;
/// <summary>
/// Gets canonical versions of InitializerMetadata instances. This avoids repeatedly
/// compiling delegates for materialization.
/// </summary>
private Memoizer<InitializerMetadata, InitializerMetadata> _getCanonicalInitializerMetadataMemoizer;
/// <summary>
/// Manages user defined function definitions.
/// </summary>
private Memoizer<EdmFunction, DbLambda> _getGeneratedFunctionDefinitionsMemoizer;
private OcAssemblyCache _conventionalOcCache = new OcAssemblyCache();
#endregion
#region Properties
/// <summary>
/// Version of the EDM that this ItemCollection represents.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Edm")]
public Double EdmVersion
{
get { return _edmVersion; }
internal set { _edmVersion = value; }
}
/// <summary>
/// conventional oc mapping cache, the locking mechanism is provided by ----semblyCache
/// </summary>
internal OcAssemblyCache ConventionalOcCache
{
get { return _conventionalOcCache; }
}
#endregion
#region Methods
/// <summary>
/// Given an InitializerMetadata instance, returns the canonical version of that instance.
/// This allows us to avoid compiling materialization delegates repeatedly for the same
/// pattern.
/// </summary>
internal InitializerMetadata GetCanonicalInitializerMetadata(InitializerMetadata metadata)
{
if (null == _getCanonicalInitializerMetadataMemoizer)
{
// We memoize the identity function because the first evaluation of the function establishes
// the canonical 'reference' for the initializer metadata with a particular 'value'.
Interlocked.CompareExchange(ref _getCanonicalInitializerMetadataMemoizer, new Memoizer<InitializerMetadata, InitializerMetadata>(
m => m, EqualityComparer<InitializerMetadata>.Default), null);
}
// check if an equivalent has already been registered
InitializerMetadata canonical = _getCanonicalInitializerMetadataMemoizer.Evaluate(metadata);
return canonical;
}
internal static bool IsSystemNamespace(DbProviderManifest manifest, string namespaceName)
{
if (manifest == MetadataItem.EdmProviderManifest)
{
return (namespaceName == EdmConstants.TransientNamespace ||
namespaceName == EdmConstants.EdmNamespace ||
namespaceName == EdmConstants.ClrPrimitiveTypeNamespace);
}
else
{
return (namespaceName == EdmConstants.TransientNamespace ||
namespaceName == EdmConstants.EdmNamespace ||
namespaceName == EdmConstants.ClrPrimitiveTypeNamespace ||
(manifest != null && namespaceName == manifest.NamespaceName));
}
}
/// <summary>
/// Load stuff from xml readers - this now includes XmlReader instances created over embedded
/// resources. See the remarks section below for some useful information.
/// </summary>
/// <param name="xmlReaders">A list of XmlReader instances</param>
/// <param name="dataModelOption">whether this is a entity data model or provider data model</param>
/// <param name="providerManifest">provider manifest from which the primitive type definition comes from</param>
/// <param name="itemCollection">item collection to add the item after loading</param>
/// <param name="computeFilePaths">Indicates whether the method should bother with the file paths; see remarks below</param>
/// <remarks>
/// In order to accommodate XmlReaders over artifacts embedded as resources in assemblies, the
/// notion of a filepath had to be abstracted into a URI. In reality, however, a res:// URI that
/// points to an embedded resource does not constitute a valid URI (i.e., one that can be parsed
/// by the System.Uri class in the .NET framework). In such cases, we need to supply a list of
/// "filepaths" (which includes res:// URIs), instead of having this method create the collection.
/// This distinction is made by setting the 'computeFilePaths' flags appropriately.
/// </remarks>
internal static IList<EdmSchemaError> LoadItems(IEnumerable<XmlReader> xmlReaders,
IEnumerable<string> sourceFilePaths,
SchemaDataModelOption dataModelOption,
DbProviderManifest providerManifest,
ItemCollection itemCollection,
bool throwOnError)
{
IList<Schema> schemaCollection = null;
// Parse and validate all the schemas - since we support using now,
// we need to parse them as a group
var errorCollection = SchemaManager.ParseAndValidate(xmlReaders, sourceFilePaths,
dataModelOption, providerManifest, out schemaCollection);
// Try to initialize the metadata if there are no errors
if (MetadataHelper.CheckIfAllErrorsAreWarnings(errorCollection))
{
List<EdmSchemaError> errors = LoadItems(providerManifest, schemaCollection, itemCollection);
foreach (var error in errors)
{
errorCollection.Add(error);
}
}
if (!MetadataHelper.CheckIfAllErrorsAreWarnings(errorCollection) && throwOnError)
{
//Future Enhancement: if there is an error, we throw exception with error and warnings.
//Otherwise the user has no clue to know about warnings.
throw EntityUtil.InvalidSchemaEncountered(Helper.CombineErrorMessage(errorCollection));
}
return errorCollection;
}
internal static List<EdmSchemaError> LoadItems(DbProviderManifest manifest, IList<Schema> somSchemas,
ItemCollection itemCollection)
{
List<EdmSchemaError> errors = new List<EdmSchemaError>();
// Convert the schema, if model schema, then we use the EDM provider manifest, otherwise use the
// store provider manifest
IEnumerable<GlobalItem> newGlobalItems = LoadSomSchema(somSchemas, manifest, itemCollection);
List<String> tempCTypeFunctionIdentity = new List<string>();
// No errors, so go ahead and add the types and make them readonly
foreach (GlobalItem globalItem in newGlobalItems)
{
// If multiple function parameter and return types expressed in SSpace map to the same
// CSpace type (e.g., SqlServer.decimal and SqlServer.numeric both map to Edm.Decimal),
// we need to guard against attempts to insert duplicate functions into the collection.
//
if (globalItem.BuiltInTypeKind == BuiltInTypeKind.EdmFunction && globalItem.DataSpace == DataSpace.SSpace)
{
EdmFunction function = (EdmFunction)globalItem;
StringBuilder sb = new StringBuilder();
EdmFunction.BuildIdentity(
sb,
function.FullName,
function.Parameters,
// convert function parameters to C-side types
(param) => MetadataHelper.ConvertStoreTypeUsageToEdmTypeUsage(param.TypeUsage),
(param) => param.Mode);
string cTypeFunctionIdentity = sb.ToString();
// Validate identity
if (tempCTypeFunctionIdentity.Contains(cTypeFunctionIdentity))
{
errors.Add(
new EdmSchemaError(
Strings.DuplicatedFunctionoverloads(function.FullName, cTypeFunctionIdentity.Substring(function.FullName.Length)).Trim()/*parameters*/,
(int)ErrorCode.DuplicatedFunctionoverloads,
EdmSchemaErrorSeverity.Error));
continue;
}
tempCTypeFunctionIdentity.Add(cTypeFunctionIdentity);
}
globalItem.SetReadOnly();
itemCollection.AddInternal(globalItem);
}
return errors;
}
/// <summary>
/// Load metadata from a SOM schema directly
/// </summary>
/// <param name="somSchema">The SOM schema to load from</param>
/// <param name="providerManifest">The provider manifest used for loading the type</param>
/// <param name="itemCollection">item collection in which primitive types are present</param>
/// <returns>The newly created items</returns>
internal static IEnumerable<GlobalItem> LoadSomSchema(IList<Schema> somSchemas,
DbProviderManifest providerManifest,
ItemCollection itemCollection)
{
IEnumerable<GlobalItem> newGlobalItems = Converter.ConvertSchema(somSchemas,
providerManifest, itemCollection);
return newGlobalItems;
}
/// <summary>
/// Get the list of primitive types for the given space
/// </summary>
/// <returns></returns>
public System.Collections.ObjectModel.ReadOnlyCollection<PrimitiveType> GetPrimitiveTypes()
{
return _primitiveTypeMaps.GetTypes();
}
/// <summary>
/// Get the list of primitive types for the given version of Edm
/// </summary>
/// <param name="edmVersion">The version of edm to use</param>
/// <returns></returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "edm")]
public System.Collections.ObjectModel.ReadOnlyCollection<PrimitiveType> GetPrimitiveTypes(double edmVersion)
{
if (edmVersion == XmlConstants.EdmVersionForV1 || edmVersion == XmlConstants.EdmVersionForV1_1 || edmVersion == XmlConstants.EdmVersionForV2)
{
return _primitiveTypeMaps.GetTypes().Where(type => !Helper.IsSpatialType(type)).ToList().AsReadOnly();
}
else if (edmVersion == XmlConstants.EdmVersionForV3)
{
return _primitiveTypeMaps.GetTypes();
}
else
{
throw EntityUtil.InvalidEDMVersion(edmVersion);
}
}
/// <summary>
/// Given the canonical primitive type, get the mapping primitive type in the given dataspace
/// </summary>
/// <param name="primitiveTypeKind">canonical primitive type</param>
/// <returns>The mapped scalar type</returns>
internal override PrimitiveType GetMappedPrimitiveType(PrimitiveTypeKind primitiveTypeKind)
{
PrimitiveType type = null;
_primitiveTypeMaps.TryGetType(primitiveTypeKind, null, out type);
return type;
}
private void LoadEdmPrimitiveTypesAndFunctions()
{
EdmProviderManifest providerManifest = EdmProviderManifest.Instance;
System.Collections.ObjectModel.ReadOnlyCollection<PrimitiveType> primitiveTypes = providerManifest.GetStoreTypes();
for (int i = 0; i < primitiveTypes.Count; i++)
{
this.AddInternal(primitiveTypes[i]);
_primitiveTypeMaps.Add(primitiveTypes[i]);
}
System.Collections.ObjectModel.ReadOnlyCollection<EdmFunction> functions = providerManifest.GetStoreFunctions();
for (int i = 0; i < functions.Count; i++)
{
this.AddInternal(functions[i]);
}
}
/// <summary>
/// Generates function definition or returns a cached one.
/// Guarantees type match of declaration and generated parameters.
/// Guarantees return type match.
/// Throws internal error for functions without definition.
/// Passes thru exceptions occured during definition generation.
/// </summary>
internal DbLambda GetGeneratedFunctionDefinition(EdmFunction function)
{
if (null == _getGeneratedFunctionDefinitionsMemoizer)
{
Interlocked.CompareExchange(
ref _getGeneratedFunctionDefinitionsMemoizer,
new Memoizer<EdmFunction, DbLambda>(GenerateFunctionDefinition, null),
null);
}
return _getGeneratedFunctionDefinitionsMemoizer.Evaluate(function);
}
/// <summary>
/// Generates function definition or returns a cached one.
/// Guarantees type match of declaration and generated parameters.
/// Guarantees return type match.
/// Throws internal error for functions without definition.
/// Passes thru exceptions occured during definition generation.
/// </summary>
internal DbLambda GenerateFunctionDefinition(EdmFunction function)
{
Debug.Assert(function.IsModelDefinedFunction, "Function definition can be requested only for user-defined model functions.");
if (!function.HasUserDefinedBody)
{
throw EntityUtil.FunctionHasNoDefinition(function);
}
DbLambda generatedDefinition;
// Generate the body
generatedDefinition = Mapping.ViewGeneration.Utils.ExternalCalls.CompileFunctionDefinition(
function.FullName,
function.CommandTextAttribute,
function.Parameters,
this);
// Ensure the result type of the generated definition matches the result type of the edm function (the declaration)
if (!TypeSemantics.IsStructurallyEqual(function.ReturnParameter.TypeUsage, generatedDefinition.Body.ResultType))
{
throw EntityUtil.FunctionDefinitionResultTypeMismatch(function, generatedDefinition.Body.ResultType);
}
Debug.Assert(generatedDefinition != null, "generatedDefinition != null");
return generatedDefinition;
}
#endregion
}//---- ItemCollection
}//----
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Drawing;
using SonicRetro.SonLVL.API;
namespace S3KObjectDefinitions.Common
{
class PathSwapper : ObjectDefinition
{
private Sprite img;
private List<Sprite> imgs = new List<Sprite>();
public override void Init(ObjectData data)
{
List<byte> tmpartfile = new List<byte>();
tmpartfile.AddRange(ObjectHelper.OpenArtFile("Common/pathswapper-art.bin", CompressionType.Nemesis));
byte[] artfile1 = tmpartfile.ToArray();
img = ObjectHelper.MapASMToBmp(artfile1, "../General/Sprites/Level Misc/Map - Path Swap.asm", 0, 0);
Point off;
BitmapBits im;
Point pos;
Size delta;
for (int i = 0; i < 32; i++)
{
byte[] artfile = tmpartfile.GetRange(((i & 0x1C) << 5), 128).ToArray();
BitmapBits tempim = ObjectHelper.MapASMToBmp(artfile, "../General/Sprites/Level Misc/Map - Path Swap.asm", (i & 4), 0).Image;
if ((i & 4) != 0)
{
im = new BitmapBits(tempim.Width * (1 << (i & 3)), tempim.Height);
delta = new Size(tempim.Width, 0);
}
else
{
im = new BitmapBits(tempim.Width, tempim.Height * (1 << (i & 3)));
delta = new Size(0, tempim.Height);
}
pos = new Point(0, 0);
off = new Point(-(im.Width / 2), -(im.Height / 2));
for (int j = 0; j < (1 << (i & 3)); j++)
{
im.DrawBitmap(tempim, pos);
pos = pos + delta;
}
imgs.Add(new Sprite(im, off));
}
}
public override ReadOnlyCollection<byte> Subtypes
{
get { return new ReadOnlyCollection<byte>(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 }); }
}
public override string Name
{
get { return "Path Swapper"; }
}
public override bool RememberState
{
get { return false; }
}
public override string SubtypeName(byte subtype)
{
string result = (subtype & 4) == 4 ? "Horizontal" : "Vertical";
return result;
}
public override Sprite Image
{
get { return img; }
}
public override Sprite SubtypeImage(byte subtype)
{
return imgs[subtype & 0x1F];
}
public override Rectangle GetBounds(ObjectEntry obj, Point camera)
{
return new Rectangle(obj.X + imgs[obj.SubType & 0x1F].X - camera.X, obj.Y + imgs[obj.SubType & 0x1F].Y - camera.Y, imgs[obj.SubType & 0x1F].Width, imgs[obj.SubType & 0x1F].Height);
}
public override Sprite GetSprite(ObjectEntry obj)
{
Sprite spr = new Sprite(imgs[obj.SubType & 0x1F].Image, imgs[obj.SubType & 0x1F].Offset);
spr.Offset = new Point(obj.X + spr.X, obj.Y + spr.Y);
return spr;
}
public override bool Debug { get { return true; } }
private PropertySpec[] customProperties = new PropertySpec[] {
new PropertySpec("Priority only", typeof(bool), "Extended", null, null, (o) => o.XFlip, (o, v) => o.XFlip = (bool)v),
new PropertySpec("Size", typeof(byte), "Extended", null, null, GetSize, SetSize),
new PropertySpec("Direction", typeof(Direction), "Extended", null, null, GetDirection, SetDirection),
new PropertySpec("Right/Down Path", typeof(int), "Extended", null, null, typeof(PathConverter), GetRDPath, SetRDPath),
new PropertySpec("Left/Up Path", typeof(int), "Extended", null, null, typeof(PathConverter), GetLUPath, SetLUPath),
new PropertySpec("Right/Down Priority", typeof(Priority), "Extended", null, null, GetRDPriority, SetRDPriority),
new PropertySpec("Left/Up Priority", typeof(Priority), "Extended", null, null, GetLUPriority, SetLUPriority),
new PropertySpec("Ground only", typeof(bool), "Extended", null, null, GetGroundOnly, SetGroundOnly)
};
public override PropertySpec[] CustomProperties
{
get
{
return customProperties;
}
}
private static object GetSize(ObjectEntry obj)
{
return (byte)(obj.SubType & 3);
}
private static void SetSize(ObjectEntry obj, object value)
{
obj.SubType = (byte)((obj.SubType & ~3) | ((byte)value & 3));
}
private static object GetDirection(ObjectEntry obj)
{
return (obj.SubType & 4) != 0 ? Direction.Horizontal : Direction.Vertical;
}
private static void SetDirection(ObjectEntry obj, object value)
{
obj.SubType = (byte)((obj.SubType & ~4) | ((Direction)value == Direction.Horizontal ? 4 : 0));
}
private static object GetRDPath(ObjectEntry obj)
{
return (obj.SubType & 8) >> 3;
}
private static void SetRDPath(ObjectEntry obj, object value)
{
obj.SubType = (byte)((obj.SubType & ~8) | ((int)value << 3));
}
private static object GetLUPath(ObjectEntry obj)
{
return (obj.SubType & 16) >> 4;
}
private static void SetLUPath(ObjectEntry obj, object value)
{
obj.SubType = (byte)((obj.SubType & ~16) | ((int)value << 4));
}
private static object GetRDPriority(ObjectEntry obj)
{
return (obj.SubType & 32) != 0 ? Priority.High : Priority.Low;
}
private static void SetRDPriority(ObjectEntry obj, object value)
{
obj.SubType = (byte)((obj.SubType & ~32) | ((Priority)value == Priority.High ? 32 : 0));
}
private static object GetLUPriority(ObjectEntry obj)
{
return (obj.SubType & 64) != 0 ? Priority.High : Priority.Low;
}
private static void SetLUPriority(ObjectEntry obj, object value)
{
obj.SubType = (byte)((obj.SubType & ~64) | ((Priority)value == Priority.High ? 64 : 0));
}
private static object GetGroundOnly(ObjectEntry obj)
{
return (obj.SubType & 128) != 0 ? true : false;
}
private static void SetGroundOnly(ObjectEntry obj, object value)
{
obj.SubType = (byte)((obj.SubType & ~128) | ((bool)value == true ? 128 : 0));
}
}
internal class PathConverter : TypeConverter
{
public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType)
{
if (sourceType == typeof(string))
return true;
return base.CanConvertFrom(context, sourceType);
}
public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType)
{
if (destinationType == typeof(int))
return true;
return base.CanConvertTo(context, destinationType);
}
public override object ConvertFrom(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value)
{
if (value is string)
return values[(string)value];
return base.ConvertFrom(context, culture, value);
}
public override object ConvertTo(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value, Type destinationType)
{
if (destinationType == typeof(string) && value is int)
{
string result = null;
foreach (KeyValuePair<string, int> item in values)
if (item.Value.Equals(value))
result = item.Key;
if (result != null) return result;
throw new KeyNotFoundException();
}
return base.ConvertTo(context, culture, value, destinationType);
}
public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context)
{
return new StandardValuesCollection(values.Keys);
}
public override bool GetStandardValuesSupported(ITypeDescriptorContext context)
{
return true;
}
private Dictionary<string, int> values = new Dictionary<string, int>() { { "Path 1", 0 }, { "Path 2", 1 } };
}
public enum Priority
{
Low,
High
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.Security.Permissions;
using System.Xml;
using Microsoft.Win32;
namespace System.Security.Cryptography.Xml
{
public class SignedXml
{
protected Signature m_signature;
protected string m_strSigningKeyName;
private AsymmetricAlgorithm _signingKey;
private XmlDocument _containingDocument = null;
private IEnumerator _keyInfoEnum = null;
private X509Certificate2Collection _x509Collection = null;
private IEnumerator _x509Enum = null;
private bool[] _refProcessed = null;
private int[] _refLevelCache = null;
internal XmlResolver _xmlResolver = null;
internal XmlElement _context = null;
private bool _bResolverSet = false;
private Func<SignedXml, bool> _signatureFormatValidator = DefaultSignatureFormatValidator;
private Collection<string> _safeCanonicalizationMethods;
// Built in canonicalization algorithm URIs
private static IList<string> s_knownCanonicalizationMethods = null;
// Built in transform algorithm URIs (excluding canonicalization URIs)
private static IList<string> s_defaultSafeTransformMethods = null;
// additional HMAC Url identifiers
private const string XmlDsigMoreHMACMD5Url = "http://www.w3.org/2001/04/xmldsig-more#hmac-md5";
private const string XmlDsigMoreHMACSHA256Url = "http://www.w3.org/2001/04/xmldsig-more#hmac-sha256";
private const string XmlDsigMoreHMACSHA384Url = "http://www.w3.org/2001/04/xmldsig-more#hmac-sha384";
private const string XmlDsigMoreHMACSHA512Url = "http://www.w3.org/2001/04/xmldsig-more#hmac-sha512";
private const string XmlDsigMoreHMACRIPEMD160Url = "http://www.w3.org/2001/04/xmldsig-more#hmac-ripemd160";
// defines the XML encryption processing rules
private EncryptedXml _exml = null;
//
// public constant Url identifiers most frequently used within the XML Signature classes
//
public const string XmlDsigNamespaceUrl = "http://www.w3.org/2000/09/xmldsig#";
public const string XmlDsigMinimalCanonicalizationUrl = "http://www.w3.org/2000/09/xmldsig#minimal";
public const string XmlDsigCanonicalizationUrl = XmlDsigC14NTransformUrl;
public const string XmlDsigCanonicalizationWithCommentsUrl = XmlDsigC14NWithCommentsTransformUrl;
public const string XmlDsigSHA1Url = "http://www.w3.org/2000/09/xmldsig#sha1";
public const string XmlDsigDSAUrl = "http://www.w3.org/2000/09/xmldsig#dsa-sha1";
public const string XmlDsigRSASHA1Url = "http://www.w3.org/2000/09/xmldsig#rsa-sha1";
public const string XmlDsigHMACSHA1Url = "http://www.w3.org/2000/09/xmldsig#hmac-sha1";
public const string XmlDsigSHA256Url = "http://www.w3.org/2001/04/xmlenc#sha256";
public const string XmlDsigRSASHA256Url = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256";
// Yes, SHA384 is in the xmldsig-more namespace even though all the other SHA variants are in xmlenc. That's the standard.
public const string XmlDsigSHA384Url = "http://www.w3.org/2001/04/xmldsig-more#sha384";
public const string XmlDsigRSASHA384Url = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha384";
public const string XmlDsigSHA512Url = "http://www.w3.org/2001/04/xmlenc#sha512";
public const string XmlDsigRSASHA512Url = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha512";
public const string XmlDsigC14NTransformUrl = "http://www.w3.org/TR/2001/REC-xml-c14n-20010315";
public const string XmlDsigC14NWithCommentsTransformUrl = "http://www.w3.org/TR/2001/REC-xml-c14n-20010315#WithComments";
public const string XmlDsigExcC14NTransformUrl = "http://www.w3.org/2001/10/xml-exc-c14n#";
public const string XmlDsigExcC14NWithCommentsTransformUrl = "http://www.w3.org/2001/10/xml-exc-c14n#WithComments";
public const string XmlDsigBase64TransformUrl = "http://www.w3.org/2000/09/xmldsig#base64";
public const string XmlDsigXPathTransformUrl = "http://www.w3.org/TR/1999/REC-xpath-19991116";
public const string XmlDsigXsltTransformUrl = "http://www.w3.org/TR/1999/REC-xslt-19991116";
public const string XmlDsigEnvelopedSignatureTransformUrl = "http://www.w3.org/2000/09/xmldsig#enveloped-signature";
public const string XmlDecryptionTransformUrl = "http://www.w3.org/2002/07/decrypt#XML";
public const string XmlLicenseTransformUrl = "urn:mpeg:mpeg21:2003:01-REL-R-NS:licenseTransform";
//
// public constructors
//
public SignedXml()
{
Initialize(null);
}
public SignedXml(XmlDocument document)
{
if (document == null)
throw new ArgumentNullException(nameof(document));
Initialize(document.DocumentElement);
}
public SignedXml(XmlElement elem)
{
if (elem == null)
throw new ArgumentNullException(nameof(elem));
Initialize(elem);
}
private void Initialize(XmlElement element)
{
_containingDocument = (element == null ? null : element.OwnerDocument);
_context = element;
m_signature = new Signature();
m_signature.SignedXml = this;
m_signature.SignedInfo = new SignedInfo();
_signingKey = null;
_safeCanonicalizationMethods = new Collection<string>(KnownCanonicalizationMethods);
}
//
// public properties
//
/// <internalonly/>
public string SigningKeyName
{
get { return m_strSigningKeyName; }
set { m_strSigningKeyName = value; }
}
public XmlResolver Resolver
{
// This property only has a setter. The rationale for this is that we don't have a good value
// to return when it has not been explicitely set, as we are using XmlSecureResolver by default
set
{
_xmlResolver = value;
_bResolverSet = true;
}
}
internal bool ResolverSet
{
get { return _bResolverSet; }
}
public Func<SignedXml, bool> SignatureFormatValidator
{
get { return _signatureFormatValidator; }
set { _signatureFormatValidator = value; }
}
public Collection<string> SafeCanonicalizationMethods
{
get { return _safeCanonicalizationMethods; }
}
public AsymmetricAlgorithm SigningKey
{
get { return _signingKey; }
set { _signingKey = value; }
}
public EncryptedXml EncryptedXml
{
get
{
if (_exml == null)
_exml = new EncryptedXml(_containingDocument); // default processing rules
return _exml;
}
set { _exml = value; }
}
public Signature Signature
{
get { return m_signature; }
}
public SignedInfo SignedInfo
{
get { return m_signature.SignedInfo; }
}
public string SignatureMethod
{
get { return m_signature.SignedInfo.SignatureMethod; }
}
public string SignatureLength
{
get { return m_signature.SignedInfo.SignatureLength; }
}
public byte[] SignatureValue
{
get { return m_signature.SignatureValue; }
}
public KeyInfo KeyInfo
{
get { return m_signature.KeyInfo; }
set { m_signature.KeyInfo = value; }
}
public XmlElement GetXml()
{
// If we have a document context, then return a signature element in this context
if (_containingDocument != null)
return m_signature.GetXml(_containingDocument);
else
return m_signature.GetXml();
}
public void LoadXml(XmlElement value)
{
if (value == null)
throw new ArgumentNullException(nameof(value));
m_signature.LoadXml(value);
if (_context == null)
{
_context = value;
}
_bCacheValid = false;
}
//
// public methods
//
public void AddReference(Reference reference)
{
m_signature.SignedInfo.AddReference(reference);
}
public void AddObject(DataObject dataObject)
{
m_signature.AddObject(dataObject);
}
public bool CheckSignature()
{
AsymmetricAlgorithm signingKey;
return CheckSignatureReturningKey(out signingKey);
}
public bool CheckSignatureReturningKey(out AsymmetricAlgorithm signingKey)
{
SignedXmlDebugLog.LogBeginSignatureVerification(this, _context);
signingKey = null;
bool bRet = false;
AsymmetricAlgorithm key = null;
if (!CheckSignatureFormat())
{
return false;
}
do
{
key = GetPublicKey();
if (key != null)
{
bRet = CheckSignature(key);
SignedXmlDebugLog.LogVerificationResult(this, key, bRet);
}
} while (key != null && bRet == false);
signingKey = key;
return bRet;
}
public bool CheckSignature(AsymmetricAlgorithm key)
{
if (!CheckSignatureFormat())
{
return false;
}
if (!CheckSignedInfo(key))
{
SignedXmlDebugLog.LogVerificationFailure(this, SR.Log_VerificationFailed_SignedInfo);
return false;
}
// Now is the time to go through all the references and see if their DigestValues are good
if (!CheckDigestedReferences())
{
SignedXmlDebugLog.LogVerificationFailure(this, SR.Log_VerificationFailed_References);
return false;
}
SignedXmlDebugLog.LogVerificationResult(this, key, true);
return true;
}
public bool CheckSignature(KeyedHashAlgorithm macAlg)
{
if (!CheckSignatureFormat())
{
return false;
}
if (!CheckSignedInfo(macAlg))
{
SignedXmlDebugLog.LogVerificationFailure(this, SR.Log_VerificationFailed_SignedInfo);
return false;
}
if (!CheckDigestedReferences())
{
SignedXmlDebugLog.LogVerificationFailure(this, SR.Log_VerificationFailed_References);
return false;
}
SignedXmlDebugLog.LogVerificationResult(this, macAlg, true);
return true;
}
public bool CheckSignature(X509Certificate2 certificate, bool verifySignatureOnly)
{
if (!verifySignatureOnly)
{
// Check key usages to make sure it is good for signing.
foreach (X509Extension extension in certificate.Extensions)
{
if (string.Compare(extension.Oid.Value, "2.5.29.15" /* szOID_KEY_USAGE */, StringComparison.OrdinalIgnoreCase) == 0)
{
X509KeyUsageExtension keyUsage = new X509KeyUsageExtension();
keyUsage.CopyFrom(extension);
SignedXmlDebugLog.LogVerifyKeyUsage(this, certificate, keyUsage);
bool validKeyUsage = (keyUsage.KeyUsages & X509KeyUsageFlags.DigitalSignature) != 0 ||
(keyUsage.KeyUsages & X509KeyUsageFlags.NonRepudiation) != 0;
if (!validKeyUsage)
{
SignedXmlDebugLog.LogVerificationFailure(this, SR.Log_VerificationFailed_X509KeyUsage);
return false;
}
break;
}
}
// Do the chain verification to make sure the certificate is valid.
X509Chain chain = new X509Chain();
chain.ChainPolicy.ExtraStore.AddRange(BuildBagOfCerts());
bool chainVerified = chain.Build(certificate);
SignedXmlDebugLog.LogVerifyX509Chain(this, chain, certificate);
if (!chainVerified)
{
SignedXmlDebugLog.LogVerificationFailure(this, SR.Log_VerificationFailed_X509Chain);
return false;
}
}
using (AsymmetricAlgorithm publicKey = Utils.GetAnyPublicKey(certificate))
{
if (!CheckSignature(publicKey))
{
return false;
}
}
SignedXmlDebugLog.LogVerificationResult(this, certificate, true);
return true;
}
public void ComputeSignature()
{
SignedXmlDebugLog.LogBeginSignatureComputation(this, _context);
BuildDigestedReferences();
// Load the key
AsymmetricAlgorithm key = SigningKey;
if (key == null)
throw new CryptographicException(SR.Cryptography_Xml_LoadKeyFailed);
// Check the signature algorithm associated with the key so that we can accordingly set the signature method
if (SignedInfo.SignatureMethod == null)
{
if (key is DSA)
{
SignedInfo.SignatureMethod = XmlDsigDSAUrl;
}
else if (key is RSA)
{
// Default to RSA-SHA1
if (SignedInfo.SignatureMethod == null)
SignedInfo.SignatureMethod = XmlDsigRSASHA1Url;
}
else
{
throw new CryptographicException(SR.Cryptography_Xml_CreatedKeyFailed);
}
}
// See if there is a signature description class defined in the Config file
SignatureDescription signatureDescription = CryptoHelpers.CreateFromName(SignedInfo.SignatureMethod) as SignatureDescription;
if (signatureDescription == null)
throw new CryptographicException(SR.Cryptography_Xml_SignatureDescriptionNotCreated);
HashAlgorithm hashAlg = signatureDescription.CreateDigest();
if (hashAlg == null)
throw new CryptographicException(SR.Cryptography_Xml_CreateHashAlgorithmFailed);
byte[] hashvalue = GetC14NDigest(hashAlg);
AsymmetricSignatureFormatter asymmetricSignatureFormatter = signatureDescription.CreateFormatter(key);
SignedXmlDebugLog.LogSigning(this, key, signatureDescription, hashAlg, asymmetricSignatureFormatter);
m_signature.SignatureValue = asymmetricSignatureFormatter.CreateSignature(hashAlg);
}
public void ComputeSignature(KeyedHashAlgorithm macAlg)
{
if (macAlg == null)
throw new ArgumentNullException(nameof(macAlg));
HMAC hash = macAlg as HMAC;
if (hash == null)
throw new CryptographicException(SR.Cryptography_Xml_SignatureMethodKeyMismatch);
int signatureLength;
if (m_signature.SignedInfo.SignatureLength == null)
signatureLength = hash.HashSize;
else
signatureLength = Convert.ToInt32(m_signature.SignedInfo.SignatureLength, null);
// signatureLength should be less than hash size
if (signatureLength < 0 || signatureLength > hash.HashSize)
throw new CryptographicException(SR.Cryptography_Xml_InvalidSignatureLength);
if (signatureLength % 8 != 0)
throw new CryptographicException(SR.Cryptography_Xml_InvalidSignatureLength2);
BuildDigestedReferences();
switch (hash.HashName)
{
case "SHA1":
SignedInfo.SignatureMethod = SignedXml.XmlDsigHMACSHA1Url;
break;
case "SHA256":
SignedInfo.SignatureMethod = SignedXml.XmlDsigMoreHMACSHA256Url;
break;
case "SHA384":
SignedInfo.SignatureMethod = SignedXml.XmlDsigMoreHMACSHA384Url;
break;
case "SHA512":
SignedInfo.SignatureMethod = SignedXml.XmlDsigMoreHMACSHA512Url;
break;
case "MD5":
SignedInfo.SignatureMethod = SignedXml.XmlDsigMoreHMACMD5Url;
break;
case "RIPEMD160":
SignedInfo.SignatureMethod = SignedXml.XmlDsigMoreHMACRIPEMD160Url;
break;
default:
throw new CryptographicException(SR.Cryptography_Xml_SignatureMethodKeyMismatch);
}
byte[] hashValue = GetC14NDigest(hash);
SignedXmlDebugLog.LogSigning(this, hash);
m_signature.SignatureValue = new byte[signatureLength / 8];
Buffer.BlockCopy(hashValue, 0, m_signature.SignatureValue, 0, signatureLength / 8);
}
//
// virtual methods
//
protected virtual AsymmetricAlgorithm GetPublicKey()
{
if (KeyInfo == null)
throw new CryptographicException(SR.Cryptography_Xml_KeyInfoRequired);
if (_x509Enum != null)
{
AsymmetricAlgorithm key = GetNextCertificatePublicKey();
if (key != null)
return key;
}
if (_keyInfoEnum == null)
_keyInfoEnum = KeyInfo.GetEnumerator();
// In our implementation, we move to the next KeyInfo clause which is an RSAKeyValue, DSAKeyValue or KeyInfoX509Data
while (_keyInfoEnum.MoveNext())
{
RSAKeyValue rsaKeyValue = _keyInfoEnum.Current as RSAKeyValue;
if (rsaKeyValue != null)
return rsaKeyValue.Key;
DSAKeyValue dsaKeyValue = _keyInfoEnum.Current as DSAKeyValue;
if (dsaKeyValue != null)
return dsaKeyValue.Key;
KeyInfoX509Data x509Data = _keyInfoEnum.Current as KeyInfoX509Data;
if (x509Data != null)
{
_x509Collection = Utils.BuildBagOfCerts(x509Data, CertUsageType.Verification);
if (_x509Collection.Count > 0)
{
_x509Enum = _x509Collection.GetEnumerator();
AsymmetricAlgorithm key = GetNextCertificatePublicKey();
if (key != null)
return key;
}
}
}
return null;
}
private X509Certificate2Collection BuildBagOfCerts()
{
X509Certificate2Collection collection = new X509Certificate2Collection();
if (KeyInfo != null)
{
foreach (KeyInfoClause clause in KeyInfo)
{
KeyInfoX509Data x509Data = clause as KeyInfoX509Data;
if (x509Data != null)
collection.AddRange(Utils.BuildBagOfCerts(x509Data, CertUsageType.Verification));
}
}
return collection;
}
private AsymmetricAlgorithm GetNextCertificatePublicKey()
{
while (_x509Enum.MoveNext())
{
X509Certificate2 certificate = (X509Certificate2)_x509Enum.Current;
if (certificate != null)
return Utils.GetAnyPublicKey(certificate);
}
return null;
}
public virtual XmlElement GetIdElement(XmlDocument document, string idValue)
{
return DefaultGetIdElement(document, idValue);
}
internal static XmlElement DefaultGetIdElement(XmlDocument document, string idValue)
{
if (document == null)
return null;
try
{
XmlConvert.VerifyNCName(idValue);
}
catch (XmlException)
{
// Identifiers are required to be an NCName
// (xml:id version 1.0, part 4, paragraph 2, bullet 1)
//
// If it isn't an NCName, it isn't allowed to match.
return null;
}
// Get the element with idValue
XmlElement elem = document.GetElementById(idValue);
if (elem != null)
{
// Have to check for duplicate ID values from the DTD.
XmlDocument docClone = (XmlDocument)document.CloneNode(true);
XmlElement cloneElem = docClone.GetElementById(idValue);
// If it's null here we want to know about it, because it means that
// GetElementById failed to work across the cloning, and our uniqueness
// test is invalid.
System.Diagnostics.Debug.Assert(cloneElem != null);
// Guard against null anyways
if (cloneElem != null)
{
cloneElem.Attributes.RemoveAll();
XmlElement cloneElem2 = docClone.GetElementById(idValue);
if (cloneElem2 != null)
{
throw new CryptographicException(
SR.Cryptography_Xml_InvalidReference);
}
}
return elem;
}
elem = GetSingleReferenceTarget(document, "Id", idValue);
if (elem != null)
return elem;
elem = GetSingleReferenceTarget(document, "id", idValue);
if (elem != null)
return elem;
elem = GetSingleReferenceTarget(document, "ID", idValue);
return elem;
}
//
// private methods
//
private bool _bCacheValid = false;
private byte[] _digestedSignedInfo = null;
private static bool DefaultSignatureFormatValidator(SignedXml signedXml)
{
// Reject the signature if it uses a truncated HMAC
if (signedXml.DoesSignatureUseTruncatedHmac())
{
return false;
}
// Reject the signature if it uses a canonicalization algorithm other than
// one of the ones explicitly allowed
if (!signedXml.DoesSignatureUseSafeCanonicalizationMethod())
{
return false;
}
// Otherwise accept it
return true;
}
// Validation function to see if the current signature is signed with a truncated HMAC - one which
// has a signature length of fewer bits than the whole HMAC output.
private bool DoesSignatureUseTruncatedHmac()
{
// If we're not using the SignatureLength property, then we're not truncating the signature length
if (SignedInfo.SignatureLength == null)
{
return false;
}
// See if we're signed witn an HMAC algorithm
HMAC hmac = CryptoHelpers.CreateFromName(SignatureMethod) as HMAC;
if (hmac == null)
{
// We aren't signed with an HMAC algorithm, so we cannot have a truncated HMAC
return false;
}
// Figure out how many bits the signature is using
int actualSignatureSize = 0;
if (!int.TryParse(SignedInfo.SignatureLength, out actualSignatureSize))
{
// If the value wasn't a valid integer, then we'll conservatively reject it all together
return true;
}
// Make sure the full HMAC signature size is the same size that was specified in the XML
// signature. If the actual signature size is not exactly the same as the full HMAC size, then
// reject the signature.
return actualSignatureSize != hmac.HashSize;
}
// Validation function to see if the signature uses a canonicalization algorithm from our list
// of approved algorithm URIs.
private bool DoesSignatureUseSafeCanonicalizationMethod()
{
foreach (string safeAlgorithm in SafeCanonicalizationMethods)
{
if (string.Equals(safeAlgorithm, SignedInfo.CanonicalizationMethod, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
SignedXmlDebugLog.LogUnsafeCanonicalizationMethod(this, SignedInfo.CanonicalizationMethod, SafeCanonicalizationMethods);
return false;
}
private bool ReferenceUsesSafeTransformMethods(Reference reference)
{
TransformChain transformChain = reference.TransformChain;
int transformCount = transformChain.Count;
for (int i = 0; i < transformCount; i++)
{
Transform transform = transformChain[i];
if (!IsSafeTransform(transform.Algorithm))
{
return false;
}
}
return true;
}
private bool IsSafeTransform(string transformAlgorithm)
{
// All canonicalization algorithms are valid transform algorithms.
foreach (string safeAlgorithm in SafeCanonicalizationMethods)
{
if (string.Equals(safeAlgorithm, transformAlgorithm, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
foreach (string safeAlgorithm in DefaultSafeTransformMethods)
{
if (string.Equals(safeAlgorithm, transformAlgorithm, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
SignedXmlDebugLog.LogUnsafeTransformMethod(
this,
transformAlgorithm,
SafeCanonicalizationMethods,
DefaultSafeTransformMethods);
return false;
}
// Get a list of the built in canonicalization algorithms, as well as any that the machine admin has
// added to the valid set.
private static IList<string> KnownCanonicalizationMethods
{
get
{
if (s_knownCanonicalizationMethods == null)
{
// Start with the list that the machine admin added, if any
List<string> safeAlgorithms = new List<string>();
// Built in algorithms
safeAlgorithms.Add(XmlDsigC14NTransformUrl);
safeAlgorithms.Add(XmlDsigC14NWithCommentsTransformUrl);
safeAlgorithms.Add(XmlDsigExcC14NTransformUrl);
safeAlgorithms.Add(XmlDsigExcC14NWithCommentsTransformUrl);
s_knownCanonicalizationMethods = safeAlgorithms;
}
return s_knownCanonicalizationMethods;
}
}
private static IList<string> DefaultSafeTransformMethods
{
get
{
if (s_defaultSafeTransformMethods == null)
{
List<string> safeAlgorithms = new List<string>();
// Built in algorithms
// KnownCanonicalizationMethods don't need to be added here, because
// the validator will automatically accept those.
//
// xmldsig 6.6.1:
// Any canonicalization algorithm that can be used for
// CanonicalizationMethod can be used as a Transform.
safeAlgorithms.Add(XmlDsigEnvelopedSignatureTransformUrl);
safeAlgorithms.Add(XmlDsigBase64TransformUrl);
safeAlgorithms.Add(XmlLicenseTransformUrl);
safeAlgorithms.Add(XmlDecryptionTransformUrl);
s_defaultSafeTransformMethods = safeAlgorithms;
}
return s_defaultSafeTransformMethods;
}
}
private byte[] GetC14NDigest(HashAlgorithm hash)
{
if (!_bCacheValid || !SignedInfo.CacheValid)
{
string baseUri = (_containingDocument == null ? null : _containingDocument.BaseURI);
XmlResolver resolver = (_bResolverSet ? _xmlResolver : new XmlSecureResolver(new XmlUrlResolver(), baseUri));
XmlDocument doc = Utils.PreProcessElementInput(SignedInfo.GetXml(), resolver, baseUri);
// Add non default namespaces in scope
CanonicalXmlNodeList namespaces = (_context == null ? null : Utils.GetPropagatedAttributes(_context));
SignedXmlDebugLog.LogNamespacePropagation(this, namespaces);
Utils.AddNamespaces(doc.DocumentElement, namespaces);
Transform c14nMethodTransform = SignedInfo.CanonicalizationMethodObject;
c14nMethodTransform.Resolver = resolver;
c14nMethodTransform.BaseURI = baseUri;
SignedXmlDebugLog.LogBeginCanonicalization(this, c14nMethodTransform);
c14nMethodTransform.LoadInput(doc);
SignedXmlDebugLog.LogCanonicalizedOutput(this, c14nMethodTransform);
_digestedSignedInfo = c14nMethodTransform.GetDigestedOutput(hash);
_bCacheValid = true;
}
return _digestedSignedInfo;
}
private int GetReferenceLevel(int index, ArrayList references)
{
if (_refProcessed[index]) return _refLevelCache[index];
_refProcessed[index] = true;
Reference reference = (Reference)references[index];
if (reference.Uri == null || reference.Uri.Length == 0 || (reference.Uri.Length > 0 && reference.Uri[0] != '#'))
{
_refLevelCache[index] = 0;
return 0;
}
if (reference.Uri.Length > 0 && reference.Uri[0] == '#')
{
string idref = Utils.ExtractIdFromLocalUri(reference.Uri);
if (idref == "xpointer(/)")
{
_refLevelCache[index] = 0;
return 0;
}
// If this is pointing to another reference
for (int j = 0; j < references.Count; ++j)
{
if (((Reference)references[j]).Id == idref)
{
_refLevelCache[index] = GetReferenceLevel(j, references) + 1;
return (_refLevelCache[index]);
}
}
// Then the reference points to an object tag
_refLevelCache[index] = 0;
return 0;
}
// Malformed reference
throw new CryptographicException(SR.Cryptography_Xml_InvalidReference);
}
private class ReferenceLevelSortOrder : IComparer
{
private ArrayList _references = null;
public ReferenceLevelSortOrder() { }
public ArrayList References
{
get { return _references; }
set { _references = value; }
}
public int Compare(object a, object b)
{
Reference referenceA = a as Reference;
Reference referenceB = b as Reference;
// Get the indexes
int iIndexA = 0;
int iIndexB = 0;
int i = 0;
foreach (Reference reference in References)
{
if (reference == referenceA) iIndexA = i;
if (reference == referenceB) iIndexB = i;
i++;
}
int iLevelA = referenceA.SignedXml.GetReferenceLevel(iIndexA, References);
int iLevelB = referenceB.SignedXml.GetReferenceLevel(iIndexB, References);
return iLevelA.CompareTo(iLevelB);
}
}
private void BuildDigestedReferences()
{
// Default the DigestMethod and Canonicalization
ArrayList references = SignedInfo.References;
// Reset the cache
_refProcessed = new bool[references.Count];
_refLevelCache = new int[references.Count];
ReferenceLevelSortOrder sortOrder = new ReferenceLevelSortOrder();
sortOrder.References = references;
// Don't alter the order of the references array list
ArrayList sortedReferences = new ArrayList();
foreach (Reference reference in references)
{
sortedReferences.Add(reference);
}
sortedReferences.Sort(sortOrder);
CanonicalXmlNodeList nodeList = new CanonicalXmlNodeList();
foreach (DataObject obj in m_signature.ObjectList)
{
nodeList.Add(obj.GetXml());
}
foreach (Reference reference in sortedReferences)
{
// If no DigestMethod has yet been set, default it to sha1
if (reference.DigestMethod == null)
reference.DigestMethod = XmlDsigSHA1Url;
SignedXmlDebugLog.LogSigningReference(this, reference);
reference.UpdateHashValue(_containingDocument, nodeList);
// If this reference has an Id attribute, add it
if (reference.Id != null)
nodeList.Add(reference.GetXml());
}
}
private bool CheckDigestedReferences()
{
ArrayList references = m_signature.SignedInfo.References;
for (int i = 0; i < references.Count; ++i)
{
Reference digestedReference = (Reference)references[i];
if (!ReferenceUsesSafeTransformMethods(digestedReference))
{
return false;
}
SignedXmlDebugLog.LogVerifyReference(this, digestedReference);
byte[] calculatedHash = null;
try
{
calculatedHash = digestedReference.CalculateHashValue(_containingDocument, m_signature.ReferencedItems);
}
catch (CryptoSignedXmlRecursionException)
{
SignedXmlDebugLog.LogSignedXmlRecursionLimit(this, digestedReference);
return false;
}
// Compare both hashes
SignedXmlDebugLog.LogVerifyReferenceHash(this, digestedReference, calculatedHash, digestedReference.DigestValue);
if (!CryptographicEquals(calculatedHash, digestedReference.DigestValue))
{
return false;
}
}
return true;
}
// Methods _must_ be marked both No Inlining and No Optimization to be fully opted out of optimization.
// This is because if a candidate method is inlined, its method level attributes, including the NoOptimization
// attribute, are lost.
// This method makes no attempt to disguise the length of either of its inputs. It is assumed the attacker has
// knowledge of the algorithms used, and thus the output length. Length is difficult to properly blind in modern CPUs.
[MethodImpl(MethodImplOptions.NoInlining | MethodImplOptions.NoOptimization)]
private static bool CryptographicEquals(byte[] a, byte[] b)
{
System.Diagnostics.Debug.Assert(a != null);
System.Diagnostics.Debug.Assert(b != null);
int result = 0;
// Short cut if the lengths are not identical
if (a.Length != b.Length)
return false;
unchecked
{
// Normally this caching doesn't matter, but with the optimizer off, this nets a non-trivial speedup.
int aLength = a.Length;
for (int i = 0; i < aLength; i++)
// We use subtraction here instead of XOR because the XOR algorithm gets ever so
// slightly faster as more and more differences pile up.
// This cannot overflow more than once (and back to 0) because bytes are 1 byte
// in length, and result is 4 bytes. The OR propagates all set bytes, so the differences
// can't add up and overflow a second time.
result = result | (a[i] - b[i]);
}
return (0 == result);
}
// If we have a signature format validation callback, check to see if this signature's format (not
// the signautre itself) is valid according to the validator. A return value of true indicates that
// the signature format is acceptable, false means that the format is not valid.
private bool CheckSignatureFormat()
{
if (_signatureFormatValidator == null)
{
// No format validator means that we default to accepting the signature. (This is
// effectively compatibility mode with v3.5).
return true;
}
SignedXmlDebugLog.LogBeginCheckSignatureFormat(this, _signatureFormatValidator);
bool formatValid = _signatureFormatValidator(this);
SignedXmlDebugLog.LogFormatValidationResult(this, formatValid);
return formatValid;
}
private bool CheckSignedInfo(AsymmetricAlgorithm key)
{
if (key == null)
throw new ArgumentNullException(nameof(key));
SignedXmlDebugLog.LogBeginCheckSignedInfo(this, m_signature.SignedInfo);
SignatureDescription signatureDescription = CryptoHelpers.CreateFromName(SignatureMethod) as SignatureDescription;
if (signatureDescription == null)
throw new CryptographicException(SR.Cryptography_Xml_SignatureDescriptionNotCreated);
// Let's see if the key corresponds with the SignatureMethod
Type ta = Type.GetType(signatureDescription.KeyAlgorithm);
if (!IsKeyTheCorrectAlgorithm(key, ta))
return false;
HashAlgorithm hashAlgorithm = signatureDescription.CreateDigest();
if (hashAlgorithm == null)
throw new CryptographicException(SR.Cryptography_Xml_CreateHashAlgorithmFailed);
byte[] hashval = GetC14NDigest(hashAlgorithm);
AsymmetricSignatureDeformatter asymmetricSignatureDeformatter = signatureDescription.CreateDeformatter(key);
SignedXmlDebugLog.LogVerifySignedInfo(this,
key,
signatureDescription,
hashAlgorithm,
asymmetricSignatureDeformatter,
hashval,
m_signature.SignatureValue);
return asymmetricSignatureDeformatter.VerifySignature(hashval, m_signature.SignatureValue);
}
private bool CheckSignedInfo(KeyedHashAlgorithm macAlg)
{
if (macAlg == null)
throw new ArgumentNullException(nameof(macAlg));
SignedXmlDebugLog.LogBeginCheckSignedInfo(this, m_signature.SignedInfo);
int signatureLength;
if (m_signature.SignedInfo.SignatureLength == null)
signatureLength = macAlg.HashSize;
else
signatureLength = Convert.ToInt32(m_signature.SignedInfo.SignatureLength, null);
// signatureLength should be less than hash size
if (signatureLength < 0 || signatureLength > macAlg.HashSize)
throw new CryptographicException(SR.Cryptography_Xml_InvalidSignatureLength);
if (signatureLength % 8 != 0)
throw new CryptographicException(SR.Cryptography_Xml_InvalidSignatureLength2);
if (m_signature.SignatureValue == null)
throw new CryptographicException(SR.Cryptography_Xml_SignatureValueRequired);
if (m_signature.SignatureValue.Length != signatureLength / 8)
throw new CryptographicException(SR.Cryptography_Xml_InvalidSignatureLength);
// Calculate the hash
byte[] hashValue = GetC14NDigest(macAlg);
SignedXmlDebugLog.LogVerifySignedInfo(this, macAlg, hashValue, m_signature.SignatureValue);
for (int i = 0; i < m_signature.SignatureValue.Length; i++)
{
if (m_signature.SignatureValue[i] != hashValue[i]) return false;
}
return true;
}
private static XmlElement GetSingleReferenceTarget(XmlDocument document, string idAttributeName, string idValue)
{
// idValue has already been tested as an NCName (unless overridden for compatibility), so there's no
// escaping that needs to be done here.
string xPath = "//*[@" + idAttributeName + "=\"" + idValue + "\"]";
// http://www.w3.org/TR/xmldsig-core/#sec-ReferenceProcessingModel says that for the form URI="#chapter1":
//
// Identifies a node-set containing the element with ID attribute value 'chapter1' ...
//
// Note that it uses the singular. Therefore, if the match is ambiguous, we should consider the document invalid.
//
// In this case, we'll treat it the same as having found nothing across all fallbacks (but shortcut so that we don't
// fall into a trap of finding a secondary element which wasn't the originally signed one).
XmlNodeList nodeList = document.SelectNodes(xPath);
if (nodeList == null || nodeList.Count == 0)
{
return null;
}
if (nodeList.Count == 1)
{
return nodeList[0] as XmlElement;
}
throw new CryptographicException(SR.Cryptography_Xml_InvalidReference);
}
private static bool IsKeyTheCorrectAlgorithm(AsymmetricAlgorithm key, Type expectedType)
{
Type actualType = key.GetType();
if (actualType == expectedType)
return true;
// This check exists solely for compatibility with 4.6. Normally, we would expect "expectedType" to be the superclass type and
// the actualType to be the subclass.
if (expectedType.IsSubclassOf(actualType))
return true;
//
// "expectedType" comes from the KeyAlgorithm property of a SignatureDescription. The BCL SignatureDescription classes have historically
// denoted provider-specific implementations ("RSACryptoServiceProvider") rather than the base class for the algorithm ("RSA"). We could
// change those (at the risk of creating other compat problems) but we have no control over third party SignatureDescriptions.
//
// So, in the absence of a better approach, walk up the parent hierarchy until we find the ancestor that's a direct subclass of
// AsymmetricAlgorithm and treat that as the algorithm identifier.
//
while (expectedType != null && expectedType.BaseType != typeof(AsymmetricAlgorithm))
{
expectedType = expectedType.BaseType;
}
if (expectedType == null)
return false; // SignatureDescription specified something that isn't even a subclass of AsymmetricAlgorithm. For compatibility with 4.6, return false rather throw.
if (actualType.IsSubclassOf(expectedType))
return true;
return false;
}
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.Serialization.Formatters;
using Arch.CMessaging.Client.Newtonsoft.Json.Converters;
using Arch.CMessaging.Client.Newtonsoft.Json.Serialization;
using Arch.CMessaging.Client.Newtonsoft.Json.Utilities;
using System.Runtime.Serialization;
using ErrorEventArgs = Arch.CMessaging.Client.Newtonsoft.Json.Serialization.ErrorEventArgs;
namespace Arch.CMessaging.Client.Newtonsoft.Json
{
/// <summary>
/// Serializes and deserializes objects into and from the JSON format.
/// The <see cref="JsonSerializer"/> enables you to control how objects are encoded into JSON.
/// </summary>
public class JsonSerializer
{
internal TypeNameHandling _typeNameHandling;
internal FormatterAssemblyStyle _typeNameAssemblyFormat;
internal PreserveReferencesHandling _preserveReferencesHandling;
internal ReferenceLoopHandling _referenceLoopHandling;
internal MissingMemberHandling _missingMemberHandling;
internal ObjectCreationHandling _objectCreationHandling;
internal NullValueHandling _nullValueHandling;
internal DefaultValueHandling _defaultValueHandling;
internal ConstructorHandling _constructorHandling;
internal MetadataPropertyHandling _metadataPropertyHandling;
internal JsonConverterCollection _converters;
internal IContractResolver _contractResolver;
internal ITraceWriter _traceWriter;
internal SerializationBinder _binder;
internal StreamingContext _context;
private IReferenceResolver _referenceResolver;
private Formatting? _formatting;
private DateFormatHandling? _dateFormatHandling;
private DateTimeZoneHandling? _dateTimeZoneHandling;
private DateParseHandling? _dateParseHandling;
private FloatFormatHandling? _floatFormatHandling;
private FloatParseHandling? _floatParseHandling;
private StringEscapeHandling? _stringEscapeHandling;
private CultureInfo _culture;
private int? _maxDepth;
private bool _maxDepthSet;
private bool? _checkAdditionalContent;
private string _dateFormatString;
private bool _dateFormatStringSet;
/// <summary>
/// Occurs when the <see cref="JsonSerializer"/> errors during serialization and deserialization.
/// </summary>
public virtual event EventHandler<ErrorEventArgs> Error;
/// <summary>
/// Gets or sets the <see cref="IReferenceResolver"/> used by the serializer when resolving references.
/// </summary>
public virtual IReferenceResolver ReferenceResolver
{
get { return GetReferenceResolver(); }
set
{
if (value == null)
throw new ArgumentNullException("value", "Reference resolver cannot be null.");
_referenceResolver = value;
}
}
/// <summary>
/// Gets or sets the <see cref="SerializationBinder"/> used by the serializer when resolving type names.
/// </summary>
public virtual SerializationBinder Binder
{
get { return _binder; }
set
{
if (value == null)
throw new ArgumentNullException("value", "Serialization binder cannot be null.");
_binder = value;
}
}
/// <summary>
/// Gets or sets the <see cref="ITraceWriter"/> used by the serializer when writing trace messages.
/// </summary>
/// <value>The trace writer.</value>
public virtual ITraceWriter TraceWriter
{
get { return _traceWriter; }
set { _traceWriter = value; }
}
/// <summary>
/// Gets or sets how type name writing and reading is handled by the serializer.
/// </summary>
public virtual TypeNameHandling TypeNameHandling
{
get { return _typeNameHandling; }
set
{
if (value < TypeNameHandling.None || value > TypeNameHandling.Auto)
throw new ArgumentOutOfRangeException("value");
_typeNameHandling = value;
}
}
/// <summary>
/// Gets or sets how a type name assembly is written and resolved by the serializer.
/// </summary>
/// <value>The type name assembly format.</value>
public virtual FormatterAssemblyStyle TypeNameAssemblyFormat
{
get { return _typeNameAssemblyFormat; }
set
{
if (value < FormatterAssemblyStyle.Simple || value > FormatterAssemblyStyle.Full)
throw new ArgumentOutOfRangeException("value");
_typeNameAssemblyFormat = value;
}
}
/// <summary>
/// Gets or sets how object references are preserved by the serializer.
/// </summary>
public virtual PreserveReferencesHandling PreserveReferencesHandling
{
get { return _preserveReferencesHandling; }
set
{
if (value < PreserveReferencesHandling.None || value > PreserveReferencesHandling.All)
throw new ArgumentOutOfRangeException("value");
_preserveReferencesHandling = value;
}
}
/// <summary>
/// Get or set how reference loops (e.g. a class referencing itself) is handled.
/// </summary>
public virtual ReferenceLoopHandling ReferenceLoopHandling
{
get { return _referenceLoopHandling; }
set
{
if (value < ReferenceLoopHandling.Error || value > ReferenceLoopHandling.Serialize)
throw new ArgumentOutOfRangeException("value");
_referenceLoopHandling = value;
}
}
/// <summary>
/// Get or set how missing members (e.g. JSON contains a property that isn't a member on the object) are handled during deserialization.
/// </summary>
public virtual MissingMemberHandling MissingMemberHandling
{
get { return _missingMemberHandling; }
set
{
if (value < MissingMemberHandling.Ignore || value > MissingMemberHandling.Error)
throw new ArgumentOutOfRangeException("value");
_missingMemberHandling = value;
}
}
/// <summary>
/// Get or set how null values are handled during serialization and deserialization.
/// </summary>
public virtual NullValueHandling NullValueHandling
{
get { return _nullValueHandling; }
set
{
if (value < NullValueHandling.Include || value > NullValueHandling.Ignore)
throw new ArgumentOutOfRangeException("value");
_nullValueHandling = value;
}
}
/// <summary>
/// Get or set how null default are handled during serialization and deserialization.
/// </summary>
public virtual DefaultValueHandling DefaultValueHandling
{
get { return _defaultValueHandling; }
set
{
if (value < DefaultValueHandling.Include || value > DefaultValueHandling.IgnoreAndPopulate)
throw new ArgumentOutOfRangeException("value");
_defaultValueHandling = value;
}
}
/// <summary>
/// Gets or sets how objects are created during deserialization.
/// </summary>
/// <value>The object creation handling.</value>
public virtual ObjectCreationHandling ObjectCreationHandling
{
get { return _objectCreationHandling; }
set
{
if (value < ObjectCreationHandling.Auto || value > ObjectCreationHandling.Replace)
throw new ArgumentOutOfRangeException("value");
_objectCreationHandling = value;
}
}
/// <summary>
/// Gets or sets how constructors are used during deserialization.
/// </summary>
/// <value>The constructor handling.</value>
public virtual ConstructorHandling ConstructorHandling
{
get { return _constructorHandling; }
set
{
if (value < ConstructorHandling.Default || value > ConstructorHandling.AllowNonPublicDefaultConstructor)
throw new ArgumentOutOfRangeException("value");
_constructorHandling = value;
}
}
/// <summary>
/// Gets or sets how metadata properties are used during deserialization.
/// </summary>
/// <value>The metadata properties handling.</value>
public virtual MetadataPropertyHandling MetadataPropertyHandling
{
get { return _metadataPropertyHandling; }
set
{
if (value < MetadataPropertyHandling.Default || value > MetadataPropertyHandling.Ignore)
throw new ArgumentOutOfRangeException("value");
_metadataPropertyHandling = value;
}
}
/// <summary>
/// Gets a collection <see cref="JsonConverter"/> that will be used during serialization.
/// </summary>
/// <value>Collection <see cref="JsonConverter"/> that will be used during serialization.</value>
public virtual JsonConverterCollection Converters
{
get
{
if (_converters == null)
_converters = new JsonConverterCollection();
return _converters;
}
}
/// <summary>
/// Gets or sets the contract resolver used by the serializer when
/// serializing .NET objects to JSON and vice versa.
/// </summary>
public virtual IContractResolver ContractResolver
{
get { return _contractResolver; }
set { _contractResolver = value ?? DefaultContractResolver.Instance; }
}
/// <summary>
/// Gets or sets the <see cref="StreamingContext"/> used by the serializer when invoking serialization callback methods.
/// </summary>
/// <value>The context.</value>
public virtual StreamingContext Context
{
get { return _context; }
set { _context = value; }
}
/// <summary>
/// Indicates how JSON text output is formatted.
/// </summary>
public virtual Formatting Formatting
{
get { return _formatting ?? JsonSerializerSettings.DefaultFormatting; }
set { _formatting = value; }
}
/// <summary>
/// Get or set how dates are written to JSON text.
/// </summary>
public virtual DateFormatHandling DateFormatHandling
{
get { return _dateFormatHandling ?? JsonSerializerSettings.DefaultDateFormatHandling; }
set { _dateFormatHandling = value; }
}
/// <summary>
/// Get or set how <see cref="DateTime"/> time zones are handling during serialization and deserialization.
/// </summary>
public virtual DateTimeZoneHandling DateTimeZoneHandling
{
get { return _dateTimeZoneHandling ?? JsonSerializerSettings.DefaultDateTimeZoneHandling; }
set { _dateTimeZoneHandling = value; }
}
/// <summary>
/// Get or set how date formatted strings, e.g. "\/Date(1198908717056)\/" and "2012-03-21T05:40Z", are parsed when reading JSON.
/// </summary>
public virtual DateParseHandling DateParseHandling
{
get { return _dateParseHandling ?? JsonSerializerSettings.DefaultDateParseHandling; }
set { _dateParseHandling = value; }
}
/// <summary>
/// Get or set how floating point numbers, e.g. 1.0 and 9.9, are parsed when reading JSON text.
/// </summary>
public virtual FloatParseHandling FloatParseHandling
{
get { return _floatParseHandling ?? JsonSerializerSettings.DefaultFloatParseHandling; }
set { _floatParseHandling = value; }
}
/// <summary>
/// Get or set how special floating point numbers, e.g. <see cref="F:System.Double.NaN"/>,
/// <see cref="F:System.Double.PositiveInfinity"/> and <see cref="F:System.Double.NegativeInfinity"/>,
/// are written as JSON text.
/// </summary>
public virtual FloatFormatHandling FloatFormatHandling
{
get { return _floatFormatHandling ?? JsonSerializerSettings.DefaultFloatFormatHandling; }
set { _floatFormatHandling = value; }
}
/// <summary>
/// Get or set how strings are escaped when writing JSON text.
/// </summary>
public virtual StringEscapeHandling StringEscapeHandling
{
get { return _stringEscapeHandling ?? JsonSerializerSettings.DefaultStringEscapeHandling; }
set { _stringEscapeHandling = value; }
}
/// <summary>
/// Get or set how <see cref="DateTime"/> and <see cref="DateTimeOffset"/> values are formatting when writing JSON text.
/// </summary>
public virtual string DateFormatString
{
get { return _dateFormatString ?? JsonSerializerSettings.DefaultDateFormatString; }
set
{
_dateFormatString = value;
_dateFormatStringSet = true;
}
}
/// <summary>
/// Gets or sets the culture used when reading JSON. Defaults to <see cref="CultureInfo.InvariantCulture"/>.
/// </summary>
public virtual CultureInfo Culture
{
get { return _culture ?? JsonSerializerSettings.DefaultCulture; }
set { _culture = value; }
}
/// <summary>
/// Gets or sets the maximum depth allowed when reading JSON. Reading past this depth will throw a <see cref="JsonReaderException"/>.
/// </summary>
public virtual int? MaxDepth
{
get { return _maxDepth; }
set
{
if (value <= 0)
throw new ArgumentException("Value must be positive.", "value");
_maxDepth = value;
_maxDepthSet = true;
}
}
/// <summary>
/// Gets a value indicating whether there will be a check for additional JSON content after deserializing an object.
/// </summary>
/// <value>
/// <c>true</c> if there will be a check for additional JSON content after deserializing an object; otherwise, <c>false</c>.
/// </value>
public virtual bool CheckAdditionalContent
{
get { return _checkAdditionalContent ?? JsonSerializerSettings.DefaultCheckAdditionalContent; }
set { _checkAdditionalContent = value; }
}
internal bool IsCheckAdditionalContentSet()
{
return (_checkAdditionalContent != null);
}
/// <summary>
/// Initializes a new instance of the <see cref="JsonSerializer"/> class.
/// </summary>
public JsonSerializer()
{
_referenceLoopHandling = JsonSerializerSettings.DefaultReferenceLoopHandling;
_missingMemberHandling = JsonSerializerSettings.DefaultMissingMemberHandling;
_nullValueHandling = JsonSerializerSettings.DefaultNullValueHandling;
_defaultValueHandling = JsonSerializerSettings.DefaultDefaultValueHandling;
_objectCreationHandling = JsonSerializerSettings.DefaultObjectCreationHandling;
_preserveReferencesHandling = JsonSerializerSettings.DefaultPreserveReferencesHandling;
_constructorHandling = JsonSerializerSettings.DefaultConstructorHandling;
_typeNameHandling = JsonSerializerSettings.DefaultTypeNameHandling;
_metadataPropertyHandling = JsonSerializerSettings.DefaultMetadataPropertyHandling;
_context = JsonSerializerSettings.DefaultContext;
_binder = DefaultSerializationBinder.Instance;
_culture = JsonSerializerSettings.DefaultCulture;
_contractResolver = DefaultContractResolver.Instance;
}
/// <summary>
/// Creates a new <see cref="JsonSerializer"/> instance.
/// The <see cref="JsonSerializer"/> will not use default settings.
/// </summary>
/// <returns>
/// A new <see cref="JsonSerializer"/> instance.
/// The <see cref="JsonSerializer"/> will not use default settings.
/// </returns>
public static JsonSerializer Create()
{
return new JsonSerializer();
}
/// <summary>
/// Creates a new <see cref="JsonSerializer"/> instance using the specified <see cref="JsonSerializerSettings"/>.
/// The <see cref="JsonSerializer"/> will not use default settings.
/// </summary>
/// <param name="settings">The settings to be applied to the <see cref="JsonSerializer"/>.</param>
/// <returns>
/// A new <see cref="JsonSerializer"/> instance using the specified <see cref="JsonSerializerSettings"/>.
/// The <see cref="JsonSerializer"/> will not use default settings.
/// </returns>
public static JsonSerializer Create(JsonSerializerSettings settings)
{
JsonSerializer serializer = Create();
if (settings != null)
ApplySerializerSettings(serializer, settings);
return serializer;
}
/// <summary>
/// Creates a new <see cref="JsonSerializer"/> instance.
/// The <see cref="JsonSerializer"/> will use default settings.
/// </summary>
/// <returns>
/// A new <see cref="JsonSerializer"/> instance.
/// The <see cref="JsonSerializer"/> will use default settings.
/// </returns>
public static JsonSerializer CreateDefault()
{
// copy static to local variable to avoid concurrency issues
Func<JsonSerializerSettings> defaultSettingsCreator = JsonConvert.DefaultSettings;
JsonSerializerSettings defaultSettings = (defaultSettingsCreator != null) ? defaultSettingsCreator() : null;
return Create(defaultSettings);
}
/// <summary>
/// Creates a new <see cref="JsonSerializer"/> instance using the specified <see cref="JsonSerializerSettings"/>.
/// The <see cref="JsonSerializer"/> will use default settings.
/// </summary>
/// <param name="settings">The settings to be applied to the <see cref="JsonSerializer"/>.</param>
/// <returns>
/// A new <see cref="JsonSerializer"/> instance using the specified <see cref="JsonSerializerSettings"/>.
/// The <see cref="JsonSerializer"/> will use default settings.
/// </returns>
public static JsonSerializer CreateDefault(JsonSerializerSettings settings)
{
JsonSerializer serializer = CreateDefault();
if (settings != null)
ApplySerializerSettings(serializer, settings);
return serializer;
}
private static void ApplySerializerSettings(JsonSerializer serializer, JsonSerializerSettings settings)
{
if (!CollectionUtils.IsNullOrEmpty(settings.Converters))
{
// insert settings converters at the beginning so they take precedence
// if user wants to remove one of the default converters they will have to do it manually
for (int i = 0; i < settings.Converters.Count; i++)
{
serializer.Converters.Insert(i, settings.Converters[i]);
}
}
// serializer specific
if (settings._typeNameHandling != null)
serializer.TypeNameHandling = settings.TypeNameHandling;
if (settings._metadataPropertyHandling != null)
serializer.MetadataPropertyHandling = settings.MetadataPropertyHandling;
if (settings._typeNameAssemblyFormat != null)
serializer.TypeNameAssemblyFormat = settings.TypeNameAssemblyFormat;
if (settings._preserveReferencesHandling != null)
serializer.PreserveReferencesHandling = settings.PreserveReferencesHandling;
if (settings._referenceLoopHandling != null)
serializer.ReferenceLoopHandling = settings.ReferenceLoopHandling;
if (settings._missingMemberHandling != null)
serializer.MissingMemberHandling = settings.MissingMemberHandling;
if (settings._objectCreationHandling != null)
serializer.ObjectCreationHandling = settings.ObjectCreationHandling;
if (settings._nullValueHandling != null)
serializer.NullValueHandling = settings.NullValueHandling;
if (settings._defaultValueHandling != null)
serializer.DefaultValueHandling = settings.DefaultValueHandling;
if (settings._constructorHandling != null)
serializer.ConstructorHandling = settings.ConstructorHandling;
if (settings._context != null)
serializer.Context = settings.Context;
if (settings._checkAdditionalContent != null)
serializer._checkAdditionalContent = settings._checkAdditionalContent;
if (settings.Error != null)
serializer.Error += settings.Error;
if (settings.ContractResolver != null)
serializer.ContractResolver = settings.ContractResolver;
if (settings.ReferenceResolver != null)
serializer.ReferenceResolver = settings.ReferenceResolver;
if (settings.TraceWriter != null)
serializer.TraceWriter = settings.TraceWriter;
if (settings.Binder != null)
serializer.Binder = settings.Binder;
// reader/writer specific
// unset values won't override reader/writer set values
if (settings._formatting != null)
serializer._formatting = settings._formatting;
if (settings._dateFormatHandling != null)
serializer._dateFormatHandling = settings._dateFormatHandling;
if (settings._dateTimeZoneHandling != null)
serializer._dateTimeZoneHandling = settings._dateTimeZoneHandling;
if (settings._dateParseHandling != null)
serializer._dateParseHandling = settings._dateParseHandling;
if (settings._dateFormatStringSet)
{
serializer._dateFormatString = settings._dateFormatString;
serializer._dateFormatStringSet = settings._dateFormatStringSet;
}
if (settings._floatFormatHandling != null)
serializer._floatFormatHandling = settings._floatFormatHandling;
if (settings._floatParseHandling != null)
serializer._floatParseHandling = settings._floatParseHandling;
if (settings._stringEscapeHandling != null)
serializer._stringEscapeHandling = settings._stringEscapeHandling;
if (settings._culture != null)
serializer._culture = settings._culture;
if (settings._maxDepthSet)
{
serializer._maxDepth = settings._maxDepth;
serializer._maxDepthSet = settings._maxDepthSet;
}
}
/// <summary>
/// Populates the JSON values onto the target object.
/// </summary>
/// <param name="reader">The <see cref="TextReader"/> that contains the JSON structure to reader values from.</param>
/// <param name="target">The target object to populate values onto.</param>
public void Populate(TextReader reader, object target)
{
Populate(new JsonTextReader(reader), target);
}
/// <summary>
/// Populates the JSON values onto the target object.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> that contains the JSON structure to reader values from.</param>
/// <param name="target">The target object to populate values onto.</param>
public void Populate(JsonReader reader, object target)
{
PopulateInternal(reader, target);
}
internal virtual void PopulateInternal(JsonReader reader, object target)
{
ValidationUtils.ArgumentNotNull(reader, "reader");
ValidationUtils.ArgumentNotNull(target, "target");
// set serialization options onto reader
CultureInfo previousCulture;
DateTimeZoneHandling? previousDateTimeZoneHandling;
DateParseHandling? previousDateParseHandling;
FloatParseHandling? previousFloatParseHandling;
int? previousMaxDepth;
string previousDateFormatString;
SetupReader(reader, out previousCulture, out previousDateTimeZoneHandling, out previousDateParseHandling, out previousFloatParseHandling, out previousMaxDepth, out previousDateFormatString);
TraceJsonReader traceJsonReader = (TraceWriter != null && TraceWriter.LevelFilter >= TraceLevel.Verbose)
? new TraceJsonReader(reader)
: null;
JsonSerializerInternalReader serializerReader = new JsonSerializerInternalReader(this);
serializerReader.Populate(traceJsonReader ?? reader, target);
if (traceJsonReader != null)
TraceWriter.Trace(TraceLevel.Verbose, "Deserialized JSON: " + Environment.NewLine + traceJsonReader.GetJson(), null);
ResetReader(reader, previousCulture, previousDateTimeZoneHandling, previousDateParseHandling, previousFloatParseHandling, previousMaxDepth, previousDateFormatString);
}
/// <summary>
/// Deserializes the Json structure contained by the specified <see cref="JsonReader"/>.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> that contains the JSON structure to deserialize.</param>
/// <returns>The <see cref="Object"/> being deserialized.</returns>
public object Deserialize(JsonReader reader)
{
return Deserialize(reader, null);
}
/// <summary>
/// Deserializes the Json structure contained by the specified <see cref="StringReader"/>
/// into an instance of the specified type.
/// </summary>
/// <param name="reader">The <see cref="TextReader"/> containing the object.</param>
/// <param name="objectType">The <see cref="Type"/> of object being deserialized.</param>
/// <returns>The instance of <paramref name="objectType"/> being deserialized.</returns>
public object Deserialize(TextReader reader, Type objectType)
{
return Deserialize(new JsonTextReader(reader), objectType);
}
/// <summary>
/// Deserializes the Json structure contained by the specified <see cref="JsonReader"/>
/// into an instance of the specified type.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> containing the object.</param>
/// <typeparam name="T">The type of the object to deserialize.</typeparam>
/// <returns>The instance of <typeparamref name="T"/> being deserialized.</returns>
public T Deserialize<T>(JsonReader reader)
{
return (T)Deserialize(reader, typeof(T));
}
/// <summary>
/// Deserializes the Json structure contained by the specified <see cref="JsonReader"/>
/// into an instance of the specified type.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> containing the object.</param>
/// <param name="objectType">The <see cref="Type"/> of object being deserialized.</param>
/// <returns>The instance of <paramref name="objectType"/> being deserialized.</returns>
public object Deserialize(JsonReader reader, Type objectType)
{
return DeserializeInternal(reader, objectType);
}
internal virtual object DeserializeInternal(JsonReader reader, Type objectType)
{
ValidationUtils.ArgumentNotNull(reader, "reader");
// set serialization options onto reader
CultureInfo previousCulture;
DateTimeZoneHandling? previousDateTimeZoneHandling;
DateParseHandling? previousDateParseHandling;
FloatParseHandling? previousFloatParseHandling;
int? previousMaxDepth;
string previousDateFormatString;
SetupReader(reader, out previousCulture, out previousDateTimeZoneHandling, out previousDateParseHandling, out previousFloatParseHandling, out previousMaxDepth, out previousDateFormatString);
TraceJsonReader traceJsonReader = (TraceWriter != null && TraceWriter.LevelFilter >= TraceLevel.Verbose)
? new TraceJsonReader(reader)
: null;
JsonSerializerInternalReader serializerReader = new JsonSerializerInternalReader(this);
object value = serializerReader.Deserialize(traceJsonReader ?? reader, objectType, CheckAdditionalContent);
if (traceJsonReader != null)
TraceWriter.Trace(TraceLevel.Verbose, "Deserialized JSON: " + Environment.NewLine + traceJsonReader.GetJson(), null);
ResetReader(reader, previousCulture, previousDateTimeZoneHandling, previousDateParseHandling, previousFloatParseHandling, previousMaxDepth, previousDateFormatString);
return value;
}
private void SetupReader(JsonReader reader, out CultureInfo previousCulture, out DateTimeZoneHandling? previousDateTimeZoneHandling, out DateParseHandling? previousDateParseHandling, out FloatParseHandling? previousFloatParseHandling, out int? previousMaxDepth, out string previousDateFormatString)
{
if (_culture != null && !_culture.Equals(reader.Culture))
{
previousCulture = reader.Culture;
reader.Culture = _culture;
}
else
{
previousCulture = null;
}
if (_dateTimeZoneHandling != null && reader.DateTimeZoneHandling != _dateTimeZoneHandling)
{
previousDateTimeZoneHandling = reader.DateTimeZoneHandling;
reader.DateTimeZoneHandling = _dateTimeZoneHandling.Value;
}
else
{
previousDateTimeZoneHandling = null;
}
if (_dateParseHandling != null && reader.DateParseHandling != _dateParseHandling)
{
previousDateParseHandling = reader.DateParseHandling;
reader.DateParseHandling = _dateParseHandling.Value;
}
else
{
previousDateParseHandling = null;
}
if (_floatParseHandling != null && reader.FloatParseHandling != _floatParseHandling)
{
previousFloatParseHandling = reader.FloatParseHandling;
reader.FloatParseHandling = _floatParseHandling.Value;
}
else
{
previousFloatParseHandling = null;
}
if (_maxDepthSet && reader.MaxDepth != _maxDepth)
{
previousMaxDepth = reader.MaxDepth;
reader.MaxDepth = _maxDepth;
}
else
{
previousMaxDepth = null;
}
if (_dateFormatStringSet && reader.DateFormatString != _dateFormatString)
{
previousDateFormatString = reader.DateFormatString;
reader.DateFormatString = _dateFormatString;
}
else
{
previousDateFormatString = null;
}
JsonTextReader textReader = reader as JsonTextReader;
if (textReader != null)
{
DefaultContractResolver resolver = _contractResolver as DefaultContractResolver;
if (resolver != null)
textReader.NameTable = resolver.GetState().NameTable;
}
}
private void ResetReader(JsonReader reader, CultureInfo previousCulture, DateTimeZoneHandling? previousDateTimeZoneHandling, DateParseHandling? previousDateParseHandling, FloatParseHandling? previousFloatParseHandling, int? previousMaxDepth, string previousDateFormatString)
{
// reset reader back to previous options
if (previousCulture != null)
reader.Culture = previousCulture;
if (previousDateTimeZoneHandling != null)
reader.DateTimeZoneHandling = previousDateTimeZoneHandling.Value;
if (previousDateParseHandling != null)
reader.DateParseHandling = previousDateParseHandling.Value;
if (previousFloatParseHandling != null)
reader.FloatParseHandling = previousFloatParseHandling.Value;
if (_maxDepthSet)
reader.MaxDepth = previousMaxDepth;
if (_dateFormatStringSet)
reader.DateFormatString = previousDateFormatString;
JsonTextReader textReader = reader as JsonTextReader;
if (textReader != null)
textReader.NameTable = null;
}
/// <summary>
/// Serializes the specified <see cref="Object"/> and writes the Json structure
/// to a <c>Stream</c> using the specified <see cref="TextWriter"/>.
/// </summary>
/// <param name="textWriter">The <see cref="TextWriter"/> used to write the Json structure.</param>
/// <param name="value">The <see cref="Object"/> to serialize.</param>
public void Serialize(TextWriter textWriter, object value)
{
Serialize(new JsonTextWriter(textWriter), value);
}
/// <summary>
/// Serializes the specified <see cref="Object"/> and writes the Json structure
/// to a <c>Stream</c> using the specified <see cref="TextWriter"/>.
/// </summary>
/// <param name="jsonWriter">The <see cref="JsonWriter"/> used to write the Json structure.</param>
/// <param name="value">The <see cref="Object"/> to serialize.</param>
/// <param name="objectType">
/// The type of the value being serialized.
/// This parameter is used when <see cref="TypeNameHandling"/> is Auto to write out the type name if the type of the value does not match.
/// Specifing the type is optional.
/// </param>
public void Serialize(JsonWriter jsonWriter, object value, Type objectType)
{
SerializeInternal(jsonWriter, value, objectType);
}
/// <summary>
/// Serializes the specified <see cref="Object"/> and writes the Json structure
/// to a <c>Stream</c> using the specified <see cref="TextWriter"/>.
/// </summary>
/// <param name="textWriter">The <see cref="TextWriter"/> used to write the Json structure.</param>
/// <param name="value">The <see cref="Object"/> to serialize.</param>
/// <param name="objectType">
/// The type of the value being serialized.
/// This parameter is used when <see cref="TypeNameHandling"/> is Auto to write out the type name if the type of the value does not match.
/// Specifing the type is optional.
/// </param>
public void Serialize(TextWriter textWriter, object value, Type objectType)
{
Serialize(new JsonTextWriter(textWriter), value, objectType);
}
/// <summary>
/// Serializes the specified <see cref="Object"/> and writes the Json structure
/// to a <c>Stream</c> using the specified <see cref="JsonWriter"/>.
/// </summary>
/// <param name="jsonWriter">The <see cref="JsonWriter"/> used to write the Json structure.</param>
/// <param name="value">The <see cref="Object"/> to serialize.</param>
public void Serialize(JsonWriter jsonWriter, object value)
{
SerializeInternal(jsonWriter, value, null);
}
internal virtual void SerializeInternal(JsonWriter jsonWriter, object value, Type objectType)
{
ValidationUtils.ArgumentNotNull(jsonWriter, "jsonWriter");
// set serialization options onto writer
Formatting? previousFormatting = null;
if (_formatting != null && jsonWriter.Formatting != _formatting)
{
previousFormatting = jsonWriter.Formatting;
jsonWriter.Formatting = _formatting.Value;
}
DateFormatHandling? previousDateFormatHandling = null;
if (_dateFormatHandling != null && jsonWriter.DateFormatHandling != _dateFormatHandling)
{
previousDateFormatHandling = jsonWriter.DateFormatHandling;
jsonWriter.DateFormatHandling = _dateFormatHandling.Value;
}
DateTimeZoneHandling? previousDateTimeZoneHandling = null;
if (_dateTimeZoneHandling != null && jsonWriter.DateTimeZoneHandling != _dateTimeZoneHandling)
{
previousDateTimeZoneHandling = jsonWriter.DateTimeZoneHandling;
jsonWriter.DateTimeZoneHandling = _dateTimeZoneHandling.Value;
}
FloatFormatHandling? previousFloatFormatHandling = null;
if (_floatFormatHandling != null && jsonWriter.FloatFormatHandling != _floatFormatHandling)
{
previousFloatFormatHandling = jsonWriter.FloatFormatHandling;
jsonWriter.FloatFormatHandling = _floatFormatHandling.Value;
}
StringEscapeHandling? previousStringEscapeHandling = null;
if (_stringEscapeHandling != null && jsonWriter.StringEscapeHandling != _stringEscapeHandling)
{
previousStringEscapeHandling = jsonWriter.StringEscapeHandling;
jsonWriter.StringEscapeHandling = _stringEscapeHandling.Value;
}
CultureInfo previousCulture = null;
if (_culture != null && !_culture.Equals(jsonWriter.Culture))
{
previousCulture = jsonWriter.Culture;
jsonWriter.Culture = _culture;
}
string previousDateFormatString = null;
if (_dateFormatStringSet && jsonWriter.DateFormatString != _dateFormatString)
{
previousDateFormatString = jsonWriter.DateFormatString;
jsonWriter.DateFormatString = _dateFormatString;
}
TraceJsonWriter traceJsonWriter = (TraceWriter != null && TraceWriter.LevelFilter >= TraceLevel.Verbose)
? new TraceJsonWriter(jsonWriter)
: null;
JsonSerializerInternalWriter serializerWriter = new JsonSerializerInternalWriter(this);
serializerWriter.Serialize(traceJsonWriter ?? jsonWriter, value, objectType);
if (traceJsonWriter != null)
TraceWriter.Trace(TraceLevel.Verbose, "Serialized JSON: " + Environment.NewLine + traceJsonWriter.GetJson(), null);
// reset writer back to previous options
if (previousFormatting != null)
jsonWriter.Formatting = previousFormatting.Value;
if (previousDateFormatHandling != null)
jsonWriter.DateFormatHandling = previousDateFormatHandling.Value;
if (previousDateTimeZoneHandling != null)
jsonWriter.DateTimeZoneHandling = previousDateTimeZoneHandling.Value;
if (previousFloatFormatHandling != null)
jsonWriter.FloatFormatHandling = previousFloatFormatHandling.Value;
if (previousStringEscapeHandling != null)
jsonWriter.StringEscapeHandling = previousStringEscapeHandling.Value;
if (_dateFormatStringSet)
jsonWriter.DateFormatString = previousDateFormatString;
if (previousCulture != null)
jsonWriter.Culture = previousCulture;
}
internal IReferenceResolver GetReferenceResolver()
{
if (_referenceResolver == null)
_referenceResolver = new DefaultReferenceResolver();
return _referenceResolver;
}
internal JsonConverter GetMatchingConverter(Type type)
{
return GetMatchingConverter(_converters, type);
}
internal static JsonConverter GetMatchingConverter(IList<JsonConverter> converters, Type objectType)
{
#if DEBUG
ValidationUtils.ArgumentNotNull(objectType, "objectType");
#endif
if (converters != null)
{
for (int i = 0; i < converters.Count; i++)
{
JsonConverter converter = converters[i];
if (converter.CanConvert(objectType))
return converter;
}
}
return null;
}
internal void OnError(ErrorEventArgs e)
{
EventHandler<ErrorEventArgs> error = Error;
if (error != null)
error(this, e);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Text;
using System.Threading;
using System.Security;
using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
using System.Globalization;
using System.Runtime.Serialization;
using System.Runtime.Versioning;
namespace System.Diagnostics
{
// READ ME:
// Modifying the order or fields of this object may require other changes
// to the unmanaged definition of the StackFrameHelper class, in
// VM\DebugDebugger.h. The binder will catch some of these layout problems.
internal class StackFrameHelper : IDisposable
{
[NonSerialized]
private Thread targetThread;
private int[] rgiOffset;
private int[] rgiILOffset;
// this field is here only for backwards compatibility of serialization format
private MethodBase[] rgMethodBase;
#pragma warning disable 414
// dynamicMethods is an array of System.Resolver objects, used to keep
// DynamicMethodDescs alive for the lifetime of StackFrameHelper.
private Object dynamicMethods; // Field is not used from managed.
[NonSerialized]
private IntPtr[] rgMethodHandle;
private String[] rgAssemblyPath;
private IntPtr[] rgLoadedPeAddress;
private int[] rgiLoadedPeSize;
private IntPtr[] rgInMemoryPdbAddress;
private int[] rgiInMemoryPdbSize;
// if rgiMethodToken[i] == 0, then don't attempt to get the portable PDB source/info
private int[] rgiMethodToken;
private String[] rgFilename;
private int[] rgiLineNumber;
private int[] rgiColumnNumber;
[OptionalField]
private bool[] rgiLastFrameFromForeignExceptionStackTrace;
private GetSourceLineInfoDelegate getSourceLineInfo;
private int iFrameCount;
#pragma warning restore 414
private delegate void GetSourceLineInfoDelegate(string assemblyPath, IntPtr loadedPeAddress, int loadedPeSize,
IntPtr inMemoryPdbAddress, int inMemoryPdbSize, int methodToken, int ilOffset,
out string sourceFile, out int sourceLine, out int sourceColumn);
private static Type s_symbolsType = null;
private static MethodInfo s_symbolsMethodInfo = null;
[ThreadStatic]
private static int t_reentrancy = 0;
public StackFrameHelper(Thread target)
{
targetThread = target;
rgMethodBase = null;
rgMethodHandle = null;
rgiMethodToken = null;
rgiOffset = null;
rgiILOffset = null;
rgAssemblyPath = null;
rgLoadedPeAddress = null;
rgiLoadedPeSize = null;
rgInMemoryPdbAddress = null;
rgiInMemoryPdbSize = null;
dynamicMethods = null;
rgFilename = null;
rgiLineNumber = null;
rgiColumnNumber = null;
getSourceLineInfo = null;
rgiLastFrameFromForeignExceptionStackTrace = null;
// 0 means capture all frames. For StackTraces from an Exception, the EE always
// captures all frames. For other uses of StackTraces, we can abort stack walking after
// some limit if we want to by setting this to a non-zero value. In Whidbey this was
// hard-coded to 512, but some customers complained. There shouldn't be any need to limit
// this as memory/CPU is no longer allocated up front. If there is some reason to provide a
// limit in the future, then we should expose it in the managed API so applications can
// override it.
iFrameCount = 0;
}
//
// Initializes the stack trace helper. If fNeedFileInfo is true, initializes rgFilename,
// rgiLineNumber and rgiColumnNumber fields using the portable PDB reader if not already
// done by GetStackFramesInternal (on Windows for old PDB format).
//
internal void InitializeSourceInfo(int iSkip, bool fNeedFileInfo, Exception exception)
{
StackTrace.GetStackFramesInternal(this, iSkip, fNeedFileInfo, exception);
if (!fNeedFileInfo)
return;
// Check if this function is being reentered because of an exception in the code below
if (t_reentrancy > 0)
return;
t_reentrancy++;
try
{
if (s_symbolsMethodInfo == null)
{
s_symbolsType = Type.GetType(
"System.Diagnostics.StackTraceSymbols, System.Diagnostics.StackTrace, Version=4.0.1.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a",
throwOnError: false);
if (s_symbolsType == null)
return;
s_symbolsMethodInfo = s_symbolsType.GetMethod("GetSourceLineInfo", BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance);
if (s_symbolsMethodInfo == null)
return;
}
if (getSourceLineInfo == null)
{
// Create an instance of System.Diagnostics.Stacktrace.Symbols
object target = Activator.CreateInstance(s_symbolsType);
// Create an instance delegate for the GetSourceLineInfo method
getSourceLineInfo = (GetSourceLineInfoDelegate)s_symbolsMethodInfo.CreateDelegate(typeof(GetSourceLineInfoDelegate), target);
}
for (int index = 0; index < iFrameCount; index++)
{
// If there was some reason not to try get the symbols from the portable PDB reader like the module was
// ENC or the source/line info was already retrieved, the method token is 0.
if (rgiMethodToken[index] != 0)
{
getSourceLineInfo(rgAssemblyPath[index], rgLoadedPeAddress[index], rgiLoadedPeSize[index],
rgInMemoryPdbAddress[index], rgiInMemoryPdbSize[index], rgiMethodToken[index],
rgiILOffset[index], out rgFilename[index], out rgiLineNumber[index], out rgiColumnNumber[index]);
}
}
}
catch
{
}
finally
{
t_reentrancy--;
}
}
void IDisposable.Dispose()
{
if (getSourceLineInfo != null)
{
IDisposable disposable = getSourceLineInfo.Target as IDisposable;
if (disposable != null)
{
disposable.Dispose();
}
}
}
public virtual MethodBase GetMethodBase(int i)
{
// There may be a better way to do this.
// we got RuntimeMethodHandles here and we need to go to MethodBase
// but we don't know whether the reflection info has been initialized
// or not. So we call GetMethods and GetConstructors on the type
// and then we fetch the proper MethodBase!!
IntPtr mh = rgMethodHandle[i];
if (mh.IsNull())
return null;
IRuntimeMethodInfo mhReal = RuntimeMethodHandle.GetTypicalMethodDefinition(new RuntimeMethodInfoStub(mh, this));
return RuntimeType.GetMethodBase(mhReal);
}
public virtual int GetOffset(int i) { return rgiOffset[i]; }
public virtual int GetILOffset(int i) { return rgiILOffset[i]; }
public virtual String GetFilename(int i) { return rgFilename == null ? null : rgFilename[i]; }
public virtual int GetLineNumber(int i) { return rgiLineNumber == null ? 0 : rgiLineNumber[i]; }
public virtual int GetColumnNumber(int i) { return rgiColumnNumber == null ? 0 : rgiColumnNumber[i]; }
public virtual bool IsLastFrameFromForeignExceptionStackTrace(int i)
{
return (rgiLastFrameFromForeignExceptionStackTrace == null) ? false : rgiLastFrameFromForeignExceptionStackTrace[i];
}
public virtual int GetNumberOfFrames() { return iFrameCount; }
//
// serialization implementation
//
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
// this is called in the process of serializing this object.
// For compatibility with Everett we need to assign the rgMethodBase field as that is the field
// that will be serialized
rgMethodBase = (rgMethodHandle == null) ? null : new MethodBase[rgMethodHandle.Length];
if (rgMethodHandle != null)
{
for (int i = 0; i < rgMethodHandle.Length; i++)
{
if (!rgMethodHandle[i].IsNull())
rgMethodBase[i] = RuntimeType.GetMethodBase(new RuntimeMethodInfoStub(rgMethodHandle[i], this));
}
}
}
[OnSerialized]
private void OnSerialized(StreamingContext context)
{
// after we are done serializing null the rgMethodBase field
rgMethodBase = null;
}
[OnDeserialized]
private void OnDeserialized(StreamingContext context)
{
// after we are done deserializing we need to transform the rgMethodBase in rgMethodHandle
rgMethodHandle = (rgMethodBase == null) ? null : new IntPtr[rgMethodBase.Length];
if (rgMethodBase != null)
{
for (int i = 0; i < rgMethodBase.Length; i++)
{
if (rgMethodBase[i] != null)
rgMethodHandle[i] = rgMethodBase[i].MethodHandle.Value;
}
}
rgMethodBase = null;
}
}
// Class which represents a description of a stack trace
// There is no good reason for the methods of this class to be virtual.
public class StackTrace
{
private StackFrame[] frames;
private int m_iNumOfFrames;
public const int METHODS_TO_SKIP = 0;
private int m_iMethodsToSkip;
// Constructs a stack trace from the current location.
public StackTrace()
{
m_iNumOfFrames = 0;
m_iMethodsToSkip = 0;
CaptureStackTrace(METHODS_TO_SKIP, false, null, null);
}
// Constructs a stack trace from the current location.
//
public StackTrace(bool fNeedFileInfo)
{
m_iNumOfFrames = 0;
m_iMethodsToSkip = 0;
CaptureStackTrace(METHODS_TO_SKIP, fNeedFileInfo, null, null);
}
// Constructs a stack trace from the current location, in a caller's
// frame
//
public StackTrace(int skipFrames)
{
if (skipFrames < 0)
throw new ArgumentOutOfRangeException(nameof(skipFrames),
SR.ArgumentOutOfRange_NeedNonNegNum);
m_iNumOfFrames = 0;
m_iMethodsToSkip = 0;
CaptureStackTrace(skipFrames + METHODS_TO_SKIP, false, null, null);
}
// Constructs a stack trace from the current location, in a caller's
// frame
//
public StackTrace(int skipFrames, bool fNeedFileInfo)
{
if (skipFrames < 0)
throw new ArgumentOutOfRangeException(nameof(skipFrames),
SR.ArgumentOutOfRange_NeedNonNegNum);
m_iNumOfFrames = 0;
m_iMethodsToSkip = 0;
CaptureStackTrace(skipFrames + METHODS_TO_SKIP, fNeedFileInfo, null, null);
}
// Constructs a stack trace from the current location.
public StackTrace(Exception e)
{
if (e == null)
throw new ArgumentNullException(nameof(e));
m_iNumOfFrames = 0;
m_iMethodsToSkip = 0;
CaptureStackTrace(METHODS_TO_SKIP, false, null, e);
}
// Constructs a stack trace from the current location.
//
public StackTrace(Exception e, bool fNeedFileInfo)
{
if (e == null)
throw new ArgumentNullException(nameof(e));
m_iNumOfFrames = 0;
m_iMethodsToSkip = 0;
CaptureStackTrace(METHODS_TO_SKIP, fNeedFileInfo, null, e);
}
// Constructs a stack trace from the current location, in a caller's
// frame
//
public StackTrace(Exception e, int skipFrames)
{
if (e == null)
throw new ArgumentNullException(nameof(e));
if (skipFrames < 0)
throw new ArgumentOutOfRangeException(nameof(skipFrames),
SR.ArgumentOutOfRange_NeedNonNegNum);
m_iNumOfFrames = 0;
m_iMethodsToSkip = 0;
CaptureStackTrace(skipFrames + METHODS_TO_SKIP, false, null, e);
}
// Constructs a stack trace from the current location, in a caller's
// frame
//
public StackTrace(Exception e, int skipFrames, bool fNeedFileInfo)
{
if (e == null)
throw new ArgumentNullException(nameof(e));
if (skipFrames < 0)
throw new ArgumentOutOfRangeException(nameof(skipFrames),
SR.ArgumentOutOfRange_NeedNonNegNum);
m_iNumOfFrames = 0;
m_iMethodsToSkip = 0;
CaptureStackTrace(skipFrames + METHODS_TO_SKIP, fNeedFileInfo, null, e);
}
// Constructs a "fake" stack trace, just containing a single frame.
// Does not have the overhead of a full stack trace.
//
public StackTrace(StackFrame frame)
{
frames = new StackFrame[1];
frames[0] = frame;
m_iMethodsToSkip = 0;
m_iNumOfFrames = 1;
}
[MethodImplAttribute(MethodImplOptions.InternalCall)]
internal static extern void GetStackFramesInternal(StackFrameHelper sfh, int iSkip, bool fNeedFileInfo, Exception e);
internal static int CalculateFramesToSkip(StackFrameHelper StackF, int iNumFrames)
{
int iRetVal = 0;
String PackageName = "System.Diagnostics";
// Check if this method is part of the System.Diagnostics
// package. If so, increment counter keeping track of
// System.Diagnostics functions
for (int i = 0; i < iNumFrames; i++)
{
MethodBase mb = StackF.GetMethodBase(i);
if (mb != null)
{
Type t = mb.DeclaringType;
if (t == null)
break;
String ns = t.Namespace;
if (ns == null)
break;
if (String.Compare(ns, PackageName, StringComparison.Ordinal) != 0)
break;
}
iRetVal++;
}
return iRetVal;
}
// Retrieves an object with stack trace information encoded.
// It leaves out the first "iSkip" lines of the stacktrace.
//
private void CaptureStackTrace(int iSkip, bool fNeedFileInfo, Thread targetThread, Exception e)
{
m_iMethodsToSkip += iSkip;
using (StackFrameHelper StackF = new StackFrameHelper(targetThread))
{
StackF.InitializeSourceInfo(0, fNeedFileInfo, e);
m_iNumOfFrames = StackF.GetNumberOfFrames();
if (m_iMethodsToSkip > m_iNumOfFrames)
m_iMethodsToSkip = m_iNumOfFrames;
if (m_iNumOfFrames != 0)
{
frames = new StackFrame[m_iNumOfFrames];
for (int i = 0; i < m_iNumOfFrames; i++)
{
bool fDummy1 = true;
bool fDummy2 = true;
StackFrame sfTemp = new StackFrame(fDummy1, fDummy2);
sfTemp.SetMethodBase(StackF.GetMethodBase(i));
sfTemp.SetOffset(StackF.GetOffset(i));
sfTemp.SetILOffset(StackF.GetILOffset(i));
sfTemp.SetIsLastFrameFromForeignExceptionStackTrace(StackF.IsLastFrameFromForeignExceptionStackTrace(i));
if (fNeedFileInfo)
{
sfTemp.SetFileName(StackF.GetFilename(i));
sfTemp.SetLineNumber(StackF.GetLineNumber(i));
sfTemp.SetColumnNumber(StackF.GetColumnNumber(i));
}
frames[i] = sfTemp;
}
// CalculateFramesToSkip skips all frames in the System.Diagnostics namespace,
// but this is not desired if building a stack trace from an exception.
if (e == null)
m_iMethodsToSkip += CalculateFramesToSkip(StackF, m_iNumOfFrames);
m_iNumOfFrames -= m_iMethodsToSkip;
if (m_iNumOfFrames < 0)
{
m_iNumOfFrames = 0;
}
}
// In case this is the same object being re-used, set frames to null
else
frames = null;
}
}
// Property to get the number of frames in the stack trace
//
public virtual int FrameCount
{
get { return m_iNumOfFrames; }
}
// Returns a given stack frame. Stack frames are numbered starting at
// zero, which is the last stack frame pushed.
//
public virtual StackFrame GetFrame(int index)
{
if ((frames != null) && (index < m_iNumOfFrames) && (index >= 0))
return frames[index + m_iMethodsToSkip];
return null;
}
// Returns an array of all stack frames for this stacktrace.
// The array is ordered and sized such that GetFrames()[i] == GetFrame(i)
// The nth element of this array is the same as GetFrame(n).
// The length of the array is the same as FrameCount.
//
public virtual StackFrame[] GetFrames()
{
if (frames == null || m_iNumOfFrames <= 0)
return null;
// We have to return a subset of the array. Unfortunately this
// means we have to allocate a new array and copy over.
StackFrame[] array = new StackFrame[m_iNumOfFrames];
Array.Copy(frames, m_iMethodsToSkip, array, 0, m_iNumOfFrames);
return array;
}
// Builds a readable representation of the stack trace
//
public override String ToString()
{
// Include a trailing newline for backwards compatibility
return ToString(TraceFormat.TrailingNewLine);
}
// TraceFormat is Used to specify options for how the
// string-representation of a StackTrace should be generated.
internal enum TraceFormat
{
Normal,
TrailingNewLine, // include a trailing new line character
NoResourceLookup // to prevent infinite resource recusion
}
// Builds a readable representation of the stack trace, specifying
// the format for backwards compatibility.
internal String ToString(TraceFormat traceFormat)
{
bool displayFilenames = true; // we'll try, but demand may fail
String word_At = "at";
String inFileLineNum = "in {0}:line {1}";
if (traceFormat != TraceFormat.NoResourceLookup)
{
word_At = SR.Word_At;
inFileLineNum = SR.StackTrace_InFileLineNumber;
}
bool fFirstFrame = true;
StringBuilder sb = new StringBuilder(255);
for (int iFrameIndex = 0; iFrameIndex < m_iNumOfFrames; iFrameIndex++)
{
StackFrame sf = GetFrame(iFrameIndex);
MethodBase mb = sf.GetMethod();
if (mb != null && (ShowInStackTrace(mb) ||
(iFrameIndex == m_iNumOfFrames - 1))) // Don't filter last frame
{
// We want a newline at the end of every line except for the last
if (fFirstFrame)
fFirstFrame = false;
else
sb.Append(Environment.NewLine);
sb.AppendFormat(CultureInfo.InvariantCulture, " {0} ", word_At);
Type t = mb.DeclaringType;
// if there is a type (non global method) print it
if (t != null)
{
// Append t.FullName, replacing '+' with '.'
string fullName = t.FullName;
for (int i = 0; i < fullName.Length; i++)
{
char ch = fullName[i];
sb.Append(ch == '+' ? '.' : ch);
}
sb.Append('.');
}
sb.Append(mb.Name);
// deal with the generic portion of the method
if (mb is MethodInfo && ((MethodInfo)mb).IsGenericMethod)
{
Type[] typars = ((MethodInfo)mb).GetGenericArguments();
sb.Append('[');
int k = 0;
bool fFirstTyParam = true;
while (k < typars.Length)
{
if (fFirstTyParam == false)
sb.Append(',');
else
fFirstTyParam = false;
sb.Append(typars[k].Name);
k++;
}
sb.Append(']');
}
ParameterInfo[] pi = null;
try
{
pi = mb.GetParameters();
}
catch
{
// The parameter info cannot be loaded, so we don't
// append the parameter list.
}
if (pi != null)
{
// arguments printing
sb.Append('(');
bool fFirstParam = true;
for (int j = 0; j < pi.Length; j++)
{
if (fFirstParam == false)
sb.Append(", ");
else
fFirstParam = false;
String typeName = "<UnknownType>";
if (pi[j].ParameterType != null)
typeName = pi[j].ParameterType.Name;
sb.Append(typeName);
sb.Append(' ');
sb.Append(pi[j].Name);
}
sb.Append(')');
}
// source location printing
if (displayFilenames && (sf.GetILOffset() != -1))
{
// If we don't have a PDB or PDB-reading is disabled for the module,
// then the file name will be null.
String fileName = null;
// Getting the filename from a StackFrame is a privileged operation - we won't want
// to disclose full path names to arbitrarily untrusted code. Rather than just omit
// this we could probably trim to just the filename so it's still mostly usefull.
try
{
fileName = sf.GetFileName();
}
catch (SecurityException)
{
// If the demand for displaying filenames fails, then it won't
// succeed later in the loop. Avoid repeated exceptions by not trying again.
displayFilenames = false;
}
if (fileName != null)
{
// tack on " in c:\tmp\MyFile.cs:line 5"
sb.Append(' ');
sb.AppendFormat(CultureInfo.InvariantCulture, inFileLineNum, fileName, sf.GetFileLineNumber());
}
}
if (sf.GetIsLastFrameFromForeignExceptionStackTrace())
{
sb.Append(Environment.NewLine);
sb.Append(SR.Exception_EndStackTraceFromPreviousThrow);
}
}
}
if (traceFormat == TraceFormat.TrailingNewLine)
sb.Append(Environment.NewLine);
return sb.ToString();
}
private static bool ShowInStackTrace(MethodBase mb)
{
Debug.Assert(mb != null);
return !(mb.IsDefined(typeof(StackTraceHiddenAttribute)) || (mb.DeclaringType?.IsDefined(typeof(StackTraceHiddenAttribute)) ?? false));
}
// This helper is called from within the EE to construct a string representation
// of the current stack trace.
private static String GetManagedStackTraceStringHelper(bool fNeedFileInfo)
{
// Note all the frames in System.Diagnostics will be skipped when capturing
// a normal stack trace (not from an exception) so we don't need to explicitly
// skip the GetManagedStackTraceStringHelper frame.
StackTrace st = new StackTrace(0, fNeedFileInfo);
return st.ToString();
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.ErrorReporting;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Notification;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.CodeAnalysis.Versions;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.SolutionCrawler
{
internal sealed partial class SolutionCrawlerRegistrationService
{
private sealed partial class WorkCoordinator
{
private sealed partial class IncrementalAnalyzerProcessor
{
private sealed class NormalPriorityProcessor : GlobalOperationAwareIdleProcessor
{
private const int MaxHighPriorityQueueCache = 29;
private readonly AsyncDocumentWorkItemQueue _workItemQueue;
private readonly Lazy<ImmutableArray<IIncrementalAnalyzer>> _lazyAnalyzers;
private readonly ConcurrentDictionary<DocumentId, IDisposable> _higherPriorityDocumentsNotProcessed;
private readonly HashSet<ProjectId> _currentSnapshotVersionTrackingSet;
private ProjectId _currentProjectProcessing;
private Solution _processingSolution;
private IDisposable _projectCache;
// whether this processor is running or not
private Task _running;
public NormalPriorityProcessor(
IAsynchronousOperationListener listener,
IncrementalAnalyzerProcessor processor,
Lazy<ImmutableArray<IIncrementalAnalyzer>> lazyAnalyzers,
IGlobalOperationNotificationService globalOperationNotificationService,
int backOffTimeSpanInMs,
CancellationToken shutdownToken) :
base(listener, processor, globalOperationNotificationService, backOffTimeSpanInMs, shutdownToken)
{
_lazyAnalyzers = lazyAnalyzers;
_running = SpecializedTasks.EmptyTask;
_workItemQueue = new AsyncDocumentWorkItemQueue(processor._registration.ProgressReporter, processor._registration.Workspace);
_higherPriorityDocumentsNotProcessed = new ConcurrentDictionary<DocumentId, IDisposable>(concurrencyLevel: 2, capacity: 20);
_currentProjectProcessing = default(ProjectId);
_processingSolution = null;
_currentSnapshotVersionTrackingSet = new HashSet<ProjectId>();
Start();
}
internal ImmutableArray<IIncrementalAnalyzer> Analyzers
{
get
{
return _lazyAnalyzers.Value;
}
}
public void Enqueue(WorkItem item)
{
Contract.ThrowIfFalse(item.DocumentId != null, "can only enqueue a document work item");
this.UpdateLastAccessTime();
var added = _workItemQueue.AddOrReplace(item);
Logger.Log(FunctionId.WorkCoordinator_DocumentWorker_Enqueue, s_enqueueLogger, Environment.TickCount, item.DocumentId, !added);
CheckHigherPriorityDocument(item);
SolutionCrawlerLogger.LogWorkItemEnqueue(
this.Processor._logAggregator, item.Language, item.DocumentId, item.InvocationReasons, item.IsLowPriority, item.ActiveMember, added);
}
private void CheckHigherPriorityDocument(WorkItem item)
{
if (!item.InvocationReasons.Contains(PredefinedInvocationReasons.HighPriority))
{
return;
}
AddHigherPriorityDocument(item.DocumentId);
}
private void AddHigherPriorityDocument(DocumentId id)
{
var cache = GetHighPriorityQueueProjectCache(id);
if (!_higherPriorityDocumentsNotProcessed.TryAdd(id, cache))
{
// we already have the document in the queue.
cache?.Dispose();
}
SolutionCrawlerLogger.LogHigherPriority(this.Processor._logAggregator, id.Id);
}
private IDisposable GetHighPriorityQueueProjectCache(DocumentId id)
{
// NOTE: we have one potential issue where we can cache a lot of stuff in memory
// since we will cache all high prioirty work's projects in memory until they are processed.
//
// To mitigate that, we will turn off cache if we have too many items in high priority queue
// this shouldn't affect active file since we always enable active file cache from background compiler.
return _higherPriorityDocumentsNotProcessed.Count <= MaxHighPriorityQueueCache ? Processor.EnableCaching(id.ProjectId) : null;
}
protected override Task WaitAsync(CancellationToken cancellationToken)
{
if (!_workItemQueue.HasAnyWork)
{
DisposeProjectCache();
}
return _workItemQueue.WaitAsync(cancellationToken);
}
public Task Running
{
get
{
return _running;
}
}
public bool HasAnyWork
{
get
{
return _workItemQueue.HasAnyWork;
}
}
protected override async Task ExecuteAsync()
{
if (this.CancellationToken.IsCancellationRequested)
{
return;
}
var source = new TaskCompletionSource<object>();
try
{
// mark it as running
_running = source.Task;
await WaitForHigherPriorityOperationsAsync().ConfigureAwait(false);
// okay, there must be at least one item in the map
await ResetStatesAsync().ConfigureAwait(false);
if (await TryProcessOneHigherPriorityDocumentAsync().ConfigureAwait(false))
{
// successfully processed a high priority document.
return;
}
// process one of documents remaining
var documentCancellation = default(CancellationTokenSource);
WorkItem workItem;
if (!_workItemQueue.TryTakeAnyWork(
_currentProjectProcessing, this.Processor.DependencyGraph, this.Processor.DiagnosticAnalyzerService,
out workItem, out documentCancellation))
{
return;
}
// check whether we have been shutdown
if (this.CancellationToken.IsCancellationRequested)
{
return;
}
// check whether we have moved to new project
SetProjectProcessing(workItem.ProjectId);
// process the new document
await ProcessDocumentAsync(this.Analyzers, workItem, documentCancellation).ConfigureAwait(false);
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
finally
{
// mark it as done running
source.SetResult(null);
}
}
protected override Task HigherQueueOperationTask
{
get
{
return this.Processor._highPriorityProcessor.Running;
}
}
protected override bool HigherQueueHasWorkItem
{
get
{
return this.Processor._highPriorityProcessor.HasAnyWork;
}
}
protected override void PauseOnGlobalOperation()
{
_workItemQueue.RequestCancellationOnRunningTasks();
}
private void SetProjectProcessing(ProjectId currentProject)
{
EnableProjectCacheIfNecessary(currentProject);
_currentProjectProcessing = currentProject;
}
private void EnableProjectCacheIfNecessary(ProjectId currentProject)
{
if (_projectCache != null && currentProject == _currentProjectProcessing)
{
return;
}
DisposeProjectCache();
_projectCache = Processor.EnableCaching(currentProject);
}
private static void DisposeProjectCache(IDisposable projectCache)
{
projectCache?.Dispose();
}
private void DisposeProjectCache()
{
DisposeProjectCache(_projectCache);
_projectCache = null;
}
private IEnumerable<DocumentId> GetPrioritizedPendingDocuments()
{
if (this.Processor._documentTracker != null)
{
// First the active document
var activeDocumentId = this.Processor._documentTracker.GetActiveDocument();
if (activeDocumentId != null && _higherPriorityDocumentsNotProcessed.ContainsKey(activeDocumentId))
{
yield return activeDocumentId;
}
// Now any visible documents
foreach (var visibleDocumentId in this.Processor._documentTracker.GetVisibleDocuments())
{
if (_higherPriorityDocumentsNotProcessed.ContainsKey(visibleDocumentId))
{
yield return visibleDocumentId;
}
}
}
// Any other high priority documents
foreach (var documentId in _higherPriorityDocumentsNotProcessed.Keys)
{
yield return documentId;
}
}
private async Task<bool> TryProcessOneHigherPriorityDocumentAsync()
{
try
{
// this is a best effort algorithm with some shortcomings.
//
// the most obvious issue is if there is a new work item (without a solution change - but very unlikely)
// for a opened document we already processed, the work item will be treated as a regular one rather than higher priority one
// (opened document)
CancellationTokenSource documentCancellation;
foreach (var documentId in this.GetPrioritizedPendingDocuments())
{
if (this.CancellationToken.IsCancellationRequested)
{
return true;
}
// see whether we have work item for the document
WorkItem workItem;
if (!_workItemQueue.TryTake(documentId, out workItem, out documentCancellation))
{
RemoveHigherPriorityDocument(documentId);
continue;
}
// okay now we have work to do
await ProcessDocumentAsync(this.Analyzers, workItem, documentCancellation).ConfigureAwait(false);
RemoveHigherPriorityDocument(documentId);
return true;
}
return false;
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
private void RemoveHigherPriorityDocument(DocumentId documentId)
{
// remove opened document processed
IDisposable projectCache;
if (_higherPriorityDocumentsNotProcessed.TryRemove(documentId, out projectCache))
{
DisposeProjectCache(projectCache);
}
}
private async Task ProcessDocumentAsync(ImmutableArray<IIncrementalAnalyzer> analyzers, WorkItem workItem, CancellationTokenSource source)
{
if (this.CancellationToken.IsCancellationRequested)
{
return;
}
var processedEverything = false;
var documentId = workItem.DocumentId;
try
{
using (Logger.LogBlock(FunctionId.WorkCoordinator_ProcessDocumentAsync, source.Token))
{
var cancellationToken = source.Token;
var document = _processingSolution.GetDocument(documentId);
if (document != null)
{
await TrackSemanticVersionsAsync(document, workItem, cancellationToken).ConfigureAwait(false);
// if we are called because a document is opened, we invalidate the document so that
// it can be re-analyzed. otherwise, since newly opened document has same version as before
// analyzer will simply return same data back
if (workItem.MustRefresh && !workItem.IsRetry)
{
var isOpen = document.IsOpen();
await ProcessOpenDocumentIfNeeded(analyzers, workItem, document, isOpen, cancellationToken).ConfigureAwait(false);
await ProcessCloseDocumentIfNeeded(analyzers, workItem, document, isOpen, cancellationToken).ConfigureAwait(false);
}
// check whether we are having special reanalyze request
await ProcessReanalyzeDocumentAsync(workItem, document, cancellationToken).ConfigureAwait(false);
await ProcessDocumentAnalyzersAsync(document, analyzers, workItem, cancellationToken).ConfigureAwait(false);
}
else
{
SolutionCrawlerLogger.LogProcessDocumentNotExist(this.Processor._logAggregator);
RemoveDocument(documentId);
}
if (!cancellationToken.IsCancellationRequested)
{
processedEverything = true;
}
}
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
finally
{
// we got cancelled in the middle of processing the document.
// let's make sure newly enqueued work item has all the flag needed.
if (!processedEverything)
{
_workItemQueue.AddOrReplace(workItem.Retry(this.Listener.BeginAsyncOperation("ReenqueueWorkItem")));
}
SolutionCrawlerLogger.LogProcessDocument(this.Processor._logAggregator, documentId.Id, processedEverything);
// remove one that is finished running
_workItemQueue.RemoveCancellationSource(workItem.DocumentId);
}
}
private async Task TrackSemanticVersionsAsync(Document document, WorkItem workItem, CancellationToken cancellationToken)
{
if (workItem.IsRetry ||
workItem.InvocationReasons.Contains(PredefinedInvocationReasons.DocumentAdded) ||
!workItem.InvocationReasons.Contains(PredefinedInvocationReasons.SyntaxChanged))
{
return;
}
var service = document.Project.Solution.Workspace.Services.GetService<ISemanticVersionTrackingService>();
if (service == null)
{
return;
}
// we already reported about this project for same snapshot, don't need to do it again
if (_currentSnapshotVersionTrackingSet.Contains(document.Project.Id))
{
return;
}
await service.RecordSemanticVersionsAsync(document.Project, cancellationToken).ConfigureAwait(false);
// mark this project as already processed.
_currentSnapshotVersionTrackingSet.Add(document.Project.Id);
}
private async Task ProcessOpenDocumentIfNeeded(ImmutableArray<IIncrementalAnalyzer> analyzers, WorkItem workItem, Document document, bool isOpen, CancellationToken cancellationToken)
{
if (!isOpen || !workItem.InvocationReasons.Contains(PredefinedInvocationReasons.DocumentOpened))
{
return;
}
SolutionCrawlerLogger.LogProcessOpenDocument(this.Processor._logAggregator, document.Id.Id);
await RunAnalyzersAsync(analyzers, document, (a, d, c) => a.DocumentOpenAsync(d, c), cancellationToken).ConfigureAwait(false);
}
private async Task ProcessCloseDocumentIfNeeded(ImmutableArray<IIncrementalAnalyzer> analyzers, WorkItem workItem, Document document, bool isOpen, CancellationToken cancellationToken)
{
if (isOpen || !workItem.InvocationReasons.Contains(PredefinedInvocationReasons.DocumentClosed))
{
return;
}
SolutionCrawlerLogger.LogProcessCloseDocument(this.Processor._logAggregator, document.Id.Id);
await RunAnalyzersAsync(analyzers, document, (a, d, c) => a.DocumentCloseAsync(d, c), cancellationToken).ConfigureAwait(false);
}
private async Task ProcessReanalyzeDocumentAsync(WorkItem workItem, Document document, CancellationToken cancellationToken)
{
try
{
#if DEBUG
Contract.Requires(!workItem.InvocationReasons.Contains(PredefinedInvocationReasons.Reanalyze) || workItem.Analyzers.Count > 0);
#endif
// no-reanalyze request or we already have a request to re-analyze every thing
if (workItem.MustRefresh || !workItem.InvocationReasons.Contains(PredefinedInvocationReasons.Reanalyze))
{
return;
}
// First reset the document state in analyzers.
var reanalyzers = workItem.Analyzers.ToImmutableArray();
await RunAnalyzersAsync(reanalyzers, document, (a, d, c) => a.DocumentResetAsync(d, c), cancellationToken).ConfigureAwait(false);
// no request to re-run syntax change analysis. run it here
var reasons = workItem.InvocationReasons;
if (!reasons.Contains(PredefinedInvocationReasons.SyntaxChanged))
{
await RunAnalyzersAsync(reanalyzers, document, (a, d, c) => a.AnalyzeSyntaxAsync(d, reasons, c), cancellationToken).ConfigureAwait(false);
}
// no request to re-run semantic change analysis. run it here
if (!workItem.InvocationReasons.Contains(PredefinedInvocationReasons.SemanticChanged))
{
await RunAnalyzersAsync(reanalyzers, document, (a, d, c) => a.AnalyzeDocumentAsync(d, null, reasons, c), cancellationToken).ConfigureAwait(false);
}
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
private void RemoveDocument(DocumentId documentId)
{
RemoveDocument(this.Analyzers, documentId);
}
private static void RemoveDocument(IEnumerable<IIncrementalAnalyzer> analyzers, DocumentId documentId)
{
foreach (var analyzer in analyzers)
{
analyzer.RemoveDocument(documentId);
}
}
private void ResetLogAggregatorIfNeeded(Solution currentSolution)
{
if (currentSolution == null || _processingSolution == null ||
currentSolution.Id == _processingSolution.Id)
{
return;
}
SolutionCrawlerLogger.LogIncrementalAnalyzerProcessorStatistics(
this.Processor._registration.CorrelationId, _processingSolution, this.Processor._logAggregator, this.Analyzers);
this.Processor.ResetLogAggregator();
}
private async Task ResetStatesAsync()
{
try
{
var currentSolution = this.Processor.CurrentSolution;
if (currentSolution != _processingSolution)
{
ResetLogAggregatorIfNeeded(currentSolution);
// clear version tracking set we already reported.
_currentSnapshotVersionTrackingSet.Clear();
_processingSolution = currentSolution;
await RunAnalyzersAsync(this.Analyzers, currentSolution, (a, s, c) => a.NewSolutionSnapshotAsync(s, c), this.CancellationToken).ConfigureAwait(false);
foreach (var id in this.Processor.GetOpenDocumentIds())
{
AddHigherPriorityDocument(id);
}
SolutionCrawlerLogger.LogResetStates(this.Processor._logAggregator);
}
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
public override void Shutdown()
{
base.Shutdown();
SolutionCrawlerLogger.LogIncrementalAnalyzerProcessorStatistics(this.Processor._registration.CorrelationId, _processingSolution, this.Processor._logAggregator, this.Analyzers);
_workItemQueue.Dispose();
if (_projectCache != null)
{
_projectCache.Dispose();
_projectCache = null;
}
}
internal void WaitUntilCompletion_ForTestingPurposesOnly(ImmutableArray<IIncrementalAnalyzer> analyzers, List<WorkItem> items)
{
CancellationTokenSource source = new CancellationTokenSource();
_processingSolution = this.Processor.CurrentSolution;
foreach (var item in items)
{
ProcessDocumentAsync(analyzers, item, source).Wait();
}
}
internal void WaitUntilCompletion_ForTestingPurposesOnly()
{
// this shouldn't happen. would like to get some diagnostic
while (_workItemQueue.HasAnyWork)
{
Environment.FailFast("How?");
}
}
}
}
}
}
}
| |
//-----------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//-----------------------------------------------------------------------------
namespace System.Runtime.Serialization
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Reflection;
using System.Xml;
using System.Security;
#if USE_REFEMIT
public abstract class PrimitiveDataContract : DataContract
#else
internal abstract class PrimitiveDataContract : DataContract
#endif
{
[Fx.Tag.SecurityNote(Critical = "Holds instance of CriticalHelper which keeps state that is cached statically for serialization."
+ " Static fields are marked SecurityCritical or readonly to prevent data from being modified or leaked to other components in appdomain.")]
[SecurityCritical]
PrimitiveDataContractCriticalHelper helper;
[Fx.Tag.SecurityNote(Critical = "Initializes SecurityCritical field 'helper'.",
Safe = "Doesn't leak anything.")]
[SecuritySafeCritical]
protected PrimitiveDataContract(Type type, XmlDictionaryString name, XmlDictionaryString ns)
: base(new PrimitiveDataContractCriticalHelper(type, name, ns))
{
helper = base.Helper as PrimitiveDataContractCriticalHelper;
}
static internal PrimitiveDataContract GetPrimitiveDataContract(Type type)
{
return DataContract.GetBuiltInDataContract(type) as PrimitiveDataContract;
}
static internal PrimitiveDataContract GetPrimitiveDataContract(string name, string ns)
{
return DataContract.GetBuiltInDataContract(name, ns) as PrimitiveDataContract;
}
internal abstract string WriteMethodName { get; }
internal abstract string ReadMethodName { get; }
internal override XmlDictionaryString TopLevelElementNamespace
{
get { return DictionaryGlobals.SerializationNamespace; }
set { }
}
internal override bool CanContainReferences
{
get { return false; }
}
internal override bool IsPrimitive
{
get { return true; }
}
internal override bool IsBuiltInDataContract
{
get
{
return true;
}
}
internal MethodInfo XmlFormatWriterMethod
{
[Fx.Tag.SecurityNote(Critical = "Fetches the critical XmlFormatWriterMethod property.",
Safe = "XmlFormatWriterMethod only needs to be protected for write; initialized in getter if null.")]
[SecuritySafeCritical]
get
{
if (helper.XmlFormatWriterMethod == null)
{
if (UnderlyingType.IsValueType)
helper.XmlFormatWriterMethod = typeof(XmlWriterDelegator).GetMethod(WriteMethodName, Globals.ScanAllMembers, null, new Type[] { UnderlyingType, typeof(XmlDictionaryString), typeof(XmlDictionaryString) }, null);
else
helper.XmlFormatWriterMethod = typeof(XmlObjectSerializerWriteContext).GetMethod(WriteMethodName, Globals.ScanAllMembers, null, new Type[] { typeof(XmlWriterDelegator), UnderlyingType, typeof(XmlDictionaryString), typeof(XmlDictionaryString) }, null);
}
return helper.XmlFormatWriterMethod;
}
}
internal MethodInfo XmlFormatContentWriterMethod
{
[Fx.Tag.SecurityNote(Critical = "Fetches the critical XmlFormatContentWriterMethod property.",
Safe = "XmlFormatContentWriterMethod only needs to be protected for write; initialized in getter if null.")]
[SecuritySafeCritical]
get
{
if (helper.XmlFormatContentWriterMethod == null)
{
if (UnderlyingType.IsValueType)
helper.XmlFormatContentWriterMethod = typeof(XmlWriterDelegator).GetMethod(WriteMethodName, Globals.ScanAllMembers, null, new Type[] { UnderlyingType }, null);
else
helper.XmlFormatContentWriterMethod = typeof(XmlObjectSerializerWriteContext).GetMethod(WriteMethodName, Globals.ScanAllMembers, null, new Type[] { typeof(XmlWriterDelegator), UnderlyingType }, null);
}
return helper.XmlFormatContentWriterMethod;
}
}
internal MethodInfo XmlFormatReaderMethod
{
[Fx.Tag.SecurityNote(Critical = "Fetches the critical XmlFormatReaderMethod property.",
Safe = "XmlFormatReaderMethod only needs to be protected for write; initialized in getter if null.")]
[SecuritySafeCritical]
get
{
if (helper.XmlFormatReaderMethod == null)
{
helper.XmlFormatReaderMethod = typeof(XmlReaderDelegator).GetMethod(ReadMethodName, Globals.ScanAllMembers);
}
return helper.XmlFormatReaderMethod;
}
}
public override void WriteXmlValue(XmlWriterDelegator xmlWriter, object obj, XmlObjectSerializerWriteContext context)
{
xmlWriter.WriteAnyType(obj);
}
protected object HandleReadValue(object obj, XmlObjectSerializerReadContext context)
{
context.AddNewObject(obj);
return obj;
}
protected bool TryReadNullAtTopLevel(XmlReaderDelegator reader)
{
Attributes attributes = new Attributes();
attributes.Read(reader);
if (attributes.Ref != Globals.NewObjectId)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.GetString(SR.CannotDeserializeRefAtTopLevel, attributes.Ref)));
if (attributes.XsiNil)
{
reader.Skip();
return true;
}
return false;
}
internal override bool Equals(object other, Dictionary<DataContractPairKey, object> checkedContracts)
{
PrimitiveDataContract dataContract = other as PrimitiveDataContract;
if (dataContract != null)
{
Type thisType = this.GetType();
Type otherType = other.GetType();
return (thisType.Equals(otherType) || thisType.IsSubclassOf(otherType) || otherType.IsSubclassOf(thisType));
}
return false;
}
[Fx.Tag.SecurityNote(Critical = "Holds all state used for for (de)serializing primitives."
+ " Since the data is cached statically, we lock down access to it.")]
#if !NO_SECURITY_ATTRIBUTES
[SecurityCritical(SecurityCriticalScope.Everything)]
#endif
class PrimitiveDataContractCriticalHelper : DataContract.DataContractCriticalHelper
{
MethodInfo xmlFormatWriterMethod;
MethodInfo xmlFormatContentWriterMethod;
MethodInfo xmlFormatReaderMethod;
internal PrimitiveDataContractCriticalHelper(Type type, XmlDictionaryString name, XmlDictionaryString ns)
: base(type)
{
SetDataContractName(name, ns);
}
internal MethodInfo XmlFormatWriterMethod
{
get { return xmlFormatWriterMethod; }
set { xmlFormatWriterMethod = value; }
}
internal MethodInfo XmlFormatContentWriterMethod
{
get { return xmlFormatContentWriterMethod; }
set { xmlFormatContentWriterMethod = value; }
}
internal MethodInfo XmlFormatReaderMethod
{
get { return xmlFormatReaderMethod; }
set { xmlFormatReaderMethod = value; }
}
}
}
internal class CharDataContract : PrimitiveDataContract
{
internal CharDataContract()
: this(DictionaryGlobals.CharLocalName, DictionaryGlobals.SerializationNamespace)
{
}
internal CharDataContract(XmlDictionaryString name, XmlDictionaryString ns)
: base(typeof(char), name, ns)
{
}
internal override string WriteMethodName { get { return "WriteChar"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsChar"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteChar((char)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsChar()
: HandleReadValue(reader.ReadElementContentAsChar(), context);
}
}
internal class AsmxCharDataContract : CharDataContract
{
internal AsmxCharDataContract() : base(DictionaryGlobals.CharLocalName, DictionaryGlobals.AsmxTypesNamespace) { }
}
internal class BooleanDataContract : PrimitiveDataContract
{
internal BooleanDataContract()
: base(typeof(bool), DictionaryGlobals.BooleanLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteBoolean"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsBoolean"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteBoolean((bool)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsBoolean()
: HandleReadValue(reader.ReadElementContentAsBoolean(), context);
}
}
internal class SignedByteDataContract : PrimitiveDataContract
{
internal SignedByteDataContract()
: base(typeof(sbyte), DictionaryGlobals.SignedByteLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteSignedByte"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsSignedByte"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteSignedByte((sbyte)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsSignedByte()
: HandleReadValue(reader.ReadElementContentAsSignedByte(), context);
}
}
internal class UnsignedByteDataContract : PrimitiveDataContract
{
internal UnsignedByteDataContract()
: base(typeof(byte), DictionaryGlobals.UnsignedByteLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteUnsignedByte"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsUnsignedByte"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteUnsignedByte((byte)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsUnsignedByte()
: HandleReadValue(reader.ReadElementContentAsUnsignedByte(), context);
}
}
internal class ShortDataContract : PrimitiveDataContract
{
internal ShortDataContract()
: base(typeof(short), DictionaryGlobals.ShortLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteShort"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsShort"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteShort((short)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsShort()
: HandleReadValue(reader.ReadElementContentAsShort(), context);
}
}
internal class UnsignedShortDataContract : PrimitiveDataContract
{
internal UnsignedShortDataContract()
: base(typeof(ushort), DictionaryGlobals.UnsignedShortLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteUnsignedShort"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsUnsignedShort"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteUnsignedShort((ushort)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsUnsignedShort()
: HandleReadValue(reader.ReadElementContentAsUnsignedShort(), context);
}
}
internal class IntDataContract : PrimitiveDataContract
{
internal IntDataContract()
: base(typeof(int), DictionaryGlobals.IntLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteInt"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsInt"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteInt((int)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsInt()
: HandleReadValue(reader.ReadElementContentAsInt(), context);
}
}
internal class UnsignedIntDataContract : PrimitiveDataContract
{
internal UnsignedIntDataContract()
: base(typeof(uint), DictionaryGlobals.UnsignedIntLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteUnsignedInt"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsUnsignedInt"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteUnsignedInt((uint)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsUnsignedInt()
: HandleReadValue(reader.ReadElementContentAsUnsignedInt(), context);
}
}
internal class LongDataContract : PrimitiveDataContract
{
internal LongDataContract()
: this(DictionaryGlobals.LongLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal LongDataContract(XmlDictionaryString name, XmlDictionaryString ns)
: base(typeof(long), name, ns)
{
}
internal override string WriteMethodName { get { return "WriteLong"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsLong"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteLong((long)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsLong()
: HandleReadValue(reader.ReadElementContentAsLong(), context);
}
}
internal class IntegerDataContract : LongDataContract
{
internal IntegerDataContract() : base(DictionaryGlobals.integerLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class PositiveIntegerDataContract : LongDataContract
{
internal PositiveIntegerDataContract() : base(DictionaryGlobals.positiveIntegerLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class NegativeIntegerDataContract : LongDataContract
{
internal NegativeIntegerDataContract() : base(DictionaryGlobals.negativeIntegerLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class NonPositiveIntegerDataContract : LongDataContract
{
internal NonPositiveIntegerDataContract() : base(DictionaryGlobals.nonPositiveIntegerLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class NonNegativeIntegerDataContract : LongDataContract
{
internal NonNegativeIntegerDataContract() : base(DictionaryGlobals.nonNegativeIntegerLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class UnsignedLongDataContract : PrimitiveDataContract
{
internal UnsignedLongDataContract()
: base(typeof(ulong), DictionaryGlobals.UnsignedLongLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteUnsignedLong"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsUnsignedLong"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteUnsignedLong((ulong)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsUnsignedLong()
: HandleReadValue(reader.ReadElementContentAsUnsignedLong(), context);
}
}
internal class FloatDataContract : PrimitiveDataContract
{
internal FloatDataContract()
: base(typeof(float), DictionaryGlobals.FloatLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteFloat"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsFloat"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteFloat((float)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsFloat()
: HandleReadValue(reader.ReadElementContentAsFloat(), context);
}
}
internal class DoubleDataContract : PrimitiveDataContract
{
internal DoubleDataContract()
: base(typeof(double), DictionaryGlobals.DoubleLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteDouble"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsDouble"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteDouble((double)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsDouble()
: HandleReadValue(reader.ReadElementContentAsDouble(), context);
}
}
internal class DecimalDataContract : PrimitiveDataContract
{
internal DecimalDataContract()
: base(typeof(decimal), DictionaryGlobals.DecimalLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteDecimal"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsDecimal"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteDecimal((decimal)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsDecimal()
: HandleReadValue(reader.ReadElementContentAsDecimal(), context);
}
}
internal class DateTimeDataContract : PrimitiveDataContract
{
internal DateTimeDataContract()
: base(typeof(DateTime), DictionaryGlobals.DateTimeLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteDateTime"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsDateTime"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteDateTime((DateTime)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsDateTime()
: HandleReadValue(reader.ReadElementContentAsDateTime(), context);
}
}
internal class StringDataContract : PrimitiveDataContract
{
internal StringDataContract()
: this(DictionaryGlobals.StringLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal StringDataContract(XmlDictionaryString name, XmlDictionaryString ns)
: base(typeof(string), name, ns)
{
}
internal override string WriteMethodName { get { return "WriteString"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsString"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteString((string)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
if (context == null)
{
return TryReadNullAtTopLevel(reader) ? null : reader.ReadElementContentAsString();
}
else
{
return HandleReadValue(reader.ReadElementContentAsString(), context);
}
}
}
internal class TimeDataContract : StringDataContract
{
internal TimeDataContract() : base(DictionaryGlobals.timeLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class DateDataContract : StringDataContract
{
internal DateDataContract() : base(DictionaryGlobals.dateLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class HexBinaryDataContract : StringDataContract
{
internal HexBinaryDataContract() : base(DictionaryGlobals.hexBinaryLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class GYearMonthDataContract : StringDataContract
{
internal GYearMonthDataContract() : base(DictionaryGlobals.gYearMonthLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class GYearDataContract : StringDataContract
{
internal GYearDataContract() : base(DictionaryGlobals.gYearLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class GMonthDayDataContract : StringDataContract
{
internal GMonthDayDataContract() : base(DictionaryGlobals.gMonthDayLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class GDayDataContract : StringDataContract
{
internal GDayDataContract() : base(DictionaryGlobals.gDayLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class GMonthDataContract : StringDataContract
{
internal GMonthDataContract() : base(DictionaryGlobals.gMonthLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class NormalizedStringDataContract : StringDataContract
{
internal NormalizedStringDataContract() : base(DictionaryGlobals.normalizedStringLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class TokenDataContract : StringDataContract
{
internal TokenDataContract() : base(DictionaryGlobals.tokenLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class LanguageDataContract : StringDataContract
{
internal LanguageDataContract() : base(DictionaryGlobals.languageLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class NameDataContract : StringDataContract
{
internal NameDataContract() : base(DictionaryGlobals.NameLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class NCNameDataContract : StringDataContract
{
internal NCNameDataContract() : base(DictionaryGlobals.NCNameLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class IDDataContract : StringDataContract
{
internal IDDataContract() : base(DictionaryGlobals.XSDIDLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class IDREFDataContract : StringDataContract
{
internal IDREFDataContract() : base(DictionaryGlobals.IDREFLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class IDREFSDataContract : StringDataContract
{
internal IDREFSDataContract() : base(DictionaryGlobals.IDREFSLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class ENTITYDataContract : StringDataContract
{
internal ENTITYDataContract() : base(DictionaryGlobals.ENTITYLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class ENTITIESDataContract : StringDataContract
{
internal ENTITIESDataContract() : base(DictionaryGlobals.ENTITIESLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class NMTOKENDataContract : StringDataContract
{
internal NMTOKENDataContract() : base(DictionaryGlobals.NMTOKENLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class NMTOKENSDataContract : StringDataContract
{
internal NMTOKENSDataContract() : base(DictionaryGlobals.NMTOKENSLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class ByteArrayDataContract : PrimitiveDataContract
{
internal ByteArrayDataContract()
: base(typeof(byte[]), DictionaryGlobals.ByteArrayLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteBase64"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsBase64"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteBase64((byte[])obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
if (context == null)
{
return TryReadNullAtTopLevel(reader) ? null : reader.ReadElementContentAsBase64();
}
else
{
return HandleReadValue(reader.ReadElementContentAsBase64(), context);
}
}
}
internal class ObjectDataContract : PrimitiveDataContract
{
internal ObjectDataContract()
: base(typeof(object), DictionaryGlobals.ObjectLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteAnyType"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsAnyType"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
// write nothing
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
object obj;
if (reader.IsEmptyElement)
{
reader.Skip();
obj = new object();
}
else
{
string localName = reader.LocalName;
string ns = reader.NamespaceURI;
reader.Read();
try
{
reader.ReadEndElement();
obj = new object();
}
catch (XmlException xes)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.GetString(SR.XmlForObjectCannotHaveContent, localName, ns), xes));
}
}
return (context == null) ? obj : HandleReadValue(obj, context);
}
internal override bool CanContainReferences
{
get { return true; }
}
internal override bool IsPrimitive
{
get { return false; }
}
}
internal class TimeSpanDataContract : PrimitiveDataContract
{
internal TimeSpanDataContract()
: this(DictionaryGlobals.TimeSpanLocalName, DictionaryGlobals.SerializationNamespace)
{
}
internal TimeSpanDataContract(XmlDictionaryString name, XmlDictionaryString ns)
: base(typeof(TimeSpan), name, ns)
{
}
internal override string WriteMethodName { get { return "WriteTimeSpan"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsTimeSpan"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteTimeSpan((TimeSpan)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsTimeSpan()
: HandleReadValue(reader.ReadElementContentAsTimeSpan(), context);
}
}
internal class XsDurationDataContract : TimeSpanDataContract
{
internal XsDurationDataContract() : base(DictionaryGlobals.TimeSpanLocalName, DictionaryGlobals.SchemaNamespace) { }
}
internal class GuidDataContract : PrimitiveDataContract
{
internal GuidDataContract()
: this(DictionaryGlobals.GuidLocalName, DictionaryGlobals.SerializationNamespace)
{
}
internal GuidDataContract(XmlDictionaryString name, XmlDictionaryString ns)
: base(typeof(Guid), name, ns)
{
}
internal override string WriteMethodName { get { return "WriteGuid"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsGuid"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteGuid((Guid)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
return (context == null) ? reader.ReadElementContentAsGuid()
: HandleReadValue(reader.ReadElementContentAsGuid(), context);
}
}
internal class AsmxGuidDataContract : GuidDataContract
{
internal AsmxGuidDataContract() : base(DictionaryGlobals.GuidLocalName, DictionaryGlobals.AsmxTypesNamespace) { }
}
internal class UriDataContract : PrimitiveDataContract
{
internal UriDataContract()
: base(typeof(Uri), DictionaryGlobals.UriLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteUri"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsUri"; } }
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteUri((Uri)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
if (context == null)
{
return TryReadNullAtTopLevel(reader) ? null : reader.ReadElementContentAsUri();
}
else
{
return HandleReadValue(reader.ReadElementContentAsUri(), context);
}
}
}
internal class QNameDataContract : PrimitiveDataContract
{
internal QNameDataContract()
: base(typeof(XmlQualifiedName), DictionaryGlobals.QNameLocalName, DictionaryGlobals.SchemaNamespace)
{
}
internal override string WriteMethodName { get { return "WriteQName"; } }
internal override string ReadMethodName { get { return "ReadElementContentAsQName"; } }
internal override bool IsPrimitive
{
get { return false; }
}
public override void WriteXmlValue(XmlWriterDelegator writer, object obj, XmlObjectSerializerWriteContext context)
{
writer.WriteQName((XmlQualifiedName)obj);
}
public override object ReadXmlValue(XmlReaderDelegator reader, XmlObjectSerializerReadContext context)
{
if (context == null)
{
return TryReadNullAtTopLevel(reader) ? null : reader.ReadElementContentAsQName();
}
else
{
return HandleReadValue(reader.ReadElementContentAsQName(), context);
}
}
internal override void WriteRootElement(XmlWriterDelegator writer, XmlDictionaryString name, XmlDictionaryString ns)
{
if (object.ReferenceEquals(ns, DictionaryGlobals.SerializationNamespace))
writer.WriteStartElement(Globals.SerPrefix, name, ns);
else if (ns != null && ns.Value != null && ns.Value.Length > 0)
writer.WriteStartElement(Globals.ElementPrefix, name, ns);
else
writer.WriteStartElement(name, ns);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Cors;
using Microsoft.AspNetCore.Cors.Infrastructure;
using Microsoft.AspNetCore.Mvc.ActionConstraints;
using Microsoft.AspNetCore.Mvc.ApplicationModels;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Moq;
using Xunit;
namespace Microsoft.AspNetCore.Mvc.Cors
{
public class CorsApplicationModelProviderTest
{
private readonly IOptions<MvcOptions> OptionsWithoutEndpointRouting = Options.Create(new MvcOptions { EnableEndpointRouting = false });
[Fact]
public void OnProvidersExecuting_SetsEndpointMetadata_IfCorsAttributeIsPresentOnController()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(Options.Create(new MvcOptions()));
var context = GetProviderContext(typeof(CorsController));
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var model = Assert.Single(context.Result.Controllers);
Assert.Empty(model.Filters);
var action = Assert.Single(model.Actions);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsNotType<CorsHttpMethodActionConstraint>(constraint);
var httpMethodMetadata = Assert.Single(selector.EndpointMetadata.OfType<HttpMethodMetadata>());
Assert.True(httpMethodMetadata.AcceptCorsPreflight);
}
[Fact]
public void OnProvidersExecuting_SetsEndpointMetadata_IfCorsAttributeIsPresentOnAction()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(Options.Create(new MvcOptions()));
var context = GetProviderContext(typeof(DisableCorsActionController));
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var model = Assert.Single(context.Result.Controllers);
Assert.Empty(model.Filters);
var action = Assert.Single(model.Actions);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsNotType<CorsHttpMethodActionConstraint>(constraint);
var httpMethodMetadata = Assert.Single(selector.EndpointMetadata.OfType<HttpMethodMetadata>());
Assert.True(httpMethodMetadata.AcceptCorsPreflight);
}
[Fact]
public void OnProvidersExecuting_WithoutGlobalAuthorizationFilter_EnableCorsAttributeAddsCorsAuthorizationFilterFactory()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(OptionsWithoutEndpointRouting);
var context = GetProviderContext(typeof(CorsController));
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var model = Assert.Single(context.Result.Controllers);
Assert.Single(model.Filters, f => f is CorsAuthorizationFilterFactory);
var action = Assert.Single(model.Actions);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsType<CorsHttpMethodActionConstraint>(constraint);
}
[Fact]
public void OnProvidersExecuting_WithoutGlobalAuthorizationFilter_DisableCorsAttributeAddsDisableCorsAuthorizationFilter()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(OptionsWithoutEndpointRouting);
var context = GetProviderContext(typeof(DisableCorsController));
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var model = Assert.Single(context.Result.Controllers);
Assert.Single(model.Filters, f => f is DisableCorsAuthorizationFilter);
var action = Assert.Single(model.Actions);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsType<CorsHttpMethodActionConstraint>(constraint);
}
[Fact]
public void OnProvidersExecuting_WithoutGlobalAuthorizationFilter_CustomCorsFilter_EnablesCorsPreflight()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(OptionsWithoutEndpointRouting);
var context = GetProviderContext(typeof(CustomCorsFilterController));
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var controller = Assert.Single(context.Result.Controllers);
var action = Assert.Single(controller.Actions);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsType<CorsHttpMethodActionConstraint>(constraint);
}
[Fact]
public void BuildActionModel_EnableCorsAttributeAddsCorsAuthorizationFilterFactory()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(OptionsWithoutEndpointRouting);
var context = GetProviderContext(typeof(EnableCorsController));
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var controller = Assert.Single(context.Result.Controllers);
var action = Assert.Single(controller.Actions);
Assert.Single(action.Filters, f => f is CorsAuthorizationFilterFactory);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsType<CorsHttpMethodActionConstraint>(constraint);
}
[Fact]
public void BuildActionModel_WithoutGlobalAuthorizationFilter_DisableCorsAttributeAddsDisableCorsAuthorizationFilter()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(OptionsWithoutEndpointRouting);
var context = GetProviderContext(typeof(DisableCorsActionController));
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var controller = Assert.Single(context.Result.Controllers);
var action = Assert.Single(controller.Actions);
Assert.Contains(action.Filters, f => f is DisableCorsAuthorizationFilter);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsType<CorsHttpMethodActionConstraint>(constraint);
}
[Fact]
public void BuildActionModel_WithoutGlobalAuthorizationFilter_CustomCorsAuthorizationFilterOnAction_EnablesCorsPreflight()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(OptionsWithoutEndpointRouting);
var context = GetProviderContext(typeof(CustomCorsFilterOnActionController));
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var controller = Assert.Single(context.Result.Controllers);
var action = Assert.Single(controller.Actions);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsType<CorsHttpMethodActionConstraint>(constraint);
}
[Fact]
public void OnProvidersExecuting_WithoutGlobalAuthorizationFilter_EnableCorsGloballyEnablesCorsPreflight()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(OptionsWithoutEndpointRouting);
var context = GetProviderContext(typeof(RegularController));
context.Result.Filters.Add(
new CorsAuthorizationFilter(Mock.Of<ICorsService>(), Mock.Of<ICorsPolicyProvider>(), Mock.Of<ILoggerFactory>()));
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var model = Assert.Single(context.Result.Controllers);
var action = Assert.Single(model.Actions);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsType<CorsHttpMethodActionConstraint>(constraint);
}
[Fact]
public void OnProvidersExecuting_WithoutGlobalAuthorizationFilter_DisableCorsGloballyEnablesCorsPreflight()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(OptionsWithoutEndpointRouting);
var context = GetProviderContext(typeof(RegularController));
context.Result.Filters.Add(new DisableCorsAuthorizationFilter());
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var model = Assert.Single(context.Result.Controllers);
var action = Assert.Single(model.Actions);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsType<CorsHttpMethodActionConstraint>(constraint);
}
[Fact]
public void OnProvidersExecuting_WithoutGlobalAuthorizationFilter_CustomCorsFilterGloballyEnablesCorsPreflight()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(OptionsWithoutEndpointRouting);
var context = GetProviderContext(typeof(RegularController));
context.Result.Filters.Add(new CustomCorsFilterAttribute());
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var model = Assert.Single(context.Result.Controllers);
var action = Assert.Single(model.Actions);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsType<CorsHttpMethodActionConstraint>(constraint);
}
[Fact]
public void OnProvidersExecuting_WithoutGlobalAuthorizationFilter_CorsNotInUseDoesNotOverrideHttpConstraints()
{
// Arrange
var corsProvider = new CorsApplicationModelProvider(OptionsWithoutEndpointRouting);
var context = GetProviderContext(typeof(RegularController));
// Act
corsProvider.OnProvidersExecuting(context);
// Assert
var model = Assert.Single(context.Result.Controllers);
var action = Assert.Single(model.Actions);
var selector = Assert.Single(action.Selectors);
var constraint = Assert.Single(selector.ActionConstraints, c => c is HttpMethodActionConstraint);
Assert.IsNotType<CorsHttpMethodActionConstraint>(constraint);
}
private static ApplicationModelProviderContext GetProviderContext(Type controllerType)
{
var context = new ApplicationModelProviderContext(new[] { controllerType.GetTypeInfo() });
var provider = new DefaultApplicationModelProvider(
Options.Create(new MvcOptions()),
new EmptyModelMetadataProvider());
provider.OnProvidersExecuting(context);
return context;
}
private class EnableCorsController
{
[EnableCors("policy")]
[HttpGet]
public IActionResult Action()
{
return null;
}
}
private class DisableCorsActionController
{
[DisableCors]
[HttpGet]
public void Action()
{
}
}
[EnableCors("policy")]
public class CorsController
{
[HttpGet]
public IActionResult Action()
{
return null;
}
}
[DisableCors]
public class DisableCorsController
{
[HttpOptions]
public IActionResult Action()
{
return null;
}
}
public class RegularController
{
[HttpPost]
public IActionResult Action()
{
return null;
}
}
[CustomCorsFilter]
public class CustomCorsFilterController
{
[HttpPost]
public IActionResult Action()
{
return null;
}
}
public class CustomCorsFilterOnActionController
{
[HttpPost]
[CustomCorsFilter]
public IActionResult Action()
{
return null;
}
}
public class CustomCorsFilterAttribute : Attribute, ICorsAuthorizationFilter
{
public int Order { get; } = 1000;
public Task OnAuthorizationAsync(AuthorizationFilterContext context)
{
return Task.FromResult(0);
}
}
}
}
| |
using System;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
using System.Data;
namespace ModuleUpdater.Dialogs
{
/// <summary>
/// Summary description for ParameterDialog.
/// </summary>
public class ParameterDialog : System.Windows.Forms.Form
{
private System.Windows.Forms.Button cancelButton;
private System.Windows.Forms.Button okButton;
private System.Windows.Forms.TextBox nameTextBox;
private System.Windows.Forms.Label nameLabel;
private System.Windows.Forms.Label descriptionLabel;
private System.Windows.Forms.TextBox descriptionTextBox;
private System.Windows.Forms.Label typeLabel;
private System.Windows.Forms.ComboBox typeComboBox;
private System.Windows.Forms.TextBox minTextBox;
private System.Windows.Forms.Label minLabel;
private System.Windows.Forms.TextBox maxTextBox;
private System.Windows.Forms.Label maxLabel;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.Container components = null;
private bool addMode;
private DataRow parameterRow;
private System.Windows.Forms.GroupBox requiredGroupBox;
private System.Windows.Forms.GroupBox optionalGroupBox;
private DataTable parameterTypeTable;
public ParameterDialog(bool addMode, ref DataRow parameterRow, DataTable parameterTypeTable)
{
// Required for Windows Form Designer support
InitializeComponent();
EnableVisualStyles.Enable(this);
this.addMode = addMode;
this.parameterRow = parameterRow;
this.parameterTypeTable = parameterTypeTable;
this.typeComboBox.DataSource = this.parameterTypeTable;
if (this.addMode)
{
this.okButton.Text = "Add";
this.Text = "Add New Parameter";
this.typeComboBox.SelectedIndex = 0;
}
else
{
this.okButton.Text = "Update";
this.Text = "Update Parameter";
this.nameTextBox.Text = this.parameterRow["fldName"].ToString();
this.descriptionTextBox.Text = this.parameterRow["fldDescription"].ToString();
this.typeComboBox.SelectedValue = long.Parse(this.parameterRow["fldParameterTypeID"].ToString());
if (this.optionalGroupBox.Enabled)
{
this.minTextBox.Text = this.parameterRow["fldMinimum"].ToString();
this.maxTextBox.Text = this.parameterRow["fldMaximum"].ToString();
}
}
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if(components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.requiredGroupBox = new System.Windows.Forms.GroupBox();
this.typeComboBox = new System.Windows.Forms.ComboBox();
this.typeLabel = new System.Windows.Forms.Label();
this.descriptionTextBox = new System.Windows.Forms.TextBox();
this.descriptionLabel = new System.Windows.Forms.Label();
this.nameLabel = new System.Windows.Forms.Label();
this.nameTextBox = new System.Windows.Forms.TextBox();
this.optionalGroupBox = new System.Windows.Forms.GroupBox();
this.maxLabel = new System.Windows.Forms.Label();
this.maxTextBox = new System.Windows.Forms.TextBox();
this.minLabel = new System.Windows.Forms.Label();
this.minTextBox = new System.Windows.Forms.TextBox();
this.cancelButton = new System.Windows.Forms.Button();
this.okButton = new System.Windows.Forms.Button();
this.requiredGroupBox.SuspendLayout();
this.optionalGroupBox.SuspendLayout();
this.SuspendLayout();
//
// requiredGroupBox
//
this.requiredGroupBox.Controls.Add(this.typeComboBox);
this.requiredGroupBox.Controls.Add(this.typeLabel);
this.requiredGroupBox.Controls.Add(this.descriptionTextBox);
this.requiredGroupBox.Controls.Add(this.descriptionLabel);
this.requiredGroupBox.Controls.Add(this.nameLabel);
this.requiredGroupBox.Controls.Add(this.nameTextBox);
this.requiredGroupBox.Location = new System.Drawing.Point(8, 8);
this.requiredGroupBox.Name = "requiredGroupBox";
this.requiredGroupBox.Size = new System.Drawing.Size(480, 136);
this.requiredGroupBox.TabIndex = 0;
this.requiredGroupBox.TabStop = false;
this.requiredGroupBox.Text = "Required Information";
//
// typeComboBox
//
this.typeComboBox.DisplayMember = "fldItem";
this.typeComboBox.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.typeComboBox.Location = new System.Drawing.Point(80, 100);
this.typeComboBox.Name = "typeComboBox";
this.typeComboBox.Size = new System.Drawing.Size(184, 21);
this.typeComboBox.TabIndex = 5;
this.typeComboBox.ValueMember = "fldParameterTypeID";
this.typeComboBox.SelectedIndexChanged += new System.EventHandler(this.typeComboBox_SelectedIndexChanged);
//
// typeLabel
//
this.typeLabel.Location = new System.Drawing.Point(16, 100);
this.typeLabel.Name = "typeLabel";
this.typeLabel.Size = new System.Drawing.Size(32, 16);
this.typeLabel.TabIndex = 4;
this.typeLabel.Text = "Type:";
this.typeLabel.TextAlign = System.Drawing.ContentAlignment.BottomLeft;
//
// descriptionTextBox
//
this.descriptionTextBox.Location = new System.Drawing.Point(80, 48);
this.descriptionTextBox.MaxLength = 65536;
this.descriptionTextBox.Multiline = true;
this.descriptionTextBox.Name = "descriptionTextBox";
this.descriptionTextBox.Size = new System.Drawing.Size(384, 48);
this.descriptionTextBox.TabIndex = 3;
this.descriptionTextBox.Text = "";
this.descriptionTextBox.TextChanged += new System.EventHandler(this.Changed_DoCheck);
//
// descriptionLabel
//
this.descriptionLabel.Location = new System.Drawing.Point(16, 48);
this.descriptionLabel.Name = "descriptionLabel";
this.descriptionLabel.Size = new System.Drawing.Size(64, 16);
this.descriptionLabel.TabIndex = 2;
this.descriptionLabel.Text = "Description:";
this.descriptionLabel.TextAlign = System.Drawing.ContentAlignment.BottomLeft;
//
// nameLabel
//
this.nameLabel.Location = new System.Drawing.Point(16, 24);
this.nameLabel.Name = "nameLabel";
this.nameLabel.Size = new System.Drawing.Size(40, 16);
this.nameLabel.TabIndex = 1;
this.nameLabel.Text = "Name:";
this.nameLabel.TextAlign = System.Drawing.ContentAlignment.BottomLeft;
//
// nameTextBox
//
this.nameTextBox.Location = new System.Drawing.Point(80, 24);
this.nameTextBox.MaxLength = 255;
this.nameTextBox.Name = "nameTextBox";
this.nameTextBox.Size = new System.Drawing.Size(384, 20);
this.nameTextBox.TabIndex = 0;
this.nameTextBox.Text = "";
this.nameTextBox.TextChanged += new System.EventHandler(this.Changed_DoCheck);
//
// optionalGroupBox
//
this.optionalGroupBox.Controls.Add(this.maxLabel);
this.optionalGroupBox.Controls.Add(this.maxTextBox);
this.optionalGroupBox.Controls.Add(this.minLabel);
this.optionalGroupBox.Controls.Add(this.minTextBox);
this.optionalGroupBox.Location = new System.Drawing.Point(8, 152);
this.optionalGroupBox.Name = "optionalGroupBox";
this.optionalGroupBox.Size = new System.Drawing.Size(480, 80);
this.optionalGroupBox.TabIndex = 1;
this.optionalGroupBox.TabStop = false;
this.optionalGroupBox.Text = "Variable Type Information";
//
// maxLabel
//
this.maxLabel.Location = new System.Drawing.Point(248, 24);
this.maxLabel.Name = "maxLabel";
this.maxLabel.Size = new System.Drawing.Size(100, 16);
this.maxLabel.TabIndex = 3;
this.maxLabel.Text = "Maximum Value:";
//
// maxTextBox
//
this.maxTextBox.Location = new System.Drawing.Point(248, 40);
this.maxTextBox.MaxLength = 255;
this.maxTextBox.Name = "maxTextBox";
this.maxTextBox.Size = new System.Drawing.Size(216, 20);
this.maxTextBox.TabIndex = 2;
this.maxTextBox.Text = "";
this.maxTextBox.TextChanged += new System.EventHandler(this.Changed_DoCheck);
//
// minLabel
//
this.minLabel.Location = new System.Drawing.Point(16, 24);
this.minLabel.Name = "minLabel";
this.minLabel.Size = new System.Drawing.Size(100, 16);
this.minLabel.TabIndex = 1;
this.minLabel.Text = "Minimum Value:";
//
// minTextBox
//
this.minTextBox.Location = new System.Drawing.Point(16, 40);
this.minTextBox.MaxLength = 255;
this.minTextBox.Name = "minTextBox";
this.minTextBox.Size = new System.Drawing.Size(216, 20);
this.minTextBox.TabIndex = 0;
this.minTextBox.Text = "";
this.minTextBox.TextChanged += new System.EventHandler(this.Changed_DoCheck);
//
// cancelButton
//
this.cancelButton.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.cancelButton.Location = new System.Drawing.Point(408, 240);
this.cancelButton.Name = "cancelButton";
this.cancelButton.Size = new System.Drawing.Size(80, 24);
this.cancelButton.TabIndex = 2;
this.cancelButton.Text = "Cancel";
//
// okButton
//
this.okButton.DialogResult = System.Windows.Forms.DialogResult.OK;
this.okButton.Location = new System.Drawing.Point(320, 240);
this.okButton.Name = "okButton";
this.okButton.Size = new System.Drawing.Size(80, 24);
this.okButton.TabIndex = 3;
this.okButton.Text = "OK";
this.okButton.Click += new System.EventHandler(this.okButton_Click);
//
// ParameterDialog
//
this.AcceptButton = this.okButton;
this.AutoScaleBaseSize = new System.Drawing.Size(5, 13);
this.CancelButton = this.cancelButton;
this.ClientSize = new System.Drawing.Size(496, 272);
this.Controls.Add(this.okButton);
this.Controls.Add(this.cancelButton);
this.Controls.Add(this.optionalGroupBox);
this.Controls.Add(this.requiredGroupBox);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "ParameterDialog";
this.ShowInTaskbar = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "ParameterDialog";
this.requiredGroupBox.ResumeLayout(false);
this.optionalGroupBox.ResumeLayout(false);
this.ResumeLayout(false);
}
#endregion
private void DoCheck()
{
this.okButton.Enabled = false;
if (addMode)
{
if (this.nameTextBox.Text != ""
&& this.descriptionTextBox.Text != "")
{
if (this.optionalGroupBox.Enabled && this.minTextBox.Text != "" && this.maxTextBox.Text != "")
this.okButton.Enabled = true;
else if (!this.optionalGroupBox.Enabled)
this.okButton.Enabled = true;
}
}
else
{
if (this.nameTextBox.Text != this.parameterRow["fldName"].ToString()
|| this.descriptionTextBox.Text != this.parameterRow["fldDescription"].ToString()
|| this.typeComboBox.SelectedValue.ToString() != this.parameterRow["fldParameterTypeID"].ToString()
|| this.minTextBox.Text != this.parameterRow["fldMinimum"].ToString()
|| this.maxTextBox.Text != this.parameterRow["fldMaximum"].ToString())
{
if (this.nameTextBox.Text != "" && this.descriptionTextBox.Text != "")
{
if (this.optionalGroupBox.Enabled && this.minTextBox.Text != "" && this.maxTextBox.Text != "")
this.okButton.Enabled = true;
else if (!this.optionalGroupBox.Enabled)
this.okButton.Enabled = true;
}
}
}
}
private void typeComboBox_SelectedIndexChanged(object sender, System.EventArgs e)
{
if (((DataRowView)this.typeComboBox.SelectedItem).Row["fldItem"].ToString() == "string"
|| ((DataRowView)this.typeComboBox.SelectedItem).Row["fldItem"].ToString() == "bool"
|| ((DataRowView)this.typeComboBox.SelectedItem).Row["fldItem"].ToString() == "int vector"
|| ((DataRowView)this.typeComboBox.SelectedItem).Row["fldItem"].ToString() == "float vector")
{
this.minTextBox.Text = this.maxTextBox.Text = "";
this.optionalGroupBox.Enabled = false;
}
else
this.optionalGroupBox.Enabled = true;
this.DoCheck();
}
private void Changed_DoCheck(object sender, System.EventArgs e)
{
this.DoCheck();
}
private void okButton_Click(object sender, System.EventArgs e)
{
this.parameterRow["fldName"] = this.nameTextBox.Text;
this.parameterRow["fldDescription"] = this.descriptionTextBox.Text;
this.parameterRow["fldParameterTypeID"] = this.typeComboBox.SelectedValue;
this.parameterRow["fldParameterType"] = ((DataRowView)this.typeComboBox.SelectedItem).Row["fldItem"].ToString();
if (this.optionalGroupBox.Enabled) {
this.parameterRow["fldMinimum"] = this.minTextBox.Text;
this.parameterRow["fldMaximum"] = this.maxTextBox.Text; }
else
this.parameterRow["fldMinimum"] = this.parameterRow["fldMaximum"] = DBNull.Value;
}
}
}
| |
using System;
using System.Drawing;
using System.Diagnostics;
using Microsoft.DirectX;
using Microsoft.DirectX.Direct3D;
namespace Simbiosis
{
/// <summary>
/// One square of terrain, composed of two triangles in a triangle strip.
/// Vertices are in the order: NW, NE, SW, SE.
/// First triangle = 012, second = 123
/// The triangles are stored in a strip for memory efficiency. However, they are rendered in batches
/// and so get converted to separate triangles when being added to the batch. The first triangle is
/// made from points 0,1,2 and the second uses points 3,2,1
/// </summary>
public class Tile : Renderable
{
/// <summary> The global batch of triangles to be rendered this frame (in video memory) </summary>
public const int BATCHSIZE = 3000; // Max # verts in batch buffer
public static int NumInBatch = 0; // # vertices currently in buffer
public static VertexBuffer Batch = new VertexBuffer(typeof(CustomVertex.PositionNormalTextured),
BATCHSIZE,
Engine.Device,
Usage.WriteOnly,
CustomVertex.PositionNormalTextured.Format,
Pool.Default);
/// <summary> Triangle strip </summary>
private CustomVertex.PositionNormalTextured[] mesh = new CustomVertex.PositionNormalTextured[4];
/// <summary> Face normals for the two triangles (1=NW, 2-SE) </summary>
private Vector3 faceNormal1 = new Vector3();
public Vector3 FaceNormal1 { get { return faceNormal1; } }
private Vector3 faceNormal2 = new Vector3();
public Vector3 FaceNormal2 { get { return faceNormal2; } }
/// <summary> ref to the texture </summary>
private Texture texture;
/// <summary> Overrides of Renderable members - abs centre and radius for culling </summary>
private float radius;
private Vector3 absCentre;
public override float Radius { get { return radius; } }
public override Vector3 AbsCentre { get { return absCentre; } }
/// <summary>
/// Construct a tile. (Normals and textures must be set separately)
/// </summary>
/// <param name="x">position in the heightfield grid</param>
/// <param name="y">position in the heightfield grid</param>
/// <param name="tileWidth">width (X) of 1 tile in world coords</param>
/// <param name="tileHeight">height (Z) of 1 tile in world coords</param>
/// <param name="height">The 2D array of heights</param>
public Tile(int x, int y, float tileWidth, float tileHeight, ref float[,] height)
{
float wx = x*tileWidth; // world xy of bottom-left vertex
float wy = y*tileHeight;
// create the two triangles for this square
mesh[0].X = wx;
mesh[0].Y = height[x,y+1];
mesh[0].Z = wy+tileHeight;
mesh[1].X = wx+tileWidth;
mesh[1].Y = height[x+1,y+1];
mesh[1].Z = wy+tileHeight;
mesh[2].X = wx;
mesh[2].Y = height[x,y];
mesh[2].Z = wy;
mesh[3].X = wx+tileWidth;
mesh[3].Y = height[x+1,y];
mesh[3].Z = wy;
// Compute the face normals for the two triangles
faceNormal1 = ThreeDee.FaceNormal(this.mesh[0].Position, this.mesh[1].Position, this.mesh[2].Position);
faceNormal2 = ThreeDee.FaceNormal(this.mesh[3].Position, this.mesh[2].Position, this.mesh[1].Position);
// Initial vertex normals point in same direction as faces
// I'll compute proper ones when all the tiles exist
mesh[0].Normal = faceNormal1;
mesh[1].Normal = faceNormal1;
mesh[2].Normal = faceNormal1;
mesh[3].Normal = faceNormal2;
// Calculate centre and "radius" for culling operations
float midHeight = (mesh[1].Y - mesh[2].Y) + mesh[1].Y; // height at mid-point
float w = tileWidth / 2.0f;
float h = tileHeight / 2.0f;
absCentre = new Vector3(wx+w,midHeight,wy+h);
radius = (float)Math.Sqrt(w*w + h*h); // dist from centre to corner
// Register the tile with the map
Map.Add(this);
}
/// <summary>
/// IDispose interface
/// </summary>
public override void Dispose()
{
// TODO: dispose of resources
Debug.WriteLine("Disposing of tile resources ");
}
/// <summary>
/// Define the tile texture
/// </summary>
/// <param name="t"></param>
public void SetTexture(Texture t)
{
// Set the texture to use
texture = t;
// define the texture coordinates
// Assume each quad has a complete texture on it
mesh[0].Tu = 0.0f;
mesh[0].Tv = 0.0f;
mesh[1].Tu = 1.0f;
mesh[1].Tv = 0.0f;
mesh[2].Tu = 0.0f;
mesh[2].Tv = 1.0f;
mesh[3].Tu = 1.0f;
mesh[3].Tv = 1.0f;
}
/// <summary>
/// Set the vertex normals for the triangles in this tile.
/// Each vertex normal is the average of the face normals for surrounding triangles
/// (some of which are in different tiles)
/// </summary>
/// <param name="tile">the array of tiles</param>
/// <param name="x">our location in the array</param>
/// <param name="y"></param>
public static void SetVertexNormal(Tile[,] tile, int x, int y)
{
// NW vertex
tile[x,y].mesh[0].Normal = Vector3.Normalize(
tile[x-1,y-1].FaceNormal2
+ tile[x,y-1].FaceNormal1
+ tile[x,y-1].FaceNormal2
+ tile[x,y].FaceNormal1
+ tile[x-1,y].FaceNormal2
+ tile[x-1,y].FaceNormal1
);
// NE vertex
tile[x,y].mesh[1].Normal = Vector3.Normalize(
tile[x,y-1].FaceNormal2
+ tile[x+1,y-1].FaceNormal1
+ tile[x+1,y-1].FaceNormal2
+ tile[x+1,y].FaceNormal1
+ tile[x,y].FaceNormal2
+ tile[x,y].FaceNormal1
);
// SW vertex
tile[x,y].mesh[2].Normal = Vector3.Normalize(
tile[x-1,y].FaceNormal2
+ tile[x,y].FaceNormal1
+ tile[x,y].FaceNormal2
+ tile[x,y+1].FaceNormal1
+ tile[x-1,y+1].FaceNormal2
+ tile[x-1,y+1].FaceNormal1
);
// SE vertex
tile[x,y].mesh[3].Normal = Vector3.Normalize(
tile[x,y].FaceNormal2
+ tile[x+1,y].FaceNormal1
+ tile[x+1,y].FaceNormal2
+ tile[x+1,y+1].FaceNormal1
+ tile[x,y+1].FaceNormal2
+ tile[x,y+1].FaceNormal1
);
}
public override void Update()
{
// TODO: Add Tile.Update implementation
}
/// <summary>
/// I'm visible, so render me. In the case of tiles, we don't actually render now. Instead we
/// add our two triangles to a batch and render it at the end (or when it gets full). This is
/// much more efficient
/// </summary>
public override void Render()
{
///// DebugTile();
// add your two triangles (as separate triangles, not strip)
// to the video memory buffer
GraphicsStream stream = Batch.Lock(NumInBatch, 0, LockFlags.None);
stream.Write(mesh[0]);
stream.Write(mesh[1]);
stream.Write(mesh[2]);
stream.Write(mesh[3]); // second triangle uses two points from first
stream.Write(mesh[2]);
stream.Write(mesh[1]);
Batch.Unlock();
NumInBatch += 6; // 6 more vertices have been added
// If there's no more room in the triangle batch, render the batch to the screen
// and clear it
if (NumInBatch>=BATCHSIZE)
{
RenderBatch();
}
}
/// <summary>
/// Render the current batch of visible tiles
/// </summary>
public static void RenderBatch()
{
try
{
if (NumInBatch!=0) // if we haven't already sent this batch...
{
// Set world matrix for no transformation
Engine.Device.Transform.World = Matrix.Identity;
// Set expected vertex format
Engine.Device.VertexFormat = CustomVertex.PositionNormalTextured.Format;
// Create a material
Material material = new Material();
material.Ambient = Color.White;
material.Diffuse = Color.White;
Engine.Device.Material = material;
// Set texture - THIS ISN'T GOING TO WORK WHEN I HAVE MULTIPLE TEXTURES!!!!!!!!!!!!!!!!!!!!!!!!!!!!
Engine.Device.SetTexture(0,Terrain.texture);
// Use linear filtering on the magnified texels in mipmap
Engine.Device.SamplerState[0].MagFilter = TextureFilter.Linear;
// Draw the triangles
Engine.Device.SetStreamSource(0,Batch,0);
Engine.Device.DrawPrimitives(PrimitiveType.TriangleList, 0, NumInBatch / 3);
}
NumInBatch = 0; // start filling from beginning next time
}
catch (Exception e)
{
Debug.WriteLine("unable to render terrain batch");
throw;
}
}
public void DebugTile()
{
Debug.WriteLine("Tri1 = "+mesh[0].X+","+mesh[0].Z+" - "+mesh[1].X+","+mesh[1].Z+" - "+mesh[2].X+","+mesh[2].Z);
Debug.WriteLine("Tri2 = "+mesh[3].X+","+mesh[3].Z+" - "+mesh[2].X+","+mesh[2].Z+" - "+mesh[1].X+","+mesh[1].Z);
Debug.WriteLine("heights1 = "+mesh[0].Y+" - "+mesh[1].Y+" - "+mesh[2].Y+" - ");
Debug.WriteLine("heights2 = "+mesh[3].Y+" - "+mesh[2].Y+" - "+mesh[1].Y+" - ");
Debug.WriteLine("FaceNorm1 = "+FaceNormal1.X+","+FaceNormal1.Y+","+FaceNormal1.Z);
Debug.WriteLine("FaceNorm2 = "+FaceNormal2.X+","+FaceNormal2.Y+","+FaceNormal2.Z);
}
}
/// <summary>
/// The terrain
/// </summary>
public class Terrain : IDisposable
{
/// <summary> Max terrain height (height in world coords when colour in heightmap = 255) </summary>
private const float MAXHEIGHT = 8.0f;
/// <summary> The library of terrain textures </summary>
private Texture[] texturelib = null;
public static Texture texture = null; // TEMP SINGLE TEXTURE UNTIL LIBRARY IS WRITTEN!
/// <summary> The 2D array of tiles </summary>
private Tile[,] tile = null;
/// <summary> size of one tile in world coords </summary>
private float tileWidth = 0;
private float tileHeight = 0;
/// <summary> Size of tile grid </summary>
private int gridWidth = 0;
private int gridHeight = 0;
public Terrain()
{
float[,] height = null;
// Load the height map
using (Bitmap bmp = (Bitmap)Bitmap.FromFile(FileResource.Fsp("heightfield.bmp")))
{
// compute scale
gridWidth = bmp.Width; // scale is determined by the size of the
gridHeight = bmp.Height; // heightfield & the size of the map
tileWidth = Map.MapWidth / bmp.Width;
tileHeight = Map.MapHeight / bmp.Height;
gridWidth = bmp.Height-1; // the grid of tiles will be 1 smaller than
gridHeight = bmp.Height-1; // the grid of heights (last height = RH side of last tile)
// Create the blank tiles
tile = new Tile[gridWidth,gridHeight];
// get heightmap into a temp array, for faster access
height = new float[bmp.Width,bmp.Height];
for (int y = 0; y<bmp.Height; y++)
{
for (int x=0; x<bmp.Width; x++)
{
/////////////////////// height[x,y] = (float)bmp.GetPixel(x,y).R / 256.0f * MAXHEIGHT;
height[x,y] = 0;
}
}
} // dispose of the bitmap
// Create the tiles and define their extents and heights
for (int y = 0; y<gridHeight; y++)
{
for (int x=0; x<gridWidth; x++)
{
tile[x,y] = new Tile(x,y,tileWidth,tileHeight,ref height);
}
}
// Now that the triangles exist, define the vertex normals, by averaging the
// surface normals of surrounding triangles
for (int y = 1; y<gridHeight-1; y++)
{
for (int x=1; x<gridWidth-1; x++)
{
Tile.SetVertexNormal(tile,x,y);
}
}
/// TODO: Load the texture map here & create the texture library,
/// then set the tile textures
// Reload resources on reset of device
Engine.Device.DeviceReset += new System.EventHandler(this.OnReset);
// Load them for the first time now
OnReset(null, null);
}
public void OnReset(object sender, EventArgs e)
{
Debug.WriteLine("Terrain.Reset()");
/// TODO: Load the whole library of textures!
texture = TextureLoader.FromFile(Engine.Device,FileResource.Fsp("ground.bmp"));
texture.GenerateMipSubLevels();
}
/// <summary>
/// IDispose interface
/// </summary>
public void Dispose()
{
// TODO: dispose of resources
Debug.WriteLine("Disposing of terrain resources ");
}
/// <summary>
/// Render the only/latest batch of unrendered tiles
/// MUST BE CALLED AFTER MAP.RENDER()!!!
/// </summary>
public void Render()
{
Tile.RenderBatch();
}
/// <summary>
/// Return the height of the terrain at x,y
/// </summary>
/// <param name="x"></param>
/// <param name="y"></param>
/// <returns></returns>
public static float AltitudeAt(float x, float y)
{
return 0; // TEMP!!!!!!
}
/// <summary>
/// Return the height of the water surface
/// </summary>
/// <returns></returns>
public static float SurfaceHeight()
{
return 30.0f; // TEMP !!!!!!
}
}
}
| |
using System;
namespace POGOLib.Official.Util.Encryption.PokeHash
{
public static class TwoFish
{
public static int BLOCK_SIZE = 16;
private static int ROUNDS = 16;
private static int INPUT_WHITEN = 0;
private static int OUTPUT_WHITEN = INPUT_WHITEN + BLOCK_SIZE / 4;
private static int ROUND_SUBKEYS = OUTPUT_WHITEN + BLOCK_SIZE / 4;
private static int SK_STEP = 0x02020202;
private static int SK_BUMP = 0x01010101;
private static int SK_ROTL = 9;
/**
* Fixed 8x8 permutation S-boxes
*/
private static byte[][] P = new byte[2][]
{
new byte[256]{
(byte) 0xA9, (byte) 0x67, (byte) 0xB3, (byte) 0xE8,
(byte) 0x04, (byte) 0xFD, (byte) 0xA3, (byte) 0x76,
(byte) 0x9A, (byte) 0x92, (byte) 0x80, (byte) 0x78,
(byte) 0xE4, (byte) 0xDD, (byte) 0xD1, (byte) 0x38,
(byte) 0x0D, (byte) 0xC6, (byte) 0x35, (byte) 0x98,
(byte) 0x18, (byte) 0xF7, (byte) 0xEC, (byte) 0x6C,
(byte) 0x43, (byte) 0x75, (byte) 0x37, (byte) 0x26,
(byte) 0xFA, (byte) 0x13, (byte) 0x94, (byte) 0x48,
(byte) 0xF2, (byte) 0xD0, (byte) 0x8B, (byte) 0x30,
(byte) 0x84, (byte) 0x54, (byte) 0xDF, (byte) 0x23,
(byte) 0x19, (byte) 0x5B, (byte) 0x3D, (byte) 0x59,
(byte) 0xF3, (byte) 0xAE, (byte) 0xA2, (byte) 0x82,
(byte) 0x63, (byte) 0x01, (byte) 0x83, (byte) 0x2E,
(byte) 0xD9, (byte) 0x51, (byte) 0x9B, (byte) 0x7C,
(byte) 0xA6, (byte) 0xEB, (byte) 0xA5, (byte) 0xBE,
(byte) 0x16, (byte) 0x0C, (byte) 0xE3, (byte) 0x61,
(byte) 0xC0, (byte) 0x8C, (byte) 0x3A, (byte) 0xF5,
(byte) 0x73, (byte) 0x2C, (byte) 0x25, (byte) 0x0B,
(byte) 0xBB, (byte) 0x4E, (byte) 0x89, (byte) 0x6B,
(byte) 0x53, (byte) 0x6A, (byte) 0xB4, (byte) 0xF1,
(byte) 0xE1, (byte) 0xE6, (byte) 0xBD, (byte) 0x45,
(byte) 0xE2, (byte) 0xF4, (byte) 0xB6, (byte) 0x66,
(byte) 0xCC, (byte) 0x95, (byte) 0x03, (byte) 0x56,
(byte) 0xD4, (byte) 0x1C, (byte) 0x1E, (byte) 0xD7,
(byte) 0xFB, (byte) 0xC3, (byte) 0x8E, (byte) 0xB5,
(byte) 0xE9, (byte) 0xCF, (byte) 0xBF, (byte) 0xBA,
(byte) 0xEA, (byte) 0x77, (byte) 0x39, (byte) 0xAF,
(byte) 0x33, (byte) 0xC9, (byte) 0x62, (byte) 0x71,
(byte) 0x81, (byte) 0x79, (byte) 0x09, (byte) 0xAD,
(byte) 0x24, (byte) 0xCD, (byte) 0xF9, (byte) 0xD8,
(byte) 0xE5, (byte) 0xC5, (byte) 0xB9, (byte) 0x4D,
(byte) 0x44, (byte) 0x08, (byte) 0x86, (byte) 0xE7,
(byte) 0xA1, (byte) 0x1D, (byte) 0xAA, (byte) 0xED,
(byte) 0x06, (byte) 0x70, (byte) 0xB2, (byte) 0xD2,
(byte) 0x41, (byte) 0x7B, (byte) 0xA0, (byte) 0x11,
(byte) 0x31, (byte) 0xC2, (byte) 0x27, (byte) 0x90,
(byte) 0x20, (byte) 0xF6, (byte) 0x60, (byte) 0xFF,
(byte) 0x96, (byte) 0x5C, (byte) 0xB1, (byte) 0xAB,
(byte) 0x9E, (byte) 0x9C, (byte) 0x52, (byte) 0x1B,
(byte) 0x5F, (byte) 0x93, (byte) 0x0A, (byte) 0xEF,
(byte) 0x91, (byte) 0x85, (byte) 0x49, (byte) 0xEE,
(byte) 0x2D, (byte) 0x4F, (byte) 0x8F, (byte) 0x3B,
(byte) 0x47, (byte) 0x87, (byte) 0x6D, (byte) 0x46,
(byte) 0xD6, (byte) 0x3E, (byte) 0x69, (byte) 0x64,
(byte) 0x2A, (byte) 0xCE, (byte) 0xCB, (byte) 0x2F,
(byte) 0xFC, (byte) 0x97, (byte) 0x05, (byte) 0x7A,
(byte) 0xAC, (byte) 0x7F, (byte) 0xD5, (byte) 0x1A,
(byte) 0x4B, (byte) 0x0E, (byte) 0xA7, (byte) 0x5A,
(byte) 0x28, (byte) 0x14, (byte) 0x3F, (byte) 0x29,
(byte) 0x88, (byte) 0x3C, (byte) 0x4C, (byte) 0x02,
(byte) 0xB8, (byte) 0xDA, (byte) 0xB0, (byte) 0x17,
(byte) 0x55, (byte) 0x1F, (byte) 0x8A, (byte) 0x7D,
(byte) 0x57, (byte) 0xC7, (byte) 0x8D, (byte) 0x74,
(byte) 0xB7, (byte) 0xC4, (byte) 0x9F, (byte) 0x72,
(byte) 0x7E, (byte) 0x15, (byte) 0x22, (byte) 0x12,
(byte) 0x58, (byte) 0x07, (byte) 0x99, (byte) 0x34,
(byte) 0x6E, (byte) 0x50, (byte) 0xDE, (byte) 0x68,
(byte) 0x65, (byte) 0xBC, (byte) 0xDB, (byte) 0xF8,
(byte) 0xC8, (byte) 0xA8, (byte) 0x2B, (byte) 0x40,
(byte) 0xDC, (byte) 0xFE, (byte) 0x32, (byte) 0xA4,
(byte) 0xCA, (byte) 0x10, (byte) 0x21, (byte) 0xF0,
(byte) 0xD3, (byte) 0x5D, (byte) 0x0F, (byte) 0x00,
(byte) 0x6F, (byte) 0x9D, (byte) 0x36, (byte) 0x42,
(byte) 0x4A, (byte) 0x5E, (byte) 0xC1, (byte) 0xE0
},
new byte[256] {
(byte) 0x75, (byte) 0xF3, (byte) 0xC6, (byte) 0xF4,
(byte) 0xDB, (byte) 0x7B, (byte) 0xFB, (byte) 0xC8,
(byte) 0x4A, (byte) 0xD3, (byte) 0xE6, (byte) 0x6B,
(byte) 0x45, (byte) 0x7D, (byte) 0xE8, (byte) 0x4B,
(byte) 0xD6, (byte) 0x32, (byte) 0xD8, (byte) 0xFD,
(byte) 0x37, (byte) 0x71, (byte) 0xF1, (byte) 0xE1,
(byte) 0x30, (byte) 0x0F, (byte) 0xF8, (byte) 0x1B,
(byte) 0x87, (byte) 0xFA, (byte) 0x06, (byte) 0x3F,
(byte) 0x5E, (byte) 0xBA, (byte) 0xAE, (byte) 0x5B,
(byte) 0x8A, (byte) 0x00, (byte) 0xBC, (byte) 0x9D,
(byte) 0x6D, (byte) 0xC1, (byte) 0xB1, (byte) 0x0E,
(byte) 0x80, (byte) 0x5D, (byte) 0xD2, (byte) 0xD5,
(byte) 0xA0, (byte) 0x84, (byte) 0x07, (byte) 0x14,
(byte) 0xB5, (byte) 0x90, (byte) 0x2C, (byte) 0xA3,
(byte) 0xB2, (byte) 0x73, (byte) 0x4C, (byte) 0x54,
(byte) 0x92, (byte) 0x74, (byte) 0x36, (byte) 0x51,
(byte) 0x38, (byte) 0xB0, (byte) 0xBD, (byte) 0x5A,
(byte) 0xFC, (byte) 0x60, (byte) 0x62, (byte) 0x96,
(byte) 0x6C, (byte) 0x42, (byte) 0xF7, (byte) 0x10,
(byte) 0x7C, (byte) 0x28, (byte) 0x27, (byte) 0x8C,
(byte) 0x13, (byte) 0x95, (byte) 0x9C, (byte) 0xC7,
(byte) 0x24, (byte) 0x46, (byte) 0x3B, (byte) 0x70,
(byte) 0xCA, (byte) 0xE3, (byte) 0x85, (byte) 0xCB,
(byte) 0x11, (byte) 0xD0, (byte) 0x93, (byte) 0xB8,
(byte) 0xA6, (byte) 0x83, (byte) 0x20, (byte) 0xFF,
(byte) 0x9F, (byte) 0x77, (byte) 0xC3, (byte) 0xCC,
(byte) 0x03, (byte) 0x6F, (byte) 0x08, (byte) 0xBF,
(byte) 0x40, (byte) 0xE7, (byte) 0x2B, (byte) 0xE2,
(byte) 0x79, (byte) 0x0C, (byte) 0xAA, (byte) 0x82,
(byte) 0x41, (byte) 0x3A, (byte) 0xEA, (byte) 0xB9,
(byte) 0xE4, (byte) 0x9A, (byte) 0xA4, (byte) 0x97,
(byte) 0x7E, (byte) 0xDA, (byte) 0x7A, (byte) 0x17,
(byte) 0x66, (byte) 0x94, (byte) 0xA1, (byte) 0x1D,
(byte) 0x3D, (byte) 0xF0, (byte) 0xDE, (byte) 0xB3,
(byte) 0x0B, (byte) 0x72, (byte) 0xA7, (byte) 0x1C,
(byte) 0xEF, (byte) 0xD1, (byte) 0x53, (byte) 0x3E,
(byte) 0x8F, (byte) 0x33, (byte) 0x26, (byte) 0x5F,
(byte) 0xEC, (byte) 0x76, (byte) 0x2A, (byte) 0x49,
(byte) 0x81, (byte) 0x88, (byte) 0xEE, (byte) 0x21,
(byte) 0xC4, (byte) 0x1A, (byte) 0xEB, (byte) 0xD9,
(byte) 0xC5, (byte) 0x39, (byte) 0x99, (byte) 0xCD,
(byte) 0xAD, (byte) 0x31, (byte) 0x8B, (byte) 0x01,
(byte) 0x18, (byte) 0x23, (byte) 0xDD, (byte) 0x1F,
(byte) 0x4E, (byte) 0x2D, (byte) 0xF9, (byte) 0x48,
(byte) 0x4F, (byte) 0xF2, (byte) 0x65, (byte) 0x8E,
(byte) 0x78, (byte) 0x5C, (byte) 0x58, (byte) 0x19,
(byte) 0x8D, (byte) 0xE5, (byte) 0x98, (byte) 0x57,
(byte) 0x67, (byte) 0x7F, (byte) 0x05, (byte) 0x64,
(byte) 0xAF, (byte) 0x63, (byte) 0xB6, (byte) 0xFE,
(byte) 0xF5, (byte) 0xB7, (byte) 0x3C, (byte) 0xA5,
(byte) 0xCE, (byte) 0xE9, (byte) 0x68, (byte) 0x44,
(byte) 0xE0, (byte) 0x4D, (byte) 0x43, (byte) 0x69,
(byte) 0x29, (byte) 0x2E, (byte) 0xAC, (byte) 0x15,
(byte) 0x59, (byte) 0xA8, (byte) 0x0A, (byte) 0x9E,
(byte) 0x6E, (byte) 0x47, (byte) 0xDF, (byte) 0x34,
(byte) 0x35, (byte) 0x6A, (byte) 0xCF, (byte) 0xDC,
(byte) 0x22, (byte) 0xC9, (byte) 0xC0, (byte) 0x9B,
(byte) 0x89, (byte) 0xD4, (byte) 0xED, (byte) 0xAB,
(byte) 0x12, (byte) 0xA2, (byte) 0x0D, (byte) 0x52,
(byte) 0xBB, (byte) 0x02, (byte) 0x2F, (byte) 0xA9,
(byte) 0xD7, (byte) 0x61, (byte) 0x1E, (byte) 0xB4,
(byte) 0x50, (byte) 0x04, (byte) 0xF6, (byte) 0xC2,
(byte) 0x16, (byte) 0x25, (byte) 0x86, (byte) 0x56,
(byte) 0x55, (byte) 0x09, (byte) 0xBE, (byte) 0x91
}
};
/**
* Define the fixed p0/p1 permutations used in keyed S-box lookup.
* By changing the following constant definitions, the S-boxes will
* automatically get changed in the Twofish engine.
*/
private static int P_00 = 1;
private static int P_01 = 0;
private static int P_02 = 0;
private static int P_03 = P_01 ^ 1;
private static int P_04 = 1;
private static int P_10 = 0;
private static int P_11 = 0;
private static int P_12 = 1;
private static int P_13 = P_11 ^ 1;
private static int P_14 = 0;
private static int P_20 = 1;
private static int P_21 = 1;
private static int P_22 = 0;
private static int P_23 = P_21 ^ 1;
private static int P_24 = 0;
private static int P_30 = 0;
private static int P_31 = 1;
private static int P_32 = 1;
private static int P_33 = P_31 ^ 1;
private static int P_34 = 1;
/**
* Primitive polynomial for GF(256)
*/
private static int GF256_FDBK_2 = 0x169 / 2;
private static int GF256_FDBK_4 = 0x169 / 4;
/**
* MDS matrix
*/
private static int[][] MDS = new int[4][];
private static int RS_GF_FDBK = 0x14D;
static TwoFish()
{
int[] m1 = new int[2];
int[] mxArray = new int[2];
int[] myArray = new int[2];
int first;
int second = 0;
for (int i = 0; i < MDS.Length; i++)
{
MDS[i] = new int[256];
}
for (first = 0; first < 256; first++)
{
second = P[0][first] & 0xFF;
m1[0] = second;
mxArray[0] = mxX(second) & 0xFF;
myArray[0] = mxY(second) & 0xFF;
second = P[1][first] & 0xFF;
m1[1] = second;
mxArray[1] = mxX(second) & 0xFF;
myArray[1] = mxY(second) & 0xFF;
MDS[0][first] = m1[P_00]
| mxArray[P_00] << 8
| myArray[P_00] << 16
| myArray[P_00] << 24;
MDS[1][first] = myArray[P_10]
| myArray[P_10] << 8
| mxArray[P_10] << 16
| m1[P_10] << 24;
MDS[2][first] = mxArray[P_20]
| myArray[P_20] << 8
| m1[P_20] << 16
| myArray[P_20] << 24;
MDS[3][first] = mxArray[P_30]
| m1[P_30] << 8
| myArray[P_30] << 16
| mxArray[P_30] << 24;
}
}
private static int lfsr1(int x)
{
return (x >> 1) ^ ((x & 0x01) != 0 ? GF256_FDBK_2 : 0);
}
private static int lfsr2(int x)
{
return (x >> 2) ^ ((x & 0x02) != 0 ? GF256_FDBK_2 : 0) ^ ((x & 0x01) != 0 ? GF256_FDBK_4 : 0);
}
private static int mxX(int x)
{
return x ^ lfsr2(x);
}
private static int mxY(int x)
{
return x ^ lfsr1(x) ^ lfsr2(x);
}
/**
* Expand a user-supplied key material into a session key.
*
* @param k The 64/128/192/256-bit user-key to use.
* @return This cipher's round keys.
* @throws InvalidKeyException If the key is invalid.
*/
public static object[] MakeKey(byte[] k)
{
if (k == null)
throw new Exception("Empty key");
int length = k.Length;
if (!(length == 8 || length == 16 || length == 24 || length == 32))
throw new Exception("Incorrect key length");
int k64Cnt = length / 8;
int subkeyCnt = ROUND_SUBKEYS + 2 * ROUNDS;
int[] k32e = new int[4];
int[] k32o = new int[4];
int[] sBoxKey = new int[4];
int i, j, offset = 0;
for (i = 0, j = k64Cnt - 1; i < 4 && offset < length; i++, j--)
{
k32e[i] = (k[offset++] & 0xFF)
| (k[offset++] & 0xFF) << 8
| (k[offset++] & 0xFF) << 16
| (k[offset++] & 0xFF) << 24;
k32o[i] = (k[offset++] & 0xFF)
| (k[offset++] & 0xFF) << 8
| (k[offset++] & 0xFF) << 16
| (k[offset++] & 0xFF) << 24;
sBoxKey[j] = rsMdsEncode(k32e[i], k32o[i]);
}
int q, A, B;
int[] subKeys = new int[subkeyCnt];
for (i = q = 0; i < subkeyCnt / 2; i++, q += SK_STEP)
{
A = f32(k64Cnt, q, k32e);
B = f32(k64Cnt, q + SK_BUMP, k32o);
B = B << 8 | RightUShift(B, 24);
A += B;
subKeys[2 * i] = A;
A += B;
subKeys[2 * i + 1] = A << SK_ROTL | RightUShift(A, (32 - SK_ROTL));
}
int k0 = sBoxKey[0];
int k1 = sBoxKey[1];
int k2 = sBoxKey[2];
int k3 = sBoxKey[3];
int b0, b1, b2, b3;
int[] sBox = new int[4 * 256];
for (i = 0; i < 256; i++)
{
b0 = b1 = b2 = b3 = i;
int val = k64Cnt & 3;
if (val == 1)
{
sBox[2 * i] = MDS[0][(P[P_01][b0] & 0xFF) ^ _b0(k0)];
sBox[2 * i + 1] = MDS[1][(P[P_11][b1] & 0xFF) ^ _b1(k0)];
sBox[0x200 + 2 * i] = MDS[2][(P[P_21][b2] & 0xFF) ^ _b2(k0)];
sBox[0x200 + 2 * i + 1] = MDS[3][(P[P_31][b3] & 0xFF) ^ _b3(k0)];
}
switch (k64Cnt & 3)
{
case 1:
sBox[2 * i] = MDS[0][(P[P_01][b0] & 0xFF) ^ _b0(k0)];
sBox[2 * i + 1] = MDS[1][(P[P_11][b1] & 0xFF) ^ _b1(k0)];
sBox[0x200 + 2 * i] = MDS[2][(P[P_21][b2] & 0xFF) ^ _b2(k0)];
sBox[0x200 + 2 * i + 1] = MDS[3][(P[P_31][b3] & 0xFF) ^ _b3(k0)];
break;
case 0:
b0 = (P[P_04][b0] & 0xFF) ^ _b0(k3);
b1 = (P[P_14][b1] & 0xFF) ^ _b1(k3);
b2 = (P[P_24][b2] & 0xFF) ^ _b2(k3);
b3 = (P[P_34][b3] & 0xFF) ^ _b3(k3);
b0 = (P[P_03][b0] & 0xFF) ^ _b0(k2);
b1 = (P[P_13][b1] & 0xFF) ^ _b1(k2);
b2 = (P[P_23][b2] & 0xFF) ^ _b2(k2);
b3 = (P[P_33][b3] & 0xFF) ^ _b3(k2);
sBox[2 * i] = MDS[0][(P[P_01][(P[P_02][b0] & 0xFF) ^ _b0(k1)] & 0xFF) ^ _b0(k0)];
sBox[2 * i + 1] = MDS[1][(P[P_11][(P[P_12][b1] & 0xFF) ^ _b1(k1)] & 0xFF) ^ _b1(k0)];
sBox[0x200 + 2 * i] = MDS[2][(P[P_21][(P[P_22][b2] & 0xFF) ^ _b2(k1)] & 0xFF) ^ _b2(k0)];
sBox[0x200 + 2 * i + 1] = MDS[3][(P[P_31][(P[P_32][b3] & 0xFF) ^ _b3(k1)] & 0xFF) ^ _b3(k0)];
break;
case 3:
b0 = (P[P_03][b0] & 0xFF) ^ _b0(k2);
b1 = (P[P_13][b1] & 0xFF) ^ _b1(k2);
b2 = (P[P_23][b2] & 0xFF) ^ _b2(k2);
b3 = (P[P_33][b3] & 0xFF) ^ _b3(k2);
sBox[2 * i] = MDS[0][(P[P_01][(P[P_02][b0] & 0xFF) ^ _b0(k1)] & 0xFF) ^ _b0(k0)];
sBox[2 * i + 1] = MDS[1][(P[P_11][(P[P_12][b1] & 0xFF) ^ _b1(k1)] & 0xFF) ^ _b1(k0)];
sBox[0x200 + 2 * i] = MDS[2][(P[P_21][(P[P_22][b2] & 0xFF) ^ _b2(k1)] & 0xFF) ^ _b2(k0)];
sBox[0x200 + 2 * i + 1] = MDS[3][(P[P_31][(P[P_32][b3] & 0xFF) ^ _b3(k1)] & 0xFF) ^ _b3(k0)];
break;
case 2:
sBox[2 * i] = MDS[0][(P[P_01][(P[P_02][b0] & 0xFF) ^ _b0(k1)] & 0xFF) ^ _b0(k0)];
sBox[2 * i + 1] = MDS[1][(P[P_11][(P[P_12][b1] & 0xFF) ^ _b1(k1)] & 0xFF) ^ _b1(k0)];
sBox[0x200 + 2 * i] = MDS[2][(P[P_21][(P[P_22][b2] & 0xFF) ^ _b2(k1)] & 0xFF) ^ _b2(k0)];
sBox[0x200 + 2 * i + 1] = MDS[3][(P[P_31][(P[P_32][b3] & 0xFF) ^ _b3(k1)] & 0xFF) ^ _b3(k0)];
break;
}
}
return new object[] { sBox, subKeys };
}
public static int RightUShift(int val, int shift)
{
return (int)((uint)val >> shift);
}
/**
* Encrypt exactly one block of plaintext.
*
* @param in The plaintext.
* @param inOffset Index of in from which to start considering data.
* @param sessionKey The session key to use for encryption.
* @return The ciphertext generated from a plaintext using the session key.
*/
public static byte[] blockEncrypt(byte[] bArray, int inOffset, Object sessionKey)
{
Object[] sk = (Object[])sessionKey;
int[] sBox = (int[])sk[0];
int[] sKey = (int[])sk[1];
int x0 = (bArray[inOffset++] & 0xFF)
| (bArray[inOffset++] & 0xFF) << 8
| (bArray[inOffset++] & 0xFF) << 16
| (bArray[inOffset++] & 0xFF) << 24;
int x1 = (bArray[inOffset++] & 0xFF)
| (bArray[inOffset++] & 0xFF) << 8
| (bArray[inOffset++] & 0xFF) << 16
| (bArray[inOffset++] & 0xFF) << 24;
int x2 = (bArray[inOffset++] & 0xFF)
| (bArray[inOffset++] & 0xFF) << 8
| (bArray[inOffset++] & 0xFF) << 16
| (bArray[inOffset++] & 0xFF) << 24;
int x3 = (bArray[inOffset++] & 0xFF)
| (bArray[inOffset++] & 0xFF) << 8
| (bArray[inOffset++] & 0xFF) << 16
| (bArray[inOffset++] & 0xFF) << 24;
x0 ^= sKey[INPUT_WHITEN];
x1 ^= sKey[INPUT_WHITEN + 1];
x2 ^= sKey[INPUT_WHITEN + 2];
x3 ^= sKey[INPUT_WHITEN + 3];
int t0, t1;
int k = ROUND_SUBKEYS;
for (int R = 0; R < ROUNDS; R += 2)
{
t0 = fe32(sBox, x0, 0);
t1 = fe32(sBox, x1, 3);
x2 ^= t0 + t1 + sKey[k++];
x2 = RightUShift(x2, 1) | x2 << 31;
x3 = x3 << 1 | RightUShift(x3, 31);
x3 ^= t0 + 2 * t1 + sKey[k++];
t0 = fe32(sBox, x2, 0);
t1 = fe32(sBox, x3, 3);
x0 ^= t0 + t1 + sKey[k++];
x0 = RightUShift(x0, 1) | x0 << 31;
x1 = x1 << 1 | RightUShift(x1, 31);
x1 ^= t0 + 2 * t1 + sKey[k++];
}
x2 ^= sKey[OUTPUT_WHITEN];
x3 ^= sKey[OUTPUT_WHITEN + 1];
x0 ^= sKey[OUTPUT_WHITEN + 2];
x1 ^= sKey[OUTPUT_WHITEN + 3];
return new byte[]{
(byte) x2, (byte) RightUShift(x2, 8), (byte) RightUShift(x2, 16), (byte) RightUShift(x2, 24),
(byte) x3, (byte) RightUShift(x3, 8), (byte) RightUShift(x3, 16), (byte) RightUShift(x3, 24),
(byte) x0, (byte) RightUShift(x0, 8), (byte) RightUShift(x0, 16), (byte) RightUShift(x0, 24),
(byte) x1, (byte) RightUShift(x1, 8), (byte) RightUShift(x1, 16), (byte) RightUShift(x1, 24),
};
}
private static int _b0(int x) { return x & 0xFF; }
private static int _b1(int x) { return RightUShift(x, 8) & 0xFF; }
private static int _b2(int x) { return RightUShift(x, 16) & 0xFF; }
private static int _b3(int x) { return RightUShift(x, 24) & 0xFF; }
/**
* Use (12, 8) Reed-Solomon code over GF(256) to produce a key S-box
* 32-bit entity from two key material 32-bit entities.
*
* @param k0 1st 32-bit entity.
* @param k1 2nd 32-bit entity.
* @return Remainder polynomial generated using RS code
*/
private static int rsMdsEncode(int k0, int k1)
{
int r = k1;
for (int i = 0; i < 4; i++)
{
r = rsRem(r);
}
r ^= k0;
for (int i = 0; i < 4; i++)
{
r = rsRem(r);
}
return r;
}
private static int rsRem(int x)
{
int b = RightUShift(x, 24) & 0xFF;
int g2 = ((b << 1) ^ ((b & 0x80) != 0 ? RS_GF_FDBK : 0)) & 0xFF;
int g3 = RightUShift(b, 1) ^ ((b & 0x01) != 0 ? RightUShift(RS_GF_FDBK, 1) : 0) ^ g2;
int result = (x << 8) ^ (g3 << 24) ^ (g2 << 16) ^ (g3 << 8) ^ b;
return result;
}
private static int f32(int k64Cnt, int x, int[] k32)
{
int b0 = _b0(x);
int b1 = _b1(x);
int b2 = _b2(x);
int b3 = _b3(x);
int k0 = k32[0];
int k1 = k32[1];
int k2 = k32[2];
int k3 = k32[3];
int result = 0;
switch (k64Cnt & 3)
{
case 1:
result =
MDS[0][(P[P_01][b0] & 0xFF)
^ _b0(k0)]
^ MDS[1][(P[P_11][b1] & 0xFF)
^ _b1(k0)]
^ MDS[2][(P[P_21][b2] & 0xFF)
^ _b2(k0)]
^ MDS[3][(P[P_31][b3] & 0xFF)
^ _b3(k0)];
break;
case 0:
b0 = (P[P_04][b0] & 0xFF) ^ _b0(k3);
b1 = (P[P_14][b1] & 0xFF) ^ _b1(k3);
b2 = (P[P_24][b2] & 0xFF) ^ _b2(k3);
b3 = (P[P_34][b3] & 0xFF) ^ _b3(k3);
b0 = (P[P_03][b0] & 0xFF) ^ _b0(k2);
b1 = (P[P_13][b1] & 0xFF) ^ _b1(k2);
b2 = (P[P_23][b2] & 0xFF) ^ _b2(k2);
b3 = (P[P_33][b3] & 0xFF) ^ _b3(k2);
result =
MDS[0][(P[P_01][(P[P_02][b0] & 0xFF)
^ _b0(k1)] & 0xFF)
^ _b0(k0)]
^ MDS[1][(P[P_11][(P[P_12][b1] & 0xFF)
^ _b1(k1)] & 0xFF) ^ _b1(k0)]
^ MDS[2][(P[P_21][(P[P_22][b2] & 0xFF)
^ _b2(k1)] & 0xFF)
^ _b2(k0)]
^ MDS[3][(P[P_31][(P[P_32][b3] & 0xFF)
^ _b3(k1)] & 0xFF)
^ _b3(k0)];
break;
case 3:
b0 = (P[P_03][b0] & 0xFF) ^ _b0(k2);
b1 = (P[P_13][b1] & 0xFF) ^ _b1(k2);
b2 = (P[P_23][b2] & 0xFF) ^ _b2(k2);
b3 = (P[P_33][b3] & 0xFF) ^ _b3(k2);
result =
MDS[0][(P[P_01][(P[P_02][b0] & 0xFF)
^ _b0(k1)] & 0xFF)
^ _b0(k0)]
^ MDS[1][(P[P_11][(P[P_12][b1] & 0xFF)
^ _b1(k1)] & 0xFF) ^ _b1(k0)]
^ MDS[2][(P[P_21][(P[P_22][b2] & 0xFF)
^ _b2(k1)] & 0xFF)
^ _b2(k0)]
^ MDS[3][(P[P_31][(P[P_32][b3] & 0xFF)
^ _b3(k1)] & 0xFF)
^ _b3(k0)];
break;
case 2:
result =
MDS[0][(P[P_01][(P[P_02][b0] & 0xFF)
^ _b0(k1)] & 0xFF)
^ _b0(k0)]
^ MDS[1][(P[P_11][(P[P_12][b1] & 0xFF)
^ _b1(k1)] & 0xFF) ^ _b1(k0)]
^ MDS[2][(P[P_21][(P[P_22][b2] & 0xFF)
^ _b2(k1)] & 0xFF)
^ _b2(k0)]
^ MDS[3][(P[P_31][(P[P_32][b3] & 0xFF)
^ _b3(k1)] & 0xFF)
^ _b3(k0)];
break;
}
return result;
}
private static int fe32(int[] sBox, int x, int r)
{
return sBox[2 * b(x, r)]
^ sBox[2 * b(x, r + 1) + 1]
^ sBox[0x200 + 2 * b(x, r + 2)]
^ sBox[0x200 + 2 * b(x, r + 3) + 1];
}
private static int b(int x, int n)
{
int result = 0;
switch (n % 4)
{
case 0:
result = _b0(x);
break;
case 1:
result = _b1(x);
break;
case 2:
result = _b2(x);
break;
case 3:
result = _b3(x);
break;
}
return result;
}
}
}
| |
using Loon.Core.Graphics.Opengl;
using Loon.Utils;
using Loon.Core.Input;
namespace Loon.Core.Graphics.Component {
public class LButton : LComponent {
private string text = null;
private bool over, pressed, exception;
private int pressedTime, offsetLeft, offsetTop, type;
private LFont font = LFont.GetDefaultFont();
private LColor fontColor = LColor.white;
public LButton(string fileName):this(fileName, null, 0, 0) {
}
public LButton(string fileName, string text, int x, int y):this(new LTexture(fileName), text, x, y) {
}
public LButton(LTexture img, string text, int x, int y):this(img, text, img.GetWidth(), img.GetHeight(), x, y) {
}
public LButton(string fileName, int row, int col):this(fileName, null, row, col, 0, 0) {
}
public LButton(string fileName, string text, int row, int col, int x, int y):this(LTextures.LoadTexture(fileName), text, row, col, x, y) {
}
public LButton(LTexture img, string text, int row, int col, int x, int y):this(TextureUtils.GetSplitTextures(img, row, col), text, row, col, x, y) {
}
public LButton(LTexture[] img, string text, int row, int col, int x, int y):base(x, y, row, col) {
this.SetImages(img);
this.text = text;
}
public LButton(string text, int x, int y, int w, int h):base(x, y, w, h) {
this.text = text;
}
public virtual void SetImages(params LTexture[] images)
{
LTexture[] buttons = new LTexture[4];
if (images != null) {
int size = images.Length;
this.type = size;
switch (size) {
case 1:
buttons[0] = images[0];
buttons[1] = images[0];
buttons[2] = images[0];
buttons[3] = images[0];
break;
case 2:
buttons[0] = images[0];
buttons[1] = images[1];
buttons[2] = images[0];
buttons[3] = images[0];
break;
case 3:
buttons[0] = images[0];
buttons[1] = images[1];
buttons[2] = images[2];
buttons[3] = images[0];
break;
case 4:
buttons = images;
break;
default:
exception = true;
break;
}
}
if (!exception) {
this.SetImageUI(buttons, true);
}
}
public override void CreateUI(GLEx g, int x, int y, LComponent component,
LTexture[] buttonImage) {
LButton button = (LButton) component;
if (buttonImage != null) {
if (!button.IsEnabled()) {
g.DrawTexture(buttonImage[3], x, y);
} else if (button.IsTouchPressed()) {
g.DrawTexture(buttonImage[2], x, y);
} else if (button.IsTouchOver()) {
g.DrawTexture(buttonImage[1], x, y);
} else {
if (type == 1) {
g.DrawTexture(buttonImage[0], x, y, LColor.gray);
} else {
g.DrawTexture(buttonImage[0], x, y);
}
}
}
if (text != null) {
LFont old = g.GetFont();
g.SetFont(font);
g.SetColor(fontColor);
g.DrawString(
text,
x + button.GetOffsetLeft()
+ (button.GetWidth() - font.StringWidth(text)) / 2,
y + button.GetOffsetTop()
+ (button.GetHeight() - font.GetLineHeight()) / 2
+ font.GetLineHeight());
g.SetFont(old);
g.ResetColor();
}
}
public override void Update(long timer)
{
if (this.pressedTime > 0 && --this.pressedTime <= 0) {
this.pressed = false;
}
}
public bool IsTouchOver() {
return this.over;
}
public bool IsTouchPressed() {
return this.pressed;
}
public string GetText() {
return this.text;
}
public void SetText(string st) {
this.text = st;
}
protected internal override void ProcessTouchDragged() {
if (this.input.GetTouchPressed() == Touch.TOUCH_MOVE) {
this.over = this.pressed = this.Intersects(this.input.GetTouchX(),
this.input.GetTouchY());
}
}
public void DoClick() {
if (Click != null) {
Click.DoClick(this);
}
}
public void DownClick() {
if (Click != null) {
Click.DownClick(this, input.GetTouchX(), input.GetTouchY());
}
}
public void UpClick() {
if (Click != null) {
Click.UpClick(this, input.GetTouchX(), input.GetTouchY());
}
}
protected internal override void ProcessTouchClicked()
{
if (this.input.GetTouchReleased() == Touch.TOUCH_UP) {
this.DoClick();
}
}
protected internal override void ProcessTouchPressed()
{
if (this.input.GetTouchPressed() == Touch.TOUCH_DOWN) {
this.DownClick();
this.pressed = true;
}
}
protected internal override void ProcessTouchReleased()
{
if (this.input.GetTouchReleased() == Touch.TOUCH_UP) {
this.UpClick();
this.pressed = false;
}
}
protected internal override void ProcessTouchEntered()
{
this.over = true;
}
protected internal override void ProcessTouchExited()
{
this.over = this.pressed = false;
}
protected internal override void ProcessKeyPressed()
{
if (this.IsSelected() && this.input.GetKeyPressed() == Key.ENTER) {
this.pressedTime = 5;
this.pressed = true;
this.DoClick();
}
}
protected internal override void ProcessKeyReleased()
{
if (this.IsSelected() && this.input.GetKeyReleased() == Key.ENTER) {
this.pressed = false;
}
}
public virtual bool IsException()
{
return exception;
}
public override string GetUIName()
{
return "Button";
}
public virtual LFont GetFont()
{
return font;
}
public virtual void SetFont(LFont font)
{
this.font = font;
}
public virtual LColor GetFontColor()
{
return fontColor;
}
public virtual void SetFontColor(LColor fontColor)
{
this.fontColor = fontColor;
}
public virtual int GetOffsetLeft()
{
return offsetLeft;
}
public virtual void SetOffsetLeft(int offsetLeft)
{
this.offsetLeft = offsetLeft;
}
public virtual int GetOffsetTop()
{
return offsetTop;
}
public virtual void SetOffsetTop(int offsetTop)
{
this.offsetTop = offsetTop;
}
}
}
| |
#region Apache Notice
/*****************************************************************************
* $Header: $
* $Revision: $
* $Date: $
*
* Copyright 2004 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
********************************************************************************/
#endregion
#region Remarks
// Code from Spring.NET
#endregion
#region Imports
using System;
using System.Reflection;
using IBatisNet.Common.Exceptions;
#endregion
namespace IBatisNet.Common.Utilities.TypesResolver
{
/// <summary>
/// Resolves a <see cref="System.Type"/> by name.
/// </summary>
/// <remarks>
/// <p>
/// The rationale behind the creation of this class is to centralise the
/// resolution of type names to <see cref="System.Type"/> instances beyond that
/// offered by the plain vanilla <see cref="System.Type.GetType"/> method call.
/// </p>
/// </remarks>
/// <version>$Id: TypeResolver.cs,v 1.5 2004/09/28 07:51:47 springboy Exp $</version>
public class TypeResolver
{
#region Constructor (s) / Destructor
/// <summary>
/// Creates a new instance of the TypeResolver class.
/// </summary>
public TypeResolver () {}
#endregion
#region Methods
/// <summary>
/// Resolves the supplied type name into a <see cref="System.Type"/>
/// instance.
/// </summary>
/// <param name="typeName">
/// The (possibly partially assembly qualified) name of a <see cref="System.Type"/>.
/// </param>
/// <returns>
/// A resolved <see cref="System.Type"/> instance.
/// </returns>
/// <exception cref="System.TypeLoadException">
/// If the type could not be resolved.
/// </exception>
public virtual Type Resolve (string typeName)
{
#region Sanity Check
if (typeName == null || typeName.Trim().Length==0)
{
throw new ConfigurationException (
"Could not load type with a null or zero length parameter.");
}
#endregion
Type type = null;
string canonicalTypeName = TypeAliasResolver.Resolve (typeName);
TypeAssemblyInfo typeInfo = new TypeAssemblyInfo (canonicalTypeName);
if (typeInfo.IsAssemblyQualified)
{
// assembly qualified... load the assembly, then the Type
Assembly assembly = Assembly.LoadWithPartialName (typeInfo.AssemblyName);
if (assembly != null)
{
type = assembly.GetType (typeInfo.TypeName, true, true);
}
}
else
{
// bare type name... loop thru all loaded assemblies
Assembly [] assemblies = AppDomain.CurrentDomain.GetAssemblies ();
foreach (Assembly assembly in assemblies)
{
type = assembly.GetType (typeInfo.TypeName, false, false);
if (type != null)
{
break;
}
}
}
if (type == null)
{
throw new TypeLoadException (
"Could not load type : " + typeName);
}
return type;
}
#endregion
#region Inner Class : TypeAssemblyInfo
/// <summary>
/// Holds data about a <see cref="System.Type"/> and it's
/// attendant <see cref="System.Reflection.Assembly"/>.
/// </summary>
internal class TypeAssemblyInfo
{
#region Constants
/// <summary>
/// The string that separates <see cref="System.Type"/> names
/// from their attendant <see cref="System.Reflection.Assembly"/>
/// names in an assembly qualified type name.
/// </summary>
public const string TypeAssemblySeparator = ",";
#endregion
#region Fields
private string unresolvedAssemblyName;
private string unresolvedTypeName;
#endregion
#region Properties
/// <summary>
/// The (unresolved) type name portion of the original type name.
/// </summary>
public string TypeName
{
get
{
return unresolvedTypeName;
}
}
/// <summary>
/// The (unresolved, possibly partial) name of the attandant assembly.
/// </summary>
public string AssemblyName
{
get
{
return unresolvedAssemblyName;
}
}
/// <summary>
/// Is the type name being resolved assembly qualified?
/// </summary>
public bool IsAssemblyQualified
{
get
{
if (AssemblyName == null || AssemblyName.Trim().Length==0)
{
return false;
}
else
{
return true;
}
}
}
/// <summary>
/// The (possibly assembly qualified) <see cref="System.Type"/> name.
/// </summary>
public string OriginalTypeName
{
get
{
System.Text.StringBuilder buffer
= new System.Text.StringBuilder (TypeName);
if (IsAssemblyQualified)
{
buffer.Append (TypeAssemblySeparator);
buffer.Append (AssemblyName);
}
return buffer.ToString ();
}
}
#endregion
#region Constructor (s) / Destructor
/// <summary>
/// Creates a new instance of the TypeAssemblyInfo class.
/// </summary>
/// <param name="unresolvedTypeName">
/// The unresolved name of a <see cref="System.Type"/>.
/// </param>
public TypeAssemblyInfo (string unresolvedTypeName)
{
SplitTypeAndAssemblyNames (unresolvedTypeName);
}
#endregion
#region Methods
private void SplitTypeAndAssemblyNames (string originalTypeName)
{
int typeAssemblyIndex
= originalTypeName.IndexOf (
TypeAssemblyInfo.TypeAssemblySeparator);
if (typeAssemblyIndex < 0)
{
unresolvedTypeName = originalTypeName;
}
else
{
unresolvedTypeName = originalTypeName.Substring (
0, typeAssemblyIndex).Trim ();
unresolvedAssemblyName = originalTypeName.Substring (
typeAssemblyIndex + 1).Trim ();
}
}
#endregion
}
#endregion
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="FileDialogCustomPlace.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
#pragma warning disable 108
namespace System.Windows.Forms
{
using System;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
using System.Text;
static class FileDialogNative
{
[ComImport]
[Guid(IIDGuid.IFileOpenDialog)]
[CoClass(typeof(FileOpenDialogRCW))]
internal interface NativeFileOpenDialog : IFileOpenDialog
{ }
[ComImport]
[Guid(IIDGuid.IFileSaveDialog)]
[CoClass(typeof(FileSaveDialogRCW))]
internal interface NativeFileSaveDialog : IFileSaveDialog
{ }
[ComImport]
[ClassInterface(ClassInterfaceType.None)]
[TypeLibType(TypeLibTypeFlags.FCanCreate)]
[Guid(CLSIDGuid.FileOpenDialog)]
internal class FileOpenDialogRCW
{ }
[ComImport]
[ClassInterface(ClassInterfaceType.None)]
[TypeLibType(TypeLibTypeFlags.FCanCreate)]
[Guid(CLSIDGuid.FileSaveDialog)]
internal class FileSaveDialogRCW
{ }
internal class IIDGuid
{
private IIDGuid() { } // Avoid FxCop violation AvoidUninstantiatedInternalClasses
// IID GUID strings for relevant COM interfaces
internal const string IModalWindow = "b4db1657-70d7-485e-8e3e-6fcb5a5c1802";
internal const string IFileDialog = "42f85136-db7e-439c-85f1-e4075d135fc8";
internal const string IFileOpenDialog = "d57c7288-d4ad-4768-be02-9d969532d960";
internal const string IFileSaveDialog = "84bccd23-5fde-4cdb-aea4-af64b83d78ab";
internal const string IFileDialogEvents = "973510DB-7D7F-452B-8975-74A85828D354";
internal const string IShellItem = "43826D1E-E718-42EE-BC55-A1E261C37BFE";
internal const string IShellItemArray = "B63EA76D-1F85-456F-A19C-48159EFA858B";
}
internal class CLSIDGuid
{
private CLSIDGuid() { } // Avoid FxCop violation AvoidUninstantiatedInternalClasses
internal const string FileOpenDialog = "DC1C5A9C-E88A-4dde-A5A1-60F82A20AEF7";
internal const string FileSaveDialog = "C0B4E2F3-BA21-4773-8DBA-335EC946EB8B";
}
[ComImport()]
[Guid(IIDGuid.IModalWindow)]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IModalWindow
{
[PreserveSig]
int Show([In] IntPtr parent);
}
internal enum SIATTRIBFLAGS
{
SIATTRIBFLAGS_AND = 0x00000001, // if multiple items and the attributes together.
SIATTRIBFLAGS_OR = 0x00000002, // if multiple items or the attributes together.
SIATTRIBFLAGS_APPCOMPAT = 0x00000003, // Call GetAttributes directly on the ShellFolder for multiple attributes
}
[ComImport]
[Guid(IIDGuid.IShellItemArray)]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IShellItemArray
{
// Not supported: IBindCtx
void BindToHandler([In, MarshalAs(UnmanagedType.Interface)] IntPtr pbc, [In] ref Guid rbhid, [In] ref Guid riid, out IntPtr ppvOut);
void GetPropertyStore([In] int Flags, [In] ref Guid riid, out IntPtr ppv);
void GetPropertyDescriptionList([In] ref PROPERTYKEY keyType, [In] ref Guid riid, out IntPtr ppv);
void GetAttributes([In] SIATTRIBFLAGS dwAttribFlags, [In] uint sfgaoMask, out uint psfgaoAttribs);
void GetCount(out uint pdwNumItems);
void GetItemAt([In] uint dwIndex, [MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void EnumItems([MarshalAs(UnmanagedType.Interface)] out IntPtr ppenumShellItems);
}
[StructLayout(LayoutKind.Sequential, Pack = 4)]
internal struct PROPERTYKEY
{
internal Guid fmtid;
internal uint pid;
}
[ComImport()]
[Guid(IIDGuid.IFileDialog)]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IFileDialog
{
[PreserveSig]
int Show([In] IntPtr parent);
void SetFileTypes([In] uint cFileTypes, [In] [MarshalAs(UnmanagedType.LPArray)]COMDLG_FILTERSPEC[] rgFilterSpec);
void SetFileTypeIndex([In] uint iFileType);
void GetFileTypeIndex(out uint piFileType);
void Advise([In, MarshalAs(UnmanagedType.Interface)] IFileDialogEvents pfde, out uint pdwCookie);
void Unadvise([In] uint dwCookie);
void SetOptions([In] FOS fos);
void GetOptions(out FOS pfos);
void SetDefaultFolder([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi);
void SetFolder([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi);
void GetFolder([MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void GetCurrentSelection([MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void SetFileName([In, MarshalAs(UnmanagedType.LPWStr)] string pszName);
void GetFileName([MarshalAs(UnmanagedType.LPWStr)] out string pszName);
void SetTitle([In, MarshalAs(UnmanagedType.LPWStr)] string pszTitle);
void SetOkButtonLabel([In, MarshalAs(UnmanagedType.LPWStr)] string pszText);
void SetFileNameLabel([In, MarshalAs(UnmanagedType.LPWStr)] string pszLabel);
void GetResult([MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void AddPlace([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi, int alignment);
void SetDefaultExtension([In, MarshalAs(UnmanagedType.LPWStr)] string pszDefaultExtension);
void Close([MarshalAs(UnmanagedType.Error)] int hr);
void SetClientGuid([In] ref Guid guid);
void ClearClientData();
void SetFilter([MarshalAs(UnmanagedType.Interface)] IntPtr pFilter);
}
[ComImport()]
[Guid(IIDGuid.IFileOpenDialog)]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IFileOpenDialog : IFileDialog
{
[PreserveSig]
int Show([In] IntPtr parent);
void SetFileTypes([In] uint cFileTypes, [In] ref COMDLG_FILTERSPEC rgFilterSpec);
void SetFileTypeIndex([In] uint iFileType);
void GetFileTypeIndex(out uint piFileType);
void Advise([In, MarshalAs(UnmanagedType.Interface)] IFileDialogEvents pfde, out uint pdwCookie);
void Unadvise([In] uint dwCookie);
void SetOptions([In] FOS fos);
void GetOptions(out FOS pfos);
void SetDefaultFolder([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi);
void SetFolder([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi);
void GetFolder([MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void GetCurrentSelection([MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void SetFileName([In, MarshalAs(UnmanagedType.LPWStr)] string pszName);
void GetFileName([MarshalAs(UnmanagedType.LPWStr)] out string pszName);
void SetTitle([In, MarshalAs(UnmanagedType.LPWStr)] string pszTitle);
void SetOkButtonLabel([In, MarshalAs(UnmanagedType.LPWStr)] string pszText);
void SetFileNameLabel([In, MarshalAs(UnmanagedType.LPWStr)] string pszLabel);
void GetResult([MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void AddPlace([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi, FileDialogCustomPlace fdcp);
void SetDefaultExtension([In, MarshalAs(UnmanagedType.LPWStr)] string pszDefaultExtension);
void Close([MarshalAs(UnmanagedType.Error)] int hr);
void SetClientGuid([In] ref Guid guid);
void ClearClientData();
void SetFilter([MarshalAs(UnmanagedType.Interface)] IntPtr pFilter);
void GetResults([MarshalAs(UnmanagedType.Interface)] out IShellItemArray ppenum);
void GetSelectedItems([MarshalAs(UnmanagedType.Interface)] out IShellItemArray ppsai);
}
[ComImport(),
Guid(IIDGuid.IFileSaveDialog),
InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IFileSaveDialog : IFileDialog
{
[PreserveSig]
int Show([In] IntPtr parent);
void SetFileTypes([In] uint cFileTypes, [In] ref COMDLG_FILTERSPEC rgFilterSpec);
void SetFileTypeIndex([In] uint iFileType);
void GetFileTypeIndex(out uint piFileType);
void Advise([In, MarshalAs(UnmanagedType.Interface)] IFileDialogEvents pfde, out uint pdwCookie);
void Unadvise([In] uint dwCookie);
void SetOptions([In] FOS fos);
void GetOptions(out FOS pfos);
void SetDefaultFolder([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi);
void SetFolder([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi);
void GetFolder([MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void GetCurrentSelection([MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void SetFileName([In, MarshalAs(UnmanagedType.LPWStr)] string pszName);
void GetFileName([MarshalAs(UnmanagedType.LPWStr)] out string pszName);
void SetTitle([In, MarshalAs(UnmanagedType.LPWStr)] string pszTitle);
void SetOkButtonLabel([In, MarshalAs(UnmanagedType.LPWStr)] string pszText);
void SetFileNameLabel([In, MarshalAs(UnmanagedType.LPWStr)] string pszLabel);
void GetResult([MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void AddPlace([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi, FileDialogCustomPlace fdcp);
void SetDefaultExtension([In, MarshalAs(UnmanagedType.LPWStr)] string pszDefaultExtension);
void Close([MarshalAs(UnmanagedType.Error)] int hr);
void SetClientGuid([In] ref Guid guid);
void ClearClientData();
void SetFilter([MarshalAs(UnmanagedType.Interface)] IntPtr pFilter);
void SetSaveAsItem([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi);
void SetProperties([In, MarshalAs(UnmanagedType.Interface)] IntPtr pStore);
void SetCollectedProperties([In, MarshalAs(UnmanagedType.Interface)] IntPtr pList, [In] int fAppendDefault);
void GetProperties([MarshalAs(UnmanagedType.Interface)] out IntPtr ppStore);
void ApplyProperties([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi, [In, MarshalAs(UnmanagedType.Interface)] IntPtr pStore, [In, ComAliasName("ShellObjects.wireHWND")] ref IntPtr hwnd, [In, MarshalAs(UnmanagedType.Interface)] IntPtr pSink);
}
[ComImport,
Guid(IIDGuid.IFileDialogEvents),
InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IFileDialogEvents
{
// NOTE: some of these callbacks are cancelable - returning S_FALSE means that
// the dialog should not proceed (e.g. with closing, changing folder); to
// support this, we need to use the PreserveSig attribute to enable us to return
// the proper HRESULT
[PreserveSig]
int OnFileOk([In, MarshalAs(UnmanagedType.Interface)] IFileDialog pfd);
[PreserveSig]
int OnFolderChanging([In, MarshalAs(UnmanagedType.Interface)] IFileDialog pfd, [In, MarshalAs(UnmanagedType.Interface)] IShellItem psiFolder);
void OnFolderChange([In, MarshalAs(UnmanagedType.Interface)] IFileDialog pfd);
void OnSelectionChange([In, MarshalAs(UnmanagedType.Interface)] IFileDialog pfd);
void OnShareViolation([In, MarshalAs(UnmanagedType.Interface)] IFileDialog pfd, [In, MarshalAs(UnmanagedType.Interface)] IShellItem psi, out FDE_SHAREVIOLATION_RESPONSE pResponse);
void OnTypeChange([In, MarshalAs(UnmanagedType.Interface)] IFileDialog pfd);
void OnOverwrite([In, MarshalAs(UnmanagedType.Interface)] IFileDialog pfd, [In, MarshalAs(UnmanagedType.Interface)] IShellItem psi, out FDE_OVERWRITE_RESPONSE pResponse);
}
[ComImport,
Guid(IIDGuid.IShellItem),
InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
internal interface IShellItem
{
void BindToHandler([In, MarshalAs(UnmanagedType.Interface)] IntPtr pbc, [In] ref Guid bhid, [In] ref Guid riid, out IntPtr ppv);
void GetParent([MarshalAs(UnmanagedType.Interface)] out IShellItem ppsi);
void GetDisplayName([In] SIGDN sigdnName, [MarshalAs(UnmanagedType.LPWStr)] out string ppszName);
void GetAttributes([In] uint sfgaoMask, out uint psfgaoAttribs);
void Compare([In, MarshalAs(UnmanagedType.Interface)] IShellItem psi, [In] uint hint, out int piOrder);
}
internal enum SIGDN : uint
{
SIGDN_NORMALDISPLAY = 0x00000000, // SHGDN_NORMAL
SIGDN_PARENTRELATIVEPARSING = 0x80018001, // SHGDN_INFOLDER | SHGDN_FORPARSING
SIGDN_DESKTOPABSOLUTEPARSING = 0x80028000, // SHGDN_FORPARSING
SIGDN_PARENTRELATIVEEDITING = 0x80031001, // SHGDN_INFOLDER | SHGDN_FOREDITING
SIGDN_DESKTOPABSOLUTEEDITING = 0x8004c000, // SHGDN_FORPARSING | SHGDN_FORADDRESSBAR
SIGDN_FILESYSPATH = 0x80058000, // SHGDN_FORPARSING
SIGDN_URL = 0x80068000, // SHGDN_FORPARSING
SIGDN_PARENTRELATIVEFORADDRESSBAR = 0x8007c001, // SHGDN_INFOLDER | SHGDN_FORPARSING | SHGDN_FORADDRESSBAR
SIGDN_PARENTRELATIVE = 0x80080001 // SHGDN_INFOLDER
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto, Pack = 4)]
internal struct COMDLG_FILTERSPEC
{
[MarshalAs(UnmanagedType.LPWStr)]
internal string pszName;
[MarshalAs(UnmanagedType.LPWStr)]
internal string pszSpec;
}
[Flags]
internal enum FOS : uint
{
FOS_OVERWRITEPROMPT = 0x00000002,
FOS_STRICTFILETYPES = 0x00000004,
FOS_NOCHANGEDIR = 0x00000008,
FOS_PICKFOLDERS = 0x00000020,
FOS_FORCEFILESYSTEM = 0x00000040, // Ensure that items returned are filesystem items.
FOS_ALLNONSTORAGEITEMS = 0x00000080, // Allow choosing items that have no storage.
FOS_NOVALIDATE = 0x00000100,
FOS_ALLOWMULTISELECT = 0x00000200,
FOS_PATHMUSTEXIST = 0x00000800,
FOS_FILEMUSTEXIST = 0x00001000,
FOS_CREATEPROMPT = 0x00002000,
FOS_SHAREAWARE = 0x00004000,
FOS_NOREADONLYRETURN = 0x00008000,
FOS_NOTESTFILECREATE = 0x00010000,
FOS_HIDEMRUPLACES = 0x00020000,
FOS_HIDEPINNEDPLACES = 0x00040000,
FOS_NODEREFERENCELINKS = 0x00100000,
FOS_DONTADDTORECENT = 0x02000000,
FOS_FORCESHOWHIDDEN = 0x10000000,
FOS_DEFAULTNOMINIMODE = 0x20000000
}
internal enum FDE_SHAREVIOLATION_RESPONSE
{
FDESVR_DEFAULT = 0x00000000,
FDESVR_ACCEPT = 0x00000001,
FDESVR_REFUSE = 0x00000002
}
internal enum FDE_OVERWRITE_RESPONSE
{
FDEOR_DEFAULT = 0x00000000,
FDEOR_ACCEPT = 0x00000001,
FDEOR_REFUSE = 0x00000002
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Text;
using OpenMetaverse;
using OpenMetaverse.Packets;
using OpenSim.Framework;
using OpenSim.Framework.Client;
namespace OpenSim.Client.Sirikata.ClientStack
{
class SirikataClientView : IClientAPI, IClientCore
{
private readonly NetworkStream stream;
public SirikataClientView(TcpClient client)
{
stream = client.GetStream();
sessionId = UUID.Random();
// Handshake with client
string con = "SSTTCP01" + sessionId;
byte[] handshake = Util.UTF8.GetBytes(con);
byte[] clientHandshake = new byte[2+6+36];
stream.Read(clientHandshake, 0, handshake.Length);
stream.Write(handshake, 0, handshake.Length - 1); // Remove null terminator (hence the -1)
}
#region Implementation of IClientAPI
private Vector3 startPos;
private UUID sessionId;
private UUID secureSessionId;
private UUID activeGroupId;
private string activeGroupName;
private ulong activeGroupPowers;
private string firstName;
private string lastName;
private IScene scene;
private int nextAnimationSequenceNumber;
private string name;
private bool isActive;
private bool sendLogoutPacketWhenClosing;
private uint circuitCode;
private IPEndPoint remoteEndPoint;
public Vector3 StartPos
{
get { return startPos; }
set { startPos = value; }
}
public bool TryGet<T>(out T iface)
{
throw new System.NotImplementedException();
}
public T Get<T>()
{
throw new System.NotImplementedException();
}
UUID IClientCore.AgentId
{
get { throw new NotImplementedException(); }
}
public void Disconnect(string reason)
{
throw new System.NotImplementedException();
}
public void Disconnect()
{
throw new System.NotImplementedException();
}
UUID IClientAPI.AgentId
{
get { throw new NotImplementedException(); }
}
public UUID SessionId
{
get { return sessionId; }
}
public UUID SecureSessionId
{
get { return secureSessionId; }
}
public UUID ActiveGroupId
{
get { return activeGroupId; }
}
public string ActiveGroupName
{
get { return activeGroupName; }
}
public ulong ActiveGroupPowers
{
get { return activeGroupPowers; }
}
public ulong GetGroupPowers(UUID groupID)
{
throw new System.NotImplementedException();
}
public bool IsGroupMember(UUID GroupID)
{
throw new System.NotImplementedException();
}
public string FirstName
{
get { return firstName; }
}
public string LastName
{
get { return lastName; }
}
public IScene Scene
{
get { return scene; }
}
public int NextAnimationSequenceNumber
{
get { return nextAnimationSequenceNumber; }
}
public string Name
{
get { return name; }
}
public bool IsActive
{
get { return isActive; }
set { isActive = value; }
}
public bool IsLoggingOut
{
get { return false; }
set { }
}
public bool SendLogoutPacketWhenClosing
{
set { sendLogoutPacketWhenClosing = value; }
}
public uint CircuitCode
{
get { return circuitCode; }
}
public IPEndPoint RemoteEndPoint
{
get { return remoteEndPoint; }
}
public event GenericMessage OnGenericMessage;
public event ImprovedInstantMessage OnInstantMessage;
public event ChatMessage OnChatFromClient;
public event TextureRequest OnRequestTexture;
public event RezObject OnRezObject;
public event ModifyTerrain OnModifyTerrain;
public event BakeTerrain OnBakeTerrain;
public event EstateChangeInfo OnEstateChangeInfo;
public event SetAppearance OnSetAppearance;
public event AvatarNowWearing OnAvatarNowWearing;
public event RezSingleAttachmentFromInv OnRezSingleAttachmentFromInv;
public event RezMultipleAttachmentsFromInv OnRezMultipleAttachmentsFromInv;
public event UUIDNameRequest OnDetachAttachmentIntoInv;
public event ObjectAttach OnObjectAttach;
public event ObjectDeselect OnObjectDetach;
public event ObjectDrop OnObjectDrop;
public event StartAnim OnStartAnim;
public event StopAnim OnStopAnim;
public event LinkObjects OnLinkObjects;
public event DelinkObjects OnDelinkObjects;
public event RequestMapBlocks OnRequestMapBlocks;
public event RequestMapName OnMapNameRequest;
public event TeleportLocationRequest OnTeleportLocationRequest;
public event DisconnectUser OnDisconnectUser;
public event RequestAvatarProperties OnRequestAvatarProperties;
public event SetAlwaysRun OnSetAlwaysRun;
public event TeleportLandmarkRequest OnTeleportLandmarkRequest;
public event DeRezObject OnDeRezObject;
public event Action<IClientAPI> OnRegionHandShakeReply;
public event GenericCall2 OnRequestWearables;
public event GenericCall1 OnCompleteMovementToRegion;
public event UpdateAgent OnPreAgentUpdate;
public event UpdateAgent OnAgentUpdate;
public event AgentRequestSit OnAgentRequestSit;
public event AgentSit OnAgentSit;
public event AvatarPickerRequest OnAvatarPickerRequest;
public event Action<IClientAPI> OnRequestAvatarsData;
public event AddNewPrim OnAddPrim;
public event FetchInventory OnAgentDataUpdateRequest;
public event TeleportLocationRequest OnSetStartLocationRequest;
public event RequestGodlikePowers OnRequestGodlikePowers;
public event GodKickUser OnGodKickUser;
public event ObjectDuplicate OnObjectDuplicate;
public event ObjectDuplicateOnRay OnObjectDuplicateOnRay;
public event GrabObject OnGrabObject;
public event DeGrabObject OnDeGrabObject;
public event MoveObject OnGrabUpdate;
public event SpinStart OnSpinStart;
public event SpinObject OnSpinUpdate;
public event SpinStop OnSpinStop;
public event UpdateShape OnUpdatePrimShape;
public event ObjectExtraParams OnUpdateExtraParams;
public event ObjectRequest OnObjectRequest;
public event ObjectSelect OnObjectSelect;
public event ObjectDeselect OnObjectDeselect;
public event GenericCall7 OnObjectDescription;
public event GenericCall7 OnObjectName;
public event GenericCall7 OnObjectClickAction;
public event GenericCall7 OnObjectMaterial;
public event RequestObjectPropertiesFamily OnRequestObjectPropertiesFamily;
public event UpdatePrimFlags OnUpdatePrimFlags;
public event UpdatePrimTexture OnUpdatePrimTexture;
public event UpdateVector OnUpdatePrimGroupPosition;
public event UpdateVector OnUpdatePrimSinglePosition;
public event UpdatePrimRotation OnUpdatePrimGroupRotation;
public event UpdatePrimSingleRotation OnUpdatePrimSingleRotation;
public event UpdatePrimSingleRotationPosition OnUpdatePrimSingleRotationPosition;
public event UpdatePrimGroupRotation OnUpdatePrimGroupMouseRotation;
public event UpdateVector OnUpdatePrimScale;
public event UpdateVector OnUpdatePrimGroupScale;
public event StatusChange OnChildAgentStatus;
public event GenericCall2 OnStopMovement;
public event Action<UUID> OnRemoveAvatar;
public event ObjectPermissions OnObjectPermissions;
public event CreateNewInventoryItem OnCreateNewInventoryItem;
public event LinkInventoryItem OnLinkInventoryItem;
public event CreateInventoryFolder OnCreateNewInventoryFolder;
public event UpdateInventoryFolder OnUpdateInventoryFolder;
public event MoveInventoryFolder OnMoveInventoryFolder;
public event FetchInventoryDescendents OnFetchInventoryDescendents;
public event PurgeInventoryDescendents OnPurgeInventoryDescendents;
public event FetchInventory OnFetchInventory;
public event RequestTaskInventory OnRequestTaskInventory;
public event UpdateInventoryItem OnUpdateInventoryItem;
public event CopyInventoryItem OnCopyInventoryItem;
public event MoveInventoryItem OnMoveInventoryItem;
public event RemoveInventoryFolder OnRemoveInventoryFolder;
public event RemoveInventoryItem OnRemoveInventoryItem;
public event UDPAssetUploadRequest OnAssetUploadRequest;
public event XferReceive OnXferReceive;
public event RequestXfer OnRequestXfer;
public event ConfirmXfer OnConfirmXfer;
public event AbortXfer OnAbortXfer;
public event RezScript OnRezScript;
public event UpdateTaskInventory OnUpdateTaskInventory;
public event MoveTaskInventory OnMoveTaskItem;
public event RemoveTaskInventory OnRemoveTaskItem;
public event RequestAsset OnRequestAsset;
public event UUIDNameRequest OnNameFromUUIDRequest;
public event ParcelAccessListRequest OnParcelAccessListRequest;
public event ParcelAccessListUpdateRequest OnParcelAccessListUpdateRequest;
public event ParcelPropertiesRequest OnParcelPropertiesRequest;
public event ParcelDivideRequest OnParcelDivideRequest;
public event ParcelJoinRequest OnParcelJoinRequest;
public event ParcelPropertiesUpdateRequest OnParcelPropertiesUpdateRequest;
public event ParcelSelectObjects OnParcelSelectObjects;
public event ParcelObjectOwnerRequest OnParcelObjectOwnerRequest;
public event ParcelAbandonRequest OnParcelAbandonRequest;
public event ParcelGodForceOwner OnParcelGodForceOwner;
public event ParcelReclaim OnParcelReclaim;
public event ParcelReturnObjectsRequest OnParcelReturnObjectsRequest;
public event ParcelDeedToGroup OnParcelDeedToGroup;
public event RegionInfoRequest OnRegionInfoRequest;
public event EstateCovenantRequest OnEstateCovenantRequest;
public event FriendActionDelegate OnApproveFriendRequest;
public event FriendActionDelegate OnDenyFriendRequest;
public event FriendshipTermination OnTerminateFriendship;
public event MoneyTransferRequest OnMoneyTransferRequest;
public event EconomyDataRequest OnEconomyDataRequest;
public event MoneyBalanceRequest OnMoneyBalanceRequest;
public event UpdateAvatarProperties OnUpdateAvatarProperties;
public event ParcelBuy OnParcelBuy;
public event RequestPayPrice OnRequestPayPrice;
public event ObjectSaleInfo OnObjectSaleInfo;
public event ObjectBuy OnObjectBuy;
public event BuyObjectInventory OnBuyObjectInventory;
public event RequestTerrain OnRequestTerrain;
public event RequestTerrain OnUploadTerrain;
public event ObjectIncludeInSearch OnObjectIncludeInSearch;
public event UUIDNameRequest OnTeleportHomeRequest;
public event ScriptAnswer OnScriptAnswer;
public event AgentSit OnUndo;
public event AgentSit OnRedo;
public event LandUndo OnLandUndo;
public event ForceReleaseControls OnForceReleaseControls;
public event GodLandStatRequest OnLandStatRequest;
public event DetailedEstateDataRequest OnDetailedEstateDataRequest;
public event SetEstateFlagsRequest OnSetEstateFlagsRequest;
public event SetEstateTerrainBaseTexture OnSetEstateTerrainBaseTexture;
public event SetEstateTerrainDetailTexture OnSetEstateTerrainDetailTexture;
public event SetEstateTerrainTextureHeights OnSetEstateTerrainTextureHeights;
public event CommitEstateTerrainTextureRequest OnCommitEstateTerrainTextureRequest;
public event SetRegionTerrainSettings OnSetRegionTerrainSettings;
public event EstateRestartSimRequest OnEstateRestartSimRequest;
public event EstateChangeCovenantRequest OnEstateChangeCovenantRequest;
public event UpdateEstateAccessDeltaRequest OnUpdateEstateAccessDeltaRequest;
public event SimulatorBlueBoxMessageRequest OnSimulatorBlueBoxMessageRequest;
public event EstateBlueBoxMessageRequest OnEstateBlueBoxMessageRequest;
public event EstateDebugRegionRequest OnEstateDebugRegionRequest;
public event EstateTeleportOneUserHomeRequest OnEstateTeleportOneUserHomeRequest;
public event EstateTeleportAllUsersHomeRequest OnEstateTeleportAllUsersHomeRequest;
public event UUIDNameRequest OnUUIDGroupNameRequest;
public event RegionHandleRequest OnRegionHandleRequest;
public event ParcelInfoRequest OnParcelInfoRequest;
public event RequestObjectPropertiesFamily OnObjectGroupRequest;
public event ScriptReset OnScriptReset;
public event GetScriptRunning OnGetScriptRunning;
public event SetScriptRunning OnSetScriptRunning;
public event UpdateVector OnAutoPilotGo;
public event TerrainUnacked OnUnackedTerrain;
public event ActivateGesture OnActivateGesture;
public event DeactivateGesture OnDeactivateGesture;
public event ObjectOwner OnObjectOwner;
public event DirPlacesQuery OnDirPlacesQuery;
public event DirFindQuery OnDirFindQuery;
public event DirLandQuery OnDirLandQuery;
public event DirPopularQuery OnDirPopularQuery;
public event DirClassifiedQuery OnDirClassifiedQuery;
public event EventInfoRequest OnEventInfoRequest;
public event ParcelSetOtherCleanTime OnParcelSetOtherCleanTime;
public event MapItemRequest OnMapItemRequest;
public event OfferCallingCard OnOfferCallingCard;
public event AcceptCallingCard OnAcceptCallingCard;
public event DeclineCallingCard OnDeclineCallingCard;
public event SoundTrigger OnSoundTrigger;
public event StartLure OnStartLure;
public event TeleportLureRequest OnTeleportLureRequest;
public event NetworkStats OnNetworkStatsUpdate;
public event ClassifiedInfoRequest OnClassifiedInfoRequest;
public event ClassifiedInfoUpdate OnClassifiedInfoUpdate;
public event ClassifiedDelete OnClassifiedDelete;
public event ClassifiedDelete OnClassifiedGodDelete;
public event EventNotificationAddRequest OnEventNotificationAddRequest;
public event EventNotificationRemoveRequest OnEventNotificationRemoveRequest;
public event EventGodDelete OnEventGodDelete;
public event ParcelDwellRequest OnParcelDwellRequest;
public event UserInfoRequest OnUserInfoRequest;
public event UpdateUserInfo OnUpdateUserInfo;
public event RetrieveInstantMessages OnRetrieveInstantMessages;
public event PickDelete OnPickDelete;
public event PickGodDelete OnPickGodDelete;
public event PickInfoUpdate OnPickInfoUpdate;
public event AvatarNotesUpdate OnAvatarNotesUpdate;
public event AvatarInterestUpdate OnAvatarInterestUpdate;
public event GrantUserFriendRights OnGrantUserRights;
public event MuteListRequest OnMuteListRequest;
public event PlacesQuery OnPlacesQuery;
public event FindAgentUpdate OnFindAgent;
public event TrackAgentUpdate OnTrackAgent;
public event NewUserReport OnUserReport;
public event SaveStateHandler OnSaveState;
public event GroupAccountSummaryRequest OnGroupAccountSummaryRequest;
public event GroupAccountDetailsRequest OnGroupAccountDetailsRequest;
public event GroupAccountTransactionsRequest OnGroupAccountTransactionsRequest;
public event FreezeUserUpdate OnParcelFreezeUser;
public event EjectUserUpdate OnParcelEjectUser;
public event ParcelBuyPass OnParcelBuyPass;
public event ParcelGodMark OnParcelGodMark;
public event GroupActiveProposalsRequest OnGroupActiveProposalsRequest;
public event GroupVoteHistoryRequest OnGroupVoteHistoryRequest;
public event SimWideDeletesDelegate OnSimWideDeletes;
public event SendPostcard OnSendPostcard;
public event MuteListEntryUpdate OnUpdateMuteListEntry;
public event MuteListEntryRemove OnRemoveMuteListEntry;
public event GodlikeMessage onGodlikeMessage;
public event GodUpdateRegionInfoUpdate OnGodUpdateRegionInfoUpdate;
public void SetDebugPacketLevel(int newDebug)
{
throw new System.NotImplementedException();
}
public void InPacket(object NewPack)
{
throw new System.NotImplementedException();
}
public void ProcessInPacket(Packet NewPack)
{
throw new System.NotImplementedException();
}
public void Close()
{
throw new System.NotImplementedException();
}
public void Kick(string message)
{
throw new System.NotImplementedException();
}
public void Start()
{
throw new System.NotImplementedException();
}
public void Stop()
{
throw new System.NotImplementedException();
}
public void SendWearables(AvatarWearable[] wearables, int serial)
{
throw new System.NotImplementedException();
}
public void SendAppearance(UUID agentID, byte[] visualParams, byte[] textureEntry)
{
throw new System.NotImplementedException();
}
public void SendStartPingCheck(byte seq)
{
throw new System.NotImplementedException();
}
public void SendKillObject(ulong regionHandle, uint localID)
{
throw new System.NotImplementedException();
}
public void SendAnimations(UUID[] animID, int[] seqs, UUID sourceAgentId, UUID[] objectIDs)
{
throw new System.NotImplementedException();
}
public void SendRegionHandshake(RegionInfo regionInfo, RegionHandshakeArgs args)
{
throw new System.NotImplementedException();
}
public void SendChatMessage(string message, byte type, Vector3 fromPos, string fromName, UUID fromAgentID, byte source, byte audible)
{
throw new System.NotImplementedException();
}
public void SendInstantMessage(GridInstantMessage im)
{
throw new System.NotImplementedException();
}
public void SendGenericMessage(string method, List<string> message)
{
}
public void SendGenericMessage(string method, List<byte[]> message)
{
throw new System.NotImplementedException();
}
public void SendLayerData(float[] map)
{
throw new System.NotImplementedException();
}
public void SendLayerData(int px, int py, float[] map)
{
throw new System.NotImplementedException();
}
public void SendWindData(Vector2[] windSpeeds)
{
throw new System.NotImplementedException();
}
public void SendCloudData(float[] cloudCover)
{
throw new System.NotImplementedException();
}
public void MoveAgentIntoRegion(RegionInfo regInfo, Vector3 pos, Vector3 look)
{
throw new System.NotImplementedException();
}
public void InformClientOfNeighbour(ulong neighbourHandle, IPEndPoint neighbourExternalEndPoint)
{
throw new System.NotImplementedException();
}
public AgentCircuitData RequestClientInfo()
{
throw new System.NotImplementedException();
}
public void CrossRegion(ulong newRegionHandle, Vector3 pos, Vector3 lookAt, IPEndPoint newRegionExternalEndPoint, string capsURL)
{
throw new System.NotImplementedException();
}
public void SendMapBlock(List<MapBlockData> mapBlocks, uint flag)
{
throw new System.NotImplementedException();
}
public void SendLocalTeleport(Vector3 position, Vector3 lookAt, uint flags)
{
throw new System.NotImplementedException();
}
public void SendRegionTeleport(ulong regionHandle, byte simAccess, IPEndPoint regionExternalEndPoint, uint locationID, uint flags, string capsURL)
{
throw new System.NotImplementedException();
}
public void SendTeleportFailed(string reason)
{
throw new System.NotImplementedException();
}
public void SendTeleportLocationStart()
{
throw new System.NotImplementedException();
}
public void SendMoneyBalance(UUID transaction, bool success, byte[] description, int balance)
{
throw new System.NotImplementedException();
}
public void SendPayPrice(UUID objectID, int[] payPrice)
{
throw new System.NotImplementedException();
}
public void SendCoarseLocationUpdate(List<UUID> users, List<Vector3> CoarseLocations)
{
throw new System.NotImplementedException();
}
public void AttachObject(uint localID, Quaternion rotation, byte attachPoint, UUID ownerID)
{
throw new System.NotImplementedException();
}
public void SetChildAgentThrottle(byte[] throttle)
{
throw new System.NotImplementedException();
}
public void SendAvatarDataImmediate(ISceneEntity avatar)
{
throw new System.NotImplementedException();
}
public void SendPrimUpdate(ISceneEntity entity, PrimUpdateFlags updateFlags)
{
throw new System.NotImplementedException();
}
public void ReprioritizeUpdates()
{
throw new System.NotImplementedException();
}
public void FlushPrimUpdates()
{
throw new System.NotImplementedException();
}
public void SendInventoryFolderDetails(UUID ownerID, UUID folderID, List<InventoryItemBase> items, List<InventoryFolderBase> folders, int version, bool fetchFolders, bool fetchItems)
{
throw new System.NotImplementedException();
}
public void SendInventoryItemDetails(UUID ownerID, InventoryItemBase item)
{
throw new System.NotImplementedException();
}
public void SendInventoryItemCreateUpdate(InventoryItemBase Item, uint callbackId)
{
throw new System.NotImplementedException();
}
public void SendRemoveInventoryItem(UUID itemID)
{
throw new System.NotImplementedException();
}
public void SendTakeControls(int controls, bool passToAgent, bool TakeControls)
{
throw new System.NotImplementedException();
}
public void SendTaskInventory(UUID taskID, short serial, byte[] fileName)
{
throw new System.NotImplementedException();
}
public void SendBulkUpdateInventory(InventoryNodeBase node)
{
throw new System.NotImplementedException();
}
public void SendXferPacket(ulong xferID, uint packet, byte[] data)
{
throw new System.NotImplementedException();
}
public void SendEconomyData(float EnergyEfficiency, int ObjectCapacity, int ObjectCount, int PriceEnergyUnit, int PriceGroupCreate, int PriceObjectClaim, float PriceObjectRent, float PriceObjectScaleFactor, int PriceParcelClaim, float PriceParcelClaimFactor, int PriceParcelRent, int PricePublicObjectDecay, int PricePublicObjectDelete, int PriceRentLight, int PriceUpload, int TeleportMinPrice, float TeleportPriceExponent)
{
throw new System.NotImplementedException();
}
public void SendAvatarPickerReply(AvatarPickerReplyAgentDataArgs AgentData, List<AvatarPickerReplyDataArgs> Data)
{
throw new System.NotImplementedException();
}
public void SendAgentDataUpdate(UUID agentid, UUID activegroupid, string firstname, string lastname, ulong grouppowers, string groupname, string grouptitle)
{
throw new System.NotImplementedException();
}
public void SendPreLoadSound(UUID objectID, UUID ownerID, UUID soundID)
{
throw new System.NotImplementedException();
}
public void SendPlayAttachedSound(UUID soundID, UUID objectID, UUID ownerID, float gain, byte flags)
{
throw new System.NotImplementedException();
}
public void SendTriggeredSound(UUID soundID, UUID ownerID, UUID objectID, UUID parentID, ulong handle, Vector3 position, float gain)
{
throw new System.NotImplementedException();
}
public void SendAttachedSoundGainChange(UUID objectID, float gain)
{
throw new System.NotImplementedException();
}
public void SendNameReply(UUID profileId, string firstname, string lastname)
{
throw new System.NotImplementedException();
}
public void SendAlertMessage(string message)
{
throw new System.NotImplementedException();
}
public void SendAgentAlertMessage(string message, bool modal)
{
throw new System.NotImplementedException();
}
public void SendLoadURL(string objectname, UUID objectID, UUID ownerID, bool groupOwned, string message, string url)
{
throw new System.NotImplementedException();
}
public void SendDialog(string objectname, UUID objectID, string ownerFirstName, string ownerLastName, string msg, UUID textureID, int ch, string[] buttonlabels)
{
throw new System.NotImplementedException();
}
public bool AddMoney(int debit)
{
throw new System.NotImplementedException();
}
public void SendSunPos(Vector3 sunPos, Vector3 sunVel, ulong CurrentTime, uint SecondsPerSunCycle, uint SecondsPerYear, float OrbitalPosition)
{
throw new System.NotImplementedException();
}
public void SendViewerEffect(ViewerEffectPacket.EffectBlock[] effectBlocks)
{
throw new System.NotImplementedException();
}
public void SendViewerTime(int phase)
{
throw new System.NotImplementedException();
}
public UUID GetDefaultAnimation(string name)
{
throw new System.NotImplementedException();
}
public void SendAvatarProperties(UUID avatarID, string aboutText, string bornOn, byte[] charterMember, string flAbout, uint flags, UUID flImageID, UUID imageID, string profileURL, UUID partnerID)
{
throw new System.NotImplementedException();
}
public void SendScriptQuestion(UUID taskID, string taskName, string ownerName, UUID itemID, int question)
{
throw new System.NotImplementedException();
}
public void SendHealth(float health)
{
throw new System.NotImplementedException();
}
public void SendEstateList(UUID invoice, int code, UUID[] Data, uint estateID)
{
throw new System.NotImplementedException();
}
public void SendBannedUserList(UUID invoice, EstateBan[] banlist, uint estateID)
{
throw new System.NotImplementedException();
}
public void SendRegionInfoToEstateMenu(RegionInfoForEstateMenuArgs args)
{
throw new System.NotImplementedException();
}
public void SendEstateCovenantInformation(UUID covenant)
{
throw new System.NotImplementedException();
}
public void SendDetailedEstateData(UUID invoice, string estateName, uint estateID, uint parentEstate, uint estateFlags, uint sunPosition, UUID covenant, string abuseEmail, UUID estateOwner)
{
throw new System.NotImplementedException();
}
public void SendLandProperties(int sequence_id, bool snap_selection, int request_result, LandData landData, float simObjectBonusFactor, int parcelObjectCapacity, int simObjectCapacity, uint regionFlags)
{
throw new System.NotImplementedException();
}
public void SendLandAccessListData(List<UUID> avatars, uint accessFlag, int localLandID)
{
throw new System.NotImplementedException();
}
public void SendForceClientSelectObjects(List<uint> objectIDs)
{
throw new System.NotImplementedException();
}
public void SendCameraConstraint(Vector4 ConstraintPlane)
{
throw new System.NotImplementedException();
}
public void SendLandObjectOwners(LandData land, List<UUID> groups, Dictionary<UUID, int> ownersAndCount)
{
throw new System.NotImplementedException();
}
public void SendLandParcelOverlay(byte[] data, int sequence_id)
{
throw new System.NotImplementedException();
}
public void SendParcelMediaCommand(uint flags, ParcelMediaCommandEnum command, float time)
{
throw new System.NotImplementedException();
}
public void SendParcelMediaUpdate(string mediaUrl, UUID mediaTextureID, byte autoScale, string mediaType, string mediaDesc, int mediaWidth, int mediaHeight, byte mediaLoop)
{
throw new System.NotImplementedException();
}
public void SendAssetUploadCompleteMessage(sbyte AssetType, bool Success, UUID AssetFullID)
{
throw new System.NotImplementedException();
}
public void SendConfirmXfer(ulong xferID, uint PacketID)
{
throw new System.NotImplementedException();
}
public void SendXferRequest(ulong XferID, short AssetType, UUID vFileID, byte FilePath, byte[] FileName)
{
throw new System.NotImplementedException();
}
public void SendInitiateDownload(string simFileName, string clientFileName)
{
throw new System.NotImplementedException();
}
public void SendImageFirstPart(ushort numParts, UUID ImageUUID, uint ImageSize, byte[] ImageData, byte imageCodec)
{
throw new System.NotImplementedException();
}
public void SendImageNextPart(ushort partNumber, UUID imageUuid, byte[] imageData)
{
throw new System.NotImplementedException();
}
public void SendImageNotFound(UUID imageid)
{
throw new System.NotImplementedException();
}
public void SendShutdownConnectionNotice()
{
throw new System.NotImplementedException();
}
public void SendSimStats(SimStats stats)
{
throw new System.NotImplementedException();
}
public void SendObjectPropertiesFamilyData(uint RequestFlags, UUID ObjectUUID, UUID OwnerID, UUID GroupID, uint BaseMask, uint OwnerMask, uint GroupMask, uint EveryoneMask, uint NextOwnerMask, int OwnershipCost, byte SaleType, int SalePrice, uint Category, UUID LastOwnerID, string ObjectName, string Description)
{
throw new System.NotImplementedException();
}
public void SendObjectPropertiesReply(UUID ItemID, ulong CreationDate, UUID CreatorUUID, UUID FolderUUID, UUID FromTaskUUID, UUID GroupUUID, short InventorySerial, UUID LastOwnerUUID, UUID ObjectUUID, UUID OwnerUUID, string TouchTitle, byte[] TextureID, string SitTitle, string ItemName, string ItemDescription, uint OwnerMask, uint NextOwnerMask, uint GroupMask, uint EveryoneMask, uint BaseMask, byte saleType, int salePrice)
{
throw new System.NotImplementedException();
}
public void SendAgentOffline(UUID[] agentIDs)
{
throw new System.NotImplementedException();
}
public void SendAgentOnline(UUID[] agentIDs)
{
throw new System.NotImplementedException();
}
public void SendSitResponse(UUID TargetID, Vector3 OffsetPos, Quaternion SitOrientation, bool autopilot, Vector3 CameraAtOffset, Vector3 CameraEyeOffset, bool ForceMouseLook)
{
throw new System.NotImplementedException();
}
public void SendAdminResponse(UUID Token, uint AdminLevel)
{
throw new System.NotImplementedException();
}
public void SendGroupMembership(GroupMembershipData[] GroupMembership)
{
throw new System.NotImplementedException();
}
public void SendGroupNameReply(UUID groupLLUID, string GroupName)
{
throw new System.NotImplementedException();
}
public void SendJoinGroupReply(UUID groupID, bool success)
{
throw new System.NotImplementedException();
}
public void SendEjectGroupMemberReply(UUID agentID, UUID groupID, bool success)
{
throw new System.NotImplementedException();
}
public void SendLeaveGroupReply(UUID groupID, bool success)
{
throw new System.NotImplementedException();
}
public void SendCreateGroupReply(UUID groupID, bool success, string message)
{
throw new System.NotImplementedException();
}
public void SendLandStatReply(uint reportType, uint requestFlags, uint resultCount, LandStatReportItem[] lsrpia)
{
throw new System.NotImplementedException();
}
public void SendScriptRunningReply(UUID objectID, UUID itemID, bool running)
{
throw new System.NotImplementedException();
}
public void SendAsset(AssetRequestToClient req)
{
throw new System.NotImplementedException();
}
public void SendTexture(AssetBase TextureAsset)
{
throw new System.NotImplementedException();
}
public byte[] GetThrottlesPacked(float multiplier)
{
throw new System.NotImplementedException();
}
public event ViewerEffectEventHandler OnViewerEffect;
public event Action<IClientAPI> OnLogout;
public event Action<IClientAPI> OnConnectionClosed;
public void SendBlueBoxMessage(UUID FromAvatarID, string FromAvatarName, string Message)
{
throw new System.NotImplementedException();
}
public void SendLogoutPacket()
{
throw new System.NotImplementedException();
}
public EndPoint GetClientEP()
{
throw new System.NotImplementedException();
}
public ClientInfo GetClientInfo()
{
throw new System.NotImplementedException();
}
public void SetClientInfo(ClientInfo info)
{
throw new System.NotImplementedException();
}
public void SetClientOption(string option, string value)
{
throw new System.NotImplementedException();
}
public string GetClientOption(string option)
{
throw new System.NotImplementedException();
}
public void SendSetFollowCamProperties(UUID objectID, SortedDictionary<int, float> parameters)
{
throw new System.NotImplementedException();
}
public void SendClearFollowCamProperties(UUID objectID)
{
throw new System.NotImplementedException();
}
public void SendRegionHandle(UUID regoinID, ulong handle)
{
throw new System.NotImplementedException();
}
public void SendParcelInfo(RegionInfo info, LandData land, UUID parcelID, uint x, uint y)
{
throw new System.NotImplementedException();
}
public void SendScriptTeleportRequest(string objName, string simName, Vector3 pos, Vector3 lookAt)
{
throw new System.NotImplementedException();
}
public void SendDirPlacesReply(UUID queryID, DirPlacesReplyData[] data)
{
throw new System.NotImplementedException();
}
public void SendDirPeopleReply(UUID queryID, DirPeopleReplyData[] data)
{
throw new System.NotImplementedException();
}
public void SendDirEventsReply(UUID queryID, DirEventsReplyData[] data)
{
throw new System.NotImplementedException();
}
public void SendDirGroupsReply(UUID queryID, DirGroupsReplyData[] data)
{
throw new System.NotImplementedException();
}
public void SendDirClassifiedReply(UUID queryID, DirClassifiedReplyData[] data)
{
throw new System.NotImplementedException();
}
public void SendDirLandReply(UUID queryID, DirLandReplyData[] data)
{
throw new System.NotImplementedException();
}
public void SendDirPopularReply(UUID queryID, DirPopularReplyData[] data)
{
throw new System.NotImplementedException();
}
public void SendEventInfoReply(EventData info)
{
throw new System.NotImplementedException();
}
public void SendMapItemReply(mapItemReply[] replies, uint mapitemtype, uint flags)
{
throw new System.NotImplementedException();
}
public void SendAvatarGroupsReply(UUID avatarID, GroupMembershipData[] data)
{
throw new System.NotImplementedException();
}
public void SendOfferCallingCard(UUID srcID, UUID transactionID)
{
throw new System.NotImplementedException();
}
public void SendAcceptCallingCard(UUID transactionID)
{
throw new System.NotImplementedException();
}
public void SendDeclineCallingCard(UUID transactionID)
{
throw new System.NotImplementedException();
}
public void SendTerminateFriend(UUID exFriendID)
{
throw new System.NotImplementedException();
}
public void SendAvatarClassifiedReply(UUID targetID, UUID[] classifiedID, string[] name)
{
throw new System.NotImplementedException();
}
public void SendClassifiedInfoReply(UUID classifiedID, UUID creatorID, uint creationDate, uint expirationDate, uint category, string name, string description, UUID parcelID, uint parentEstate, UUID snapshotID, string simName, Vector3 globalPos, string parcelName, byte classifiedFlags, int price)
{
throw new System.NotImplementedException();
}
public void SendAgentDropGroup(UUID groupID)
{
throw new System.NotImplementedException();
}
public void RefreshGroupMembership()
{
throw new System.NotImplementedException();
}
public void SendAvatarNotesReply(UUID targetID, string text)
{
throw new System.NotImplementedException();
}
public void SendAvatarPicksReply(UUID targetID, Dictionary<UUID, string> picks)
{
throw new System.NotImplementedException();
}
public void SendPickInfoReply(UUID pickID, UUID creatorID, bool topPick, UUID parcelID, string name, string desc, UUID snapshotID, string user, string originalName, string simName, Vector3 posGlobal, int sortOrder, bool enabled)
{
throw new System.NotImplementedException();
}
public void SendAvatarClassifiedReply(UUID targetID, Dictionary<UUID, string> classifieds)
{
throw new System.NotImplementedException();
}
public void SendParcelDwellReply(int localID, UUID parcelID, float dwell)
{
throw new System.NotImplementedException();
}
public void SendUserInfoReply(bool imViaEmail, bool visible, string email)
{
throw new System.NotImplementedException();
}
public void SendUseCachedMuteList()
{
throw new System.NotImplementedException();
}
public void SendMuteListUpdate(string filename)
{
throw new System.NotImplementedException();
}
public void KillEndDone()
{
throw new System.NotImplementedException();
}
public bool AddGenericPacketHandler(string MethodName, GenericMessage handler)
{
throw new System.NotImplementedException();
}
public void SendRebakeAvatarTextures(UUID textureID)
{
throw new System.NotImplementedException();
}
public void SendAvatarInterestsReply(UUID avatarID, uint wantMask, string wantText, uint skillsMask, string skillsText, string languages)
{
throw new System.NotImplementedException();
}
public void SendGroupAccountingDetails(IClientAPI sender,UUID groupID, UUID transactionID, UUID sessionID, int amt)
{
}
public void SendGroupAccountingSummary(IClientAPI sender,UUID groupID, uint moneyAmt, int totalTier, int usedTier)
{
}
public void SendGroupTransactionsSummaryDetails(IClientAPI sender,UUID groupID, UUID transactionID, UUID sessionID,int amt)
{
}
public void SendGroupVoteHistory(UUID groupID, UUID transactionID, GroupVoteHistory[] Votes)
{
}
public void SendGroupActiveProposals(UUID groupID, UUID transactionID, GroupActiveProposals[] Proposals)
{
}
public void SendChangeUserRights(UUID agentID, UUID friendID, int rights)
{
}
public void SendTextBoxRequest(string message, int chatChannel, string objectname, string ownerFirstName, string ownerLastName, UUID objectId)
{
}
public void StopFlying(ISceneEntity presence)
{
}
#endregion
}
}
| |
// Copyright (C) 2014 dot42
//
// Original filename: Java.Nio.Channels.Spi.cs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma warning disable 1717
namespace Java.Nio.Channels.Spi
{
/// <summary>
/// <para><c> AbstractSelectableChannel </c> is the base implementation class for selectable channels. It declares methods for registering, unregistering and closing selectable channels. It is thread-safe. </para>
/// </summary>
/// <java-name>
/// java/nio/channels/spi/AbstractSelectableChannel
/// </java-name>
[Dot42.DexImport("java/nio/channels/spi/AbstractSelectableChannel", AccessFlags = 1057)]
public abstract partial class AbstractSelectableChannel : global::Java.Nio.Channels.SelectableChannel
/* scope: __dot42__ */
{
/// <summary>
/// <para>Constructs a new <c> AbstractSelectableChannel </c> .</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Ljava/nio/channels/spi/SelectorProvider;)V", AccessFlags = 4)]
protected internal AbstractSelectableChannel(global::Java.Nio.Channels.Spi.SelectorProvider selectorProvider) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Returns the selector provider that has created this channel.</para><para><para>java.nio.channels.SelectableChannel::provider() </para></para>
/// </summary>
/// <returns>
/// <para>this channel's selector provider. </para>
/// </returns>
/// <java-name>
/// provider
/// </java-name>
[Dot42.DexImport("provider", "()Ljava/nio/channels/spi/SelectorProvider;", AccessFlags = 17)]
public override global::Java.Nio.Channels.Spi.SelectorProvider Provider() /* MethodBuilder.Create */
{
return default(global::Java.Nio.Channels.Spi.SelectorProvider);
}
/// <summary>
/// <para>Indicates whether this channel is registered with one or more selectors.</para><para></para>
/// </summary>
/// <returns>
/// <para><c> true </c> if this channel is registered with a selector, <c> false </c> otherwise. </para>
/// </returns>
/// <java-name>
/// isRegistered
/// </java-name>
[Dot42.DexImport("isRegistered", "()Z", AccessFlags = 49)]
public override bool IsRegistered() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Gets this channel's selection key for the specified selector.</para><para></para>
/// </summary>
/// <returns>
/// <para>the selection key for the channel or <c> null </c> if this channel has not been registered with <c> selector </c> . </para>
/// </returns>
/// <java-name>
/// keyFor
/// </java-name>
[Dot42.DexImport("keyFor", "(Ljava/nio/channels/Selector;)Ljava/nio/channels/SelectionKey;", AccessFlags = 49)]
public override global::Java.Nio.Channels.SelectionKey KeyFor(global::Java.Nio.Channels.Selector selector) /* MethodBuilder.Create */
{
return default(global::Java.Nio.Channels.SelectionKey);
}
/// <summary>
/// <para>Registers this channel with the specified selector for the specified interest set. If the channel is already registered with the selector, the interest set is updated to <c> interestSet </c> and the corresponding selection key is returned. If the channel is not yet registered, this method calls the <c> register </c> method of <c> selector </c> and adds the selection key to this channel's key set.</para><para></para>
/// </summary>
/// <returns>
/// <para>the selection key for this registration. </para>
/// </returns>
/// <java-name>
/// register
/// </java-name>
[Dot42.DexImport("register", "(Ljava/nio/channels/Selector;ILjava/lang/Object;)Ljava/nio/channels/SelectionKey;" +
"", AccessFlags = 17)]
public override global::Java.Nio.Channels.SelectionKey Register(global::Java.Nio.Channels.Selector selector, int interestSet, object attachment) /* MethodBuilder.Create */
{
return default(global::Java.Nio.Channels.SelectionKey);
}
/// <summary>
/// <para>Implements the channel closing behavior. Calls <c> implCloseSelectableChannel() </c> first, then loops through the list of selection keys and cancels them, which unregisters this channel from all selectors it is registered with.</para><para></para>
/// </summary>
/// <java-name>
/// implCloseChannel
/// </java-name>
[Dot42.DexImport("implCloseChannel", "()V", AccessFlags = 52)]
protected internal override void ImplCloseChannel() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Implements the closing function of the SelectableChannel. This method is called from <c> implCloseChannel() </c> .</para><para></para>
/// </summary>
/// <java-name>
/// implCloseSelectableChannel
/// </java-name>
[Dot42.DexImport("implCloseSelectableChannel", "()V", AccessFlags = 1028)]
protected internal abstract void ImplCloseSelectableChannel() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Indicates whether this channel is in blocking mode.</para><para></para>
/// </summary>
/// <returns>
/// <para><c> true </c> if this channel is blocking, <c> false </c> otherwise. </para>
/// </returns>
/// <java-name>
/// isBlocking
/// </java-name>
[Dot42.DexImport("isBlocking", "()Z", AccessFlags = 17)]
public override bool IsBlocking() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Gets the object used for the synchronization of <c> register </c> and <c> configureBlocking </c> .</para><para></para>
/// </summary>
/// <returns>
/// <para>the synchronization object. </para>
/// </returns>
/// <java-name>
/// blockingLock
/// </java-name>
[Dot42.DexImport("blockingLock", "()Ljava/lang/Object;", AccessFlags = 17)]
public override object BlockingLock() /* MethodBuilder.Create */
{
return default(object);
}
/// <summary>
/// <para>Sets the blocking mode of this channel. A call to this method blocks if other calls to this method or to <c> register </c> are executing. The actual setting of the mode is done by calling <c> implConfigureBlocking(boolean) </c> .</para><para><para>java.nio.channels.SelectableChannel::configureBlocking(boolean) </para></para>
/// </summary>
/// <returns>
/// <para>this channel. </para>
/// </returns>
/// <java-name>
/// configureBlocking
/// </java-name>
[Dot42.DexImport("configureBlocking", "(Z)Ljava/nio/channels/SelectableChannel;", AccessFlags = 17)]
public override global::Java.Nio.Channels.SelectableChannel ConfigureBlocking(bool blockingMode) /* MethodBuilder.Create */
{
return default(global::Java.Nio.Channels.SelectableChannel);
}
/// <summary>
/// <para>Implements the configuration of blocking/non-blocking mode.</para><para></para>
/// </summary>
/// <java-name>
/// implConfigureBlocking
/// </java-name>
[Dot42.DexImport("implConfigureBlocking", "(Z)V", AccessFlags = 1028)]
protected internal abstract void ImplConfigureBlocking(bool blocking) /* MethodBuilder.Create */ ;
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal AbstractSelectableChannel() /* TypeBuilder.AddDefaultConstructor */
{
}
}
/// <summary>
/// <para><c> AbstractSelectionKey </c> is the base implementation class for selection keys. It implements validation and cancellation methods. </para>
/// </summary>
/// <java-name>
/// java/nio/channels/spi/AbstractSelectionKey
/// </java-name>
[Dot42.DexImport("java/nio/channels/spi/AbstractSelectionKey", AccessFlags = 1057)]
public abstract partial class AbstractSelectionKey : global::Java.Nio.Channels.SelectionKey
/* scope: __dot42__ */
{
/// <summary>
/// <para>Constructs a new <c> AbstractSelectionKey </c> . </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 4)]
protected internal AbstractSelectionKey() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Indicates whether this key is valid. A key is valid as long as it has not been canceled.</para><para></para>
/// </summary>
/// <returns>
/// <para><c> true </c> if this key has not been canceled, <c> false </c> otherwise. </para>
/// </returns>
/// <java-name>
/// isValid
/// </java-name>
[Dot42.DexImport("isValid", "()Z", AccessFlags = 17)]
public override bool IsValid() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Cancels this key. </para><para>A key that has been canceled is no longer valid. Calling this method on an already canceled key does nothing. </para>
/// </summary>
/// <java-name>
/// cancel
/// </java-name>
[Dot42.DexImport("cancel", "()V", AccessFlags = 17)]
public override void Cancel() /* MethodBuilder.Create */
{
}
}
/// <summary>
/// <para><c> AbstractSelector </c> is the base implementation class for selectors. It realizes the interruption of selection by <c> begin </c> and <c> end </c> . It also holds the cancellation and the deletion of the key set. </para>
/// </summary>
/// <java-name>
/// java/nio/channels/spi/AbstractSelector
/// </java-name>
[Dot42.DexImport("java/nio/channels/spi/AbstractSelector", AccessFlags = 1057)]
public abstract partial class AbstractSelector : global::Java.Nio.Channels.Selector
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "(Ljava/nio/channels/spi/SelectorProvider;)V", AccessFlags = 4)]
protected internal AbstractSelector(global::Java.Nio.Channels.Spi.SelectorProvider selectorProvider) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Closes this selector. This method does nothing if this selector is already closed. The actual closing must be implemented by subclasses in <c> implCloseSelector() </c> . </para>
/// </summary>
/// <java-name>
/// close
/// </java-name>
[Dot42.DexImport("close", "()V", AccessFlags = 17)]
public override void Close() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Implements the closing of this channel. </para>
/// </summary>
/// <java-name>
/// implCloseSelector
/// </java-name>
[Dot42.DexImport("implCloseSelector", "()V", AccessFlags = 1028)]
protected internal abstract void ImplCloseSelector() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Returns true if this selector is open. </para>
/// </summary>
/// <java-name>
/// isOpen
/// </java-name>
[Dot42.DexImport("isOpen", "()Z", AccessFlags = 17)]
public override bool IsOpen() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Returns this selector's provider. </para>
/// </summary>
/// <java-name>
/// provider
/// </java-name>
[Dot42.DexImport("provider", "()Ljava/nio/channels/spi/SelectorProvider;", AccessFlags = 17)]
public override global::Java.Nio.Channels.Spi.SelectorProvider Provider() /* MethodBuilder.Create */
{
return default(global::Java.Nio.Channels.Spi.SelectorProvider);
}
/// <summary>
/// <para>Returns this channel's set of canceled selection keys. </para>
/// </summary>
/// <java-name>
/// cancelledKeys
/// </java-name>
[Dot42.DexImport("cancelledKeys", "()Ljava/util/Set;", AccessFlags = 20, Signature = "()Ljava/util/Set<Ljava/nio/channels/SelectionKey;>;")]
protected internal global::Java.Util.ISet<global::Java.Nio.Channels.SelectionKey> CancelledKeys() /* MethodBuilder.Create */
{
return default(global::Java.Util.ISet<global::Java.Nio.Channels.SelectionKey>);
}
/// <summary>
/// <para>Registers <c> channel </c> with this selector.</para><para></para>
/// </summary>
/// <returns>
/// <para>the key related to the channel and this selector. </para>
/// </returns>
/// <java-name>
/// register
/// </java-name>
[Dot42.DexImport("register", "(Ljava/nio/channels/spi/AbstractSelectableChannel;ILjava/lang/Object;)Ljava/nio/c" +
"hannels/SelectionKey;", AccessFlags = 1028)]
protected internal abstract global::Java.Nio.Channels.SelectionKey Register(global::Java.Nio.Channels.Spi.AbstractSelectableChannel channel, int operations, object attachment) /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Deletes the key from the channel's key set. </para>
/// </summary>
/// <java-name>
/// deregister
/// </java-name>
[Dot42.DexImport("deregister", "(Ljava/nio/channels/spi/AbstractSelectionKey;)V", AccessFlags = 20)]
protected internal void Deregister(global::Java.Nio.Channels.Spi.AbstractSelectionKey key) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Indicates the beginning of a code section that includes an I/O operation that is potentially blocking. After this operation, the application should invoke the corresponding <c> end(boolean) </c> method. </para>
/// </summary>
/// <java-name>
/// begin
/// </java-name>
[Dot42.DexImport("begin", "()V", AccessFlags = 20)]
protected internal void Begin() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Indicates the end of a code section that has been started with <c> begin() </c> and that includes a potentially blocking I/O operation. </para>
/// </summary>
/// <java-name>
/// end
/// </java-name>
[Dot42.DexImport("end", "()V", AccessFlags = 20)]
protected internal void End() /* MethodBuilder.Create */
{
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal AbstractSelector() /* TypeBuilder.AddDefaultConstructor */
{
}
}
/// <summary>
/// <para><c> AbstractInterruptibleChannel </c> is the root class for interruptible channels. </para><para>The basic usage pattern for an interruptible channel is to invoke <c> begin() </c> before any I/O operation that potentially blocks indefinitely, then <c> end(boolean) </c> after completing the operation. The argument to the <c> end </c> method should indicate if the I/O operation has actually completed so that any change may be visible to the invoker. </para>
/// </summary>
/// <java-name>
/// java/nio/channels/spi/AbstractInterruptibleChannel
/// </java-name>
[Dot42.DexImport("java/nio/channels/spi/AbstractInterruptibleChannel", AccessFlags = 1057)]
public abstract partial class AbstractInterruptibleChannel : global::Java.Nio.Channels.IChannel, global::Java.Nio.Channels.IInterruptibleChannel
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "()V", AccessFlags = 4)]
protected internal AbstractInterruptibleChannel() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Returns true if this channel is open. </para>
/// </summary>
/// <java-name>
/// isOpen
/// </java-name>
[Dot42.DexImport("isOpen", "()Z", AccessFlags = 49)]
public bool IsOpen() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Closes an open channel. If the channel is already closed then this method has no effect, otherwise it closes the receiver via the <c> implCloseChannel </c> method. </para><para>If an attempt is made to perform an operation on a closed channel then a java.nio.channels.ClosedChannelException is thrown. </para><para>If multiple threads attempt to simultaneously close a channel, then only one thread will run the closure code and the others will be blocked until the first one completes.</para><para><para>java.nio.channels.Channel::close() </para></para>
/// </summary>
/// <java-name>
/// close
/// </java-name>
[Dot42.DexImport("close", "()V", AccessFlags = 17)]
public void Close() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Indicates the beginning of a code section that includes an I/O operation that is potentially blocking. After this operation, the application should invoke the corresponding <c> end(boolean) </c> method. </para>
/// </summary>
/// <java-name>
/// begin
/// </java-name>
[Dot42.DexImport("begin", "()V", AccessFlags = 20)]
protected internal void Begin() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Indicates the end of a code section that has been started with <c> begin() </c> and that includes a potentially blocking I/O operation.</para><para></para>
/// </summary>
/// <java-name>
/// end
/// </java-name>
[Dot42.DexImport("end", "(Z)V", AccessFlags = 20)]
protected internal void End(bool success) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Implements the channel closing behavior. </para><para>Closes the channel with a guarantee that the channel is not currently closed through another invocation of <c> close() </c> and that the method is thread-safe. </para><para>Any outstanding threads blocked on I/O operations on this channel must be released with either a normal return code, or by throwing an <c> AsynchronousCloseException </c> .</para><para></para>
/// </summary>
/// <java-name>
/// implCloseChannel
/// </java-name>
[Dot42.DexImport("implCloseChannel", "()V", AccessFlags = 1028)]
protected internal abstract void ImplCloseChannel() /* MethodBuilder.Create */ ;
}
/// <summary>
/// <para><c> SelectorProvider </c> is an abstract base class that declares methods for providing instances of DatagramChannel, Pipe, java.nio.channels.Selector , ServerSocketChannel, and SocketChannel. All the methods of this class are thread-safe.</para><para>A provider instance can be retrieved through a system property or the configuration file in a jar file; if no provider is available that way then the system default provider is returned. </para>
/// </summary>
/// <java-name>
/// java/nio/channels/spi/SelectorProvider
/// </java-name>
[Dot42.DexImport("java/nio/channels/spi/SelectorProvider", AccessFlags = 1057)]
public abstract partial class SelectorProvider
/* scope: __dot42__ */
{
/// <summary>
/// <para>Constructs a new <c> SelectorProvider </c> . </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 4)]
protected internal SelectorProvider() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Gets a provider instance by executing the following steps when called for the first time: <ul><li><para>if the system property "java.nio.channels.spi.SelectorProvider" is set, the value of this property is the class name of the provider returned; </para></li><li><para>if there is a provider-configuration file named "java.nio.channels.spi.SelectorProvider" in META-INF/services of a jar file valid in the system class loader, the first class name is the provider's class name; </para></li><li><para>otherwise, a system default provider will be returned. </para></li></ul></para><para></para>
/// </summary>
/// <returns>
/// <para>the provider. </para>
/// </returns>
/// <java-name>
/// provider
/// </java-name>
[Dot42.DexImport("provider", "()Ljava/nio/channels/spi/SelectorProvider;", AccessFlags = 41)]
public static global::Java.Nio.Channels.Spi.SelectorProvider Provider() /* MethodBuilder.Create */
{
return default(global::Java.Nio.Channels.Spi.SelectorProvider);
}
/// <summary>
/// <para>Creates a new open <c> DatagramChannel </c> .</para><para></para>
/// </summary>
/// <returns>
/// <para>the new channel. </para>
/// </returns>
/// <java-name>
/// openDatagramChannel
/// </java-name>
[Dot42.DexImport("openDatagramChannel", "()Ljava/nio/channels/DatagramChannel;", AccessFlags = 1025)]
public abstract global::Java.Nio.Channels.DatagramChannel OpenDatagramChannel() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Creates a new <c> Pipe </c> .</para><para></para>
/// </summary>
/// <returns>
/// <para>the new pipe. </para>
/// </returns>
/// <java-name>
/// openPipe
/// </java-name>
[Dot42.DexImport("openPipe", "()Ljava/nio/channels/Pipe;", AccessFlags = 1025)]
public abstract global::Java.Nio.Channels.Pipe OpenPipe() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Creates a new selector.</para><para></para>
/// </summary>
/// <returns>
/// <para>the new selector. </para>
/// </returns>
/// <java-name>
/// openSelector
/// </java-name>
[Dot42.DexImport("openSelector", "()Ljava/nio/channels/spi/AbstractSelector;", AccessFlags = 1025)]
public abstract global::Java.Nio.Channels.Spi.AbstractSelector OpenSelector() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Creates a new open <c> ServerSocketChannel </c> .</para><para></para>
/// </summary>
/// <returns>
/// <para>the new channel. </para>
/// </returns>
/// <java-name>
/// openServerSocketChannel
/// </java-name>
[Dot42.DexImport("openServerSocketChannel", "()Ljava/nio/channels/ServerSocketChannel;", AccessFlags = 1025)]
public abstract global::Java.Nio.Channels.ServerSocketChannel OpenServerSocketChannel() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Create a new open <c> SocketChannel </c> .</para><para></para>
/// </summary>
/// <returns>
/// <para>the new channel. </para>
/// </returns>
/// <java-name>
/// openSocketChannel
/// </java-name>
[Dot42.DexImport("openSocketChannel", "()Ljava/nio/channels/SocketChannel;", AccessFlags = 1025)]
public abstract global::Java.Nio.Channels.SocketChannel OpenSocketChannel() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Returns the channel inherited from the process that created this VM. On Android, this method always returns null because stdin and stdout are never connected to a socket.</para><para></para>
/// </summary>
/// <returns>
/// <para>the channel. </para>
/// </returns>
/// <java-name>
/// inheritedChannel
/// </java-name>
[Dot42.DexImport("inheritedChannel", "()Ljava/nio/channels/Channel;", AccessFlags = 1)]
public virtual global::Java.Nio.Channels.IChannel InheritedChannel() /* MethodBuilder.Create */
{
return default(global::Java.Nio.Channels.IChannel);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.12.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsUrl
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Models;
public static partial class PathItemsExtensions
{
/// <summary>
/// send globalStringPath='globalStringPath',
/// pathItemStringPath='pathItemStringPath',
/// localStringPath='localStringPath', globalStringQuery='globalStringQuery',
/// pathItemStringQuery='pathItemStringQuery',
/// localStringQuery='localStringQuery'
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='localStringPath'>
/// should contain value 'localStringPath'
/// </param>
/// <param name='pathItemStringPath'>
/// A string value 'pathItemStringPath' that appears in the path
/// </param>
/// <param name='localStringQuery'>
/// should contain value 'localStringQuery'
/// </param>
/// <param name='pathItemStringQuery'>
/// A string value 'pathItemStringQuery' that appears as a query parameter
/// </param>
public static void GetAllWithValues(this IPathItems operations, string localStringPath, string pathItemStringPath, string localStringQuery = default(string), string pathItemStringQuery = default(string))
{
Task.Factory.StartNew(s => ((IPathItems)s).GetAllWithValuesAsync(localStringPath, pathItemStringPath, localStringQuery, pathItemStringQuery), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// send globalStringPath='globalStringPath',
/// pathItemStringPath='pathItemStringPath',
/// localStringPath='localStringPath', globalStringQuery='globalStringQuery',
/// pathItemStringQuery='pathItemStringQuery',
/// localStringQuery='localStringQuery'
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='localStringPath'>
/// should contain value 'localStringPath'
/// </param>
/// <param name='pathItemStringPath'>
/// A string value 'pathItemStringPath' that appears in the path
/// </param>
/// <param name='localStringQuery'>
/// should contain value 'localStringQuery'
/// </param>
/// <param name='pathItemStringQuery'>
/// A string value 'pathItemStringQuery' that appears as a query parameter
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task GetAllWithValuesAsync( this IPathItems operations, string localStringPath, string pathItemStringPath, string localStringQuery = default(string), string pathItemStringQuery = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
await operations.GetAllWithValuesWithHttpMessagesAsync(localStringPath, pathItemStringPath, localStringQuery, pathItemStringQuery, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// send globalStringPath='globalStringPath',
/// pathItemStringPath='pathItemStringPath',
/// localStringPath='localStringPath', globalStringQuery=null,
/// pathItemStringQuery='pathItemStringQuery',
/// localStringQuery='localStringQuery'
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='localStringPath'>
/// should contain value 'localStringPath'
/// </param>
/// <param name='pathItemStringPath'>
/// A string value 'pathItemStringPath' that appears in the path
/// </param>
/// <param name='localStringQuery'>
/// should contain value 'localStringQuery'
/// </param>
/// <param name='pathItemStringQuery'>
/// A string value 'pathItemStringQuery' that appears as a query parameter
/// </param>
public static void GetGlobalQueryNull(this IPathItems operations, string localStringPath, string pathItemStringPath, string localStringQuery = default(string), string pathItemStringQuery = default(string))
{
Task.Factory.StartNew(s => ((IPathItems)s).GetGlobalQueryNullAsync(localStringPath, pathItemStringPath, localStringQuery, pathItemStringQuery), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// send globalStringPath='globalStringPath',
/// pathItemStringPath='pathItemStringPath',
/// localStringPath='localStringPath', globalStringQuery=null,
/// pathItemStringQuery='pathItemStringQuery',
/// localStringQuery='localStringQuery'
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='localStringPath'>
/// should contain value 'localStringPath'
/// </param>
/// <param name='pathItemStringPath'>
/// A string value 'pathItemStringPath' that appears in the path
/// </param>
/// <param name='localStringQuery'>
/// should contain value 'localStringQuery'
/// </param>
/// <param name='pathItemStringQuery'>
/// A string value 'pathItemStringQuery' that appears as a query parameter
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task GetGlobalQueryNullAsync( this IPathItems operations, string localStringPath, string pathItemStringPath, string localStringQuery = default(string), string pathItemStringQuery = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
await operations.GetGlobalQueryNullWithHttpMessagesAsync(localStringPath, pathItemStringPath, localStringQuery, pathItemStringQuery, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// send globalStringPath=globalStringPath,
/// pathItemStringPath='pathItemStringPath',
/// localStringPath='localStringPath', globalStringQuery=null,
/// pathItemStringQuery='pathItemStringQuery', localStringQuery=null
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='localStringPath'>
/// should contain value 'localStringPath'
/// </param>
/// <param name='pathItemStringPath'>
/// A string value 'pathItemStringPath' that appears in the path
/// </param>
/// <param name='localStringQuery'>
/// should contain null value
/// </param>
/// <param name='pathItemStringQuery'>
/// A string value 'pathItemStringQuery' that appears as a query parameter
/// </param>
public static void GetGlobalAndLocalQueryNull(this IPathItems operations, string localStringPath, string pathItemStringPath, string localStringQuery = default(string), string pathItemStringQuery = default(string))
{
Task.Factory.StartNew(s => ((IPathItems)s).GetGlobalAndLocalQueryNullAsync(localStringPath, pathItemStringPath, localStringQuery, pathItemStringQuery), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// send globalStringPath=globalStringPath,
/// pathItemStringPath='pathItemStringPath',
/// localStringPath='localStringPath', globalStringQuery=null,
/// pathItemStringQuery='pathItemStringQuery', localStringQuery=null
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='localStringPath'>
/// should contain value 'localStringPath'
/// </param>
/// <param name='pathItemStringPath'>
/// A string value 'pathItemStringPath' that appears in the path
/// </param>
/// <param name='localStringQuery'>
/// should contain null value
/// </param>
/// <param name='pathItemStringQuery'>
/// A string value 'pathItemStringQuery' that appears as a query parameter
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task GetGlobalAndLocalQueryNullAsync( this IPathItems operations, string localStringPath, string pathItemStringPath, string localStringQuery = default(string), string pathItemStringQuery = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
await operations.GetGlobalAndLocalQueryNullWithHttpMessagesAsync(localStringPath, pathItemStringPath, localStringQuery, pathItemStringQuery, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// send globalStringPath='globalStringPath',
/// pathItemStringPath='pathItemStringPath',
/// localStringPath='localStringPath', globalStringQuery='globalStringQuery',
/// pathItemStringQuery=null, localStringQuery=null
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='localStringPath'>
/// should contain value 'localStringPath'
/// </param>
/// <param name='pathItemStringPath'>
/// A string value 'pathItemStringPath' that appears in the path
/// </param>
/// <param name='localStringQuery'>
/// should contain value null
/// </param>
/// <param name='pathItemStringQuery'>
/// should contain value null
/// </param>
public static void GetLocalPathItemQueryNull(this IPathItems operations, string localStringPath, string pathItemStringPath, string localStringQuery = default(string), string pathItemStringQuery = default(string))
{
Task.Factory.StartNew(s => ((IPathItems)s).GetLocalPathItemQueryNullAsync(localStringPath, pathItemStringPath, localStringQuery, pathItemStringQuery), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// send globalStringPath='globalStringPath',
/// pathItemStringPath='pathItemStringPath',
/// localStringPath='localStringPath', globalStringQuery='globalStringQuery',
/// pathItemStringQuery=null, localStringQuery=null
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='localStringPath'>
/// should contain value 'localStringPath'
/// </param>
/// <param name='pathItemStringPath'>
/// A string value 'pathItemStringPath' that appears in the path
/// </param>
/// <param name='localStringQuery'>
/// should contain value null
/// </param>
/// <param name='pathItemStringQuery'>
/// should contain value null
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task GetLocalPathItemQueryNullAsync( this IPathItems operations, string localStringPath, string pathItemStringPath, string localStringQuery = default(string), string pathItemStringQuery = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
await operations.GetLocalPathItemQueryNullWithHttpMessagesAsync(localStringPath, pathItemStringPath, localStringQuery, pathItemStringQuery, null, cancellationToken).ConfigureAwait(false);
}
}
}
| |
#region -- License Terms --
//
// MessagePack for CLI
//
// Copyright (C) 2010-2015 FUJIWARA, Yusuke
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion -- License Terms --
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using MsgPack.Serialization.ReflectionSerializers;
using NUnit.Framework;
namespace MsgPack.Serialization
{
[TestFixture]
public class AotTest
{
[TestFixtureSetUp]
public static void SetupFixture()
{
MessagePackSerializer.PrepareType<Timestamp>();
MessagePackSerializer.PrepareCollectionType<byte>();
MessagePackSerializer.PrepareCollectionType<char>();
MessagePackSerializer.PrepareCollectionType<int>();
MessagePackSerializer.PrepareCollectionType<float>();
MessagePackSerializer.PrepareCollectionType<double>();
MessagePackSerializer.PrepareCollectionType<short>();
MessagePackSerializer.PrepareCollectionType<uint>();
MessagePackSerializer.PrepareCollectionType<ulong>();
MessagePackSerializer.PrepareCollectionType<sbyte>();
MessagePackSerializer.PrepareDictionaryType<string, int>();
new ArraySegmentEqualityComparer<byte>().Equals( default( ArraySegment<byte> ), default( ArraySegment<byte> ) );
new ArraySegmentEqualityComparer<char>().Equals( default( ArraySegment<char> ), default( ArraySegment<char> ) );
new ArraySegmentEqualityComparer<int>().Equals( default( ArraySegment<int> ), default( ArraySegment<int> ) );
}
[Test]
public void TestGenericDefaultSerializer_ArraySegmentOfByte()
{
TestGenericDefaultSerializerCore( new ArraySegment<byte>( new byte[] { 1 } ), new ArraySegmentEqualityComparer<byte>().Equals );
}
[Test]
public void TestGenericDefaultSerializer_ArraySegmentOfChar()
{
TestGenericDefaultSerializerCore( new ArraySegment<char>( new[] { 'a' } ), new ArraySegmentEqualityComparer<char>().Equals );
}
[Test]
public void TestGenericDefaultSerializer_ArraySegmentOfInt32()
{
TestGenericDefaultSerializerCore( new ArraySegment<int>( new[] { 1 } ), new ArraySegmentEqualityComparer<int>().Equals );
}
[Test]
public void TestGenericDefaultSerializer_KeyValuePair()
{
TestGenericDefaultSerializerCore( new KeyValuePair<string, int>( "A", 1 ), ( x, y ) => x.Key == y.Key && x.Value == y.Value );
}
[Test]
public void TestGenericDefaultSerializer_Stack()
{
var stack = new Stack<int>(2);
stack.Push( 1 );
stack.Push( 2 );
TestGenericDefaultSerializerCore( stack, Enumerable.SequenceEqual );
}
[Test]
public void TestGenericDefaultSerializer_Queue()
{
var queue = new Queue<int>(2);
queue.Enqueue( 1 );
queue.Enqueue( 2 );
TestGenericDefaultSerializerCore( queue, Enumerable.SequenceEqual );
}
[Test]
public void TestGenericDefaultSerializer_List()
{
TestGenericDefaultSerializerCore( new List<int>( 2 ) { 1, 2 }, Enumerable.SequenceEqual );
}
[Test]
public void TestGenericDefaultSerializer_ListOfMessagePackObject()
{
TestGenericDefaultSerializerCore( new List<MessagePackObject>( 2 ) { 1, 2 }, Enumerable.SequenceEqual );
}
[Test]
public void TestGenericDefaultSerializer_Dictionary()
{
TestGenericDefaultSerializerCore( new Dictionary<string, int>( 2 ) { { "A", 1 }, { "B", 2 } }, Enumerable.SequenceEqual );
}
private static void TestGenericDefaultSerializerCore<T>( T value, Func<T, T, bool> comparer )
{
var context = new SerializationContext( PackerCompatibilityOptions.None );
var serializer = context.GetSerializer<T>();
using ( var buffer = new MemoryStream() )
{
serializer.Pack( buffer, value );
buffer.Position = 0;
var result = serializer.Unpack( buffer );
Assert.That( comparer( value, result ), " Expected: {1}{0} Actual :{2}", Environment.NewLine, value, result );
}
}
[Test]
public void TestTypeMetadataExtraction()
{
VerifyType( typeof( WithMessagePackMember ), new[] { "B", "A" }, new string[ 0 ] );
VerifyType( typeof( ComplexTypeWithDataContractWithOrder ), new[] { "Source", "Data", "TimeStamp", "History" }, new[] { "History" } );
VerifyType( typeof( ComplexTypeWithOneBaseOrder ), new[] { null, "One", "Two" }, new string[ 0 ] );
VerifyType( typeof( DataMemberAttributeNamedPropertyTestTarget ), new[] { "Alias" }, new string[ 0 ] );
}
private static void VerifyType( Type type, string[] expectedMemberNames, string[] readOnlyMembers )
{
var context = new SerializationContext();
var target = SerializationTarget.Prepare( context, type );
Assert.That(
target.Members.Count,
Is.EqualTo( expectedMemberNames.Length ),
"Some members are lacked.{0} Expected:[{1}]{0} Actual :[{2}]",
Environment.NewLine,
String.Join( ", ", expectedMemberNames ),
String.Join( ", ", target.Members.Select( m => String.Format("{{Name: {0}, Contract: {{Name: {1}, Id: {2}, NilImplication: {3}}}, Member: '{4}'}}", m.MemberName, m.Contract.Name, m.Contract.Id, m.Contract.NilImplication, m.Member) ).ToArray() )
);
for ( var i = 0; i < expectedMemberNames.Length; i++ )
{
Assert.That(
target.Members[ i ].MemberName,
Is.EqualTo( expectedMemberNames[ i ] ),
"Member at index {1} is differ.{0} Expected:[{2}]{0} Actual :[{3}]",
Environment.NewLine,
i,
String.Join( ", ", expectedMemberNames ),
String.Join( ", ", target.Members.Select( m => m.MemberName + "@Id=" + m.Contract.Id ).ToArray() )
);
}
Func<object, object>[] getters;
Action<object, object>[] setters;
MemberInfo[] memberInfos;
DataMemberContract[] contracts;
MessagePackSerializer[] serializers;
ReflectionSerializerHelper.GetMetadata( type, target.Members, context, out getters, out setters, out memberInfos, out contracts, out serializers );
Assert.That( getters.Length, Is.EqualTo( target.Members.Count ), "getters.Length" );
Assert.That( setters.Length, Is.EqualTo( target.Members.Count ), "setters.Length" );
Assert.That( memberInfos.Length, Is.EqualTo( target.Members.Count ), "memberInfos.Length" );
Assert.That( contracts.Length, Is.EqualTo( target.Members.Count ), "contracts.Length" );
Assert.That( serializers.Length, Is.EqualTo( target.Members.Count ), "serializers.Length" );
for ( var i = 0; i < expectedMemberNames.Length; i++ )
{
if ( expectedMemberNames[ i ] == null )
{
Assert.That( getters[ i ], Is.Null, "getters[{0}]", i );
Assert.That( setters[ i ], Is.Null, "setters[{0}]", i );
Assert.That( memberInfos[ i ], Is.Null, "memberInfos[{0}]", i );
Assert.That( contracts[ i ].Name, Is.Null, "contracts[{0}]", i );
Assert.That( serializers[ i ], Is.Null, "serializers[{0}]", i );
}
else
{
Assert.That( getters[ i ], Is.Not.Null, "getters[{0}]", i );
if ( readOnlyMembers.Contains( expectedMemberNames[ i ] ) )
{
Assert.That( setters[ i ], Is.Null, "setters[{0}]", i );
}
else
{
Assert.That( setters[ i ], Is.Not.Null, "setters[{0}]", i );
}
Assert.That( memberInfos[ i ], Is.Not.Null, "memberInfos[{0}]", i );
Assert.That( contracts[ i ].Name, Is.Not.Null, "contracts[{0}]", i );
Assert.That( serializers[ i ], Is.Not.Null, "serializers[{0}]", i );
}
}
}
public class WithMessagePackMember
{
[MessagePackMember( 0 )]
public string B { get; set; }
[MessagePackMember( 1 )]
public string A { get; set; }
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void TestAllOnesUInt64()
{
var test = new BooleanComparisonOpTest__TestAllOnesUInt64();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse2.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class BooleanComparisonOpTest__TestAllOnesUInt64
{
private const int VectorSize = 16;
private const int Op1ElementCount = VectorSize / sizeof(UInt64);
private static UInt64[] _data = new UInt64[Op1ElementCount];
private static Vector128<UInt64> _clsVar;
private Vector128<UInt64> _fld;
private BooleanUnaryOpTest__DataTable<UInt64> _dataTable;
static BooleanComparisonOpTest__TestAllOnesUInt64()
{
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (ulong)(random.Next(0, int.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt64>, byte>(ref _clsVar), ref Unsafe.As<UInt64, byte>(ref _data[0]), VectorSize);
}
public BooleanComparisonOpTest__TestAllOnesUInt64()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (ulong)(random.Next(0, int.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<UInt64>, byte>(ref _fld), ref Unsafe.As<UInt64, byte>(ref _data[0]), VectorSize);
for (var i = 0; i < Op1ElementCount; i++) { _data[i] = (ulong)(random.Next(0, int.MaxValue)); }
_dataTable = new BooleanUnaryOpTest__DataTable<UInt64>(_data, VectorSize);
}
public bool IsSupported => Sse41.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Sse41.TestAllOnes(
Unsafe.Read<Vector128<UInt64>>(_dataTable.inArrayPtr)
);
ValidateResult(_dataTable.inArrayPtr, result);
}
public void RunBasicScenario_Load()
{
var result = Sse41.TestAllOnes(
Sse2.LoadVector128((UInt64*)(_dataTable.inArrayPtr))
);
ValidateResult(_dataTable.inArrayPtr, result);
}
public void RunBasicScenario_LoadAligned()
{
var result = Sse41.TestAllOnes(
Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArrayPtr))
);
ValidateResult(_dataTable.inArrayPtr, result);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Sse41).GetMethod(nameof(Sse41.TestAllOnes), new Type[] { typeof(Vector128<UInt64>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<UInt64>>(_dataTable.inArrayPtr)
});
ValidateResult(_dataTable.inArrayPtr, (bool)(result));
}
public void RunReflectionScenario_Load()
{
var result = typeof(Sse41).GetMethod(nameof(Sse41.TestAllOnes), new Type[] { typeof(Vector128<UInt64>) })
.Invoke(null, new object[] {
Sse2.LoadVector128((UInt64*)(_dataTable.inArrayPtr))
});
ValidateResult(_dataTable.inArrayPtr, (bool)(result));
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Sse41).GetMethod(nameof(Sse41.TestAllOnes), new Type[] { typeof(Vector128<UInt64>) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArrayPtr))
});
ValidateResult(_dataTable.inArrayPtr, (bool)(result));
}
public void RunClsVarScenario()
{
var result = Sse41.TestAllOnes(
_clsVar
);
ValidateResult(_clsVar, result);
}
public void RunLclVarScenario_UnsafeRead()
{
var value = Unsafe.Read<Vector128<UInt64>>(_dataTable.inArrayPtr);
var result = Sse41.TestAllOnes(value);
ValidateResult(value, result);
}
public void RunLclVarScenario_Load()
{
var value = Sse2.LoadVector128((UInt64*)(_dataTable.inArrayPtr));
var result = Sse41.TestAllOnes(value);
ValidateResult(value, result);
}
public void RunLclVarScenario_LoadAligned()
{
var value = Sse2.LoadAlignedVector128((UInt64*)(_dataTable.inArrayPtr));
var result = Sse41.TestAllOnes(value);
ValidateResult(value, result);
}
public void RunLclFldScenario()
{
var test = new BooleanComparisonOpTest__TestAllOnesUInt64();
var result = Sse41.TestAllOnes(test._fld);
ValidateResult(test._fld, result);
}
public void RunFldScenario()
{
var result = Sse41.TestAllOnes(_fld);
ValidateResult(_fld, result);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector128<UInt64> value, bool result, [CallerMemberName] string method = "")
{
UInt64[] inArray = new UInt64[Op1ElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray[0]), value);
ValidateResult(inArray, result, method);
}
private void ValidateResult(void* value, bool result, [CallerMemberName] string method = "")
{
UInt64[] inArray = new UInt64[Op1ElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<UInt64, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(value), VectorSize);
ValidateResult(inArray, result, method);
}
private void ValidateResult(UInt64[] value, bool result, [CallerMemberName] string method = "")
{
var expectedResult = true;
for (var i = 0; i < Op1ElementCount; i++)
{
expectedResult &= ((~value[i] & ulong.MaxValue) == 0);
}
if (expectedResult != result)
{
Succeeded = false;
Console.WriteLine($"{nameof(Sse41)}.{nameof(Sse41.TestAllOnes)}<UInt64>(Vector128<UInt64>): {method} failed:");
Console.WriteLine($" value: ({string.Join(", ", value)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
/*
* Copyright (c) 2008, openmetaverse.org
* All rights reserved.
*
* - Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Neither the name of the openmetaverse.org nor the names
* of its contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Runtime.InteropServices;
using System.Globalization;
namespace OpenMetaverse
{
/// <summary>
/// A three-dimensional vector with floating-point values
/// </summary>
[Serializable]
[StructLayout(LayoutKind.Sequential)]
public struct Vector3 : IComparable<Vector3>, IEquatable<Vector3>
{
/// <summary>X value</summary>
public float X;
/// <summary>Y value</summary>
public float Y;
/// <summary>Z value</summary>
public float Z;
#region Constructors
public Vector3(float x, float y, float z)
{
X = x;
Y = y;
Z = z;
}
public Vector3(float value)
{
X = value;
Y = value;
Z = value;
}
public Vector3(Vector2 value, float z)
{
X = value.X;
Y = value.Y;
Z = z;
}
public Vector3(Vector3d vector)
{
X = (float)vector.X;
Y = (float)vector.Y;
Z = (float)vector.Z;
}
/// <summary>
/// Constructor, builds a vector from a byte array
/// </summary>
/// <param name="byteArray">Byte array containing three four-byte floats</param>
/// <param name="pos">Beginning position in the byte array</param>
public Vector3(byte[] byteArray, int pos)
{
X = Y = Z = 0f;
FromBytes(byteArray, pos);
}
public Vector3(Vector3 vector)
{
X = vector.X;
Y = vector.Y;
Z = vector.Z;
}
#endregion Constructors
#region Public Methods
public float Length()
{
return (float)Math.Sqrt(DistanceSquared(this, Zero));
}
public float LengthSquared()
{
return DistanceSquared(this, Zero);
}
public void Normalize()
{
this = Normalize(this);
}
/// <summary>
/// Test if this vector is equal to another vector, within a given
/// tolerance range
/// </summary>
/// <param name="vec">Vector to test against</param>
/// <param name="tolerance">The acceptable magnitude of difference
/// between the two vectors</param>
/// <returns>True if the magnitude of difference between the two vectors
/// is less than the given tolerance, otherwise false</returns>
public bool ApproxEquals(Vector3 vec, float tolerance)
{
Vector3 diff = this - vec;
return (diff.LengthSquared() <= tolerance * tolerance);
}
/// <summary>
/// IComparable.CompareTo implementation
/// </summary>
public int CompareTo(Vector3 vector)
{
return Length().CompareTo(vector.Length());
}
/// <summary>
/// Test if this vector is composed of all finite numbers
/// </summary>
public bool IsFinite()
{
return (Utils.IsFinite(X) && Utils.IsFinite(Y) && Utils.IsFinite(Z));
}
/// <summary>
/// Builds a vector from a byte array
/// </summary>
/// <param name="byteArray">Byte array containing a 12 byte vector</param>
/// <param name="pos">Beginning position in the byte array</param>
public void FromBytes(byte[] byteArray, int pos)
{
if (!BitConverter.IsLittleEndian)
{
// Big endian architecture
byte[] conversionBuffer = new byte[12];
Buffer.BlockCopy(byteArray, pos, conversionBuffer, 0, 12);
Array.Reverse(conversionBuffer, 0, 4);
Array.Reverse(conversionBuffer, 4, 4);
Array.Reverse(conversionBuffer, 8, 4);
X = BitConverter.ToSingle(conversionBuffer, 0);
Y = BitConverter.ToSingle(conversionBuffer, 4);
Z = BitConverter.ToSingle(conversionBuffer, 8);
}
else
{
// Little endian architecture
X = BitConverter.ToSingle(byteArray, pos);
Y = BitConverter.ToSingle(byteArray, pos + 4);
Z = BitConverter.ToSingle(byteArray, pos + 8);
}
}
/// <summary>
/// Returns the raw bytes for this vector
/// </summary>
/// <returns>A 12 byte array containing X, Y, and Z</returns>
public byte[] GetBytes()
{
byte[] byteArray = new byte[12];
ToBytes(byteArray, 0);
return byteArray;
}
/// <summary>
/// Writes the raw bytes for this vector to a byte array
/// </summary>
/// <param name="dest">Destination byte array</param>
/// <param name="pos">Position in the destination array to start
/// writing. Must be at least 12 bytes before the end of the array</param>
public void ToBytes(byte[] dest, int pos)
{
Buffer.BlockCopy(BitConverter.GetBytes(X), 0, dest, pos + 0, 4);
Buffer.BlockCopy(BitConverter.GetBytes(Y), 0, dest, pos + 4, 4);
Buffer.BlockCopy(BitConverter.GetBytes(Z), 0, dest, pos + 8, 4);
if (!BitConverter.IsLittleEndian)
{
Array.Reverse(dest, pos + 0, 4);
Array.Reverse(dest, pos + 4, 4);
Array.Reverse(dest, pos + 8, 4);
}
}
#endregion Public Methods
#region Static Methods
public static Vector3 Add(Vector3 value1, Vector3 value2)
{
value1.X += value2.X;
value1.Y += value2.Y;
value1.Z += value2.Z;
return value1;
}
public static Vector3 Clamp(Vector3 value1, Vector3 min, Vector3 max)
{
return new Vector3(
Utils.Clamp(value1.X, min.X, max.X),
Utils.Clamp(value1.Y, min.Y, max.Y),
Utils.Clamp(value1.Z, min.Z, max.Z));
}
public static Vector3 Cross(Vector3 value1, Vector3 value2)
{
return new Vector3(
value1.Y * value2.Z - value2.Y * value1.Z,
value1.Z * value2.X - value2.Z * value1.X,
value1.X * value2.Y - value2.X * value1.Y);
}
public static float Distance(Vector3 value1, Vector3 value2)
{
return (float)Math.Sqrt(DistanceSquared(value1, value2));
}
public static float DistanceSquared(Vector3 value1, Vector3 value2)
{
return
(value1.X - value2.X) * (value1.X - value2.X) +
(value1.Y - value2.Y) * (value1.Y - value2.Y) +
(value1.Z - value2.Z) * (value1.Z - value2.Z);
}
public static Vector3 Divide(Vector3 value1, Vector3 value2)
{
value1.X /= value2.X;
value1.Y /= value2.Y;
value1.Z /= value2.Z;
return value1;
}
public static Vector3 Divide(Vector3 value1, float value2)
{
float factor = 1f / value2;
value1.X *= factor;
value1.Y *= factor;
value1.Z *= factor;
return value1;
}
public static float Dot(Vector3 value1, Vector3 value2)
{
return value1.X * value2.X + value1.Y * value2.Y + value1.Z * value2.Z;
}
public static Vector3 Lerp(Vector3 value1, Vector3 value2, float amount)
{
return new Vector3(
Utils.Lerp(value1.X, value2.X, amount),
Utils.Lerp(value1.Y, value2.Y, amount),
Utils.Lerp(value1.Z, value2.Z, amount));
}
public static float Mag(Vector3 value)
{
return (float)Math.Sqrt((value.X * value.X) + (value.Y * value.Y) + (value.Z * value.Z));
}
public static Vector3 Max(Vector3 value1, Vector3 value2)
{
return new Vector3(
Math.Max(value1.X, value2.X),
Math.Max(value1.Y, value2.Y),
Math.Max(value1.Z, value2.Z));
}
public static Vector3 Min(Vector3 value1, Vector3 value2)
{
return new Vector3(
Math.Min(value1.X, value2.X),
Math.Min(value1.Y, value2.Y),
Math.Min(value1.Z, value2.Z));
}
public static Vector3 Multiply(Vector3 value1, Vector3 value2)
{
value1.X *= value2.X;
value1.Y *= value2.Y;
value1.Z *= value2.Z;
return value1;
}
public static Vector3 Multiply(Vector3 value1, float scaleFactor)
{
value1.X *= scaleFactor;
value1.Y *= scaleFactor;
value1.Z *= scaleFactor;
return value1;
}
public static Vector3 Negate(Vector3 value)
{
value.X = -value.X;
value.Y = -value.Y;
value.Z = -value.Z;
return value;
}
public static Vector3 Normalize(Vector3 value)
{
const float MAG_THRESHOLD = 0.0000001f;
float factor = Distance(value, Zero);
if (factor > MAG_THRESHOLD)
{
factor = 1f / factor;
value.X *= factor;
value.Y *= factor;
value.Z *= factor;
}
else
{
value.X = 0f;
value.Y = 0f;
value.Z = 0f;
}
return value;
}
/// <summary>
/// Parse a vector from a string
/// </summary>
/// <param name="val">A string representation of a 3D vector, enclosed
/// in arrow brackets and separated by commas</param>
public static Vector3 Parse(string val)
{
char[] splitChar = { ',' };
string[] split = val.Replace("<", String.Empty).Replace(">", String.Empty).Split(splitChar);
return new Vector3(
Single.Parse(split[0].Trim(), Utils.EnUsCulture),
Single.Parse(split[1].Trim(), Utils.EnUsCulture),
Single.Parse(split[2].Trim(), Utils.EnUsCulture));
}
public static bool TryParse(string val, out Vector3 result)
{
try
{
result = Parse(val);
return true;
}
catch (Exception)
{
result = Vector3.Zero;
return false;
}
}
/// <summary>
/// Calculate the rotation between two vectors
/// </summary>
/// <param name="a">Normalized directional vector (such as 1,0,0 for forward facing)</param>
/// <param name="b">Normalized target vector</param>
public static Quaternion RotationBetween(Vector3 a, Vector3 b)
{
float dotProduct = Dot(a, b);
Vector3 crossProduct = Cross(a, b);
float magProduct = a.Length() * b.Length();
double angle = Math.Acos(dotProduct / magProduct);
Vector3 axis = Normalize(crossProduct);
float s = (float)Math.Sin(angle / 2d);
return new Quaternion(
axis.X * s,
axis.Y * s,
axis.Z * s,
(float)Math.Cos(angle / 2d));
}
/// <summary>
/// Interpolates between two vectors using a cubic equation
/// </summary>
public static Vector3 SmoothStep(Vector3 value1, Vector3 value2, float amount)
{
return new Vector3(
Utils.SmoothStep(value1.X, value2.X, amount),
Utils.SmoothStep(value1.Y, value2.Y, amount),
Utils.SmoothStep(value1.Z, value2.Z, amount));
}
public static Vector3 Subtract(Vector3 value1, Vector3 value2)
{
value1.X -= value2.X;
value1.Y -= value2.Y;
value1.Z -= value2.Z;
return value1;
}
public static Vector3 Transform(Vector3 position, Matrix4 matrix)
{
return new Vector3(
(position.X * matrix.M11) + (position.Y * matrix.M21) + (position.Z * matrix.M31) + matrix.M41,
(position.X * matrix.M12) + (position.Y * matrix.M22) + (position.Z * matrix.M32) + matrix.M42,
(position.X * matrix.M13) + (position.Y * matrix.M23) + (position.Z * matrix.M33) + matrix.M43);
}
public static Vector3 TransformNormal(Vector3 position, Matrix4 matrix)
{
return new Vector3(
(position.X * matrix.M11) + (position.Y * matrix.M21) + (position.Z * matrix.M31),
(position.X * matrix.M12) + (position.Y * matrix.M22) + (position.Z * matrix.M32),
(position.X * matrix.M13) + (position.Y * matrix.M23) + (position.Z * matrix.M33));
}
#endregion Static Methods
#region Overrides
public override bool Equals(object obj)
{
return (obj is Vector3) ? this == (Vector3)obj : false;
}
public bool Equals(Vector3 other)
{
return this == other;
}
public override int GetHashCode()
{
return X.GetHashCode() ^ Y.GetHashCode() ^ Z.GetHashCode();
}
/// <summary>
/// Get a formatted string representation of the vector
/// </summary>
/// <returns>A string representation of the vector</returns>
public override string ToString()
{
return String.Format(Utils.EnUsCulture, "<{0}, {1}, {2}>", X, Y, Z);
}
/// <summary>
/// Get a string representation of the vector elements with up to three
/// decimal digits and separated by spaces only
/// </summary>
/// <returns>Raw string representation of the vector</returns>
public string ToRawString()
{
CultureInfo enUs = new CultureInfo("en-us");
enUs.NumberFormat.NumberDecimalDigits = 3;
return String.Format(enUs, "{0} {1} {2}", X, Y, Z);
}
#endregion Overrides
#region Operators
public static bool operator ==(Vector3 value1, Vector3 value2)
{
return value1.X == value2.X
&& value1.Y == value2.Y
&& value1.Z == value2.Z;
}
public static bool operator !=(Vector3 value1, Vector3 value2)
{
return !(value1 == value2);
}
public static Vector3 operator +(Vector3 value1, Vector3 value2)
{
value1.X += value2.X;
value1.Y += value2.Y;
value1.Z += value2.Z;
return value1;
}
public static Vector3 operator -(Vector3 value)
{
value.X = -value.X;
value.Y = -value.Y;
value.Z = -value.Z;
return value;
}
public static Vector3 operator -(Vector3 value1, Vector3 value2)
{
value1.X -= value2.X;
value1.Y -= value2.Y;
value1.Z -= value2.Z;
return value1;
}
public static Vector3 operator *(Vector3 value1, Vector3 value2)
{
value1.X *= value2.X;
value1.Y *= value2.Y;
value1.Z *= value2.Z;
return value1;
}
public static Vector3 operator *(Vector3 value, float scaleFactor)
{
value.X *= scaleFactor;
value.Y *= scaleFactor;
value.Z *= scaleFactor;
return value;
}
public static Vector3 operator *(Vector3 vec, Quaternion rot)
{
float rw = -rot.X * vec.X - rot.Y * vec.Y - rot.Z * vec.Z;
float rx = rot.W * vec.X + rot.Y * vec.Z - rot.Z * vec.Y;
float ry = rot.W * vec.Y + rot.Z * vec.X - rot.X * vec.Z;
float rz = rot.W * vec.Z + rot.X * vec.Y - rot.Y * vec.X;
vec.X = -rw * rot.X + rx * rot.W - ry * rot.Z + rz * rot.Y;
vec.Y = -rw * rot.Y + ry * rot.W - rz * rot.X + rx * rot.Z;
vec.Z = -rw * rot.Z + rz * rot.W - rx * rot.Y + ry * rot.X;
return vec;
}
public static Vector3 operator *(Vector3 vector, Matrix4 matrix)
{
return Transform(vector, matrix);
}
public static Vector3 operator /(Vector3 value1, Vector3 value2)
{
value1.X /= value2.X;
value1.Y /= value2.Y;
value1.Z /= value2.Z;
return value1;
}
public static Vector3 operator /(Vector3 value, float divider)
{
float factor = 1f / divider;
value.X *= factor;
value.Y *= factor;
value.Z *= factor;
return value;
}
/// <summary>
/// Cross product between two vectors
/// </summary>
public static Vector3 operator %(Vector3 value1, Vector3 value2)
{
return Cross(value1, value2);
}
#endregion Operators
/// <summary>A vector with a value of 0,0,0</summary>
public readonly static Vector3 Zero = new Vector3();
/// <summary>A vector with a value of 1,1,1</summary>
public readonly static Vector3 One = new Vector3(1f, 1f, 1f);
/// <summary>A unit vector facing forward (X axis), value 1,0,0</summary>
public readonly static Vector3 UnitX = new Vector3(1f, 0f, 0f);
/// <summary>A unit vector facing left (Y axis), value 0,1,0</summary>
public readonly static Vector3 UnitY = new Vector3(0f, 1f, 0f);
/// <summary>A unit vector facing up (Z axis), value 0,0,1</summary>
public readonly static Vector3 UnitZ = new Vector3(0f, 0f, 1f);
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.Diagnostics;
using System.Threading;
//using Microsoft.Research.Joins.JoinPatterns;
using Microsoft.Research.Joins;
namespace Sanity {
class OverflowTests {
static object Initialize(Join j, int i) {
switch (i % 8) {
case 0 : {
Asynchronous.Channel c;
j.Initialize(out c);
return c;
}
case 1 : {
Asynchronous.Channel<string> c;
j.Initialize(out c);
return c;
}
case 2 : {
Synchronous<string>.Channel<string> c;
j.Initialize(out c);
return c;
}
case 3 : {
Synchronous<string>.Channel c;
j.Initialize(out c);
return c;
}
case 4 : {
Synchronous.Channel<string> c;
j.Initialize(out c);
return c;
}
case 5 : {
Synchronous.Channel c;
j.Initialize(out c);
return c;
}
case 6: {
Asynchronous.Channel[] c;
j.Initialize(out c, 1);
return c;
}
case 7: {
Asynchronous.Channel<string>[] c;
j.Initialize(out c, 1);
return c;
}
default :
Debug.Assert(false);
return null;
}
}
static void TestInitialize(int size) {
Join j = Join.Create(size);
// fill up j
for (int i = 0; i < size; i++) {
Debug.Assert(j.Size == size);
Debug.Assert(j.Count == i);
Debug.Assert(Initialize(j,i) != null);
}
// overflow j
for (int i = 0; i<8*2 ; i++)
{ string msg = null;
object channel = null;
try {
channel = Initialize(j,i);
Debug.Assert(false);
}
catch (JoinException e) {
if (msg == null) { // check we get consistent messages
msg = e.Message;
Debug.Assert(msg != null);
} else
Debug.Assert(e.Message == msg);
};
Debug.Assert(channel == null);
Debug.Assert(j.Count == size);
Debug.Assert(j.Size == size);
}
}
public static void Test() {
for (int i = 1; i < 8; i = 2 * i) {
TestInitialize(i - 1);
TestInitialize(i);
TestInitialize(i + 1);
}
}
}
class NullChannels {
// Test When(null) raises JoinException (consistently)
public static void TestWhenNull() {
string msg = null;
Join j = Join.Create();
try {
j.When((Asynchronous.Channel)null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
try {
j.When((Asynchronous.Channel<string>)null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When((Synchronous<string>.Channel<string>)null).Do(delegate { return ""; });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When((Synchronous<string>.Channel)null).Do(delegate { return ""; });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When((Synchronous.Channel<string>)null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When((Synchronous.Channel)null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
string msg2 = null;
try {
j.When((Asynchronous.Channel[])null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg2 = e.Message;
Debug.Assert(msg2 != null);
}
try {
j.When((Asynchronous.Channel<string>[])null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg2 == e.Message);
}
try {
j.When((Asynchronous.Channel[])new Asynchronous.Channel[] { null }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When((Asynchronous.Channel<string>[])new Asynchronous.Channel<string>[] { null }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
}
// Test And(null) raises JoinException (consistently)
public static void TestAndNull() {
string msg = null;
Join j = Join.Create();
Synchronous.Channel chan;
j.Initialize(out chan);
var jp = j.When(chan);
try {
jp.And((Asynchronous.Channel)null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
try {
jp.And((Asynchronous.Channel<string>)null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
string msg2 = null;
try {
jp.And((Asynchronous.Channel[])null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg2 = e.Message;
Debug.Assert(msg2 != null);
}
try {
jp.And((Asynchronous.Channel<string>[])null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg2 == e.Message);
}
try {
jp.And((Asynchronous.Channel[])new Asynchronous.Channel[] { null }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
jp.And((Asynchronous.Channel<string>[])new Asynchronous.Channel<string>[] { null }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
}
// Test And(null) raises JoinException (consistently)
public static void TestAndAndNull() {
string msg = null;
Join j = Join.Create();
Synchronous.Channel<string> chan1;
Asynchronous.Channel<string> chan2;
j.Initialize(out chan1);
j.Initialize(out chan2);
var jp = j.When(chan1).And(chan2);
try {
jp.And((Asynchronous.Channel)null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
try {
jp.And((Asynchronous.Channel<string>)null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
string msg2 = null;
try {
jp.And((Asynchronous.Channel[])null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg2 = e.Message;
Debug.Assert(msg2 != null);
}
try {
jp.And((Asynchronous.Channel<string>[])null).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg2 == e.Message);
}
try {
jp.And((Asynchronous.Channel[])new Asynchronous.Channel[] { null }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
jp.And((Asynchronous.Channel<string>[])new Asynchronous.Channel<string>[] { null }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
}
// Test When(foreignChannel) raises JoinException (consistently)
public static void TestWhenForeign() {
string msg = null;
Join j = Join.Create();
Join f = Join.Create();
Asynchronous.Channel async;
Asynchronous.Channel<string> asyncString;
Synchronous<string>.Channel syncString;
Synchronous<string>.Channel<string> syncStringString;
Synchronous.Channel<string> syncVoidString;
Synchronous.Channel syncVoid;
f.Initialize(out async);
f.Initialize(out asyncString);
f.Initialize(out syncString);
f.Initialize(out syncStringString);
f.Initialize(out syncVoidString);
f.Initialize(out syncVoid);
try {
j.When(async).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
try {
j.When(asyncString).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When(syncStringString).Do(delegate { return ""; });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When(syncString).Do(delegate { return ""; });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When(syncVoidString).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When(syncVoid).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When((Asynchronous.Channel[])new Asynchronous.Channel[] { async }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When((Asynchronous.Channel<string>[])new Asynchronous.Channel<string>[] { asyncString }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
}
// Test When(foreignChannel) raises JoinException (consistently)
public static void TestAndForeign() {
string msg = null;
Join j = Join.Create();
Synchronous.Channel chan;
j.Initialize(out chan);
var jp = j.When(chan);
Join f = Join.Create();
Asynchronous.Channel async;
Asynchronous.Channel<string> asyncString;
Synchronous<string>.Channel syncString;
Synchronous<string>.Channel<string> syncStringString;
Synchronous.Channel<string> syncVoidString;
Synchronous.Channel syncVoid;
f.Initialize(out async);
f.Initialize(out asyncString);
f.Initialize(out syncString);
f.Initialize(out syncStringString);
f.Initialize(out syncVoidString);
f.Initialize(out syncVoid);
try {
jp.And(async).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
try {
jp.And(asyncString).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
jp.And((Asynchronous.Channel[])new Asynchronous.Channel[] { async }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
jp.And((Asynchronous.Channel<string>[])new Asynchronous.Channel<string>[] { asyncString }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
}
// Test Do(null) raises JoinException (consistently)
public static void TestDoNull() {
string msg = null;
Join j = Join.Create();
Synchronous.Channel syncVoid;
Synchronous<string>.Channel syncString;
j.Initialize(out syncVoid);
j.Initialize(out syncString);
try {
j.When(syncVoid).Do(null);
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
try {
j.When(syncString).Do(null);
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
}
// Test repeated channel raises JoinException (consistently)
public static void TestRepeatedChannels() {
string msg = null;
Join j = Join.Create();
Asynchronous.Channel async1;
Asynchronous.Channel async2;
Asynchronous.Channel<string> asyncString1;
Asynchronous.Channel<string> asyncString2;
j.Initialize(out async1);
j.Initialize(out async2);
j.Initialize(out asyncString1);
j.Initialize(out asyncString2);
try {
j.When(async1).And(async1).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
try {
j.When(asyncString1).And(asyncString1).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When(new Asynchronous.Channel[] { async1, async2, async1 }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When(new Asynchronous.Channel[] { async1, async2}).And(async1).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When(new Asynchronous.Channel<string>[] {asyncString1,asyncString2,asyncString1}).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When(new Asynchronous.Channel<string>[] { asyncString1, asyncString2}).And(asyncString1).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
}
// Test redundant pattern raises JoinException (consistently)
public static void TestRedundantPatterns() {
string msg = null;
Join j = Join.Create();
Asynchronous.Channel async1;
Asynchronous.Channel async2;
Asynchronous.Channel async3;
Asynchronous.Channel<string> asyncString1;
Asynchronous.Channel<string> asyncString2;
Asynchronous.Channel<string> asyncString3;
Synchronous<String>.Channel<String> syncStringString;
Synchronous<String>.Channel syncString;
Synchronous.Channel<String> syncVoidString;
Synchronous.Channel syncVoid;
j.Initialize(out async1);
j.Initialize(out async2);
j.Initialize(out async3);
j.Initialize(out asyncString1);
j.Initialize(out asyncString2);
j.Initialize(out asyncString3);
j.Initialize(out syncStringString);
j.Initialize(out syncString);
j.Initialize(out syncVoidString);
j.Initialize(out syncVoid);
j.When(async1).Do(delegate { });
try {
j.When(async1).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
try {
j.When(async1).And(async2).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
j.When(async2).And(async3).Do(delegate { });
try {
j.When(async2).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When(new Asynchronous.Channel[] { async2}).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
j.When(asyncString1).Do(delegate { });
try {
j.When(asyncString1).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
try {
j.When(asyncString1).And(asyncString2).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
j.When(asyncString2).And(asyncString3).Do(delegate { });
try {
j.When(asyncString2).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
try {
j.When(new Asynchronous.Channel<string>[] { asyncString2 }).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
j.When(syncStringString).Do(delegate { return null; });
try {
j.When(syncStringString).And(async1).Do(delegate { return null; });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
j.When(syncString).Do(delegate { return null; });
try {
j.When(syncString).And(async1).Do(delegate { return null; });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
j.When(syncVoidString).Do(delegate { });
try {
j.When(syncVoidString).And(async1).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
j.When(syncVoid).Do(delegate { });
try {
j.When(syncVoid).And(async1).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
}
// Test empy pattern raises JoinException (consistently)
public static void TestEmptyPatterns() {
string msg = null;
Join j = Join.Create();
Asynchronous.Channel[] asyncs;
Asynchronous.Channel<string>[] asyncStrings;
Asynchronous.Channel async1;
Asynchronous.Channel async2;
j.Initialize(out asyncs, 0);
j.Initialize(out asyncStrings, 0);
j.Initialize(out async1);
j.Initialize(out async2);
try {
j.When(asyncs).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
msg = e.Message;
Debug.Assert(msg != null);
};
try {
j.When(asyncStrings).Do(delegate { });
Debug.Assert(false);
}
catch (JoinException e) {
Debug.Assert(msg == e.Message);
}
// these are legal, because the entire pattern is still non-empty
try {
j.When(asyncs).And(async1).Do(delegate { });
}
catch (JoinException) {
Debug.Assert(false);
}
try {
j.When(asyncStrings).And(async2).Do(delegate { });
}
catch (JoinException) {
Debug.Assert(false);
}
}
public static void Test() {
TestWhenNull();
TestAndNull();
TestAndAndNull();
// we could go on, but let's not and say we did.
TestWhenForeign();
TestAndForeign();
TestDoNull();
TestRepeatedChannels();
//TestRedundantPatterns();
TestEmptyPatterns();
}
}
class Program {
static void Main() {
OverflowTests.Test();
NullChannels.Test();
Console.WriteLine("Done");
Console.ReadLine();
}
}
}
| |
// <copyright file="NativeClient.cs" company="Google Inc.">
// Copyright (C) 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
#if UNITY_ANDROID
#pragma warning disable 0642 // Possible mistaken empty statement
namespace GooglePlayGames.Android
{
using GooglePlayGames.BasicApi;
using GooglePlayGames.BasicApi.SavedGame;
using GooglePlayGames.OurUtils;
using System;
using System.Collections.Generic;
using GooglePlayGames.BasicApi.Events;
using GooglePlayGames.BasicApi.Video;
using UnityEngine;
using UnityEngine.SocialPlatforms;
public class AndroidClient : IPlayGamesClient
{
private enum AuthState
{
Unauthenticated,
Authenticated
}
private readonly object GameServicesLock = new object();
private readonly object AuthStateLock = new object();
private readonly PlayGamesClientConfiguration mConfiguration;
private volatile ISavedGameClient mSavedGameClient;
private volatile IEventsClient mEventsClient;
private volatile IVideoClient mVideoClient;
private volatile AndroidTokenClient mTokenClient;
private volatile Player mUser = null;
private volatile AuthState mAuthState = AuthState.Unauthenticated;
private IUserProfile[] mFriends = new IUserProfile[0];
private LoadFriendsStatus mLastLoadFriendsStatus = LoadFriendsStatus.Unknown;
AndroidJavaClass mGamesClass = new AndroidJavaClass("com.google.android.gms.games.Games");
private static string TasksClassName = "com.google.android.gms.tasks.Tasks";
private AndroidJavaObject mFriendsResolutionException = null;
private readonly int mLeaderboardMaxResults = 25; // can be from 1 to 25
private readonly int mFriendsMaxResults = 200; // the maximum load friends page size
public event Action OnAuthenticatedProxy;
internal AndroidClient(PlayGamesClientConfiguration configuration)
{
PlayGamesHelperObject.CreateObject();
this.mConfiguration = Misc.CheckNotNull(configuration);
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.Authenticate"/>
public void Authenticate(bool silent, Action<SignInStatus> callback)
{
lock (AuthStateLock)
{
// If the user is already authenticated, just fire the callback, we don't need
// any additional work.
if (mAuthState == AuthState.Authenticated)
{
Debug.Log("Already authenticated.");
InvokeCallbackOnGameThread(callback, SignInStatus.Success);
return;
}
}
InitializeTokenClient();
Debug.Log("Starting Auth with token client.");
mTokenClient.FetchTokens(silent, (int result) =>
{
bool succeed = result == 0 /* CommonStatusCodes.SUCCEED */;
InitializeGameServices();
if (succeed)
{
using (var signInTasks = new AndroidJavaObject("java.util.ArrayList"))
{
AndroidJavaObject taskGetPlayer =
getPlayersClient().Call<AndroidJavaObject>("getCurrentPlayer");
AndroidJavaObject taskGetActivationHint =
getGamesClient().Call<AndroidJavaObject>("getActivationHint");
AndroidJavaObject taskIsCaptureSupported =
getVideosClient().Call<AndroidJavaObject>("isCaptureSupported");
if (!mConfiguration.IsHidingPopups)
{
AndroidJavaObject taskSetViewForPopups;
using (var popupView = AndroidHelperFragment.GetDefaultPopupView())
{
taskSetViewForPopups =
getGamesClient().Call<AndroidJavaObject>("setViewForPopups", popupView);
}
signInTasks.Call<bool>("add", taskSetViewForPopups);
}
signInTasks.Call<bool>("add", taskGetPlayer);
signInTasks.Call<bool>("add", taskGetActivationHint);
signInTasks.Call<bool>("add", taskIsCaptureSupported);
using (var tasks = new AndroidJavaClass(TasksClassName))
using (var allTask = tasks.CallStatic<AndroidJavaObject>("whenAll", signInTasks))
{
AndroidTaskUtils.AddOnCompleteListener<AndroidJavaObject>(
allTask,
completeTask =>
{
if (completeTask.Call<bool>("isSuccessful"))
{
using (var resultObject = taskGetPlayer.Call<AndroidJavaObject>("getResult"))
{
mUser = AndroidJavaConverter.ToPlayer(resultObject);
}
var account = mTokenClient.GetAccount();
lock (GameServicesLock)
{
mSavedGameClient = new AndroidSavedGameClient(this, account);
mEventsClient = new AndroidEventsClient(account);
bool isCaptureSupported;
using (var resultObject =
taskIsCaptureSupported.Call<AndroidJavaObject>("getResult"))
{
isCaptureSupported = resultObject.Call<bool>("booleanValue");
}
mVideoClient = new AndroidVideoClient(isCaptureSupported, account);
}
mAuthState = AuthState.Authenticated;
if (OnAuthenticatedProxy != null)
{
GooglePlayGames.OurUtils.Logger.d("OnAuthenticated");
OnAuthenticatedProxy();
}
InvokeCallbackOnGameThread(callback, SignInStatus.Success);
GooglePlayGames.OurUtils.Logger.d("Authentication succeeded");
LoadAchievements(ignore => { });
}
else
{
SignOut();
if (completeTask.Call<bool>("isCanceled"))
{
InvokeCallbackOnGameThread(callback, SignInStatus.Canceled);
return;
}
using (var exception = completeTask.Call<AndroidJavaObject>("getException"))
{
GooglePlayGames.OurUtils.Logger.e(
"Authentication failed - " + exception.Call<string>("toString"));
InvokeCallbackOnGameThread(callback, SignInStatus.InternalError);
}
}
}
);
}
}
}
else
{
lock (AuthStateLock)
{
Debug.Log("Returning an error code.");
InvokeCallbackOnGameThread(callback, SignInHelper.ToSignInStatus(result));
}
}
});
}
private static Action<T> AsOnGameThreadCallback<T>(Action<T> callback)
{
if (callback == null)
{
return delegate { };
}
return result => InvokeCallbackOnGameThread(callback, result);
}
private static void InvokeCallbackOnGameThread(Action callback)
{
if (callback == null)
{
return;
}
PlayGamesHelperObject.RunOnGameThread(() => { callback(); });
}
private static void InvokeCallbackOnGameThread<T>(Action<T> callback, T data)
{
if (callback == null)
{
return;
}
PlayGamesHelperObject.RunOnGameThread(() => { callback(data); });
}
private static Action<T1, T2> AsOnGameThreadCallback<T1, T2>(
Action<T1, T2> toInvokeOnGameThread)
{
return (result1, result2) =>
{
if (toInvokeOnGameThread == null)
{
return;
}
PlayGamesHelperObject.RunOnGameThread(() => toInvokeOnGameThread(result1, result2));
};
}
private static void InvokeCallbackOnGameThread<T1, T2>(Action<T1, T2> callback, T1 t1, T2 t2)
{
if (callback == null)
{
return;
}
PlayGamesHelperObject.RunOnGameThread(() => { callback(t1, t2); });
}
private void InitializeGameServices()
{
if (mTokenClient != null)
{
return;
}
InitializeTokenClient();
}
private void InitializeTokenClient()
{
if (mTokenClient != null)
{
return;
}
mTokenClient = new AndroidTokenClient();
if (!GameInfo.WebClientIdInitialized() &&
(mConfiguration.IsRequestingIdToken || mConfiguration.IsRequestingAuthCode))
{
OurUtils.Logger.e("Server Auth Code and ID Token require web clientId to configured.");
}
string[] scopes = mConfiguration.Scopes;
// Set the auth flags in the token client.
mTokenClient.SetWebClientId(GameInfo.WebClientId);
mTokenClient.SetRequestAuthCode(mConfiguration.IsRequestingAuthCode, mConfiguration.IsForcingRefresh);
mTokenClient.SetRequestEmail(mConfiguration.IsRequestingEmail);
mTokenClient.SetRequestIdToken(mConfiguration.IsRequestingIdToken);
mTokenClient.SetHidePopups(mConfiguration.IsHidingPopups);
mTokenClient.AddOauthScopes("https://www.googleapis.com/auth/games_lite");
if (mConfiguration.EnableSavedGames)
{
mTokenClient.AddOauthScopes("https://www.googleapis.com/auth/drive.appdata");
}
mTokenClient.AddOauthScopes(scopes);
mTokenClient.SetAccountName(mConfiguration.AccountName);
}
/// <summary>
/// Gets the user's email.
/// </summary>
/// <remarks>The email address returned is selected by the user from the accounts present
/// on the device. There is no guarantee this uniquely identifies the player.
/// For unique identification use the id property of the local player.
/// The user can also choose to not select any email address, meaning it is not
/// available.</remarks>
/// <returns>The user email or null if not authenticated or the permission is
/// not available.</returns>
public string GetUserEmail()
{
if (!this.IsAuthenticated())
{
Debug.Log("Cannot get API client - not authenticated");
return null;
}
return mTokenClient.GetEmail();
}
/// <summary>
/// Returns an id token, which can be verified server side, if they are logged in.
/// </summary>
/// <param name="idTokenCallback"> A callback to be invoked after token is retrieved. Will be passed null value
/// on failure. </param>
/// <returns>The identifier token.</returns>
public string GetIdToken()
{
if (!this.IsAuthenticated())
{
Debug.Log("Cannot get API client - not authenticated");
return null;
}
return mTokenClient.GetIdToken();
}
/// <summary>
/// Asynchronously retrieves the server auth code for this client.
/// </summary>
/// <remarks>Note: This function is currently only implemented for Android.</remarks>
/// <param name="serverClientId">The Client ID.</param>
/// <param name="callback">Callback for response.</param>
public string GetServerAuthCode()
{
if (!this.IsAuthenticated())
{
Debug.Log("Cannot get API client - not authenticated");
return null;
}
return mTokenClient.GetAuthCode();
}
public void GetAnotherServerAuthCode(bool reAuthenticateIfNeeded,
Action<string> callback)
{
mTokenClient.GetAnotherServerAuthCode(reAuthenticateIfNeeded, AsOnGameThreadCallback(callback));
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.IsAuthenticated"/>
public bool IsAuthenticated()
{
lock (AuthStateLock)
{
return mAuthState == AuthState.Authenticated;
}
}
public void LoadFriends(Action<bool> callback)
{
LoadAllFriends(mFriendsMaxResults, /* forceReload= */ false, /* loadMore= */ false, callback);
}
private void LoadAllFriends(int pageSize, bool forceReload, bool loadMore,
Action<bool> callback)
{
LoadFriendsPaginated(pageSize, loadMore, forceReload, result =>
{
mLastLoadFriendsStatus = result;
switch (result)
{
case LoadFriendsStatus.Completed:
InvokeCallbackOnGameThread(callback, true);
break;
case LoadFriendsStatus.LoadMore:
// There are more friends to load.
LoadAllFriends(pageSize, /* forceReload= */ false, /* loadMore= */ true, callback);
break;
case LoadFriendsStatus.ResolutionRequired:
case LoadFriendsStatus.InternalError:
case LoadFriendsStatus.NotAuthorized:
InvokeCallbackOnGameThread(callback, false);
break;
default:
GooglePlayGames.OurUtils.Logger.d("There was an error when loading friends." + result);
InvokeCallbackOnGameThread(callback, false);
break;
}
});
}
public void LoadFriends(int pageSize, bool forceReload,
Action<LoadFriendsStatus> callback)
{
LoadFriendsPaginated(pageSize, /* isLoadMore= */ false, /* forceReload= */ forceReload,
callback);
}
public void LoadMoreFriends(int pageSize, Action<LoadFriendsStatus> callback)
{
LoadFriendsPaginated(pageSize, /* isLoadMore= */ true, /* forceReload= */ false,
callback);
}
private void LoadFriendsPaginated(int pageSize, bool isLoadMore, bool forceReload,
Action<LoadFriendsStatus> callback)
{
mFriendsResolutionException = null;
using (var playersClient = getPlayersClient())
using (var task = isLoadMore
? playersClient.Call<AndroidJavaObject>("loadMoreFriends", pageSize)
: playersClient.Call<AndroidJavaObject>("loadFriends", pageSize,
forceReload))
{
AndroidTaskUtils.AddOnSuccessListener<AndroidJavaObject>(
task, annotatedData =>
{
using (var playersBuffer = annotatedData.Call<AndroidJavaObject>("get"))
{
AndroidJavaObject metadata = playersBuffer.Call<AndroidJavaObject>("getMetadata");
var areMoreFriendsToLoad = metadata != null &&
metadata.Call<AndroidJavaObject>("getString",
"next_page_token") != null;
mFriends = AndroidJavaConverter.playersBufferToArray(playersBuffer);
mLastLoadFriendsStatus = areMoreFriendsToLoad
? LoadFriendsStatus.LoadMore
: LoadFriendsStatus.Completed;
InvokeCallbackOnGameThread(callback, mLastLoadFriendsStatus);
}
});
AndroidTaskUtils.AddOnFailureListener(task, exception =>
{
AndroidHelperFragment.IsResolutionRequired(exception, resolutionRequired =>
{
if (resolutionRequired)
{
mFriendsResolutionException =
exception.Call<AndroidJavaObject>("getResolution");
mLastLoadFriendsStatus = LoadFriendsStatus.ResolutionRequired;
mFriends = new IUserProfile[0];
InvokeCallbackOnGameThread(callback, LoadFriendsStatus.ResolutionRequired);
}
else
{
mFriendsResolutionException = null;
if (Misc.IsApiException(exception))
{
var statusCode = exception.Call<int>("getStatusCode");
if (statusCode == /* GamesClientStatusCodes.NETWORK_ERROR_NO_DATA */ 26504)
{
mLastLoadFriendsStatus = LoadFriendsStatus.NetworkError;
InvokeCallbackOnGameThread(callback, LoadFriendsStatus.NetworkError);
return;
}
}
mLastLoadFriendsStatus = LoadFriendsStatus.InternalError;
OurUtils.Logger.e("LoadFriends failed: " +
exception.Call<string>("toString"));
InvokeCallbackOnGameThread(callback, LoadFriendsStatus.InternalError);
}
});
return;
});
}
}
public LoadFriendsStatus GetLastLoadFriendsStatus()
{
return mLastLoadFriendsStatus;
}
public void AskForLoadFriendsResolution(Action<UIStatus> callback)
{
if (mFriendsResolutionException == null)
{
GooglePlayGames.OurUtils.Logger.d("The developer asked for access to the friends " +
"list but there is no intent to trigger the UI. This may be because the user " +
"has granted access already or the game has not called loadFriends() before.");
using (var playersClient = getPlayersClient())
using (
var task = playersClient.Call<AndroidJavaObject>("loadFriends", /* pageSize= */ 1,
/* forceReload= */ false))
{
AndroidTaskUtils.AddOnSuccessListener<AndroidJavaObject>(
task, annotatedData => { InvokeCallbackOnGameThread(callback, UIStatus.Valid); });
AndroidTaskUtils.AddOnFailureListener(task, exception =>
{
AndroidHelperFragment.IsResolutionRequired(exception, resolutionRequired =>
{
if (resolutionRequired)
{
mFriendsResolutionException =
exception.Call<AndroidJavaObject>("getResolution");
AndroidHelperFragment.AskForLoadFriendsResolution(
mFriendsResolutionException, AsOnGameThreadCallback(callback));
}
else
{
var statusCode = exception.Call<int>("getStatusCode");
if (statusCode == /* GamesClientStatusCodes.NETWORK_ERROR_NO_DATA */ 26504)
{
InvokeCallbackOnGameThread(callback, UIStatus.NetworkError);
return;
}
Debug.Log("LoadFriends failed with status code: " + statusCode);
InvokeCallbackOnGameThread(callback, UIStatus.InternalError);
}
});
return;
});
}
}
else
{
AndroidHelperFragment.AskForLoadFriendsResolution(mFriendsResolutionException,
AsOnGameThreadCallback(callback));
}
}
public void ShowCompareProfileWithAlternativeNameHintsUI(string playerId,
string otherPlayerInGameName,
string currentPlayerInGameName,
Action<UIStatus> callback)
{
AndroidHelperFragment.ShowCompareProfileWithAlternativeNameHintsUI(
playerId, otherPlayerInGameName, currentPlayerInGameName,
GetUiSignOutCallbackOnGameThread(callback));
}
public void GetFriendsListVisibility(bool forceReload,
Action<FriendsListVisibilityStatus> callback)
{
using (var playersClient = getPlayersClient())
using (
var task = playersClient.Call<AndroidJavaObject>("getCurrentPlayer", forceReload))
{
AndroidTaskUtils.AddOnSuccessListener<AndroidJavaObject>(task, annotatedData =>
{
AndroidJavaObject currentPlayerInfo =
annotatedData.Call<AndroidJavaObject>("get").Call<AndroidJavaObject>(
"getCurrentPlayerInfo");
int playerListVisibility =
currentPlayerInfo.Call<int>("getFriendsListVisibilityStatus");
InvokeCallbackOnGameThread(callback,
AndroidJavaConverter.ToFriendsListVisibilityStatus(playerListVisibility));
});
AndroidTaskUtils.AddOnFailureListener(task, exception =>
{
InvokeCallbackOnGameThread(callback, FriendsListVisibilityStatus.NetworkError);
return;
});
}
}
public IUserProfile[] GetFriends()
{
return mFriends;
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.SignOut"/>
public void SignOut()
{
SignOut( /* uiCallback= */ null);
}
public void SignOut(Action uiCallback)
{
if (mTokenClient == null)
{
InvokeCallbackOnGameThread(uiCallback);
return;
}
mTokenClient.Signout();
mAuthState = AuthState.Unauthenticated;
if (uiCallback != null)
{
InvokeCallbackOnGameThread(uiCallback);
}
PlayGamesHelperObject.RunOnGameThread(() => SignInHelper.SetPromptUiSignIn(true));
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.GetUserId"/>
public string GetUserId()
{
if (mUser == null)
{
return null;
}
return mUser.id;
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.GetUserDisplayName"/>
public string GetUserDisplayName()
{
if (mUser == null)
{
return null;
}
return mUser.userName;
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.GetUserImageUrl"/>
public string GetUserImageUrl()
{
if (mUser == null)
{
return null;
}
return mUser.AvatarURL;
}
public void SetGravityForPopups(Gravity gravity)
{
if (!IsAuthenticated())
{
GooglePlayGames.OurUtils.Logger.d("Cannot call SetGravityForPopups when not authenticated");
}
using (var gamesClient = getGamesClient())
using (gamesClient.Call<AndroidJavaObject>("setGravityForPopups",
(int) gravity | (int) Gravity.CENTER_HORIZONTAL))
;
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.GetPlayerStats"/>
public void GetPlayerStats(Action<CommonStatusCodes, PlayerStats> callback)
{
using (var playerStatsClient = getPlayerStatsClient())
using (var task = playerStatsClient.Call<AndroidJavaObject>("loadPlayerStats", /* forceReload= */ false))
{
AndroidTaskUtils.AddOnSuccessListener<AndroidJavaObject>(
task,
annotatedData =>
{
using (var playerStatsJava = annotatedData.Call<AndroidJavaObject>("get"))
{
int numberOfPurchases = playerStatsJava.Call<int>("getNumberOfPurchases");
float avgSessionLength = playerStatsJava.Call<float>("getAverageSessionLength");
int daysSinceLastPlayed = playerStatsJava.Call<int>("getDaysSinceLastPlayed");
int numberOfSessions = playerStatsJava.Call<int>("getNumberOfSessions");
float sessionPercentile = playerStatsJava.Call<float>("getSessionPercentile");
float spendPercentile = playerStatsJava.Call<float>("getSpendPercentile");
float spendProbability = playerStatsJava.Call<float>("getSpendProbability");
float churnProbability = playerStatsJava.Call<float>("getChurnProbability");
float highSpenderProbability = playerStatsJava.Call<float>("getHighSpenderProbability");
float totalSpendNext28Days = playerStatsJava.Call<float>("getTotalSpendNext28Days");
PlayerStats result = new PlayerStats(
numberOfPurchases,
avgSessionLength,
daysSinceLastPlayed,
numberOfSessions,
sessionPercentile,
spendPercentile,
spendProbability,
churnProbability,
highSpenderProbability,
totalSpendNext28Days);
InvokeCallbackOnGameThread(callback, CommonStatusCodes.Success, result);
}
});
AddOnFailureListenerWithSignOut(
task,
e =>
{
Debug.Log("GetPlayerStats failed: " + e.Call<string>("toString"));
var statusCode = IsAuthenticated()
? CommonStatusCodes.InternalError
: CommonStatusCodes.SignInRequired;
InvokeCallbackOnGameThread(callback, statusCode, new PlayerStats());
});
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.LoadUsers"/>
public void LoadUsers(string[] userIds, Action<IUserProfile[]> callback)
{
if (!IsAuthenticated())
{
InvokeCallbackOnGameThread(callback, new IUserProfile[0]);
return;
}
using (var playersClient = getPlayersClient())
{
object countLock = new object();
int count = userIds.Length;
int resultCount = 0;
IUserProfile[] users = new IUserProfile[count];
for (int i = 0; i < count; ++i)
{
using (var task = playersClient.Call<AndroidJavaObject>("loadPlayer", userIds[i]))
{
AndroidTaskUtils.AddOnSuccessListener<AndroidJavaObject>(
task,
annotatedData =>
{
using (var player = annotatedData.Call<AndroidJavaObject>("get"))
{
string playerId = player.Call<string>("getPlayerId");
for (int j = 0; j < count; ++j)
{
if (playerId == userIds[j])
{
users[j] = AndroidJavaConverter.ToPlayer(player);
break;
}
}
lock (countLock)
{
++resultCount;
if (resultCount == count)
{
InvokeCallbackOnGameThread(callback, users);
}
}
}
});
AddOnFailureListenerWithSignOut(task, exception =>
{
Debug.Log("LoadUsers failed for index " + i +
" with: " + exception.Call<string>("toString"));
lock (countLock)
{
++resultCount;
if (resultCount == count)
{
InvokeCallbackOnGameThread(callback, users);
}
}
});
}
}
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.LoadAchievements"/>
public void LoadAchievements(Action<Achievement[]> callback)
{
using (var achievementsClient = getAchievementsClient())
using (var task = achievementsClient.Call<AndroidJavaObject>("load", /* forceReload= */ false))
{
AndroidTaskUtils.AddOnSuccessListener<AndroidJavaObject>(
task,
annotatedData =>
{
using (var achievementBuffer = annotatedData.Call<AndroidJavaObject>("get"))
{
int count = achievementBuffer.Call<int>("getCount");
Achievement[] result = new Achievement[count];
for (int i = 0; i < count; ++i)
{
Achievement achievement = new Achievement();
using (var javaAchievement = achievementBuffer.Call<AndroidJavaObject>("get", i))
{
achievement.Id = javaAchievement.Call<string>("getAchievementId");
achievement.Description = javaAchievement.Call<string>("getDescription");
achievement.Name = javaAchievement.Call<string>("getName");
achievement.Points = javaAchievement.Call<ulong>("getXpValue");
long timestamp = javaAchievement.Call<long>("getLastUpdatedTimestamp");
achievement.LastModifiedTime = AndroidJavaConverter.ToDateTime(timestamp);
achievement.RevealedImageUrl = javaAchievement.Call<string>("getRevealedImageUrl");
achievement.UnlockedImageUrl = javaAchievement.Call<string>("getUnlockedImageUrl");
achievement.IsIncremental =
javaAchievement.Call<int>("getType") == 1 /* TYPE_INCREMENTAL */;
if (achievement.IsIncremental)
{
achievement.CurrentSteps = javaAchievement.Call<int>("getCurrentSteps");
achievement.TotalSteps = javaAchievement.Call<int>("getTotalSteps");
}
int state = javaAchievement.Call<int>("getState");
achievement.IsUnlocked = state == 0 /* STATE_UNLOCKED */;
achievement.IsRevealed = state == 1 /* STATE_REVEALED */;
}
result[i] = achievement;
}
achievementBuffer.Call("release");
InvokeCallbackOnGameThread(callback, result);
}
});
AddOnFailureListenerWithSignOut(
task,
exception =>
{
Debug.Log("LoadAchievements failed: " + exception.Call<string>("toString"));
InvokeCallbackOnGameThread(callback, new Achievement[0]);
});
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.UnlockAchievement"/>
public void UnlockAchievement(string achId, Action<bool> callback)
{
if (!IsAuthenticated())
{
InvokeCallbackOnGameThread(callback, false);
return;
}
using (var achievementsClient = getAchievementsClient())
{
achievementsClient.Call("unlock", achId);
InvokeCallbackOnGameThread(callback, true);
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.RevealAchievement"/>
public void RevealAchievement(string achId, Action<bool> callback)
{
if (!IsAuthenticated())
{
InvokeCallbackOnGameThread(callback, false);
return;
}
using (var achievementsClient = getAchievementsClient())
{
achievementsClient.Call("reveal", achId);
InvokeCallbackOnGameThread(callback, true);
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.IncrementAchievement"/>
public void IncrementAchievement(string achId, int steps, Action<bool> callback)
{
if (!IsAuthenticated())
{
InvokeCallbackOnGameThread(callback, false);
return;
}
using (var achievementsClient = getAchievementsClient())
{
achievementsClient.Call("increment", achId, steps);
InvokeCallbackOnGameThread(callback, true);
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.SetStepsAtLeast"/>
public void SetStepsAtLeast(string achId, int steps, Action<bool> callback)
{
if (!IsAuthenticated())
{
InvokeCallbackOnGameThread(callback, false);
return;
}
using (var achievementsClient = getAchievementsClient())
{
achievementsClient.Call("setSteps", achId, steps);
InvokeCallbackOnGameThread(callback, true);
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.ShowAchievementsUI"/>
public void ShowAchievementsUI(Action<UIStatus> callback)
{
if (!IsAuthenticated())
{
InvokeCallbackOnGameThread(callback, UIStatus.NotAuthorized);
return;
}
AndroidHelperFragment.ShowAchievementsUI(GetUiSignOutCallbackOnGameThread(callback));
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.LeaderboardMaxResults"/>
public int LeaderboardMaxResults()
{
return mLeaderboardMaxResults;
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.ShowLeaderboardUI"/>
public void ShowLeaderboardUI(string leaderboardId, LeaderboardTimeSpan span, Action<UIStatus> callback)
{
if (!IsAuthenticated())
{
InvokeCallbackOnGameThread(callback, UIStatus.NotAuthorized);
return;
}
if (leaderboardId == null)
{
AndroidHelperFragment.ShowAllLeaderboardsUI(GetUiSignOutCallbackOnGameThread(callback));
}
else
{
AndroidHelperFragment.ShowLeaderboardUI(leaderboardId, span,
GetUiSignOutCallbackOnGameThread(callback));
}
}
private void AddOnFailureListenerWithSignOut(AndroidJavaObject task, Action<AndroidJavaObject> callback)
{
AndroidTaskUtils.AddOnFailureListener(
task,
exception =>
{
var statusCode = exception.Call<int>("getStatusCode");
if (statusCode == /* CommonStatusCodes.SignInRequired */ 4 ||
statusCode == /* GamesClientStatusCodes.CLIENT_RECONNECT_REQUIRED */ 26502)
{
SignOut();
}
callback(exception);
});
}
private Action<UIStatus> GetUiSignOutCallbackOnGameThread(Action<UIStatus> callback)
{
Action<UIStatus> uiCallback = (status) =>
{
if (status == UIStatus.NotAuthorized)
{
SignOut(() =>
{
if (callback != null)
{
callback(status);
}
});
}
else
{
if (callback != null)
{
callback(status);
}
}
};
return AsOnGameThreadCallback(uiCallback);
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.LoadScores"/>
public void LoadScores(string leaderboardId, LeaderboardStart start,
int rowCount, LeaderboardCollection collection,
LeaderboardTimeSpan timeSpan,
Action<LeaderboardScoreData> callback)
{
using (var client = getLeaderboardsClient())
{
string loadScoresMethod =
start == LeaderboardStart.TopScores ? "loadTopScores" : "loadPlayerCenteredScores";
using (var task = client.Call<AndroidJavaObject>(
loadScoresMethod,
leaderboardId,
AndroidJavaConverter.ToLeaderboardVariantTimeSpan(timeSpan),
AndroidJavaConverter.ToLeaderboardVariantCollection(collection),
rowCount))
{
AndroidTaskUtils.AddOnSuccessListener<AndroidJavaObject>(
task,
annotatedData =>
{
using (var leaderboardScores = annotatedData.Call<AndroidJavaObject>("get"))
{
InvokeCallbackOnGameThread(callback, CreateLeaderboardScoreData(
leaderboardId,
collection,
timeSpan,
annotatedData.Call<bool>("isStale")
? ResponseStatus.SuccessWithStale
: ResponseStatus.Success,
leaderboardScores));
leaderboardScores.Call("release");
}
});
AddOnFailureListenerWithSignOut(task, exception =>
{
AndroidHelperFragment.IsResolutionRequired(
exception, resolutionRequired =>
{
if (resolutionRequired)
{
mFriendsResolutionException = exception.Call<AndroidJavaObject>(
"getResolution");
InvokeCallbackOnGameThread(
callback, new LeaderboardScoreData(leaderboardId,
ResponseStatus.ResolutionRequired));
}
else
{
mFriendsResolutionException = null;
}
});
Debug.Log("LoadScores failed: " + exception.Call<string>("toString"));
InvokeCallbackOnGameThread(
callback, new LeaderboardScoreData(leaderboardId,
ResponseStatus.InternalError));
});
}
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.LoadMoreScores"/>
public void LoadMoreScores(ScorePageToken token, int rowCount,
Action<LeaderboardScoreData> callback)
{
using (var client = getLeaderboardsClient())
using (var task = client.Call<AndroidJavaObject>("loadMoreScores",
token.InternalObject, rowCount, AndroidJavaConverter.ToPageDirection(token.Direction)))
{
AndroidTaskUtils.AddOnSuccessListener<AndroidJavaObject>(
task,
annotatedData =>
{
using (var leaderboardScores = annotatedData.Call<AndroidJavaObject>("get"))
{
InvokeCallbackOnGameThread(callback, CreateLeaderboardScoreData(
token.LeaderboardId,
token.Collection,
token.TimeSpan,
annotatedData.Call<bool>("isStale")
? ResponseStatus.SuccessWithStale
: ResponseStatus.Success,
leaderboardScores));
leaderboardScores.Call("release");
}
});
AddOnFailureListenerWithSignOut(task, exception =>
{
AndroidHelperFragment.IsResolutionRequired(exception, resolutionRequired =>
{
if (resolutionRequired)
{
mFriendsResolutionException =
exception.Call<AndroidJavaObject>("getResolution");
InvokeCallbackOnGameThread(
callback, new LeaderboardScoreData(token.LeaderboardId,
ResponseStatus.ResolutionRequired));
}
else
{
mFriendsResolutionException = null;
}
});
Debug.Log("LoadMoreScores failed: " + exception.Call<string>("toString"));
InvokeCallbackOnGameThread(
callback, new LeaderboardScoreData(token.LeaderboardId,
ResponseStatus.InternalError));
});
}
}
private LeaderboardScoreData CreateLeaderboardScoreData(
string leaderboardId,
LeaderboardCollection collection,
LeaderboardTimeSpan timespan,
ResponseStatus status,
AndroidJavaObject leaderboardScoresJava)
{
LeaderboardScoreData leaderboardScoreData = new LeaderboardScoreData(leaderboardId, status);
var scoresBuffer = leaderboardScoresJava.Call<AndroidJavaObject>("getScores");
int count = scoresBuffer.Call<int>("getCount");
for (int i = 0; i < count; ++i)
{
using (var leaderboardScore = scoresBuffer.Call<AndroidJavaObject>("get", i))
{
long timestamp = leaderboardScore.Call<long>("getTimestampMillis");
System.DateTime date = AndroidJavaConverter.ToDateTime(timestamp);
ulong rank = (ulong) leaderboardScore.Call<long>("getRank");
string scoreHolderId = "";
using (var scoreHolder = leaderboardScore.Call<AndroidJavaObject>("getScoreHolder"))
{
scoreHolderId = scoreHolder.Call<string>("getPlayerId");
}
ulong score = (ulong) leaderboardScore.Call<long>("getRawScore");
string metadata = leaderboardScore.Call<string>("getScoreTag");
leaderboardScoreData.AddScore(new PlayGamesScore(date, leaderboardId,
rank, scoreHolderId, score, metadata));
}
}
leaderboardScoreData.NextPageToken = new ScorePageToken(scoresBuffer, leaderboardId, collection,
timespan, ScorePageDirection.Forward);
leaderboardScoreData.PrevPageToken = new ScorePageToken(scoresBuffer, leaderboardId, collection,
timespan, ScorePageDirection.Backward);
using (var leaderboard = leaderboardScoresJava.Call<AndroidJavaObject>("getLeaderboard"))
using (var variants = leaderboard.Call<AndroidJavaObject>("getVariants"))
using (var variant = variants.Call<AndroidJavaObject>("get", 0))
{
leaderboardScoreData.Title = leaderboard.Call<string>("getDisplayName");
if (variant.Call<bool>("hasPlayerInfo"))
{
System.DateTime date = AndroidJavaConverter.ToDateTime(0);
ulong rank = (ulong) variant.Call<long>("getPlayerRank");
ulong score = (ulong) variant.Call<long>("getRawPlayerScore");
string metadata = variant.Call<string>("getPlayerScoreTag");
leaderboardScoreData.PlayerScore = new PlayGamesScore(date, leaderboardId,
rank, mUser.id, score, metadata);
}
leaderboardScoreData.ApproximateCount = (ulong) variant.Call<long>("getNumScores");
}
return leaderboardScoreData;
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.SubmitScore"/>
public void SubmitScore(string leaderboardId, long score, Action<bool> callback)
{
if (!IsAuthenticated())
{
InvokeCallbackOnGameThread(callback, false);
}
using (var client = getLeaderboardsClient())
{
client.Call("submitScore", leaderboardId, score);
InvokeCallbackOnGameThread(callback, true);
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.SubmitScore"/>
public void SubmitScore(string leaderboardId, long score, string metadata,
Action<bool> callback)
{
if (!IsAuthenticated())
{
InvokeCallbackOnGameThread(callback, false);
}
using (var client = getLeaderboardsClient())
{
client.Call("submitScore", leaderboardId, score, metadata);
InvokeCallbackOnGameThread(callback, true);
}
}
public void RequestPermissions(string[] scopes, Action<SignInStatus> callback)
{
callback = AsOnGameThreadCallback(callback);
mTokenClient.RequestPermissions(scopes, code =>
{
UpdateClients();
callback(code);
});
}
private void UpdateClients()
{
lock (GameServicesLock)
{
var account = mTokenClient.GetAccount();
mSavedGameClient = new AndroidSavedGameClient(this, account);
mEventsClient = new AndroidEventsClient(account);
mVideoClient = new AndroidVideoClient(mVideoClient.IsCaptureSupported(), account);
}
}
/// <summary>Returns whether or not user has given permissions for given scopes.</summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.HasPermissions"/>
public bool HasPermissions(string[] scopes)
{
return mTokenClient.HasPermissions(scopes);
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.GetSavedGameClient"/>
public ISavedGameClient GetSavedGameClient()
{
lock (GameServicesLock)
{
return mSavedGameClient;
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.GetEventsClient"/>
public IEventsClient GetEventsClient()
{
lock (GameServicesLock)
{
return mEventsClient;
}
}
///<summary></summary>
/// <seealso cref="GooglePlayGames.BasicApi.IPlayGamesClient.GetVideoClient"/>
public IVideoClient GetVideoClient()
{
lock (GameServicesLock)
{
return mVideoClient;
}
}
private AndroidJavaObject getAchievementsClient()
{
return mGamesClass.CallStatic<AndroidJavaObject>("getAchievementsClient",
AndroidHelperFragment.GetActivity(), mTokenClient.GetAccount());
}
private AndroidJavaObject getGamesClient()
{
return mGamesClass.CallStatic<AndroidJavaObject>("getGamesClient", AndroidHelperFragment.GetActivity(),
mTokenClient.GetAccount());
}
private AndroidJavaObject getPlayersClient()
{
return mGamesClass.CallStatic<AndroidJavaObject>("getPlayersClient", AndroidHelperFragment.GetActivity(),
mTokenClient.GetAccount());
}
private AndroidJavaObject getLeaderboardsClient()
{
return mGamesClass.CallStatic<AndroidJavaObject>("getLeaderboardsClient",
AndroidHelperFragment.GetActivity(), mTokenClient.GetAccount());
}
private AndroidJavaObject getPlayerStatsClient()
{
return mGamesClass.CallStatic<AndroidJavaObject>("getPlayerStatsClient",
AndroidHelperFragment.GetActivity(), mTokenClient.GetAccount());
}
private AndroidJavaObject getVideosClient()
{
return mGamesClass.CallStatic<AndroidJavaObject>("getVideosClient", AndroidHelperFragment.GetActivity(),
mTokenClient.GetAccount());
}
}
}
#endif
| |
// Copyright 2006 Alp Toker <alp@atoker.com>
// This software is made available under the MIT License
// See COPYING for details
using System;
using System.Text;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
namespace NDesk.DBus
{
class MessageWriter
{
protected EndianFlag endianness;
protected MemoryStream stream;
public Connection connection;
//a default constructor is a bad idea for now as we want to make sure the header and content-type match
public MessageWriter () : this (Connection.NativeEndianness) {}
public MessageWriter (EndianFlag endianness)
{
this.endianness = endianness;
stream = new MemoryStream ();
}
public byte[] ToArray ()
{
//TODO: mark the writer locked or something here
return stream.ToArray ();
}
public void CloseWrite ()
{
WritePad (8);
}
public void Write (byte val)
{
stream.WriteByte (val);
}
public void Write (bool val)
{
Write ((uint) (val ? 1 : 0));
}
unsafe protected void MarshalUShort (byte *data)
{
WritePad (2);
byte[] dst = new byte[2];
if (endianness == Connection.NativeEndianness) {
dst[0] = data[0];
dst[1] = data[1];
} else {
dst[0] = data[1];
dst[1] = data[0];
}
stream.Write (dst, 0, 2);
}
unsafe public void Write (short val)
{
MarshalUShort ((byte*)&val);
}
unsafe public void Write (ushort val)
{
MarshalUShort ((byte*)&val);
}
unsafe protected void MarshalUInt (byte *data)
{
WritePad (4);
byte[] dst = new byte[4];
if (endianness == Connection.NativeEndianness) {
dst[0] = data[0];
dst[1] = data[1];
dst[2] = data[2];
dst[3] = data[3];
} else {
dst[0] = data[3];
dst[1] = data[2];
dst[2] = data[1];
dst[3] = data[0];
}
stream.Write (dst, 0, 4);
}
unsafe public void Write (int val)
{
MarshalUInt ((byte*)&val);
}
unsafe public void Write (uint val)
{
MarshalUInt ((byte*)&val);
}
unsafe protected void MarshalULong (byte *data)
{
WritePad (8);
byte[] dst = new byte[8];
if (endianness == Connection.NativeEndianness) {
for (int i = 0; i < 8; ++i)
dst[i] = data[i];
} else {
for (int i = 0; i < 8; ++i)
dst[i] = data[7 - i];
}
stream.Write (dst, 0, 8);
}
unsafe public void Write (long val)
{
MarshalULong ((byte*)&val);
}
unsafe public void Write (ulong val)
{
MarshalULong ((byte*)&val);
}
#if !DISABLE_SINGLE
unsafe public void Write (float val)
{
MarshalUInt ((byte*)&val);
}
#endif
unsafe public void Write (double val)
{
MarshalULong ((byte*)&val);
}
public void Write (string val)
{
byte[] utf8_data = Encoding.UTF8.GetBytes (val);
Write ((uint)utf8_data.Length);
stream.Write (utf8_data, 0, utf8_data.Length);
WriteNull ();
}
public void Write (ObjectPath val)
{
Write (val.Value);
}
public void Write (Signature val)
{
byte[] ascii_data = val.GetBuffer ();
if (ascii_data.Length > Protocol.MaxSignatureLength)
throw new Exception ("Signature length " + ascii_data.Length + " exceeds maximum allowed " + Protocol.MaxSignatureLength + " bytes");
Write ((byte)ascii_data.Length);
stream.Write (ascii_data, 0, ascii_data.Length);
WriteNull ();
}
public void WriteComplex (object val, Type type)
{
if (type == typeof (void))
return;
if (type.IsArray) {
WriteArray (val, type.GetElementType ());
} else if (type.IsGenericType && (type.GetGenericTypeDefinition () == typeof (IDictionary<,>) || type.GetGenericTypeDefinition () == typeof (Dictionary<,>))) {
Type[] genArgs = type.GetGenericArguments ();
System.Collections.IDictionary idict = (System.Collections.IDictionary)val;
WriteFromDict (genArgs[0], genArgs[1], idict);
} else if (Mapper.IsPublic (type)) {
WriteObject (type, val);
} else if (!type.IsPrimitive && !type.IsEnum) {
WriteValueType (val, type);
/*
} else if (type.IsGenericType && type.GetGenericTypeDefinition () == typeof (Nullable<>)) {
//is it possible to support nullable types?
Type[] genArgs = type.GetGenericArguments ();
WriteVariant (genArgs[0], val);
*/
} else {
throw new Exception ("Can't write");
}
}
public void Write (Type type, object val)
{
if (type == typeof (void))
return;
if (type.IsArray) {
WriteArray (val, type.GetElementType ());
} else if (type == typeof (ObjectPath)) {
Write ((ObjectPath)val);
} else if (type == typeof (Signature)) {
Write ((Signature)val);
} else if (type == typeof (object)) {
Write (val);
} else if (type == typeof (string)) {
Write ((string)val);
} else if (type.IsGenericType && (type.GetGenericTypeDefinition () == typeof (IDictionary<,>) || type.GetGenericTypeDefinition () == typeof (Dictionary<,>))) {
Type[] genArgs = type.GetGenericArguments ();
System.Collections.IDictionary idict = (System.Collections.IDictionary)val;
WriteFromDict (genArgs[0], genArgs[1], idict);
} else if (Mapper.IsPublic (type)) {
WriteObject (type, val);
} else if (!type.IsPrimitive && !type.IsEnum) {
WriteValueType (val, type);
} else {
Write (Signature.TypeToDType (type), val);
}
}
//helper method, should not be used as it boxes needlessly
public void Write (DType dtype, object val)
{
switch (dtype)
{
case DType.Byte:
{
Write ((byte)val);
}
break;
case DType.Boolean:
{
Write ((bool)val);
}
break;
case DType.Int16:
{
Write ((short)val);
}
break;
case DType.UInt16:
{
Write ((ushort)val);
}
break;
case DType.Int32:
{
Write ((int)val);
}
break;
case DType.UInt32:
{
Write ((uint)val);
}
break;
case DType.Int64:
{
Write ((long)val);
}
break;
case DType.UInt64:
{
Write ((ulong)val);
}
break;
#if !DISABLE_SINGLE
case DType.Single:
{
Write ((float)val);
}
break;
#endif
case DType.Double:
{
Write ((double)val);
}
break;
case DType.String:
{
Write ((string)val);
}
break;
case DType.ObjectPath:
{
Write ((ObjectPath)val);
}
break;
case DType.Signature:
{
Write ((Signature)val);
}
break;
case DType.Variant:
{
Write ((object)val);
}
break;
default:
throw new Exception ("Unhandled D-Bus type: " + dtype);
}
}
public void WriteObject (Type type, object val)
{
ObjectPath path;
BusObject bobj = val as BusObject;
if (bobj == null && val is MarshalByRefObject) {
bobj = ((MarshalByRefObject)val).GetLifetimeService () as BusObject;
}
if (bobj == null)
throw new Exception ("No object reference to write");
path = bobj.Path;
Write (path);
}
//variant
public void Write (object val)
{
//TODO: maybe support sending null variants
if (val == null)
throw new NotSupportedException ("Cannot send null variant");
Type type = val.GetType ();
WriteVariant (type, val);
}
public void WriteVariant (Type type, object val)
{
Signature sig = Signature.GetSig (type);
Write (sig);
Write (type, val);
}
//this requires a seekable stream for now
public void WriteArray (object obj, Type elemType)
{
Array val = (Array)obj;
//TODO: more fast paths for primitive arrays
if (elemType == typeof (byte)) {
if (val.Length > Protocol.MaxArrayLength)
throw new Exception ("Array length " + val.Length + " exceeds maximum allowed " + Protocol.MaxArrayLength + " bytes");
Write ((uint)val.Length);
stream.Write ((byte[])val, 0, val.Length);
return;
}
long origPos = stream.Position;
Write ((uint)0);
//advance to the alignment of the element
WritePad (Protocol.GetAlignment (Signature.TypeToDType (elemType)));
long startPos = stream.Position;
foreach (object elem in val)
Write (elemType, elem);
long endPos = stream.Position;
uint ln = (uint)(endPos - startPos);
stream.Position = origPos;
if (ln > Protocol.MaxArrayLength)
throw new Exception ("Array length " + ln + " exceeds maximum allowed " + Protocol.MaxArrayLength + " bytes");
Write (ln);
stream.Position = endPos;
}
public void WriteFromDict (Type keyType, Type valType, System.Collections.IDictionary val)
{
long origPos = stream.Position;
Write ((uint)0);
//advance to the alignment of the element
//WritePad (Protocol.GetAlignment (Signature.TypeToDType (type)));
WritePad (8);
long startPos = stream.Position;
foreach (System.Collections.DictionaryEntry entry in val)
{
WritePad (8);
Write (keyType, entry.Key);
Write (valType, entry.Value);
}
long endPos = stream.Position;
uint ln = (uint)(endPos - startPos);
stream.Position = origPos;
if (ln > Protocol.MaxArrayLength)
throw new Exception ("Dict length " + ln + " exceeds maximum allowed " + Protocol.MaxArrayLength + " bytes");
Write (ln);
stream.Position = endPos;
}
public void WriteValueType (object val, Type type)
{
MethodInfo mi = TypeImplementer.GetWriteMethod (type);
mi.Invoke (null, new object[] {this, val});
}
/*
public void WriteValueTypeOld (object val, Type type)
{
WritePad (8);
if (type.IsGenericType && type.GetGenericTypeDefinition () == typeof (KeyValuePair<,>)) {
System.Reflection.PropertyInfo key_prop = type.GetProperty ("Key");
Write (key_prop.PropertyType, key_prop.GetValue (val, null));
System.Reflection.PropertyInfo val_prop = type.GetProperty ("Value");
Write (val_prop.PropertyType, val_prop.GetValue (val, null));
return;
}
FieldInfo[] fis = type.GetFields (BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
foreach (System.Reflection.FieldInfo fi in fis) {
object elem;
elem = fi.GetValue (val);
Write (fi.FieldType, elem);
}
}
*/
public void WriteNull ()
{
stream.WriteByte (0);
}
public void WritePad (int alignment)
{
int needed = Protocol.PadNeeded ((int)stream.Position, alignment);
for (int i = 0 ; i != needed ; i++)
stream.WriteByte (0);
}
}
}
| |
// Copyright 2017 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Diagnostics;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
namespace Google.Cloud.Tools.Analyzers
{
/// <summary>
/// Warns about deriving from or exposing types from namespaces which are not to be exposed as dependencies.
/// </summary>
[DiagnosticAnalyzer(LanguageNames.CSharp)]
public class PublicDependencyForbiddenAnalyzer : DiagnosticAnalyzer
{
private static readonly ImmutableArray<string> ForbiddenNamespacePrefixes = ImmutableArray.Create("Google.Apis.");
public const string DiagnosticId = "GCP0003";
private const string Category = "Usage";
private static readonly LocalizableString Title = "Publicly dependency to forbidden namespace";
private static readonly LocalizableString MessageFormat = "The type '{0}' directly or indirectly depends on the forbidden namespace '{1}'";
private static readonly LocalizableString Description = "Dependencies on certain namespaces should not be exposed publicly.";
private static DiagnosticDescriptor Rule =
new DiagnosticDescriptor(DiagnosticId, Title, MessageFormat, Category, DiagnosticSeverity.Hidden, isEnabledByDefault: true, description: Description);
public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get { return ImmutableArray.Create(Rule); } }
public override void Initialize(AnalysisContext context)
{
context.EnableConcurrentExecution();
context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.Analyze | GeneratedCodeAnalysisFlags.ReportDiagnostics);
context.RegisterSymbolAction(AnalyzeEvent, SymbolKind.Event);
context.RegisterSymbolAction(AnalyzeField, SymbolKind.Field);
context.RegisterSymbolAction(AnalyzeMethod, SymbolKind.Method);
context.RegisterSymbolAction(AnalyzeNamedType, SymbolKind.NamedType);
context.RegisterSymbolAction(AnalyzeProperty, SymbolKind.Property);
}
private static void AnalyzeEvent(SymbolAnalysisContext context)
{
var eventSymbol = (IEventSymbol)context.Symbol;
var declNode = context.Symbol.DeclaringSyntaxReferences[0].GetSyntax(context.CancellationToken);
if (declNode.IsKind(SyntaxKind.EventDeclaration))
{
CheckType<EventDeclarationSyntax>(
context,
eventSymbol.Type,
eventNode => eventNode.Type.GetLocation());
}
else
{
CheckType<VariableDeclaratorSyntax>(
context,
eventSymbol.Type,
variableDeclaratorNode => (variableDeclaratorNode.Parent as VariableDeclarationSyntax)?.Type.GetLocation());
}
}
private static void AnalyzeField(SymbolAnalysisContext context)
{
var fieldSymbol = (IFieldSymbol)context.Symbol;
CheckType<VariableDeclaratorSyntax>(
context,
fieldSymbol.Type,
variableDeclaratorNode => (variableDeclaratorNode.Parent as VariableDeclarationSyntax)?.Type.GetLocation());
}
private static void AnalyzeMethod(SymbolAnalysisContext context)
{
var methodKind = ((IMethodSymbol)context.Symbol).MethodKind;
switch (methodKind)
{
case MethodKind.AnonymousFunction:
case MethodKind.DelegateInvoke:
case MethodKind.Destructor:
case MethodKind.EventAdd:
case MethodKind.EventRaise:
case MethodKind.EventRemove:
case MethodKind.ExplicitInterfaceImplementation:
case MethodKind.PropertyGet:
case MethodKind.PropertySet:
case MethodKind.StaticConstructor:
case MethodKind.LocalFunction:
return;
}
var methodSymbol = (IMethodSymbol)context.Symbol;
if (methodKind != MethodKind.Constructor)
{
CheckType<MethodDeclarationSyntax>(
context,
methodSymbol.ReturnType,
methodNode => methodNode.ReturnType.GetLocation());
}
for (int i = 0; i < methodSymbol.Parameters.Length; ++i)
{
if (methodKind == MethodKind.Constructor)
{
CheckType<ConstructorDeclarationSyntax>(
context,
methodSymbol.Parameters[i].Type,
constructorNode => constructorNode.ParameterList.Parameters[i].Type.GetLocation());
}
else
{
CheckType<MethodDeclarationSyntax>(
context,
methodSymbol.Parameters[i].Type,
methodNode => methodNode.ParameterList.Parameters[i].Type.GetLocation());
}
}
}
private static void AnalyzeNamedType(SymbolAnalysisContext context)
{
var typeSymbol = (INamedTypeSymbol)context.Symbol;
CheckType<TypeDeclarationSyntax>(
context,
typeSymbol,
methodNode => methodNode.BaseList.GetLocation());
}
private static void AnalyzeProperty(SymbolAnalysisContext context)
{
var propertySymbol = (IPropertySymbol)context.Symbol;
CheckType<PropertyDeclarationSyntax>(
context,
propertySymbol.Type,
propertyNode => propertyNode.Type.GetLocation());
for (int i = 0; i < propertySymbol.Parameters.Length; ++i)
{
CheckType<IndexerDeclarationSyntax>(
context,
propertySymbol.Parameters[i].Type,
propertyNode => propertyNode.ParameterList.Parameters[i].Type.GetLocation());
}
}
private static void CheckType<T>(SymbolAnalysisContext context, ITypeSymbol type, Func<T, Location> getErrorLocation)
where T : CSharpSyntaxNode
{
if (!context.Symbol.IsExternallyVisible())
{
return;
}
if (ForbiddenNamespacePrefixes.Any(context.Symbol.ContainingSymbol.ToDisplayString().StartsWith))
{
// If a symbol is actually defined in a forbidden namespace, don't add an error to it.
return;
}
#pragma warning disable RS1024 // We're providing an appropriate symbol equality comparer, so all comparisons should be fine.
var checkedTypes = new Dictionary<ITypeSymbol, TypeCheckResult>(SymbolEqualityComparer.Default);
#pragma warning restore RS1024
var result = CheckType(type, checkedTypes);
if (!result.IsTypeForbidden)
{
return;
}
if (context.Symbol.DeclaringSyntaxReferences.IsEmpty)
{
return;
}
T declaringNode = (T)context.Symbol.DeclaringSyntaxReferences.FirstOrDefault().GetSyntax(context.CancellationToken);
context.ReportDiagnostic(
Diagnostic.Create(
Rule,
getErrorLocation(declaringNode),
type.ToDisplayString(),
result.ForbiddenNamespace));
}
private static TypeCheckResult CheckType(ITypeSymbol type, Dictionary<ITypeSymbol, TypeCheckResult> checkedTypes)
{
if (!checkedTypes.TryGetValue(type, out var result))
{
// Add a placeholder before we do anything else to prevent recursion issues.
checkedTypes.Add(type, TypeCheckResult.Allowed);
result = CheckTypeImpl(type);
checkedTypes[type] = result;
}
return result;
TypeCheckResult CheckTypeImpl(ITypeSymbol currentType)
{
if (currentType.TypeKind == TypeKind.Array)
{
return CheckType(((IArrayTypeSymbol)currentType).ElementType, checkedTypes);
}
if (currentType is INamedTypeSymbol namedType)
{
if (namedType.TypeKind == TypeKind.Delegate)
{
var delegateResult = CheckType(namedType.DelegateInvokeMethod.ReturnType, checkedTypes);
if (!delegateResult.IsTypeForbidden) {
delegateResult =
namedType.DelegateInvokeMethod.Parameters.
Select(p => CheckType(p.Type, checkedTypes)).
FirstOrDefault(c => c.IsTypeForbidden);
}
if (delegateResult?.IsTypeForbidden == true)
{
return TypeCheckResult.Forbidden(delegateResult.ForbiddenNamespace);
}
}
else
{
if (ForbiddenNamespacePrefixes.Any(namedType.ContainingNamespace.ToDisplayString().StartsWith))
{
return TypeCheckResult.Forbidden(namedType.ContainingNamespace.ToDisplayString());
}
var dependentTypeResult =
namedType.BaseType == null ? null : CheckType(namedType.BaseType, checkedTypes);
if (dependentTypeResult?.IsTypeForbidden != true)
{
dependentTypeResult =
namedType.Interfaces.Select(i => CheckType(i, checkedTypes)).FirstOrDefault(c => c.IsTypeForbidden);
}
if (dependentTypeResult?.IsTypeForbidden != true)
{
dependentTypeResult =
namedType.TypeArguments.Select(p => CheckType(p, checkedTypes)).FirstOrDefault(c => c.IsTypeForbidden);
}
if (dependentTypeResult?.IsTypeForbidden == true)
{
return TypeCheckResult.Forbidden(dependentTypeResult.ForbiddenNamespace);
}
}
}
return TypeCheckResult.Allowed;
}
}
private class TypeCheckResult
{
public static readonly TypeCheckResult Allowed = new TypeCheckResult();
public static TypeCheckResult Forbidden(string forbiddenNamespace) =>
new TypeCheckResult
{
IsTypeForbidden = true,
ForbiddenNamespace = forbiddenNamespace
};
private TypeCheckResult() { }
public bool IsTypeForbidden { get; private set; }
public string ForbiddenNamespace { get; private set; }
}
}
}
| |
using System.Linq;
using System.Reflection;
using FluentMigrator.Expressions;
using FluentMigrator.Runner;
using FluentMigrator.VersionTableInfo;
using Moq;
using NUnit.Framework;
using NUnit.Should;
namespace FluentMigrator.Tests.Unit
{
public class TestMigrationProcessorOptions : IMigrationProcessorOptions
{
public bool PreviewOnly
{
get { return false; }
}
public int Timeout
{
get { return 30; }
}
public string ProviderSwitches
{
get
{
return string.Empty;
}
}
}
[TestFixture]
public class VersionLoaderTests
{
[Test]
public void CanLoadCustomVersionTableMetaData()
{
var runner = new Mock<IMigrationRunner>();
runner.SetupGet(r => r.Processor.Options).Returns(new TestMigrationProcessorOptions());
var conventions = new MigrationConventions();
var asm = Assembly.GetExecutingAssembly();
var loader = new VersionLoader(runner.Object, asm, conventions);
var versionTableMetaData = loader.GetVersionTableMetaData();
versionTableMetaData.ShouldBeOfType<TestVersionTableMetaData>();
}
[Test]
public void CanLoadDefaultVersionTableMetaData()
{
var runner = new Mock<IMigrationRunner>();
runner.SetupGet(r => r.Processor.Options).Returns(new TestMigrationProcessorOptions());
var conventions = new MigrationConventions();
var asm = "s".GetType().Assembly;
var loader = new VersionLoader(runner.Object, asm, conventions);
var versionTableMetaData = loader.GetVersionTableMetaData();
versionTableMetaData.ShouldBeOfType<DefaultVersionTableMetaData>();
}
[Test]
public void DeleteVersionShouldExecuteDeleteDataExpression()
{
var processor = new Mock<IMigrationProcessor>();
var runner = new Mock<IMigrationRunner>();
runner.SetupGet(r => r.Processor).Returns(processor.Object);
var conventions = new MigrationConventions();
var asm = Assembly.GetExecutingAssembly();
var loader = new VersionLoader(runner.Object, asm, conventions);
processor.Setup(p => p.Process(It.Is<DeleteDataExpression>(expression =>
expression.SchemaName == loader.VersionTableMetaData.SchemaName
&& expression.TableName == loader.VersionTableMetaData.TableName
&& expression.Rows.All(
definition =>
definition.All(
pair =>
pair.Key == loader.VersionTableMetaData.ColumnName && pair.Value.Equals(1L))))))
.Verifiable();
loader.DeleteVersion(1);
processor.VerifyAll();
}
[Test]
public void RemoveVersionTableShouldBehaveAsExpected()
{
var processor = new Mock<IMigrationProcessor>();
var runner = new Mock<IMigrationRunner>();
runner.SetupGet(r => r.Processor).Returns(processor.Object);
var conventions = new MigrationConventions();
var asm = Assembly.GetExecutingAssembly();
var loader = new VersionLoader(runner.Object, asm, conventions);
processor.Setup(p => p.Process(It.Is<DeleteTableExpression>(expression =>
expression.SchemaName == loader.VersionTableMetaData.SchemaName
&& expression.TableName == loader.VersionTableMetaData.TableName)))
.Verifiable();
processor.Setup(p => p.Process(It.Is<DeleteSchemaExpression>(expression =>
expression.SchemaName == loader.VersionTableMetaData.SchemaName)))
.Verifiable();
loader.RemoveVersionTable();
processor.VerifyAll();
}
[Test]
public void RemoveVersionTableShouldNotRemoveSchemaIfItDidNotOwnTheSchema()
{
var processor = new Mock<IMigrationProcessor>();
var runner = new Mock<IMigrationRunner>();
runner.SetupGet(r => r.Processor).Returns(processor.Object);
var conventions = new MigrationConventions();
var asm = Assembly.GetExecutingAssembly();
var loader = new VersionLoader(runner.Object, asm, conventions);
((TestVersionTableMetaData) loader.VersionTableMetaData).OwnsSchema = false;
processor.Setup(p => p.Process(It.Is<DeleteTableExpression>(expression =>
expression.SchemaName == loader.VersionTableMetaData.SchemaName
&& expression.TableName == loader.VersionTableMetaData.TableName)))
.Verifiable();
loader.RemoveVersionTable();
processor.Verify(p => p.Process(It.IsAny<DeleteSchemaExpression>()), Times.Never());
}
[Test]
public void UpdateVersionShouldExecuteInsertDataExpression()
{
var processor = new Mock<IMigrationProcessor>();
var runner = new Mock<IMigrationRunner>();
runner.SetupGet(r => r.Processor).Returns(processor.Object);
var conventions = new MigrationConventions();
var asm = Assembly.GetExecutingAssembly();
var loader = new VersionLoader(runner.Object, asm, conventions);
processor.Setup(p => p.Process(It.Is<InsertDataExpression>(expression =>
expression.SchemaName == loader.VersionTableMetaData.SchemaName
&& expression.TableName == loader.VersionTableMetaData.TableName
&& expression.Rows.Any(
definition =>
definition.Any(
pair =>
pair.Key == loader.VersionTableMetaData.ColumnName && pair.Value.Equals(1L))))))
.Verifiable();
loader.UpdateVersionInfo(1);
processor.VerifyAll();
}
[Test]
public void VersionSchemaMigrationOnlyRunOnceEvenIfExistenceChecksReturnFalse()
{
var conventions = new MigrationConventions();
var processor = new Mock<IMigrationProcessor>();
var runner = new Mock<IMigrationRunner>();
var asm = Assembly.GetExecutingAssembly();
runner.SetupGet(r => r.Processor).Returns(processor.Object);
processor.Setup(p => p.SchemaExists(It.IsAny<string>())).Returns(false);
var loader = new VersionLoader(runner.Object, asm, conventions);
loader.LoadVersionInfo();
runner.Verify(r => r.Up(loader.VersionSchemaMigration), Times.Once());
}
[Test]
public void VersionMigrationOnlyRunOnceEvenIfExistenceChecksReturnFalse()
{
var conventions = new MigrationConventions();
var processor = new Mock<IMigrationProcessor>();
var runner = new Mock<IMigrationRunner>();
var asm = Assembly.GetExecutingAssembly();
runner.SetupGet(r => r.Processor).Returns(processor.Object);
processor.Setup(p => p.TableExists(new TestVersionTableMetaData().SchemaName, TestVersionTableMetaData.TABLENAME)).Returns(false);
var loader = new VersionLoader(runner.Object, asm, conventions);
loader.LoadVersionInfo();
runner.Verify(r => r.Up(loader.VersionMigration), Times.Once());
}
[Test]
public void VersionUniqueMigrationOnlyRunOnceEvenIfExistenceChecksReturnFalse()
{
var conventions = new MigrationConventions();
var processor = new Mock<IMigrationProcessor>();
var runner = new Mock<IMigrationRunner>();
var asm = Assembly.GetExecutingAssembly();
runner.SetupGet(r => r.Processor).Returns(processor.Object);
processor.Setup(p => p.ColumnExists(new TestVersionTableMetaData().SchemaName, TestVersionTableMetaData.TABLENAME, TestVersionTableMetaData.APPLIEDONCOLUMNNAME)).Returns(false);
var loader = new VersionLoader(runner.Object, asm, conventions);
loader.LoadVersionInfo();
runner.Verify(r => r.Up(loader.VersionUniqueMigration), Times.Once());
}
[Test]
public void VersionDescriptionMigrationOnlyRunOnceEvenIfExistenceChecksReturnFalse()
{
var conventions = new MigrationConventions();
var processor = new Mock<IMigrationProcessor>();
var runner = new Mock<IMigrationRunner>();
var asm = Assembly.GetExecutingAssembly();
runner.SetupGet(r => r.Processor).Returns(processor.Object);
processor.Setup(p => p.ColumnExists(new TestVersionTableMetaData().SchemaName, TestVersionTableMetaData.TABLENAME, TestVersionTableMetaData.APPLIEDONCOLUMNNAME)).Returns(false);
var loader = new VersionLoader(runner.Object, asm, conventions);
loader.LoadVersionInfo();
runner.Verify(r => r.Up(loader.VersionDescriptionMigration), Times.Once());
}
}
}
| |
// Copyright (C) 2015-2021 The Neo Project.
//
// The neo is free software distributed under the MIT software license,
// see the accompanying file LICENSE in the main directory of the
// project or http://www.opensource.org/licenses/mit-license.php
// for more details.
//
// Redistribution and use in source and binary forms with or without
// modifications are permitted.
using Neo.IO.Json;
using Neo.VM;
using Neo.VM.Types;
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Numerics;
using System.Text.Json;
using Array = Neo.VM.Types.Array;
using Boolean = Neo.VM.Types.Boolean;
using Buffer = Neo.VM.Types.Buffer;
namespace Neo.SmartContract
{
/// <summary>
/// A JSON serializer for <see cref="StackItem"/>.
/// </summary>
public static class JsonSerializer
{
/// <summary>
/// Serializes a <see cref="StackItem"/> to a <see cref="JObject"/>.
/// </summary>
/// <param name="item">The <see cref="StackItem"/> to serialize.</param>
/// <returns>The serialized object.</returns>
public static JObject Serialize(StackItem item)
{
switch (item)
{
case Array array:
{
return array.Select(p => Serialize(p)).ToArray();
}
case ByteString _:
case Buffer _:
{
return item.GetString();
}
case Integer num:
{
var integer = num.GetInteger();
if (integer > JNumber.MAX_SAFE_INTEGER || integer < JNumber.MIN_SAFE_INTEGER)
throw new InvalidOperationException();
return (double)integer;
}
case Boolean boolean:
{
return boolean.GetBoolean();
}
case Map map:
{
var ret = new JObject();
foreach (var entry in map)
{
if (!(entry.Key is ByteString)) throw new FormatException();
var key = entry.Key.GetString();
var value = Serialize(entry.Value);
ret[key] = value;
}
return ret;
}
case Null _:
{
return JObject.Null;
}
default: throw new FormatException();
}
}
/// <summary>
/// Serializes a <see cref="StackItem"/> to JSON.
/// </summary>
/// <param name="item">The <see cref="StackItem"/> to convert.</param>
/// <param name="maxSize">The maximum size of the JSON output.</param>
/// <returns>A byte array containing the JSON output.</returns>
public static byte[] SerializeToByteArray(StackItem item, uint maxSize)
{
using MemoryStream ms = new();
using Utf8JsonWriter writer = new(ms, new JsonWriterOptions
{
Indented = false,
SkipValidation = false
});
Stack stack = new();
stack.Push(item);
while (stack.Count > 0)
{
switch (stack.Pop())
{
case Array array:
writer.WriteStartArray();
stack.Push(JsonTokenType.EndArray);
for (int i = array.Count - 1; i >= 0; i--)
stack.Push(array[i]);
break;
case JsonTokenType.EndArray:
writer.WriteEndArray();
break;
case StackItem buffer when buffer is ByteString || buffer is Buffer:
writer.WriteStringValue(buffer.GetString());
break;
case Integer num:
{
var integer = num.GetInteger();
if (integer > JNumber.MAX_SAFE_INTEGER || integer < JNumber.MIN_SAFE_INTEGER)
throw new InvalidOperationException();
writer.WriteNumberValue((double)integer);
break;
}
case Boolean boolean:
writer.WriteBooleanValue(boolean.GetBoolean());
break;
case Map map:
writer.WriteStartObject();
stack.Push(JsonTokenType.EndObject);
foreach (var pair in map.Reverse())
{
if (!(pair.Key is ByteString)) throw new FormatException();
stack.Push(pair.Value);
stack.Push(pair.Key);
stack.Push(JsonTokenType.PropertyName);
}
break;
case JsonTokenType.EndObject:
writer.WriteEndObject();
break;
case JsonTokenType.PropertyName:
writer.WritePropertyName(((StackItem)stack.Pop()).GetString());
break;
case Null _:
writer.WriteNullValue();
break;
default:
throw new InvalidOperationException();
}
if (ms.Position + writer.BytesPending > maxSize) throw new InvalidOperationException();
}
writer.Flush();
if (ms.Position > maxSize) throw new InvalidOperationException();
return ms.ToArray();
}
/// <summary>
/// Deserializes a <see cref="StackItem"/> from <see cref="JObject"/>.
/// </summary>
/// <param name="json">The <see cref="JObject"/> to deserialize.</param>
/// <param name="limits">The limits for the deserialization.</param>
/// <param name="referenceCounter">The <see cref="ReferenceCounter"/> used by the <see cref="StackItem"/>.</param>
/// <returns>The deserialized <see cref="StackItem"/>.</returns>
public static StackItem Deserialize(JObject json, ExecutionEngineLimits limits, ReferenceCounter referenceCounter = null)
{
uint maxStackSize = limits.MaxStackSize;
return Deserialize(json, ref maxStackSize, referenceCounter);
}
private static StackItem Deserialize(JObject json, ref uint maxStackSize, ReferenceCounter referenceCounter)
{
if (maxStackSize-- == 0) throw new FormatException();
switch (json)
{
case null:
{
return StackItem.Null;
}
case JArray array:
{
List<StackItem> list = new(array.Count);
foreach (JObject obj in array)
list.Add(Deserialize(obj, ref maxStackSize, referenceCounter));
return new Array(referenceCounter, list);
}
case JString str:
{
return str.Value;
}
case JNumber num:
{
if ((num.Value % 1) != 0) throw new FormatException("Decimal value is not allowed");
return (BigInteger)num.Value;
}
case JBoolean boolean:
{
return new Boolean(boolean.Value);
}
case JObject obj:
{
var item = new Map(referenceCounter);
foreach (var entry in obj.Properties)
{
if (maxStackSize-- == 0) throw new FormatException();
var key = entry.Key;
var value = Deserialize(entry.Value, ref maxStackSize, referenceCounter);
item[key] = value;
}
return item;
}
default: throw new FormatException();
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
namespace UnitTest.SimpleInMem
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using Bond.Comm;
using Bond.Comm.Layers;
using Bond.Comm.SimpleInMem;
using NUnit.Framework;
using UnitTest.Comm;
using UnitTest.Interfaces;
using UnitTest.Layers;
[TestFixture]
public class SimpleInMemConnectionTest
{
private const string address = "SimpleInMemTakesAnyRandomConnectionString";
private TransportBuilder<SimpleInMemTransport> transportBuilder;
private SimpleInMemTransport transport;
private SimpleInMemListener listener;
private SimpleInMemConnection[] connections;
[SetUp]
public void Init()
{
transportBuilder = new SimpleInMemTransportBuilder();
}
public async Task DefaultSetup(IService service, int count)
{
transport = transportBuilder.Construct();
listener = (SimpleInMemListener)transport.MakeListener(address);
listener.AddService(service);
await listener.StartAsync();
connections = new SimpleInMemConnection[count];
for (int connectionIndex = 0; connectionIndex < count; connectionIndex++)
{
connections[connectionIndex] = (SimpleInMemConnection)await transport.ConnectToAsync(address, System.Threading.CancellationToken.None);
Assert.IsTrue(connections[connectionIndex].IsConnected);
Assert.IsTrue(connections[connectionIndex].IsPaired);
}
}
[Test]
public async void TestWithServerAndClientConnections()
{
await DefaultSetup(new CalculatorService(), 1);
IEnumerator<Guid> pairIds = listener.GetPairIds().GetEnumerator();
pairIds.MoveNext();
Guid firstPair = pairIds.Current;
SimpleInMemConnection serverConnection = listener.GetConnection(firstPair, ConnectionType.Server);
SimpleInMemConnection clientConnection = listener.GetConnection(firstPair, ConnectionType.Client);
const int first = 91;
const int second = 23;
int addResult = first + second;
int subResult = first - second;
var serverProxy = new CalculatorProxy<SimpleInMemConnection>(serverConnection);
var clientProxy = new CalculatorProxy<SimpleInMemConnection>(clientConnection);
var input = new PairedInput
{
First = first,
Second = second
};
var request = new Message<PairedInput>(input);
IMessage<Output> addResponse = await clientProxy.AddAsync(request, System.Threading.CancellationToken.None);
IMessage<Output> subResponse = await clientProxy.SubtractAsync(request, System.Threading.CancellationToken.None);
Assert.IsFalse(addResponse.IsError);
Assert.IsFalse(subResponse.IsError);
Output addOutput = addResponse.Payload.Deserialize();
Output subOutput = subResponse.Payload.Deserialize();
Assert.AreEqual(addResult, addOutput.Result);
Assert.AreEqual(subResult, subOutput.Result);
addResponse = await serverProxy.AddAsync(request, System.Threading.CancellationToken.None);
subResponse = await serverProxy.SubtractAsync(request, System.Threading.CancellationToken.None);
Assert.IsTrue(addResponse.IsError);
Assert.IsTrue(subResponse.IsError);
Error addError = addResponse.Error.Deserialize();
Error subError = subResponse.Error.Deserialize();
Assert.AreEqual((int)ErrorCode.MethodNotFound, (int)addError.error_code);
Assert.AreEqual("Got request for unknown method [unittest.simpleinmem.Calculator.Add].", addError.message);
Assert.AreEqual((int)ErrorCode.MethodNotFound, (int)subError.error_code);
Assert.AreEqual("Got request for unknown method [unittest.simpleinmem.Calculator.Subtract].", subError.message);
}
[Test]
public async void ConnectionStateCycle()
{
await DefaultSetup(new CalculatorService(), 1);
SimpleInMemConnection localConnection =
(SimpleInMemConnection)await transport.ConnectToAsync(address, System.Threading.CancellationToken.None);
Assert.AreEqual(localConnection.State, CnxState.Connected);
await localConnection.StopAsync();
Assert.AreEqual(localConnection.State, CnxState.Disconnected);
}
[Test]
public async void ConnectionStateCycle_CloseAlreadyClosedConnection()
{
await DefaultSetup(new CalculatorService(), 1);
SimpleInMemConnection localConnection =
(SimpleInMemConnection)await transport.ConnectToAsync(address, System.Threading.CancellationToken.None);
Assert.AreEqual(localConnection.State, CnxState.Connected);
// Ensure that closing an already closed connection is no-op
for (int index = 0; index < 5; index++)
{
await localConnection.StopAsync();
Assert.AreEqual(localConnection.State, CnxState.Disconnected);
}
}
[Test]
public async void ValidSetup()
{
await DefaultSetup(new CalculatorService(), 1);
Assert.AreEqual(connections[0].ConnectionType, ConnectionType.Client);
}
[Test]
public async Task MethodCall()
{
await DefaultSetup(new CalculatorService(), 1);
const int first = 91;
const int second = 23;
int addResult = first + second;
int subResult = first - second;
var calculatorProxy = new CalculatorProxy<SimpleInMemConnection>(connections[0]);
var input = new PairedInput
{
First = first,
Second = second
};
var request = new Message<PairedInput>(input);
IMessage<Output> addResponse = await calculatorProxy.AddAsync(request, System.Threading.CancellationToken.None);
IMessage<Output> subResponse = await calculatorProxy.SubtractAsync(request, System.Threading.CancellationToken.None);
Output addOutput = addResponse.Payload.Deserialize();
Output subOutput = subResponse.Payload.Deserialize();
Assert.AreEqual(addResult, addOutput.Result);
Assert.AreEqual(subResult, subOutput.Result);
}
[Test]
public async void EventCall()
{
await DefaultSetup(new CalculatorService(), 1);
var calculatorProxy = new CalculatorProxy<SimpleInMemConnection>(connections[0]);
calculatorProxy.ClearAsync();
bool wasSignaled = CalculatorService.ClearCalledEvent.WaitOne(30000);
Assert.IsTrue(wasSignaled, "Timed out waiting for event");
}
[Test]
public async Task MultipleClientConnectionsEventCalls()
{
await DefaultSetup(new CalculatorService(), 10);
Task[] connectionTasks = new Task[connections.Length];
const int taskCount = 25;
for (int connectionIndex = 0; connectionIndex < connections.Length; connectionIndex++)
{
SimpleInMemConnection conn = connections[connectionIndex];
connectionTasks[connectionIndex] = Task.Run(() =>
{
Task[] tasks = new Task[taskCount];
for (int taskIndex = 0; taskIndex < taskCount; taskIndex++)
{
tasks[taskIndex] = Task.Run(() =>
{
var calculatorProxy = new CalculatorProxy<SimpleInMemConnection>(conn);
calculatorProxy.IncrementCountAsync();
});
}
Task.WaitAll(tasks);
});
}
Task.WaitAll(connectionTasks);
int totalWaitTime = 0;
const int maxWait = 5000;
// Intentionally avoiding exponential back-off due to simple nature of this test
const int incrementalWait = 500;
while (totalWaitTime < maxWait)
{
await Task.Delay(incrementalWait);
totalWaitTime += incrementalWait;
try
{
Assert.AreEqual(CalculatorService.Count, connections.Length * taskCount);
break;
}
catch (AssertionException)
{
// The implementation of SimpleInMem can guarantee delivery of events just be virtue of staying with in the process boundary.
// SimpleInMem event failing after 5 seconds needs to raise alarm and investigation.
if (totalWaitTime > maxWait)
{
throw;
}
}
}
Console.WriteLine($"{nameof(MultipleClientConnectionsEventCalls)} - Count: {CalculatorService.Count}");
}
[Test]
public async Task MultipleClientConnectionsMethodCalls()
{
Stopwatch sw = Stopwatch.StartNew();
await DefaultSetup(new CalculatorService(), 10);
Task[] connectionTasks = new Task[connections.Length];
for (int connectionIndex = 0; connectionIndex < connections.Length; connectionIndex++)
{
SimpleInMemConnection conn = connections[connectionIndex];
connectionTasks[connectionIndex] = Task.Run(() =>
{
int taskCount = 25;
Task[] tasks = new Task[taskCount];
for (int taskIndex = 0; taskIndex < taskCount; taskIndex++)
{
tasks[taskIndex] = Task.Run(async () =>
{
Random rand = new Random(DateTime.UtcNow.Millisecond);
int first = rand.Next(1, 100);
int second = rand.Next(1, 50);
int expectedAddResult = first + second;
int expectedSubResult = first - second;
var addTraceId = Guid.NewGuid().ToString();
var subTraceId = Guid.NewGuid().ToString();
var calculatorProxy = new CalculatorProxy<SimpleInMemConnection>(conn);
var addInput = new PairedInput
{
First = first,
Second = second,
TraceId = addTraceId
};
var subInput = new PairedInput
{
First = first,
Second = second,
TraceId = subTraceId
};
Message<PairedInput> addRequest = new Message<PairedInput>(addInput);
Message<PairedInput> subRequest = new Message<PairedInput>(subInput);
IMessage<Output> addResponse = await calculatorProxy.AddAsync(addRequest, System.Threading.CancellationToken.None);
IMessage<Output> subResponse = await calculatorProxy.SubtractAsync(subRequest, System.Threading.CancellationToken.None);
Output addOutput = addResponse.Payload.Deserialize();
Output subOutput = subResponse.Payload.Deserialize();
Assert.AreEqual(expectedAddResult, addOutput.Result);
Assert.AreEqual(addInput.TraceId, addOutput.TraceId);
Assert.AreEqual(expectedSubResult, subOutput.Result);
Assert.AreEqual(subInput.TraceId, subOutput.TraceId);
});
}
Task.WaitAll(tasks);
});
}
Task.WaitAll(connectionTasks);
sw.Stop();
Console.WriteLine($"{nameof(MultipleClientConnectionsMethodCalls)} - test time: {sw.Elapsed.TotalSeconds}");
}
[Test]
public async Task MethodCall_WithServiceError()
{
await DefaultSetup(new CalculatorService(), 1);
const int first = 91;
const int second = 23;
var calculatorProxy = new CalculatorProxy<SimpleInMemConnection>(connections[0]);
var input = new PairedInput
{
First = first,
Second = second
};
var request = new Message<PairedInput>(input);
IMessage<Output> multiplyResponse = await calculatorProxy.MultiplyAsync(request, System.Threading.CancellationToken.None);
Assert.IsTrue(multiplyResponse.IsError);
InternalServerError error = multiplyResponse.Error.Deserialize<InternalServerError>();
Assert.AreEqual((int)ErrorCode.InternalServerError, error.error_code);
Assert.That(error.message, Is.StringContaining(Errors.InternalErrorMessage));
}
[Test]
public async Task MethodCall_WithMethodNotFound()
{
await DefaultSetup(new CalculatorService(), 1);
const int first = 91;
const int second = 23;
const string methodName = "Divide";
var input = new PairedInput
{
First = first,
Second = second
};
var request = new Message<PairedInput>(input);
IMessage<Output> divideResponse = await connections[0].RequestResponseAsync<PairedInput, Output>(methodName, request, new System.Threading.CancellationToken());
Assert.IsTrue(divideResponse.IsError);
Error error = divideResponse.Error.Deserialize<Error>();
Assert.AreEqual((int)ErrorCode.MethodNotFound, error.error_code);
Assert.That(error.message, Is.StringContaining($"Got request for unknown method [{methodName}]."));
}
[Test]
public async Task MethodCall_WithLayerStack()
{
var testList = new List<string>();
var layer1 = new TestLayer_Append("foo", testList);
var layer2 = new TestLayer_Append("bar", testList);
transportBuilder.SetLayerStackProvider(new LayerStackProvider<Dummy>(LoggerTests.BlackHole, layer1, layer2));
await DefaultSetup(new CalculatorService(), 1);
const int first = 91;
const int second = 23;
int addResult = first + second;
var calculatorProxy = new CalculatorProxy<SimpleInMemConnection>(connections[0]);
var input = new PairedInput
{
First = first,
Second = second
};
var request = new Message<PairedInput>(input);
IMessage<Output> addResponse = await calculatorProxy.AddAsync(request, System.Threading.CancellationToken.None);
Output addOutput = addResponse.Payload.Deserialize();
Assert.AreEqual(addResult, addOutput.Result);
Assert.AreEqual(8, testList.Count);
Assert.AreEqual(layer1.value, testList[0]);
Assert.AreEqual(testList[0] + layer2.value, testList[1]);
Assert.AreEqual(testList[1] + layer2.value, testList[2]);
Assert.AreEqual(testList[2] + layer1.value, testList[3]);
Assert.AreEqual(layer1.value, testList[4]);
Assert.AreEqual(testList[4] + layer2.value, testList[5]);
Assert.AreEqual(testList[5] + layer2.value, testList[6]);
Assert.AreEqual(testList[6] + layer1.value, testList[7]);
}
[Test]
public async Task MethodCall_ReqRsp_WithStatefulLayers()
{
var layerProvider = new TestLayerProvider_StatefulAppend("Layer");
var layerStackProvider = new LayerStackProvider<Dummy>(LoggerTests.BlackHole, layerProvider);
transportBuilder.SetLayerStackProvider(layerStackProvider);
await DefaultSetup(new CalculatorService(), 1);
layerProvider.Layers.Clear();
var calculatorProxy = new CalculatorProxy<SimpleInMemConnection>(connections[0]);
var request = new Message<PairedInput>(new PairedInput { First = 1, Second = 2 });
IMessage<Output> response = await calculatorProxy.AddAsync(request, CancellationToken.None);
Assert.IsFalse(response.IsError);
Assert.AreEqual(2, layerProvider.Layers.Count);
Assert.AreEqual("Layer0SendLayer0Receive", layerProvider.Layers[0].State);
Assert.AreEqual("Layer1ReceiveLayer1Send", layerProvider.Layers[1].State);
request = new Message<PairedInput>(new PairedInput { First = 1, Second = 2 });
response = await calculatorProxy.AddAsync(request, CancellationToken.None);
Assert.IsFalse(response.IsError);
Assert.AreEqual(4, layerProvider.Layers.Count);
Assert.AreEqual("Layer2SendLayer2Receive", layerProvider.Layers[2].State);
Assert.AreEqual("Layer3ReceiveLayer3Send", layerProvider.Layers[3].State);
}
[Test]
public async Task MethodCall_ReqRsp_WithLayerStackErrors()
{
var errorLayer = new TestLayer_ReturnErrors();
transportBuilder.SetLayerStackProvider(new LayerStackProvider<Dummy>(LoggerTests.BlackHole, errorLayer));
var testService = new DummyTestService();
await DefaultSetup(testService, 1);
var proxy = new DummyTestProxy<SimpleInMemConnection>(connections[0]);
var request = new Dummy { int_value = 100 };
errorLayer.SetState(MessageType.Request, errorOnSend: false, errorOnReceive: true);
IMessage<Dummy> response = await proxy.ReqRspMethodAsync(request);
Assert.IsTrue(response.IsError);
Error error = response.Error.Deserialize();
Assert.AreEqual(TestLayer_ReturnErrors.ReceiveError, error.error_code, "Error 1 does not match");
Assert.AreEqual(0, testService.RequestCount);
Assert.AreEqual(Dummy.Empty.int_value, testService.LastRequestReceived.int_value);
errorLayer.SetState(MessageType.Request, errorOnSend: true, errorOnReceive: false);
request.int_value = 101;
response = await proxy.ReqRspMethodAsync(request);
Assert.IsTrue(response.IsError);
error = response.Error.Deserialize();
Assert.AreEqual(TestLayer_ReturnErrors.SendError, error.error_code, "Error 2 does not match");
Assert.AreEqual(0, testService.RequestCount);
Assert.AreEqual(Dummy.Empty.int_value, testService.LastRequestReceived.int_value);
errorLayer.SetState(MessageType.Response, errorOnSend: true, errorOnReceive: false);
request.int_value = 102;
response = await proxy.ReqRspMethodAsync(request);
Assert.IsTrue(response.IsError);
error = response.Error.Deserialize();
Assert.AreEqual(TestLayer_ReturnErrors.SendError, error.error_code, "Error 3 does not match");
Assert.AreEqual(1, testService.RequestCount);
Assert.AreEqual(request.int_value, testService.LastRequestReceived.int_value);
errorLayer.SetState(MessageType.Response, errorOnSend: false, errorOnReceive: true);
request.int_value = 103;
response = await proxy.ReqRspMethodAsync(request);
Assert.IsTrue(response.IsError);
error = response.Error.Deserialize();
Assert.AreEqual(TestLayer_ReturnErrors.ReceiveError, error.error_code, "Error 4 does not match");
Assert.AreEqual(2, testService.RequestCount);
Assert.AreEqual(request.int_value, testService.LastRequestReceived.int_value);
errorLayer.SetState(MessageType.Event, errorOnSend: true, errorOnReceive: true);
request.int_value = 104;
response = await proxy.ReqRspMethodAsync(request);
Assert.IsFalse(response.IsError);
Assert.AreEqual(105, response.Payload.Deserialize().int_value);
Assert.AreEqual(3, testService.RequestCount);
Assert.AreEqual(request.int_value, testService.LastRequestReceived.int_value);
}
[Test]
public async Task MethodCall_ReqRsp_FailingLayerStackProvider_ClientSendReq()
{
// Fail after 2 successful GetLayerStack calls (1 on client, 1 on server)
transportBuilder.SetLayerStackProvider(new TestLayerStackProvider_Fails(2));
var testService = new DummyTestService();
await DefaultSetup(testService, 1);
var proxy = new DummyTestProxy<SimpleInMemConnection>(connections[0]);
var request = new Dummy { int_value = 100 };
IMessage<Dummy> response = await proxy.ReqRspMethodAsync(request);
Assert.IsFalse(response.IsError);
Assert.AreEqual(101, response.Payload.Deserialize().int_value);
request.int_value = 101;
response = await proxy.ReqRspMethodAsync(request);
Assert.IsTrue(response.IsError);
Error error = response.Error.Deserialize();
Assert.AreEqual((int)ErrorCode.InternalServerError, error.error_code);
Assert.AreEqual(TestLayerStackProvider_Fails.InternalDetails, error.message);
}
[Test]
public async Task MethodCall_ReqRsp_FailingLayerStackProvider_ServerReceiveReq()
{
// Fail after 3 successful GetLayerStack calls (2 on client, 1 on server)
transportBuilder.SetLayerStackProvider(new TestLayerStackProvider_Fails(3));
var testService = new DummyTestService();
await DefaultSetup(testService, 1);
var proxy = new DummyTestProxy<SimpleInMemConnection>(connections[0]);
var request = new Dummy { int_value = 100 };
IMessage<Dummy> response = await proxy.ReqRspMethodAsync(request);
Assert.IsFalse(response.IsError);
Assert.AreEqual(101, response.Payload.Deserialize().int_value);
request.int_value = 101;
response = await proxy.ReqRspMethodAsync(request);
Assert.IsTrue(response.IsError);
Error error = response.Error.Deserialize();
Assert.AreEqual((int)ErrorCode.InternalServerError, error.error_code);
Assert.AreEqual(Errors.InternalErrorMessage, error.message);
}
[Test]
public async Task MethodCall_Event_With_StatefulLayers()
{
var layerProvider = new TestLayerProvider_StatefulAppend("Layer");
var layerStackProvider = new LayerStackProvider<Dummy>(LoggerTests.BlackHole, layerProvider);
transportBuilder.SetLayerStackProvider(layerStackProvider);
var testService = new DummyTestService();
await DefaultSetup(testService, 1);
var proxy = new DummyTestProxy<SimpleInMemConnection>(connections[0]);
var theEvent = new Dummy { int_value = 100 };
layerProvider.Layers.Clear();
ManualResetEventSlim waitForEvent = testService.CreateResetEvent();
proxy.EventMethodAsync(theEvent);
bool wasSignaled = waitForEvent.Wait(TimeSpan.FromSeconds(1));
Assert.IsTrue(wasSignaled, "Timed out waiting for event to fire");
testService.ClearResetEvent();
Assert.AreEqual(1, testService.EventCount);
Assert.AreEqual(theEvent.int_value, testService.LastEventReceived.int_value);
Assert.AreEqual(2, layerProvider.Layers.Count);
Assert.AreEqual("Layer0Send", layerProvider.Layers[0].State);
Assert.AreEqual("Layer1Receive", layerProvider.Layers[1].State);
}
[Test]
public async Task MethodCall_Event_WithLayerStackErrors()
{
var errorLayer = new TestLayer_ReturnErrors();
transportBuilder.SetLayerStackProvider(new LayerStackProvider<Dummy>(LoggerTests.BlackHole, errorLayer));
var testService = new DummyTestService();
await DefaultSetup(testService, 1);
var proxy = new DummyTestProxy<SimpleInMemConnection>(connections[0]);
var theEvent = new Dummy { int_value = 100 };
errorLayer.SetState(MessageType.Event, errorOnSend: false, errorOnReceive: true);
ManualResetEventSlim waitForEvent = testService.CreateResetEvent();
proxy.EventMethodAsync(theEvent);
bool wasSignaled = waitForEvent.Wait(TimeSpan.FromSeconds(1));
Assert.IsFalse(wasSignaled, "Event should not fire 1");
testService.ClearResetEvent();
Assert.AreEqual(0, testService.EventCount);
Assert.AreEqual(Dummy.Empty.int_value, testService.LastEventReceived.int_value);
errorLayer.SetState(MessageType.Event, errorOnSend: true, errorOnReceive: false);
theEvent.int_value = 101;
waitForEvent = testService.CreateResetEvent();
proxy.EventMethodAsync(theEvent);
wasSignaled = waitForEvent.Wait(TimeSpan.FromSeconds(1));
Assert.IsFalse(wasSignaled, "Event should not fire 2");
testService.ClearResetEvent();
Assert.AreEqual(0, testService.EventCount);
Assert.AreEqual(Dummy.Empty.int_value, testService.LastEventReceived.int_value);
errorLayer.SetState(MessageType.Event, errorOnSend: false, errorOnReceive: false);
theEvent.int_value = 102;
waitForEvent = testService.CreateResetEvent();
proxy.EventMethodAsync(theEvent);
wasSignaled = waitForEvent.Wait(TimeSpan.FromSeconds(1));
Assert.IsTrue(wasSignaled, "Timed out waiting for event to fire");
Assert.AreEqual(1, testService.EventCount);
Assert.AreEqual(theEvent.int_value, testService.LastEventReceived.int_value);
}
[Test]
public async Task MethodCall_Event_FailingLayerStackProvider_ClientSendEvent()
{
// Fail after 2 successful GetLayerStack calls (1 on client, 1 on server)
transportBuilder.SetLayerStackProvider(new TestLayerStackProvider_Fails(2));
var testService = new DummyTestService();
await DefaultSetup(testService, 1);
var proxy = new DummyTestProxy<SimpleInMemConnection>(connections[0]);
var theEvent = new Dummy { int_value = 100 };
ManualResetEventSlim waitForEvent = testService.CreateResetEvent();
proxy.EventMethodAsync(theEvent);
bool wasSignaled = waitForEvent.Wait(TimeSpan.FromSeconds(1));
Assert.IsTrue(wasSignaled, "Timed out waiting for event to fire");
Assert.AreEqual(1, testService.EventCount);
Assert.AreEqual(theEvent.int_value, testService.LastEventReceived.int_value);
waitForEvent = testService.CreateResetEvent();
proxy.EventMethodAsync(theEvent);
wasSignaled = waitForEvent.Wait(TimeSpan.FromSeconds(1));
Assert.IsFalse(wasSignaled, "Event should not fire 2");
Assert.AreEqual(1, testService.EventCount);
}
[Test]
public async Task MethodCall_Event_FailingLayerStackProvider_ServerReceiveEvent()
{
// Fail after 3 successful GetLayerStack calls (2 on client, 1 on server)
transportBuilder.SetLayerStackProvider(new TestLayerStackProvider_Fails(3));
var testService = new DummyTestService();
await DefaultSetup(testService, 1);
var proxy = new DummyTestProxy<SimpleInMemConnection>(connections[0]);
var theEvent = new Dummy { int_value = 100 };
ManualResetEventSlim waitForEvent = testService.CreateResetEvent();
proxy.EventMethodAsync(theEvent);
bool wasSignaled = waitForEvent.Wait(TimeSpan.FromSeconds(1));
Assert.IsTrue(wasSignaled, "Timed out waiting for event to fire");
Assert.AreEqual(1, testService.EventCount);
Assert.AreEqual(theEvent.int_value, testService.LastEventReceived.int_value);
waitForEvent = testService.CreateResetEvent();
proxy.EventMethodAsync(theEvent);
wasSignaled = waitForEvent.Wait(TimeSpan.FromSeconds(1));
Assert.IsFalse(wasSignaled, "Event should not fire 2");
Assert.AreEqual(1, testService.EventCount);
}
[TearDown]
public async void Cleanup()
{
if (connections != null)
{
for (int connectionIndex = 0; connectionIndex < connections.Length; connectionIndex++)
{
await connections[connectionIndex].StopAsync();
Assert.IsFalse(connections[connectionIndex].IsConnected);
Assert.IsFalse(connections[connectionIndex].IsPaired);
}
}
connections = null;
await transport.StopAsync();
}
}
}
| |
/*
Microsoft Automatic Graph Layout,MSAGL
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
""Software""), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Globalization;
using System.Linq;
using System.Windows.Forms;
using Microsoft.Msagl.Core.DataStructures;
using Microsoft.Msagl.Core.Geometry.Curves;
using Microsoft.Msagl.Core.GraphAlgorithms;
using Microsoft.Msagl.Core.Layout;
using Microsoft.Msagl.DebugHelpers;
using Microsoft.Msagl.Drawing;
using Microsoft.Msagl.Layout.Layered;
using BBox = Microsoft.Msagl.Core.Geometry.Rectangle;
using Color=System.Drawing.Color;
using DrawingGraph = Microsoft.Msagl.Drawing.Graph;
using GeometryEdge = Microsoft.Msagl.Core.Layout.Edge;
using GeometryNode = Microsoft.Msagl.Core.Layout.Node;
using DrawingEdge = Microsoft.Msagl.Drawing.Edge;
using DrawingNode = Microsoft.Msagl.Drawing.Node;
using P2 = Microsoft.Msagl.Core.Geometry.Point;
namespace Microsoft.Msagl.GraphViewerGdi{
/// <summary>
/// exposes some drawing functionality
/// </summary>
public sealed class Draw{
/// <summary>
/// private constructor
/// </summary>
Draw(){
}
static double doubleCircleOffsetRatio = 0.9;
internal static double DoubleCircleOffsetRatio{
get { return doubleCircleOffsetRatio; }
}
internal static float dashSize = 0.05f; //inches
/// <summary>
/// A color converter
/// </summary>
/// <param name="gleeColor"></param>
/// <returns></returns>
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Msagl")]
public static Color MsaglColorToDrawingColor(Drawing.Color gleeColor){
return Color.FromArgb(gleeColor.A, gleeColor.R, gleeColor.G, gleeColor.B);
}
/// <summary>
/// Drawing that can be performed on any Graphics object
/// </summary>
/// <param name="graphics"></param>
/// <param name="precalculatedObject"></param>
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "precalculated"),
SuppressMessage("Microsoft.Naming", "CA1720:IdentifiersShouldNotContainTypeNames", MessageId = "object"),
SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Precalculated")]
public static void DrawPrecalculatedLayoutObject(Graphics graphics, object precalculatedObject){
var dg = precalculatedObject as DGraph;
if(dg != null)
dg.DrawGraph(graphics);
}
#if TEST_MSAGL
internal static void DrawDebugStuff(Graphics g, DGraph graphToDraw, Pen myPen){
if(graphToDraw.DrawingGraph.DebugICurves != null){
foreach (ICurve c in graphToDraw.DrawingGraph.DebugICurves){
DrawDebugICurve(graphToDraw, c, myPen, g);
}
}
if(graphToDraw.DrawingGraph.DebugCurves != null){
foreach (DebugCurve shape in graphToDraw.DrawingGraph.DebugCurves)
DrawDebugCurve(graphToDraw, shape, g);
}
}
static void DrawDebugCurve(DGraph graph, DebugCurve debugCurve, Graphics graphics) {
using (var pen = new Pen(GetColorFromString(debugCurve), (float) debugCurve.Width))
using (var brush=new SolidBrush(GetFillColorFromString(debugCurve))){
if (debugCurve.DashArray != null) {
pen.DashStyle = DashStyle.Dash;
pen.DashPattern = CreateDashArray(debugCurve.DashArray);
pen.DashOffset = pen.DashPattern[0];
}
DrawDebugCurve(graph, debugCurve.Curve, pen, brush, graphics, debugCurve.Label);
}
}
static float[] CreateDashArray(double[] dashArray) {
var ret = new float[dashArray.Length];
for (int i = 0; i < dashArray.Length; i++)
ret[i] = (float)dashArray[i];
return ret;
}
static Drawing.Color StringToMsaglColor(string val) {
return DrawingColorToGLEEColor(GetDrawingColorFromString(val));
}
internal static Drawing.Color DrawingColorToGLEEColor(Color drawingColor) {
return new Drawing.Color(drawingColor.A, drawingColor.R, drawingColor.G, drawingColor.B);
}
static string Get(string[] a, int i) {
if (i < a.Length) return a[i];
return "";
}
static Color GetDrawingColorFromString(string val) {
// object o=FindInColorTable(val);
// if(o==null) //could be an RGB color
try {
if (val.IndexOf(" ") != -1) {
string[] nums = Split(val);
double H = GetNumber(Get(nums, 0)); //hue
double S = GetNumber(Get(nums, 1)); //saturation
double V = GetNumber(Get(nums, 2)); //value
double r, g, b;
H *= 360.0;
if (S == 0) r = g = b = V;
else {
int Hi = ((int)(H + 0.5)) / 60;
double f = H / 60.0 - Hi;
double p = V * (1.0 - S);
double q = V * (1.0 - (S * f));
double t = V * (1.0 - (S * (1.0 - f)));
if (Hi == 0) {
r = V;
g = t;
b = p;
} else if (Hi == 1) {
r = q;
g = V;
b = p;
} else if (Hi == 2) {
r = p;
g = V;
b = t;
} else if (Hi == 3) {
r = p;
g = q;
b = V;
} else if (Hi == 4) {
r = t;
g = p;
b = V;
} else if (Hi == 5) {
r = V;
g = p;
b = q;
} else throw new Exception("unexpected value of Hi " + Hi);
}
return Color.FromArgb(ToByte(r), ToByte(g), ToByte(b));
} else if (val[0] == '#') //could be #%2x%2x%2x or #%2x%2x%2x%2x
if (val.Length == 7) {
int r = FromX(val.Substring(1, 2));
int g = FromX(val.Substring(3, 2));
int b = FromX(val.Substring(5, 2));
return Color.FromArgb(r, g, b);
} else if (val.Length == 9) {
int r = FromX(val.Substring(1, 2));
int g = FromX(val.Substring(3, 2));
int b = FromX(val.Substring(5, 2));
int a = FromX(val.Substring(7, 2));
return Color.FromArgb(a, r, g, b);
} else
throw new Exception("unexpected color " + val);
else
return FromNameOrBlack(val);
} catch {
return FromNameOrBlack(val);
}
}
static int FromX(string s) {
return Int32.Parse(s, NumberStyles.AllowHexSpecifier, AttributeBase.USCultureInfo);
}
static int ToByte(double c) {
var ret = (int)(255.0 * c + 0.5);
if (ret > 255)
ret = 255;
else if (ret < 0)
ret = 0;
return ret;
}
static Color FromNameOrBlack(string val) {
Color ret = Color.FromName(val);
if (ret.A == 0 && ret.R == 0 && ret.B == 0 && ret.G == 0) //the name is not recognized
return Color.Black;
return ret;
}
static string[] Split(string txt) {
return txt.Split(new char[] { ' ', ',', '\n', '\r', ';', '\t' }, StringSplitOptions.RemoveEmptyEntries);
}
static double GetNumber(string txt) {
int i;
double d;
if (Int32.TryParse(txt, out i)) {
return i;
}
if (Double.TryParse(txt, out d)) {
return d;
}
if (txt == "") return 0;
throw new Exception(String.Format("Cannot convert \"{0}\" to a number", txt));
}
static Color GetColorFromString(DebugCurve curve){
Drawing.Color msaglColor = StringToMsaglColor(curve.Color);
msaglColor.A = curve.Transparency;
return MsaglColorToDrawingColor(msaglColor);
}
static Color GetFillColorFromString(DebugCurve curve) {
if (curve.FillColor == null || curve.FillColor == "") return Color.Transparent;
Drawing.Color msaglColor = StringToMsaglColor(curve.FillColor);
msaglColor.A = curve.Transparency;
return MsaglColorToDrawingColor(msaglColor);
}
static void DrawDebugCurve(DGraph graphToDraw, ICurve c, Pen myPen, SolidBrush solidBrush, Graphics g, object id){
var p = c as Polyline;
if (p != null)
{
if (solidBrush.Color!=Color.Transparent)
g.FillPolygon(solidBrush, GetPolylinePoints(p));
if (p.Closed) {
g.DrawPolygon(myPen, GetPolylinePoints(p));
}
else
g.DrawLines(myPen, GetPolylinePoints(p));
}
else {
if (SimpleSeg(c)) {
DrawSimpleSeg(c, g, myPen, graphToDraw);
}
else {
var curve = c as Curve;
if (curve != null)
foreach (ICurve ss in curve.Segments) {
DrawSimpleSeg(ss, g, myPen, graphToDraw);
}
else {
var rect = c as RoundedRect;
if (rect != null) {
foreach (ICurve ss in rect.Curve.Segments)
DrawSimpleSeg(ss, g, myPen, graphToDraw);
}}
}
}
if(id!=null && c!=null)
{
var s = id.ToString();
var brush = new SolidBrush(myPen.Color);
var point = c.Start;
var rect = new RectangleF((float)c.Start.X, (float)c.Start.Y, (float)c.BoundingBox.Width, (float)c.BoundingBox.Height);
DrawStringInRectCenter(g, brush, new Font(FontFamily.GenericSerif, 10),s, rect );
}
}
static void DrawSimpleSeg(ICurve c, Graphics g, Pen myPen, DGraph graphToDraw) {
var lineSeg = c as LineSegment;
if (lineSeg != null) {
g.DrawLine(myPen, (float) lineSeg.Start.X, (float) lineSeg.Start.Y, (float) lineSeg.End.X,
(float) lineSeg.End.Y);
}
else {
var bs = c as CubicBezierSegment;
if (bs != null) {
g.DrawBezier(myPen, (float) bs.B(0).X, (float) bs.B(0).Y,
(float) bs.B(1).X, (float) bs.B(1).Y,
(float) bs.B(2).X, (float) bs.B(2).Y,
(float) bs.B(3).X, (float) bs.B(3).Y);
if (graphToDraw.DrawingGraph.ShowControlPoints)
DrawControlPoints(g, bs);
}
else {
var el = c as Ellipse;
if (el != null) {
DrawArc(myPen, g, el);
}
else {
throw new InvalidOperationException();
}
}
}
}
static bool SimpleSeg(ICurve curve){
return curve is LineSegment || curve is Ellipse || curve is CubicBezierSegment;
}
static void DrawDebugICurve(DGraph graphToDraw, ICurve c, Pen myPen, Graphics g) {
var p = c as Polyline;
if (p != null) {
SetColor(graphToDraw, myPen, p);
DrawPolyline(p, myPen, g);
}
else {
var lineSeg = c as LineSegment;
if (lineSeg != null) {
SetColor(graphToDraw, myPen, lineSeg);
DrawLine(myPen, g, lineSeg);
}
else {
var bs = c as CubicBezierSegment;
if (bs != null) {
SetColor(graphToDraw, myPen, bs);
DrawBezier(graphToDraw, myPen, g, bs);
}
else {
var el = c as Ellipse;
if (el != null) {
SetColor(graphToDraw, myPen, el);
DrawArc(myPen, g, el);
}
else {
var curve = c as Curve;
if (curve != null)
foreach (ICurve ss in curve.Segments)
DrawDebugICurve(graphToDraw, ss, myPen, g);
else {
var rect = c as RoundedRect;
if (rect != null) {
foreach (ICurve ss in rect.Curve.Segments)
DrawDebugICurve(graphToDraw, ss, myPen, g);
}
}
}
}
}
}
}
static void DrawArc(Pen pen, Graphics g, Ellipse el) {
double sweepAngle;
BBox box;
float startAngle;
GetGdiArcDimensions(el, out startAngle, out sweepAngle, out box);
//an exception is thrown for very small arcs
if (box.Width < 0.01 || box.Height < 0.01 || ((el.ParEnd - el.ParStart) < (Math.PI / 4) && (el.End - el.Start).Length < 0.01)) {
g.DrawLines(pen, EllipsePoints(10, el));
} else {
g.DrawArc(pen,
(float)box.Left,
(float)box.Bottom,
(float)box.Width,
(float)box.Height,
startAngle,
(float)sweepAngle);
}
}
static PointF[] EllipsePoints(int n, Ellipse el) {
var ret = new PointF[n+1];
var del = (el.ParEnd - el.ParStart)/n;
for(int i=0;i<=n;i++) {
ret[i] = PP(el[el.ParStart + i*del]);
}
return ret;
}
static void DrawBezier(DGraph graphToDraw, Pen myPen, Graphics g, CubicBezierSegment bs){
g.DrawBezier(myPen, (float) bs.B(0).X, (float) bs.B(0).Y,
(float) bs.B(1).X, (float) bs.B(1).Y,
(float) bs.B(2).X, (float) bs.B(2).Y,
(float) bs.B(3).X, (float) bs.B(3).Y);
if(graphToDraw.DrawingGraph.ShowControlPoints)
DrawControlPoints(g, bs);
}
static void DrawLine(Pen myPen, Graphics g, LineSegment lineSeg){
g.DrawLine(myPen, (float) lineSeg.Start.X, (float) lineSeg.Start.Y, (float) lineSeg.End.X,
(float) lineSeg.End.Y);
}
static void DrawPolyline(Polyline p, Pen myPen, Graphics g){
if(p.Closed)
g.DrawPolygon(myPen, GetPolylinePoints(p));
//g.FillPolygon(new SolidBrush(myPen.Color), GetPolylinePoints(p));
else
g.DrawLines(myPen, GetPolylinePoints(p));
}
static PointF[] GetPolylinePoints(Polyline p){
var ret = new List<PointF>();
foreach (P2 pnt in p){
ret.Add(new PointF((float) pnt.X, (float) pnt.Y));
}
return ret.ToArray();
}
static void SetColor(DGraph graphToDraw, Pen myPen, object bs){
// Microsoft.Msagl.Drawing.Color color;
if(bs is CubicBezierSegment)
myPen.Color = Color.Green;
else if(bs is Polyline)
myPen.Color = Color.Brown;
else
myPen.Color = Color.Blue;
}
[SuppressMessage("Microsoft.Globalization", "CA1305:SpecifyIFormatProvider", MessageId = "System.Int32.ToString"
)]
internal static void DrawDataBase(Graphics g, Pen myPen, DrawingGraph dg){
int i = 0;
foreach (Anchor p in dg.DataBase.Anchors)
i = DrawAnchor(g, i, p);
myPen.Color = Color.Blue;
Pen myOtherPen=new Pen(Color.FromArgb(100, 0,0,255), 1);
foreach (var edges in dg.DataBase.Multiedges.Values)
foreach (IntEdge e in edges) {
// if (e.LayerEdges != null)
// foreach (LayerEdge le in e.LayerEdges) {
// g.DrawLine(myPen, PointF(dg.DataBase.Anchors[le.Source].Origin),
// PointF(dg.DataBase.Anchors[le.Target].Origin));
// }
if (e.Edge.UnderlyingPolyline == null) continue;
var points = e.Edge.UnderlyingPolyline.ToArray();
for (int j = 0; j < points.Length - 1; j++)
g.DrawLine(myOtherPen, PointF(points[j]), PointF(points[j + 1]));
}
myPen.Color = Color.Red;
if(dg.DataBase.nodesToShow == null)
foreach (var li in dg.DataBase.Multiedges.Values)
foreach (IntEdge ie in li)
if(ie.Edge.Curve is Curve){
foreach (ICurve s in (ie.Edge.Curve as Curve).Segments){
var bs = s as CubicBezierSegment;
if(bs != null){
g.DrawBezier(myPen, (float) bs.B(0).X, (float) bs.B(0).Y,
(float) bs.B(1).X, (float) bs.B(1).Y,
(float) bs.B(2).X, (float) bs.B(2).Y,
(float) bs.B(3).X, (float) bs.B(3).Y);
} else{
var ls = s as LineSegment;
g.DrawLine(myPen, (float) ls.Start.X, (float) ls.Start.Y,
(float) ls.End.X, (float) ls.End.Y);
}
}
myPen.Color = Color.FromArgb(50, 100,100,0 );
if (ie.Edge.UnderlyingPolyline != null)
foreach (LineSegment ls in ie.Edge.UnderlyingPolyline.GetSegments())
g.DrawLine(myPen, (float) ls.Start.X, (float) ls.Start.Y,
(float) ls.End.X, (float) ls.End.Y);
myPen.Color = Color.Red;
}
}
static int DrawAnchor(Graphics g, int i, Anchor p){
string stringToShow = i + (p.UserData != null ? (" " + p.UserData) : String.Empty);
DrawStringInRectCenter(g, Brushes.Blue, new Font(FontFamily.GenericSerif, 10), stringToShow,
new RectangleF((float) p.Left, (float) p.Bottom,
(float) p.RightAnchor + (float) p.LeftAnchor,
(float) p.TopAnchor + (float) p.BottomAnchor));
i++;
return i;
}
internal static void DrawControlPoints(Graphics g, CubicBezierSegment bs){
using (var pen = new Pen(Color.Green, (float) (1.0/1000.0))){
pen.DashPattern = new[]{1, (float) 1};
pen.DashStyle = DashStyle.Dot;
g.DrawLine(pen, PointF(bs.B(0)), PointF(bs.B(1)));
g.DrawLine(pen, PointF(bs.B(1)), PointF(bs.B(2)));
g.DrawLine(pen, PointF(bs.B(2)), PointF(bs.B(3)));
}
}
#endif
internal static void AddStyleForPen(DObject dObj, Pen myPen, Style style){
if(style == Style.Dashed){
myPen.DashStyle = DashStyle.Dash;
if(dObj.DashPatternArray == null){
float f = dObj.DashSize();
dObj.DashPatternArray = new[]{f, f};
}
myPen.DashPattern = dObj.DashPatternArray;
myPen.DashOffset = dObj.DashPatternArray[0];
} else if(style == Style.Dotted){
myPen.DashStyle = DashStyle.Dash;
if(dObj.DashPatternArray == null){
float f = dObj.DashSize();
dObj.DashPatternArray = new[]{1, f};
}
myPen.DashPattern = dObj.DashPatternArray;
}
}
internal static void DrawEdgeArrows(Graphics g, DrawingEdge edge, Color edgeColor, Pen myPen){
ArrowAtTheEnd(g, edge, edgeColor, myPen);
ArrawAtTheBeginning(g, edge, edgeColor, myPen);
}
static void ArrawAtTheBeginning(Graphics g, DrawingEdge edge, Color edgeColor, Pen myPen){
if(edge.GeometryEdge != null && edge.Attr.ArrowAtSource)
DrawArrowAtTheBeginningWithControlPoints(g, edge, edgeColor, myPen);
}
static void DrawArrowAtTheBeginningWithControlPoints(Graphics g, DrawingEdge edge, Color edgeColor, Pen myPen){
if(edge.EdgeCurve != null)
if(edge.Attr.ArrowheadAtSource == ArrowStyle.None)
DrawLine(g, myPen, edge.EdgeCurve.Start,
edge.ArrowAtSourcePosition);
else
using (var sb = new SolidBrush(edgeColor))
DrawArrow(g, sb, edge.EdgeCurve.Start,
edge.ArrowAtSourcePosition, edge.Attr.LineWidth, edge.Attr.ArrowheadAtSource);
}
static void ArrowAtTheEnd(Graphics g, DrawingEdge edge, Color edgeColor, Pen myPen){
if(edge.GeometryEdge != null && edge.Attr.ArrowAtTarget)
DrawArrowAtTheEndWithControlPoints(g, edge, edgeColor, myPen);
}
const float toDegrees = 180/(float) Math.PI;
static void DrawArrowAtTheEndWithControlPoints(Graphics g, DrawingEdge edge, Color edgeColor, Pen myPen){
if(edge.EdgeCurve != null)
if(edge.Attr.ArrowheadAtTarget == ArrowStyle.None)
DrawLine(g, myPen, edge.EdgeCurve.End,
edge.ArrowAtTargetPosition);
else
using (var sb = new SolidBrush(edgeColor))
DrawArrow(g, sb, edge.EdgeCurve.End,
edge.ArrowAtTargetPosition, edge.Attr.LineWidth, edge.Attr.ArrowheadAtTarget);
}
/// <summary>
///
/// </summary>
/// <param name="iCurve"></param>
/// <returns></returns>
public static GraphicsPath CreateGraphicsPath(ICurve iCurve){
var graphicsPath = new GraphicsPath();
if(iCurve == null)
return null;
var c = iCurve as Curve;
if (c != null)
HandleCurve(c, graphicsPath);
else {
var ls = iCurve as LineSegment;
if (ls != null)
graphicsPath.AddLine(PointF(ls.Start), PointF(ls.End));
else {
var seg = iCurve as CubicBezierSegment;
if (seg != null)
graphicsPath.AddBezier(PointF(seg.B(0)), PointF(seg.B(1)), PointF(seg.B(2)), PointF(seg.B(3)));
else {
var ellipse = iCurve as Ellipse;
if (ellipse != null)
AddEllipseSeg(graphicsPath, iCurve as Ellipse);
else {
var poly=iCurve as Polyline;
if (poly != null) HandlePolyline(poly, graphicsPath);
else {
var rr = (RoundedRect) iCurve;
HandleCurve(rr.Curve, graphicsPath);
}
}
}
}
}
/*
if (false) {
if (c != null) {
foreach (var s in c.Segments) {
CubicBezierSegment cubic = s as CubicBezierSegment;
if (cubic != null)
foreach (var t in cubic.MaximalCurvaturePoints) {
graphicsPath.AddPath(CreatePathOnCurvaturePoint(t, cubic), false);
}
}
} else {
CubicBezierSegment cubic = iCurve as CubicBezierSegment;
if (cubic != null) {
foreach (var t in cubic.MaximalCurvaturePoints) {
graphicsPath.AddPath(CreatePathOnCurvaturePoint(t, cubic), false);
}
}
}
}
*/
return graphicsPath;
}
static void HandlePolyline(Polyline poly, GraphicsPath graphicsPath) {
graphicsPath.AddLines(poly.Select(PointF).ToArray());
if( poly.Closed)
graphicsPath.CloseFigure();
}
static void HandleCurve(Curve c, GraphicsPath graphicsPath) {
foreach (ICurve seg in c.Segments){
var cubic = seg as CubicBezierSegment;
if(cubic != null)
graphicsPath.AddBezier(PointF(cubic.B(0)), PointF(cubic.B(1)), PointF(cubic.B(2)),
PointF(cubic.B(3)));
else{
var ls = seg as LineSegment;
if(ls != null)
graphicsPath.AddLine(PointF(ls.Start), PointF(ls.End));
else {
var el = seg as Ellipse;
// double del = (el.ParEnd - el.ParStart)/11.0;
// graphicsPath.AddLines(Enumerable.Range(1, 10).Select(i => el[el.ParStart + del*i]).
// Select(p => new PointF((float) p.X, (float) p.Y)).ToArray());
AddEllipseSeg(graphicsPath, el);
}
}
}
}
static void AddEllipseSeg(GraphicsPath graphicsPath, Ellipse el) {
double sweepAngle;
BBox box;
float startAngle;
GetGdiArcDimensions(el, out startAngle, out sweepAngle, out box);
graphicsPath.AddArc((float) box.Left,
(float) box.Bottom,
(float) box.Width,
(float) box.Height,
startAngle,
(float)sweepAngle);
}
static GraphicsPath CreateControlPointPolygon(Tuple<double, double> t, CubicBezierSegment cubic){
var gp = new GraphicsPath();
gp.AddLines(new[]{PP(cubic.B(0)), PP(cubic.B(1)), PP(cubic.B(2)), PP(cubic.B(3))});
return gp;
}
static PointF PP(P2 point){
return new PointF((float) point.X, (float) point.Y);
}
static GraphicsPath CreatePathOnCurvaturePoint(Tuple<double, double> t, CubicBezierSegment cubic){
var gp = new GraphicsPath();
P2 center = cubic[t.Item1];
int radius = 10;
gp.AddEllipse((float) (center.X - radius), (float) (center.Y - radius),
(2*radius), (2*radius));
return gp;
}
static bool NeedToFill(Color fillColor){
return fillColor.A != 0; //the color is not transparent
}
internal static void DrawDoubleCircle(Graphics g, Pen pen, DNode dNode) {
var drNode = dNode.DrawingNode;
NodeAttr nodeAttr = drNode.Attr;
double x = drNode.GeometryNode.Center.X - drNode.GeometryNode.Width/2.0f;
double y = drNode.GeometryNode.Center.Y - drNode.GeometryNode.Height / 2.0f;
if(NeedToFill(dNode.FillColor)){
g.FillEllipse(new SolidBrush(dNode.FillColor), (float) x, (float) y, (float) drNode.Width,
(float) drNode.Height);
}
g.DrawEllipse(pen, (float) x, (float) y, (float) drNode.Width, (float) drNode.Height);
var w = (float) drNode.Width;
var h = (float) drNode.Height;
float m = Math.Max(w, h);
float coeff = (float) 1.0 - (float) (DoubleCircleOffsetRatio);
x += coeff*m/2.0;
y += coeff*m/2.0;
g.DrawEllipse(pen, (float) x, (float) y, w - coeff*m, h - coeff*m);
}
static Color FillColor(NodeAttr nodeAttr){
return MsaglColorToDrawingColor(nodeAttr.FillColor);
}
///<summary>
///</summary>
internal const double ArrowAngle = 25.0; //degrees
internal static void DrawArrow(Graphics g, Brush brush, P2 start, P2 end, double lineWidth,
ArrowStyle arrowStyle){
switch (arrowStyle){
case ArrowStyle.NonSpecified:
case ArrowStyle.Normal:
DrawNormalArrow(g, brush, ref start, ref end, lineWidth);
break;
case ArrowStyle.Tee:
DrawTeeArrow(g, brush, ref start, ref end, lineWidth);
break;
case ArrowStyle.Diamond:
DrawDiamondArrow(g, brush, ref start, ref end, lineWidth);
break;
case ArrowStyle.ODiamond:
DrawODiamondArrow(g, brush, ref start, ref end, lineWidth);
break;
case ArrowStyle.Generalization:
DrawGeneralizationArrow(g, brush, ref start, ref end, lineWidth);
break;
default:
throw new InvalidOperationException();
}
}
internal static void DrawNormalArrow(Graphics g, Brush brush, ref P2 start, ref P2 end, double lineWidth){
PointF[] points;
if(lineWidth == 1){
P2 dir = end - start;
P2 h = dir;
dir /= dir.Length;
var s = new P2(-dir.Y, dir.X);
s *= h.Length*((float) Math.Tan(ArrowAngle*0.5f*(Math.PI/180.0)));
points = new[]{PointF(start + s), PointF(end), PointF(start - s)};
} else{
P2 dir = end - start;
P2 h = dir;
dir /= dir.Length;
var s = new P2(-dir.Y, dir.X);
float w =(float) (0.5*lineWidth);
P2 s0 = w*s;
double al = ArrowAngle*0.5f*(Math.PI/180.0);
s *= h.Length*((float) Math.Tan(al));
s += s0;
points = new[]{PointF(start + s), PointF(start - s), PointF(end - s0), PointF(end + s0)};
P2 center = end - dir*w*(float) Math.Tan(al);
double rad = w/Math.Cos(al);
g.FillEllipse(brush,
(float) center.X - (float) rad,
(float) center.Y - (float) rad,
2.0f*(float) rad,
2.0f*(float) rad);
}
g.FillPolygon(brush, points);
}
static void DrawTeeArrow(Graphics g, Brush brush, ref P2 start, ref P2 end, double lineWidth){
double lw = lineWidth == -1 ? 1 : lineWidth;
using (var p = new Pen(brush, (float) lw)){
g.DrawLine(p, PointF(start), PointF(end));
P2 dir = end - start;
P2 h = dir;
dir /= dir.Length;
var s = new P2(-dir.Y, dir.X);
s *= 2*h.Length*((float) Math.Tan(ArrowAngle*0.5f*(Math.PI/180.0)));
s += (1 + lw)*s.Normalize();
g.DrawLine(p, PointF(start + s), PointF(start - s));
}
}
internal static void DrawDiamondArrow(Graphics g, Brush brush, ref P2 start, ref P2 end, double lineWidth){
double lw = lineWidth == -1 ? 1 : lineWidth;
using (var p = new Pen(brush, (float) lw)){
P2 dir = end - start;
P2 h = dir;
dir /= dir.Length;
var s = new P2(-dir.Y, dir.X);
var points = new[]{
PointF(start - dir), PointF(start + (h/2) + s*(h.Length/3)), PointF(end),
PointF(start + (h/2) - s*(h.Length/3))
};
g.FillPolygon(p.Brush, points);
}
}
internal static void DrawODiamondArrow(Graphics g, Brush brush, ref P2 start, ref P2 end, double lineWidth){
double lw = lineWidth == -1 ? 1 : lineWidth;
using (var p = new Pen(brush, (float) lw)){
P2 dir = end - start;
P2 h = dir;
dir /= dir.Length;
var s = new P2(-dir.Y, dir.X);
var points = new[]{
PointF(start - dir), PointF(start + (h/2) + s*(h.Length/3)), PointF(end),
PointF(start + (h/2) - s*(h.Length/3))
};
g.DrawPolygon(p, points);
}
}
internal static void DrawGeneralizationArrow(Graphics g, Brush brush, ref P2 start, ref P2 end,
double lineWidth){
double lw = lineWidth == -1 ? 1 : lineWidth;
using (var p = new Pen(brush, (float) lw)){
P2 dir = end - start;
P2 h = dir;
dir /= dir.Length;
var s = new P2(-dir.Y, dir.X);
var points = new[]{
PointF(start), PointF(start + s*(h.Length/2)), PointF(end), PointF(start - s*(h.Length/2))
};
// g.FillPolygon(p.Brush, points);
g.DrawPolygon(p, points);
}
}
internal static void DrawLine(Graphics g, Pen pen, P2 start, P2 end){
g.DrawLine(pen, PointF(start), PointF(end));
}
internal static void DrawBox(Graphics g, Pen pen, DNode dNode) {
var drNode = dNode.DrawingNode;
NodeAttr nodeAttr = drNode.Attr;
if(nodeAttr.XRadius == 0 || nodeAttr.YRadius == 0){
double x = drNode.GeometryNode.Center.X - drNode.Width/2.0f;
double y = drNode.GeometryNode.Center.Y - drNode.Height / 2.0f;
if(NeedToFill(dNode.FillColor)){
Color fc = FillColor(nodeAttr);
g.FillRectangle(new SolidBrush(fc), (float) x, (float) y, (float) drNode.Width,
(float) drNode.Height);
}
g.DrawRectangle(pen, (float) x, (float) y, (float) drNode.Width, (float) drNode.Height);
} else{
var width = (float) drNode.Width;
var height = (float) drNode.Height;
var xRadius = (float) nodeAttr.XRadius;
var yRadius = (float) nodeAttr.YRadius;
using (var path = new GraphicsPath()){
FillTheGraphicsPath(drNode, width, height, ref xRadius, ref yRadius, path);
if(NeedToFill(dNode.FillColor)){
g.FillPath(new SolidBrush(dNode.FillColor), path);
}
g.DrawPath(pen, path);
}
}
}
static void FillTheGraphicsPath(DrawingNode drNode, float width, float height, ref float xRadius,
ref float yRadius, GraphicsPath path) {
NodeAttr nodeAttr = drNode.Attr;
float w = (width/2);
if(xRadius > w)
xRadius = w;
float h = (height/2);
if(yRadius > h)
yRadius = h;
var x = (float) drNode.GeometryNode.Center.X;
var y = (float) drNode.GeometryNode.Center.Y;
float ox = w - xRadius;
float oy = h - yRadius;
float top = y + h;
float bottom = y - h;
float left = x - w;
float right = x + w;
const float PI = 180;
if(ox > 0)
path.AddLine(x - ox, bottom, x + ox, bottom);
path.AddArc(x + ox - xRadius, y - oy - yRadius, 2*xRadius, 2*yRadius, 1.5f*PI, 0.5f*PI);
if(oy > 0)
path.AddLine(right, y - oy, right, y + oy);
path.AddArc(x + ox - xRadius, y + oy - yRadius, 2*xRadius, 2*yRadius, 0, 0.5f*PI);
if(ox > 0)
path.AddLine(x + ox, top, x - ox, top);
path.AddArc(x - ox - xRadius, y + oy - yRadius, 2*xRadius, 2*yRadius, 0.5f*PI, 0.5f*PI);
if(oy > 0)
path.AddLine(left, y + oy, left, y - oy);
path.AddArc(x - ox - xRadius, y - oy - yRadius, 2*xRadius, 2*yRadius, PI, 0.5f*PI);
}
internal static void DrawDiamond(Graphics g, Pen pen, DNode dNode) {
var drNode = dNode.DrawingNode;
NodeAttr nodeAttr = drNode.Attr;
double w2 = drNode.Width/2.0f;
double h2 = drNode.Height/2.0f;
double cx = drNode.Pos.X;
double cy = drNode.Pos.Y;
var ps = new[]{
new PointF((float) cx - (float) w2, (float) cy),
new PointF((float) cx, (float) cy + (float) h2),
new PointF((float) cx + (float) w2, (float) cy),
new PointF((float) cx, (float) cy - (float) h2)
};
if(NeedToFill(dNode.FillColor)){
Color fc = FillColor(nodeAttr);
g.FillPolygon(new SolidBrush(fc), ps);
}
g.DrawPolygon(pen, ps);
}
internal static void DrawEllipse(Graphics g, Pen pen, DNode dNode) {
var drNode = dNode.DrawingNode;
NodeAttr nodeAttr = drNode.Attr;
var width = (float)drNode.Width;
var height = (float)drNode.Height;
var x = (float)(drNode.Pos.X - width / 2.0);
var y = (float) (drNode.Pos.Y - height/2.0);
DrawEllipseOnPosition(dNode, nodeAttr, g, x, y, width, height, pen);
}
static void DrawEllipseOnPosition(DNode dNode, NodeAttr nodeAttr, Graphics g, float x, float y, float width,
float height, Pen pen){
if(NeedToFill(dNode.FillColor))
g.FillEllipse(new SolidBrush(dNode.FillColor), x, y, width, height);
if(nodeAttr.Shape == Shape.Point)
g.FillEllipse(new SolidBrush(pen.Color), x, y, width, height);
g.DrawEllipse(pen, x, y, width, height);
}
//static internal void DrawGraphBBox(Graphics g,DGraph graphToDraw)
//{
// foreach( Style style in graphToDraw.DrawingGraph.GraphAttr.Styles)
// {
// if(style==Style.Filled)
// {
// BBox bb=graphToDraw.DrawingGraph.GraphAttr.BoundingBox;
// g.FillRectangle(
// new SolidBrush(System.Drawing.Color.LightSteelBlue),
// (float)bb.LeftTop.X,(float)bb.LeftTop.Y,(float)bb.RightBottom.X-(float)bb.LeftTop.X,-(float)bb.RightBottom.Y+(float)bb.RightBottom.Y);
// return;
// }
// }
// if(!(graphToDraw.DrawingGraph.GraphAttr.Backgroundcolor.A==0))
// {
// BBox bb=graphToDraw.DrawingGraph.GraphAttr.BoundingBox;
// SolidBrush brush=new SolidBrush((MsaglColorToDrawingColor( graphToDraw.DrawingGraph.GraphAttr.Backgroundcolor)));
// if(!bb.IsEmpty)
// g.FillRectangle(brush,
// (float) bb.LeftTop.X,(float)bb.LeftTop.Y,(float)bb.RightBottom.X-(float)bb.LeftTop.X,-(float)bb.LeftTop.Y+(float)bb.RightBottom.Y);
// }
//}
//don't know what to do about the throw-catch block
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")]
internal static void DrawLabel(Graphics g, DLabel label){
if (label == null || label.DrawingLabel.Width == 0)
return;
var rectF = GetLabelRect(label);
try{
DrawStringInRectCenter(g, new SolidBrush(MsaglColorToDrawingColor(label.DrawingLabel.FontColor)),
label.Font, label.DrawingLabel.Text, rectF);
}
catch{
}
if(label.MarkedForDragging){
var pen = new Pen(MsaglColorToDrawingColor(label.DrawingLabel.FontColor));
pen.DashStyle = DashStyle.Dot;
DrawLine(g, pen, label.DrawingLabel.GeometryLabel.AttachmentSegmentStart,
label.DrawingLabel.GeometryLabel.AttachmentSegmentEnd);
}
}
private static RectangleF GetLabelRect(DLabel label)
{
var subgraph = label.DrawingLabel.Owner as Subgraph;
if (subgraph != null)
{
var cluster = (Cluster) subgraph.GeometryNode;
var rb = cluster.RectangularBoundary;
var cx = rb.Rect.Left + rb.Rect.Width/2;
var cy = cluster.BoundingBox.Top - rb.TopMargin/2;
var size = label.DrawingLabel.Size;
return new RectangleF(
(float) (cx - size.Width/2),
(float) (cy - size.Height/2),
(float) size.Width,
(float) size.Height);
}
else
{
var rectF = new RectangleF((float) label.DrawingLabel.Left, (float) label.DrawingLabel.Bottom,
(float) label.DrawingLabel.Size.Width,
(float) label.DrawingLabel.Size.Height);
return rectF;
}
}
static void DrawStringInRectCenter(Graphics g, Brush brush, Font f, string s, RectangleF r
/*, double rectLineWidth*/){
if(String.IsNullOrEmpty(s))
return;
using (Matrix m = g.Transform){
using (Matrix saveM = m.Clone()){
//rotate the label around its center
float c = (r.Bottom + r.Top)/2;
using (var m2 = new Matrix(1, 0, 0, -1, 0, 2*c)){
m.Multiply(m2);
}
g.Transform = m;
using (StringFormat stringFormat = StringFormat.GenericTypographic){
g.DrawString(s, f, brush, r.Left, r.Top, stringFormat);
}
g.Transform = saveM;
}
}
}
internal static PointF PointF(P2 p){
return new PointF((float) p.X, (float) p.Y);
}
internal static void DrawFromMsaglCurve(Graphics g, Pen pen, DNode dNode){
var drNode = dNode.DrawingNode;
NodeAttr attr = dNode.DrawingNode.Attr;
var iCurve=drNode.GeometryNode.BoundaryCurve;
var c =iCurve as Curve;
if(c != null){
DrawCurve(dNode, c, g, pen);
} else{
var ellipse = iCurve as Ellipse;
if(ellipse != null){
double w = ellipse.AxisA.X;
double h = ellipse.AxisB.Y;
DrawEllipseOnPosition(dNode, dNode.DrawingNode.Attr, g, (float) (ellipse.Center.X - w),
(float) (ellipse.Center.Y - h),
(float) w*2, (float) h*2, pen);
} else {
var poly = iCurve as Polyline;
if (poly != null) {
var path = new GraphicsPath();
path.AddLines(poly.Select(p => new Point((int)p.X, (int)p.Y)).ToArray());
path.CloseAllFigures();
if (NeedToFill(dNode.FillColor))
g.FillPath(new SolidBrush(dNode.FillColor), path);
g.DrawPath(pen, path);
} else {
var roundedRect = iCurve as RoundedRect;
if (roundedRect != null)
DrawCurve(dNode, roundedRect.Curve, g, pen);
}
}
}
}
static void DrawCurve(DNode dNode, Curve c, Graphics g, Pen pen) {
var path = new GraphicsPath();
foreach (ICurve seg in c.Segments)
AddSegToPath(seg, ref path);
if (NeedToFill(dNode.FillColor))
g.FillPath(new SolidBrush(dNode.FillColor), path);
g.DrawPath(pen, path);
}
static void AddSegToPath(ICurve seg, ref GraphicsPath path){
var line = seg as LineSegment;
if(line != null)
path.AddLine(PointF(line.Start), PointF(line.End));
else{
var cb = seg as CubicBezierSegment;
if(cb != null)
path.AddBezier(PointF(cb.B(0)), PointF(cb.B(1)), PointF(cb.B(2)), PointF(cb.B(3)));
else{
var ellipse = seg as Ellipse;
if(ellipse != null){
//we assume that ellipes are going counterclockwise
double cx = ellipse.Center.X;
double cy = ellipse.Center.Y;
double w = ellipse.AxisA.X*2;
double h = ellipse.AxisB.Y*2;
double sweep = ellipse.ParEnd - ellipse.ParStart;
if(sweep < 0)
sweep += Math.PI*2;
const double toDegree = 180/Math.PI;
path.AddArc((float) (cx - w/2), (float) (cy - h/2), (float) w, (float) h,
(float) (ellipse.ParStart*toDegree), (float) (sweep*toDegree));
}
}
}
}
const double ToDegreesMultiplier = 180 / Math.PI;
/// <summary>
/// it is a very tricky function, please change carefully
/// </summary>
/// <param name="ellipse"></param>
/// <param name="startAngle"></param>
/// <param name="sweepAngle"></param>
/// <param name="box"></param>
public static void GetGdiArcDimensions(Ellipse ellipse, out float startAngle, out double sweepAngle, out BBox box) {
box = ellipse.FullBox();
startAngle = EllipseStandardAngle(ellipse, ellipse.ParStart);
bool orientedCcw = ellipse.OrientedCounterclockwise();
if ( Math.Abs(( Math.Abs(ellipse.ParEnd - ellipse.ParStart) - Math.PI * 2)) < 0.001)//we have a full ellipse
sweepAngle = 360;
else
sweepAngle = (orientedCcw ? P2.Angle(ellipse.Start, ellipse.Center, ellipse.End) :P2.Angle(ellipse.End, ellipse.Center, ellipse.Start))
* ToDegreesMultiplier;
if (!orientedCcw)
sweepAngle = -sweepAngle;
}
static float EllipseStandardAngle(Ellipse ellipse, double angle){
P2 p = Math.Cos(angle)*ellipse.AxisA + Math.Sin(angle)*ellipse.AxisB;
return (float)(Math.Atan2(p.Y, p.X) *ToDegreesMultiplier);
}
///<summary>
///</summary>
///<param name="sender"></param>
///<param name="e"></param>
public static void GviewerMouseMove(object sender, MouseEventArgs e){
var gviewer = sender as GViewer;
if (gviewer != null){
float viewerX;
float viewerY;
gviewer.ScreenToSource(e.Location.X, e.Location.Y, out viewerX, out viewerY);
var str = String.Format(String.Format("{0},{1}", viewerX, viewerY));
var form = gviewer.ParentForm;
foreach (var ch in form.Controls){
var sb = ch as StatusStrip;
if (sb != null){
foreach (var item in sb.Items){
var label = item as ToolStripStatusLabel;
if (label == null) continue;
label.Text = str;
return;
}
}
}
}
}
}
}
| |
// author: guillaume l. <guillaume@geelweb.org>
// license: http://opensource.org/licenses/bsd-license.php BSD License
// copyright: copyright (c) 2007-2009, guillaume luchet
using System;
using Gtk;
namespace Barcode {
/// <summary>Barcode
/// <para>describe a barcode</para>
/// </summary>
public abstract class Barcode {
// consts {{{
/** A type of barcode */
//public const int EAN13 = 1;
/** A type of barcode */
//public const int EAN8 = 2;
/** A type of barcode */
//public const int UPCA = 3;
/** A type of barcode */
//public const int UPCE = 4;
/** A type of barcode */
//public const int SUPP2 = 5;
/** A type of barcode */
//public const int SUPP5 = 6;
/** A type of barcode */
//public const int POSTNET = 7;
/** A type of barcode */
//public const int PLANET = 8;
/** A type of barcode */
//public const int CODE128 = 9;
/** A type of barcode */
//public const int CODE128_UCC = 10;
/** A type of barcode */
//public const int CODE128_RAW = 11;
/** A type of barcode */
//public const int CODABAR = 12;
// }}}
// float x {{{
/// <summary>The minimum bar width.</summary>
protected float x;
/// <summary>The minimum bar width.</summary>
public float X {
get { return x; }
set { this.x = value; }
}
// }}}
// float n {{{
/// <summary>The bar multiplier for wide bars or the distance between
/// bars for Postnet and Planet.</summary>
protected float n;
/// <summary>Gets the bar multiplier for wide bars.</summary>
public float N {
get { return n; }
set { this.n = value; }
}
// }}}
// Pango.FontDescription font {{{
/// <summary>The text font.</summary>
protected Pango.FontDescription font;
/// <summary>Gets the text font. <CODE>null</CODE> if no text.</summary>
public Pango.FontDescription Font {
get { return font; }
set { this.font = value; }
}
// }}}
// float size {{{
/// <summary>The size of the text or the height of the shorter bar
/// in Postnet.</summary>
protected float size;
/// <summary>Gets the size of the text.</summary>
public float Size {
get { return size; }
set { this.size = value; }
}
// }}}
// float baseline {{{
/// <summary>If positive, the text distance under the bars. If zero or negative,
/// the text distance above the bars.</summary>
protected float baseline;
/// <summary>Gets the text baseline.
/// If positive, the text distance under the bars. If zero or negative,
/// the text distance above the bars.</summary>
public float Baseline {
get { return baseline; }
set { this.baseline = value; }
}
// }}}
// float barHeight {{{
/// <summary>Gets the height of the bars.</summary>
/// <summary>The height of the bars.</summary>
protected float barHeight;
public float BarHeight {
get { return barHeight; }
set { this.barHeight = value; }
}
// }}}
// int textAlignment {{{
/// <summary>The text Element. Can be <CODE>Element.ALIGN_LEFT</CODE>,
/// <CODE>Element.ALIGN_CENTER</CODE> or
/// <CODE>Element.ALIGN_RIGHT</CODE>.</summary>
protected int textAlignment;
/// <summary>Gets the text Element. Can be
/// <CODE>Element.ALIGN_LEFT</CODE>,
/// <CODE>Element.ALIGN_CENTER</CODE> or
/// <CODE>Element.ALIGN_RIGHT</CODE>.</summary>
public int TextAlignment{
get { return textAlignment; }
set { this.textAlignment = value; }
}
// }}}
// bool generateChecksum {{{
/// <summary>The optional checksum generation.</summary>
protected bool generateChecksum;
/// <summary>The property for the optional checksum
/// generation.</summary>
public bool GenerateChecksum {
set { this.generateChecksum = value; }
get { return generateChecksum; }
}
// }}}
// bool checksumText {{{
/// <summary>Shows the generated checksum in the the text.</summary>
protected bool checksumText;
/// <summary>Sets the property to show the generated checksum in the
/// the text.</summary>
public bool ChecksumText {
set { this.checksumText = value; }
get { return checksumText; }
}
// }}}
// bool startStopText {{{
/// <summary>Show the start and stop character '*' in the text for
/// the barcode 39 or 'ABCD' for codabar.</summary>
protected bool startStopText;
/// <summary>Gets the property to show the start and stop character '*'
/// in the text for the barcode 39.</summary>
public bool StartStopText {
set { this.startStopText = value; }
get { return startStopText; }
}
// }}}
// bool extended {{{
/// <summary>Generates extended barcode 39.</summary>
protected bool extended;
/// <summary>Sets the property to generate extended barcode
/// 39.</summary>
public bool Extended {
set { this.extended = value; }
get { return extended; }
}
// }}}
// string code {{{
/// <summary>The code to generate.</summary>
protected string code = "";
/// <summary>Gets the code to generate.</summary>
public virtual string Code {
get { return code; }
set { this.code = value; }
}
// }}}
// bool guardBars {{{
/// <summary>Show the guard bars for barcode EAN.</summary>
protected bool guardBars;
/// <summary>Sets the property to show the guard bars for barcode
/// EAN.</summary>
public bool GuardBars {
set { this.guardBars = value; }
get { return guardBars; }
}
// }}}
// int codeType {{{
/// <summary>The code type.</summary>
protected int codeType;
/// <summary>Gets the code type.</summary>
public int CodeType {
get { return codeType; }
set { this.codeType = value; }
}
// }}}
// float inkSpreading {{{
/// <summary>The ink spreading.</summary>
protected float inkSpreading = 0;
/// <summary></summary>
public float InkSpreading {
set { inkSpreading = value; }
get { return inkSpreading; }
}
// }}}
// String altText {{{
/// <summary>The alternate text to be used, if present.</summary>
protected String altText;
// string altText
/// <summary>Sets the alternate text. If present, this text will be
/// used instead of the text derived from the supplied code.</summary>
public String AltText {
set { altText = value; }
get { return altText; }
}
// }}}
// Barcode::getBarsCode() {{{
/// <summary>Return the bars code</summary>
/// <param name="text">barcode text</param>
/// <return>byte[]</return>
public abstract byte[] getBarsCode(string text);
// }}}
}
}
| |
/*
The MIT License (MIT)
Copyright (c) 2016 Maksim Volkau
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included AddOrUpdateServiceFactory
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
namespace DryIoc
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
/// <summary>Methods to work with immutable arrays, and general array sugar.</summary>
public static class ArrayTools
{
/// <summary>Returns true if array is null or have no items.</summary> <typeparam name="T">Type of array item.</typeparam>
/// <param name="source">Source array to check.</param> <returns>True if null or has no items, false otherwise.</returns>
public static bool IsNullOrEmpty<T>(this T[] source)
{
return source == null || source.Length == 0;
}
/// <summary>Returns empty array instead of null, or source array otherwise.</summary> <typeparam name="T">Type of array item.</typeparam>
/// <param name="source">Source array.</param> <returns>Empty array or source.</returns>
public static T[] EmptyIfNull<T>(this T[] source)
{
return source ?? Empty<T>();
}
/// <summary>Returns source enumerable if it is array, otherwise converts source to array.</summary>
/// <typeparam name="T">Array item type.</typeparam>
/// <param name="source">Source enumerable.</param>
/// <returns>Source enumerable or its array copy.</returns>
public static T[] ToArrayOrSelf<T>(this IEnumerable<T> source)
{
return source is T[] ? (T[])source : source.ToArray();
}
/// <summary>Returns new array consisting from all items from source array then all items from added array.
/// If source is null or empty, then added array will be returned.
/// If added is null or empty, then source will be returned.</summary>
/// <typeparam name="T">Array item type.</typeparam>
/// <param name="source">Array with leading items.</param>
/// <param name="added">Array with following items.</param>
/// <returns>New array with items of source and added arrays.</returns>
public static T[] Append<T>(this T[] source, params T[] added)
{
if (added == null || added.Length == 0)
return source;
if (source == null || source.Length == 0)
return added;
var result = new T[source.Length + added.Length];
Array.Copy(source, 0, result, 0, source.Length);
if (added.Length == 1)
result[source.Length] = added[0];
else
Array.Copy(added, 0, result, source.Length, added.Length);
return result;
}
/// <summary>Returns new array with <paramref name="value"/> appended,
/// or <paramref name="value"/> at <paramref name="index"/>, if specified.
/// If source array could be null or empty, then single value item array will be created despite any index.</summary>
/// <typeparam name="T">Array item type.</typeparam>
/// <param name="source">Array to append value to.</param>
/// <param name="value">Value to append.</param>
/// <param name="index">(optional) Index of value to update.</param>
/// <returns>New array with appended or updated value.</returns>
public static T[] AppendOrUpdate<T>(this T[] source, T value, int index = -1)
{
if (source == null || source.Length == 0)
return new[] { value };
var sourceLength = source.Length;
index = index < 0 ? sourceLength : index;
var result = new T[index < sourceLength ? sourceLength : sourceLength + 1];
Array.Copy(source, result, sourceLength);
result[index] = value;
return result;
}
/// <summary>Calls predicate on each item in <paramref name="source"/> array until predicate returns true,
/// then method will return this item index, or if predicate returns false for each item, method will return -1.</summary>
/// <typeparam name="T">Type of array items.</typeparam>
/// <param name="source">Source array: if null or empty, then method will return -1.</param>
/// <param name="predicate">Delegate to evaluate on each array item until delegate returns true.</param>
/// <returns>Index of item for which predicate returns true, or -1 otherwise.</returns>
public static int IndexOf<T>(this T[] source, Func<T, bool> predicate)
{
if (source != null && source.Length != 0)
for (var i = 0; i < source.Length; ++i)
if (predicate(source[i]))
return i;
return -1;
}
/// <summary>Looks up for item in source array equal to provided value, and returns its index, or -1 if not found.</summary>
/// <typeparam name="T">Type of array items.</typeparam>
/// <param name="source">Source array: if null or empty, then method will return -1.</param>
/// <param name="value">Value to look up.</param>
/// <returns>Index of item equal to value, or -1 item is not found.</returns>
public static int IndexOf<T>(this T[] source, T value)
{
if (source != null && source.Length != 0)
for (var i = 0; i < source.Length; ++i)
{
var item = source[i];
if (ReferenceEquals(item, value) || Equals(item, value))
return i;
}
return -1;
}
/// <summary>Produces new array without item at specified <paramref name="index"/>.
/// Will return <paramref name="source"/> array if index is out of bounds, or source is null/empty.</summary>
/// <typeparam name="T">Type of array item.</typeparam>
/// <param name="source">Input array.</param> <param name="index">Index if item to remove.</param>
/// <returns>New array with removed item at index, or input source array if index is not in array.</returns>
public static T[] RemoveAt<T>(this T[] source, int index)
{
if (source == null || source.Length == 0 || index < 0 || index >= source.Length)
return source;
if (index == 0 && source.Length == 1)
return new T[0];
var result = new T[source.Length - 1];
if (index != 0)
Array.Copy(source, 0, result, 0, index);
if (index != result.Length)
Array.Copy(source, index + 1, result, index, result.Length - index);
return result;
}
/// <summary>Looks for item in array using equality comparison, and returns new array with found item remove, or original array if not item found.</summary>
/// <typeparam name="T">Type of array item.</typeparam>
/// <param name="source">Input array.</param> <param name="value">Value to find and remove.</param>
/// <returns>New array with value removed or original array if value is not found.</returns>
public static T[] Remove<T>(this T[] source, T value)
{
return source.RemoveAt(source.IndexOf(value));
}
/// <summary>Returns singleton empty array of provided type.</summary>
/// <typeparam name="T">Array item type.</typeparam> <returns>Empty array.</returns>
public static T[] Empty<T>()
{
return EmptyArray<T>.Value;
}
private static class EmptyArray<T>
{
public static readonly T[] Value = new T[0];
}
}
/// <summary>Wrapper that provides optimistic-concurrency Swap operation implemented using <see cref="Ref.Swap{T}"/>.</summary>
/// <typeparam name="T">Type of object to wrap.</typeparam>
public sealed class Ref<T> where T : class
{
/// <summary>Gets the wrapped value.</summary>
public T Value { get { return _value; } }
/// <summary>Creates ref to object, optionally with initial value provided.</summary>
/// <param name="initialValue">(optional) Initial value.</param>
public Ref(T initialValue = default(T))
{
_value = initialValue;
}
/// <summary>Exchanges currently hold object with <paramref name="getNewValue"/> - see <see cref="Ref.Swap{T}"/> for details.</summary>
/// <param name="getNewValue">Delegate to produce new object value from current one passed as parameter.</param>
/// <returns>Returns old object value the same way as <see cref="Interlocked.Exchange(ref int,int)"/></returns>
/// <remarks>Important: <paramref name="getNewValue"/> May be called multiple times to retry update with value concurrently changed by other code.</remarks>
public T Swap(Func<T, T> getNewValue)
{
return Ref.Swap(ref _value, getNewValue);
}
/// <summary>Just sets new value ignoring any intermingled changes.</summary>
/// <param name="newValue"></param> <returns>old value</returns>
public T Swap(T newValue)
{
return Interlocked.Exchange(ref _value, newValue);
}
/// <summary>Compares current Referred value with <paramref name="currentValue"/> and if equal replaces current with <paramref name="newValue"/></summary>
/// <param name="currentValue"></param> <param name="newValue"></param>
/// <returns>True if current value was replaced with new value, and false if current value is outdated (already changed by other party).</returns>
/// <example><c>[!CDATA[
/// var value = SomeRef.Value;
/// if (!SomeRef.TrySwapIfStillCurrent(value, Update(value))
/// SomeRef.Swap(v => Update(v)); // fallback to normal Swap with delegate allocation
/// ]]</c></example>
public bool TrySwapIfStillCurrent(T currentValue, T newValue)
{
return Interlocked.CompareExchange(ref _value, newValue, currentValue) == currentValue;
}
private T _value;
}
/// <summary>Provides optimistic-concurrency consistent <see cref="Swap{T}"/> operation.</summary>
public static class Ref
{
/// <summary>Factory for <see cref="Ref{T}"/> with type of value inference.</summary>
/// <typeparam name="T">Type of value to wrap.</typeparam>
/// <param name="value">Initial value to wrap.</param>
/// <returns>New ref.</returns>
public static Ref<T> Of<T>(T value) where T : class
{
return new Ref<T>(value);
}
/// <summary>Creates new ref to the value of original ref.</summary> <typeparam name="T">Ref value type.</typeparam>
/// <param name="original">Original ref.</param> <returns>New ref to original value.</returns>
public static Ref<T> NewRef<T>(this Ref<T> original) where T : class
{
return Of(original.Value);
}
/// <summary>First, it evaluates new value using <paramref name="getNewValue"/> function.
/// Second, it checks that original value is not changed.
/// If it is changed it will retry first step, otherwise it assigns new value and returns original (the one used for <paramref name="getNewValue"/>).</summary>
/// <typeparam name="T">Type of value to swap.</typeparam>
/// <param name="value">Reference to change to new value</param>
/// <param name="getNewValue">Delegate to get value from old one.</param>
/// <returns>Old/original value. By analogy with <see cref="Interlocked.Exchange(ref int,int)"/>.</returns>
/// <remarks>Important: <paramref name="getNewValue"/> May be called multiple times to retry update with value concurrently changed by other code.</remarks>
public static T Swap<T>(ref T value, Func<T, T> getNewValue) where T : class
{
var retryCount = 0;
while (true)
{
var oldValue = value;
var newValue = getNewValue(oldValue);
if (Interlocked.CompareExchange(ref value, newValue, oldValue) == oldValue)
return oldValue;
if (++retryCount > RETRY_COUNT_UNTIL_THROW)
throw new InvalidOperationException(_errorRetryCountExceeded);
}
}
private const int RETRY_COUNT_UNTIL_THROW = 50;
private static readonly string _errorRetryCountExceeded =
"Ref retried to Update for " + RETRY_COUNT_UNTIL_THROW + " times But there is always someone else intervened.";
}
/// <summary>Immutable Key-Value pair. It is reference type (could be check for null),
/// which is different from System value type <see cref="KeyValuePair{TKey,TValue}"/>.
/// In addition provides <see cref="Equals"/> and <see cref="GetHashCode"/> implementations.</summary>
/// <typeparam name="K">Type of Key.</typeparam><typeparam name="V">Type of Value.</typeparam>
public class KV<K, V>
{
/// <summary>Key.</summary>
public readonly K Key;
/// <summary>Value.</summary>
public readonly V Value;
/// <summary>Creates Key-Value object by providing key and value. Does Not check either one for null.</summary>
/// <param name="key">key.</param><param name="value">value.</param>
public KV(K key, V value)
{
Key = key;
Value = value;
}
/// <summary>Creates nice string view.</summary><returns>String representation.</returns>
public override string ToString()
{
var s = new StringBuilder('{');
if (Key != null)
s.Append(Key);
s.Append(',');
if (Value != null)
s.Append(Value);
s.Append('}');
return s.ToString();
}
/// <summary>Returns true if both key and value are equal to corresponding key-value of other object.</summary>
/// <param name="obj">Object to check equality with.</param> <returns>True if equal.</returns>
public override bool Equals(object obj)
{
var other = obj as KV<K, V>;
return other != null
&& (ReferenceEquals(other.Key, Key) || Equals(other.Key, Key))
&& (ReferenceEquals(other.Value, Value) || Equals(other.Value, Value));
}
/// <summary>Combines key and value hash code. R# generated default implementation.</summary>
/// <returns>Combined hash code for key-value.</returns>
public override int GetHashCode()
{
unchecked
{
return ((object)Key == null ? 0 : Key.GetHashCode() * 397)
^ ((object)Value == null ? 0 : Value.GetHashCode());
}
}
}
/// <summary>Delegate for changing value from old one to some new based on provided new value.</summary>
/// <typeparam name="V">Type of values.</typeparam>
/// <param name="oldValue">Existing value.</param>
/// <param name="newValue">New value passed to Update.. method.</param>
/// <returns>Changed value.</returns>
public delegate V Update<V>(V oldValue, V newValue);
// todo: V3: Rename to ImTree
/// <summary>Simple immutable AVL tree with integer keys and object values.</summary>
public sealed class ImTreeMapIntToObj
{
/// <summary>Empty tree to start with.</summary>
public static readonly ImTreeMapIntToObj Empty = new ImTreeMapIntToObj();
/// <summary>Key.</summary>
public readonly int Key;
/// <summary>Value.</summary>
public readonly object Value;
/// <summary>Left sub-tree/branch, or empty.</summary>
public readonly ImTreeMapIntToObj Left;
/// <summary>Right sub-tree/branch, or empty.</summary>
public readonly ImTreeMapIntToObj Right;
/// <summary>Height of longest sub-tree/branch plus 1. It is 0 for empty tree, and 1 for single node tree.</summary>
public readonly int Height;
/// <summary>Returns true is tree is empty.</summary>
public bool IsEmpty { get { return Height == 0; } }
/// <summary>Returns new tree with added or updated value for specified key.</summary>
/// <param name="key"></param> <param name="value"></param>
/// <returns>New tree.</returns>
public ImTreeMapIntToObj AddOrUpdate(int key, object value)
{
return AddOrUpdate(key, value, false, null);
}
/// <summary>Delegate to calculate new value from and old and a new value.</summary>
/// <param name="oldValue">Old</param> <param name="newValue">New</param> <returns>Calculated result.</returns>
public delegate object UpdateValue(object oldValue, object newValue);
/// <summary>Returns new tree with added or updated value for specified key.</summary>
/// <param name="key">Key</param> <param name="value">Value</param>
/// <param name="updateValue">(optional) Delegate to calculate new value from and old and a new value.</param>
/// <returns>New tree.</returns>
public ImTreeMapIntToObj AddOrUpdate(int key, object value, UpdateValue updateValue)
{
return AddOrUpdate(key, value, false, updateValue);
}
/// <summary>Returns new tree with updated value for the key, Or the same tree if key was not found.</summary>
/// <param name="key"></param> <param name="value"></param>
/// <returns>New tree if key is found, or the same tree otherwise.</returns>
public ImTreeMapIntToObj Update(int key, object value)
{
return AddOrUpdate(key, value, true, null);
}
/// <summary>Get value for found key or null otherwise.</summary>
/// <param name="key"></param> <returns>Found value or null.</returns>
public object GetValueOrDefault(int key)
{
var tree = this;
while (tree.Height != 0 && tree.Key != key)
tree = key < tree.Key ? tree.Left : tree.Right;
return tree.Height != 0 ? tree.Value : null;
}
/// <summary>Returns all sub-trees enumerated from left to right.</summary>
/// <returns>Enumerated sub-trees or empty if tree is empty.</returns>
public IEnumerable<ImTreeMapIntToObj> Enumerate()
{
if (Height == 0)
yield break;
var parents = new ImTreeMapIntToObj[Height];
var tree = this;
var parentCount = -1;
while (tree.Height != 0 || parentCount != -1)
{
if (tree.Height != 0)
{
parents[++parentCount] = tree;
tree = tree.Left;
}
else
{
tree = parents[parentCount--];
yield return tree;
tree = tree.Right;
}
}
}
#region Implementation
private ImTreeMapIntToObj() { }
private ImTreeMapIntToObj(int key, object value, ImTreeMapIntToObj left, ImTreeMapIntToObj right)
{
Key = key;
Value = value;
Left = left;
Right = right;
Height = 1 + (left.Height > right.Height ? left.Height : right.Height);
}
private ImTreeMapIntToObj AddOrUpdate(int key, object value, bool updateOnly, UpdateValue update)
{
return Height == 0 ? // tree is empty
(updateOnly ? this : new ImTreeMapIntToObj(key, value, Empty, Empty))
: (key == Key ? // actual update
new ImTreeMapIntToObj(key, update == null ? value : update(Value, value), Left, Right)
: (key < Key // try update on left or right sub-tree
? With(Left.AddOrUpdate(key, value, updateOnly, update), Right)
: With(Left, Right.AddOrUpdate(key, value, updateOnly, update))).KeepBalanced());
}
private ImTreeMapIntToObj KeepBalanced()
{
var delta = Left.Height - Right.Height;
return delta >= 2 ? With(Left.Right.Height - Left.Left.Height == 1 ? Left.RotateLeft() : Left, Right).RotateRight()
: (delta <= -2 ? With(Left, Right.Left.Height - Right.Right.Height == 1 ? Right.RotateRight() : Right).RotateLeft()
: this);
}
private ImTreeMapIntToObj RotateRight()
{
return Left.With(Left.Left, With(Left.Right, Right));
}
private ImTreeMapIntToObj RotateLeft()
{
return Right.With(With(Left, Right.Left), Right.Right);
}
private ImTreeMapIntToObj With(ImTreeMapIntToObj left, ImTreeMapIntToObj right)
{
return left == Left && right == Right ? this : new ImTreeMapIntToObj(Key, Value, left, right);
}
#endregion
}
// todo: V3: Rename to ImHashTree
/// <summary>Immutable http://en.wikipedia.org/wiki/AVL_tree where actual node key is hash code of <typeparamref name="K"/>.</summary>
public sealed class ImTreeMap<K, V>
{
/// <summary>Empty tree to start with.</summary>
public static readonly ImTreeMap<K, V> Empty = new ImTreeMap<K, V>();
/// <summary>Key of type K that should support <see cref="object.Equals(object)"/> and <see cref="object.GetHashCode"/>.</summary>
public readonly K Key;
/// <summary>Value of any type V.</summary>
public readonly V Value;
/// <summary>Calculated key hash.</summary>
public readonly int Hash;
/// <summary>In case of <see cref="Hash"/> conflicts for different keys contains conflicted keys with their values.</summary>
public readonly KV<K, V>[] Conflicts;
/// <summary>Left sub-tree/branch, or empty.</summary>
public readonly ImTreeMap<K, V> Left;
/// <summary>Right sub-tree/branch, or empty.</summary>
public readonly ImTreeMap<K, V> Right;
/// <summary>Height of longest sub-tree/branch plus 1. It is 0 for empty tree, and 1 for single node tree.</summary>
public readonly int Height;
/// <summary>Returns true if tree is empty.</summary>
public bool IsEmpty { get { return Height == 0; } }
/// <summary>Returns new tree with added key-value. If value with the same key is exist, then
/// if <paramref name="update"/> is not specified: then existing value will be replaced by <paramref name="value"/>;
/// if <paramref name="update"/> is specified: then update delegate will decide what value to keep.</summary>
/// <param name="key">Key to add.</param><param name="value">Value to add.</param>
/// <param name="update">(optional) Delegate to decide what value to keep: old or new one.</param>
/// <returns>New tree with added or updated key-value.</returns>
public ImTreeMap<K, V> AddOrUpdate(K key, V value, Update<V> update = null)
{
return AddOrUpdate(key.GetHashCode(), key, value, update, updateOnly: false);
}
/// <summary>Looks for <paramref name="key"/> and replaces its value with new <paramref name="value"/>, or
/// runs custom update handler (<paramref name="update"/>) with old and new value to get the updated result.</summary>
/// <param name="key">Key to look for.</param>
/// <param name="value">New value to replace key value with.</param>
/// <param name="update">(optional) Delegate for custom update logic, it gets old and new <paramref name="value"/>
/// as inputs and should return updated value as output.</param>
/// <returns>New tree with updated value or the SAME tree if no key found.</returns>
public ImTreeMap<K, V> Update(K key, V value, Update<V> update = null)
{
return AddOrUpdate(key.GetHashCode(), key, value, update, updateOnly: true);
}
/// <summary>Looks for key in a tree and returns the key value if found, or <paramref name="defaultValue"/> otherwise.</summary>
/// <param name="key">Key to look for.</param> <param name="defaultValue">(optional) Value to return if key is not found.</param>
/// <returns>Found value or <paramref name="defaultValue"/>.</returns>
public V GetValueOrDefault(K key, V defaultValue = default(V))
{
var t = this;
var hash = key.GetHashCode();
while (t.Height != 0 && t.Hash != hash)
t = hash < t.Hash ? t.Left : t.Right;
return t.Height != 0 && (ReferenceEquals(key, t.Key) || key.Equals(t.Key))
? t.Value : t.GetConflictedValueOrDefault(key, defaultValue);
}
/// <summary>Depth-first in-order traversal as described in http://en.wikipedia.org/wiki/Tree_traversal
/// The only difference is using fixed size array instead of stack for speed-up (~20% faster than stack).</summary>
/// <returns>Sequence of enumerated key value pairs.</returns>
public IEnumerable<KV<K, V>> Enumerate()
{
if (Height == 0)
yield break;
var parents = new ImTreeMap<K, V>[Height];
var tree = this;
var parentCount = -1;
while (tree.Height != 0 || parentCount != -1)
{
if (tree.Height != 0)
{
parents[++parentCount] = tree;
tree = tree.Left;
}
else
{
tree = parents[parentCount--];
yield return new KV<K, V>(tree.Key, tree.Value);
if (tree.Conflicts != null)
for (var i = 0; i < tree.Conflicts.Length; i++)
yield return tree.Conflicts[i];
tree = tree.Right;
}
}
}
#region Implementation
private ImTreeMap() { }
private ImTreeMap(int hash, K key, V value, KV<K, V>[] conficts, ImTreeMap<K, V> left, ImTreeMap<K, V> right)
{
Hash = hash;
Key = key;
Value = value;
Conflicts = conficts;
Left = left;
Right = right;
Height = 1 + (left.Height > right.Height ? left.Height : right.Height);
}
private ImTreeMap<K, V> AddOrUpdate(int hash, K key, V value, Update<V> update, bool updateOnly)
{
return Height == 0 ? (updateOnly ? this : new ImTreeMap<K, V>(hash, key, value, null, Empty, Empty))
: (hash == Hash ? UpdateValueAndResolveConflicts(key, value, update, updateOnly)
: (hash < Hash
? With(Left.AddOrUpdate(hash, key, value, update, updateOnly), Right)
: With(Left, Right.AddOrUpdate(hash, key, value, update, updateOnly))).KeepBalanced());
}
private ImTreeMap<K, V> UpdateValueAndResolveConflicts(K key, V value, Update<V> update, bool updateOnly)
{
if (ReferenceEquals(Key, key) || Key.Equals(key))
return new ImTreeMap<K, V>(Hash, key, update == null ? value : update(Value, value), Conflicts, Left, Right);
if (Conflicts == null) // add only if updateOnly is false.
return updateOnly ? this
: new ImTreeMap<K, V>(Hash, Key, Value, new[] { new KV<K, V>(key, value) }, Left, Right);
var found = Conflicts.Length - 1;
while (found >= 0 && !Equals(Conflicts[found].Key, Key)) --found;
if (found == -1)
{
if (updateOnly) return this;
var newConflicts = new KV<K, V>[Conflicts.Length + 1];
Array.Copy(Conflicts, 0, newConflicts, 0, Conflicts.Length);
newConflicts[Conflicts.Length] = new KV<K, V>(key, value);
return new ImTreeMap<K, V>(Hash, Key, Value, newConflicts, Left, Right);
}
var conflicts = new KV<K, V>[Conflicts.Length];
Array.Copy(Conflicts, 0, conflicts, 0, Conflicts.Length);
conflicts[found] = new KV<K, V>(key, update == null ? value : update(Conflicts[found].Value, value));
return new ImTreeMap<K, V>(Hash, Key, Value, conflicts, Left, Right);
}
private V GetConflictedValueOrDefault(K key, V defaultValue)
{
if (Conflicts != null)
for (var i = 0; i < Conflicts.Length; i++)
if (Equals(Conflicts[i].Key, key))
return Conflicts[i].Value;
return defaultValue;
}
private ImTreeMap<K, V> KeepBalanced()
{
var delta = Left.Height - Right.Height;
return delta >= 2 ? With(Left.Right.Height - Left.Left.Height == 1 ? Left.RotateLeft() : Left, Right).RotateRight()
: (delta <= -2 ? With(Left, Right.Left.Height - Right.Right.Height == 1 ? Right.RotateRight() : Right).RotateLeft()
: this);
}
private ImTreeMap<K, V> RotateRight()
{
return Left.With(Left.Left, With(Left.Right, Right));
}
private ImTreeMap<K, V> RotateLeft()
{
return Right.With(With(Left, Right.Left), Right.Right);
}
private ImTreeMap<K, V> With(ImTreeMap<K, V> left, ImTreeMap<K, V> right)
{
return left == Left && right == Right ? this : new ImTreeMap<K, V>(Hash, Key, Value, Conflicts, left, right);
}
#endregion
}
}
| |
namespace AngleSharp.Css.Tests.Rules
{
using AngleSharp.Css.Dom;
using NUnit.Framework;
using static CssConstructionFunctions;
[TestFixture]
public class CssSupportsTests
{
[Test]
public void SupportsEmptyRule()
{
var source = @"@supports () { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("()", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsBackgroundColorRedRule()
{
var source = @"@supports (background-color: red) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("(background-color: red)", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsBackgroundColorRedAndColorBlueRule()
{
var source = @"@supports ((background-color: red) and (color: blue)) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("((background-color: red) and (color: blue))", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsNotUnsupportedDeclarationRule()
{
var source = @"@supports (not (background-transparency: half)) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("(not (background-transparency: half))", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsUnsupportedDeclarationRule()
{
var source = @"@supports ((background-transparency: zero)) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("((background-transparency: zero))", supports.ConditionText);
Assert.IsFalse(supports.Condition.Check(device));
}
[Test]
public void SupportsBackgroundRedWithImportantRule()
{
var source = @"@supports (background: red !important) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("(background: red !important)", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsPaddingTopOrPaddingLeftRule()
{
var source = @"@supports ((padding-TOP : 0) or (padding-left : 0)) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("((padding-TOP: 0) or (padding-left: 0))", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsPaddingTopOrPaddingLeftAndPaddingBottomOrPaddingRightRule()
{
var source = @"@supports (((padding-top: 0) or (padding-left: 0)) and ((padding-bottom: 0) or (padding-right: 0))) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("(((padding-top: 0) or (padding-left: 0)) and ((padding-bottom: 0) or (padding-right: 0)))", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsDisplayFlexWithImportantRule()
{
var source = @"@supports (display: flex !important) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("(display: flex !important)", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsBareDisplayFlexRule()
{
var source = @"@supports display: flex { }";
var sheet = ParseStyleSheet(source);
Assert.AreEqual(0, sheet.Rules.Length);
}
[Test]
public void SupportsDisplayFlexMultipleBracketsRule()
{
var source = @"@supports ((display: flex)) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("((display: flex))", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsTransitionOrAnimationNameAndTransformFrontBracketRule()
{
var source = @"@supports ((transition-property: color) or
(animation-name: foo)) and
(transform: rotate(10deg)) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("((transition-property: color) or (animation-name: foo)) and (transform: rotate(10deg))", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsTransitionOrAnimationNameAndTransformBackBracketRule()
{
var source = @"@supports (transition-property: color) or
((animation-name: foo) and
(transform: rotate(10deg))) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("(transition-property: color) or ((animation-name: foo) and (transform: rotate(10deg)))", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsShadowVendorPrefixesRule()
{
var source = @"@supports ( box-shadow: 0 0 2px black ) or
( -moz-box-shadow: 0 0 2px black ) or
( -webkit-box-shadow: 0 0 2px black ) or
( -o-box-shadow: 0 0 2px black ) { }";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual("(box-shadow: 0 0 2px black) or (-moz-box-shadow: 0 0 2px black) or (-webkit-box-shadow: 0 0 2px black) or (-o-box-shadow: 0 0 2px black)", supports.ConditionText);
Assert.IsTrue(supports.Condition.Check(device));
}
[Test]
public void SupportsNegatedDisplayFlexRuleWithDeclarations()
{
var source = @"@supports not ( display: flex ) {
body { width: 100%; height: 100%; background: white; color: black; }
#navigation { width: 25%; }
#article { width: 75%; }
}";
var sheet = ParseStyleSheet(source);
var device = new DefaultRenderDevice();
Assert.AreEqual(1, sheet.Rules.Length);
Assert.IsInstanceOf<CssSupportsRule>(sheet.Rules[0]);
var supports = sheet.Rules[0] as CssSupportsRule;
Assert.AreEqual(3, supports.Rules.Length);
Assert.AreEqual("not (display: flex)", supports.ConditionText);
Assert.IsFalse(supports.Condition.Check(device));
}
}
}
| |
using System;
using flash.display;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Graphics;
using net.flashpunk;
using net.flashpunk.graphics;
using net.flashpunk.tweens.misc;
using net.flashpunk.utils;
namespace net.flashpunk.splash
{
/**
* This object displays the FlashPunk splash screen.
*/
public class Splash : Entity
{
/*
* Embedded graphics.
*/
class SPLASH_LINES : Embed { public SPLASH_LINES() : base("splash_background.jpg") { } }
class SPLASH_COG : Embed { public SPLASH_COG() : base("splash_cog.png") { } }
class SPLASH_LEFT : Embed { public SPLASH_LEFT() : base("splash_left.png") { } }
class SPLASH_RIGHT : Embed { public SPLASH_RIGHT() : base("splash_right.png") { } }
/*
* Image objects.
*/
public Graphiclist list;
public Image lines;
public Image cog = new Image(new SPLASH_COG());
public Image leftText = new Image(new SPLASH_LEFT());
public Image rightText = new Image(new SPLASH_RIGHT());
public Image fade = Image.createRect(FP.width, FP.height, 0);
/*
* Tween information.
*/
public NumTween tween;
public NumTween fader;
public int leftX;
public int rightX;
/// <summary>
/// Initializes a new instance of the <see cref="Splash"/> class.
/// </summary>
public Splash()
: this(0xFF3366, 0x202020, 0.5f, 2, 0.5f, 720)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Splash"/> class.
/// </summary>
/// <param name="color">The color.</param>
public Splash(int color)
: this(color, 0x202020, 0.5f, 2, 0.5f, 720)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Splash"/> class.
/// </summary>
/// <param name="color">The color.</param>
/// <param name="bgColor">Color of the bg.</param>
public Splash(int color, int bgColor)
: this(color, bgColor, 0.5f, 2, 0.5f, 720)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Splash"/> class.
/// </summary>
/// <param name="color">The color.</param>
/// <param name="bgColor">Color of the bg.</param>
/// <param name="fadeTime">The fade time.</param>
public Splash(int color, int bgColor, float fadeTime)
: this(color, bgColor, fadeTime, 2, 0.5f, 720)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Splash"/> class.
/// </summary>
/// <param name="color">The color.</param>
/// <param name="bgColor">Color of the bg.</param>
/// <param name="fadeTime">The fade time.</param>
/// <param name="spinTime">The spin time.</param>
public Splash(int color, int bgColor, float fadeTime, float spinTime)
: this(color, bgColor, fadeTime, spinTime, 0.5f, 720)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Splash"/> class.
/// </summary>
/// <param name="color">The color.</param>
/// <param name="bgColor">Color of the bg.</param>
/// <param name="fadeTime">The fade time.</param>
/// <param name="spinTime">The spin time.</param>
/// <param name="spinPause">The spin pause.</param>
public Splash(int color, int bgColor, float fadeTime, float spinTime, float spinPause)
: this(color, bgColor, fadeTime, spinTime, spinPause, 720)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Splash"/> class.
/// </summary>
/// <param name="color">The color.</param>
/// <param name="bgColor">Color of the bg.</param>
/// <param name="fadeTime">The fade time.</param>
/// <param name="spinTime">The spin time.</param>
/// <param name="spinPause">The spin pause.</param>
/// <param name="spins">The spins.</param>
public Splash(int color, int bgColor, float fadeTime, float spinTime, float spinPause, float spins)
{
// Create the lines image.
//Texture2D data = new Texture2D(FP.graphicsDeviceManager.GraphicsDevice, FP.width, FP.height); // TODO: Fill with 0x353535
//Graphics g = FP.sprite.graphics;
//g.clear();
//g.beginGradientFill(GradientType.RADIAL, new int[] { 0, 0 }, new float[] { 1, 0 }, new byte[] { 0, 255 });
//g.drawCircle(0, 0, 100);
//FP.matrix = Matrix.CreateScale(FP.width / 200, FP.height / 200, 0);
//FP.matrix.Translation = new Vector3(FP.width / 2, FP.height / 2, 0);
//data.draw(FP.sprite, FP.matrix);
//g.clear();
//g.beginBitmapFill(new SPLASH_LINES().bitmapData);
//g.drawRect(0, 0, FP.width, FP.height);
//data.draw(FP.sprite);
lines = new Image(new SPLASH_LINES());
// Set the entity information.
x = FP.width / 2;
y = FP.height / 2;
graphic = new Graphiclist(leftText, rightText, cog, lines, fade);
// Set the screen information.
FP.screen.color = bgColor;
// Set the lines properties.
lines.blend = BlendMode.SUBTRACT;
lines.smooth = true;
lines.centerOO();
// Set the big cog properties.
cog.visible = true;
cog.color = color;
cog.smooth = true;
cog.originX = cog.width / 2;
cog.originY = cog.height / 2;
cog.x -= cog.originX;
cog.y -= cog.originY;
// Set the left text properties.
leftText.color = color;
leftText.smooth = true;
leftText.originX = leftText.width;
leftText.originY = leftText.height / 2;
leftText.x -= leftText.originX + cog.width / 4 + 4;
leftText.y -= leftText.originY;
leftX = (int)leftText.x;
// Set the right text properties.
rightText.color = color;
rightText.smooth = true;
rightText.originY = rightText.height / 2;
rightText.x += cog.width / 4;
rightText.y -= rightText.originY;
rightX = (int)rightText.x;
// Set the fade cover properties.
fade.x -= x;
fade.y -= y;
// Set the timing properties.
_fadeTime = fadeTime;
_spinTime = spinTime;
_spinPause = spinPause;
_spins = spins;
// Add the tweens.
tween = new NumTween(tweenEnd);
fader = new NumTween(faderEnd);
addTween(tween);
addTween(fader);
// Make invisible until you start it.
visible = false;
}
/// <summary>
/// Start the splash screen.
/// </summary>
public void start()
{
visible = true;
fadeIn();
}
/// <summary>
/// Start the splash screen.
/// </summary>
/// <param name="onCompleteFunction">The on complete function.</param>
public void start(Action onCompleteFunction)
{
_onCompleteFunction = onCompleteFunction;
start();
}
/// <summary>
/// Start the splash screen.
/// </summary>
/// <param name="onCompleteWorld">The on complete world.</param>
public void start(World onCompleteWorld)
{
_onCompleteWorld = onCompleteWorld;
start();
}
/// <summary>
/// Update the splash screen.
/// </summary>
override public void update()
{
// In case the phone has been rotated.
x = FP.width / 2;
y = FP.height / 2;
// Text scaling/positioning.
float t = 1 - tween.scale;
leftText.x = leftX - t * FP.width / 2;
rightText.x = rightX + t * FP.width / 2;
leftText.scaleY = rightText.scaleY = tween.scale;
leftText.alpha = rightText.alpha = Ease.cubeIn(tween.scale);
// Cog rotation/positioning.
cog.angle = tween.scale <= 1 ? tween.value : tween.value * 2;
cog.scale = 2.5f - tween.scale * 2;
cog.alpha = tween.scale;
// Fade in/out alpha control.
fade.alpha = fader.value;
// Pause before fade out.
if (_spinWait > 0)
{
_spinWait -= FP.isFixed ? 1 : FP.elapsed;
if (_spinWait <= 0) fadeOut();
}
}
/// <summary>
/// When the fade tween completes.
/// </summary>
private void faderEnd()
{
if (fader.value == 0) tween.tween(_spins, 0, _spinTime, Ease.backOut);
else splashEnd();
}
/// <summary>
/// When the tween completes.
/// </summary>
private void tweenEnd()
{
if (_spinPause >= 0) _spinWait = _spinPause;
else fadeOut();
}
/// <summary>
/// When the splash screen has completed.
/// </summary>
private void splashEnd()
{
if (_onCompleteFunction == null && _onCompleteWorld == null) return;
else if (_onCompleteFunction != null) _onCompleteFunction();
else if (_onCompleteWorld != null) FP.world = _onCompleteWorld;
else throw new Exception("The onComplete parameter must be a Function callback or World object.");
}
/// <summary>
/// Fades the splash screen in.
/// </summary>
private void fadeIn()
{
fader.tween(1, 0, _fadeTime, Ease.cubeOut);
}
/// <summary>
/// Fades the splash screen out.
/// </summary>
private void fadeOut()
{
fader.tween(0, 1, _fadeTime, Ease.cubeIn);
}
/**
* Fade in/out time and logo spinning time.
*/
private float _fadeTime;
private float _spinTime;
private float _spins;
private float _spinPause;
private float _spinWait = 0;
private Action _onCompleteFunction;
private World _onCompleteWorld;
}
}
| |
/***************************************************************************
* PreferencesDialog.cs
*
* Copyright (C) 2008 Novell, Inc.
* Written by Scott Reeves <sreeves@gmail.com>
****************************************************************************/
/* THIS FILE IS LICENSED UNDER THE MIT LICENSE AS OUTLINED IMMEDIATELY BELOW:
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Collections.Generic;
using Gtk;
using Mono.Unix;
using Tasque;
using CollectionTransforms;
using System.Diagnostics;
namespace Tasque
{
public class PreferencesDialog : Gtk.Dialog
{
// private CheckButton showCompletedTasksCheck;
Gtk.Notebook notebook;
//
// General Page Widgets
//
Gtk.Widget generalPage;
int generalPageId;
Gtk.ComboBox backendComboBox;
Dictionary<int, Backend> backendComboMap; // track backends
int selectedBackend;
Gtk.CheckButton showCompletedTasksCheckButton;
CollectionView<Category> filteredCategories;
List<string> categoriesToHide;
Gtk.TreeView categoriesTree;
//
// Appearance Page Widgets
//
Gtk.Widget appearancePage;
Gtk.Entry txtTodaysTaskColor;
Gtk.ColorButton btnChangeTodaysTaskColor;
Gtk.Entry txtOverdueTaskColor;
Gtk.ColorButton btnChangeOverdueTaskColor;
//
// Backend Page Widgets
//
Gtk.Widget backendPage;
int backendPageId;
public PreferencesDialog() : base ()
{
LoadPreferences();
Init();
ConnectEvents();
Shown += OnShown;
this.WidthRequest = 400;
this.HeightRequest = 350;
}
protected override void OnResponse (ResponseType response_id)
{
base.OnResponse (response_id);
Hide ();
}
private void Init()
{
Debug.WriteLine("Called Preferences Init");
this.Icon = Utilities.GetIcon ("tasque-16", 16);
// Update the window title
this.Title = string.Format (Catalog.GetString ("Tasque Preferences"));
this.VBox.Spacing = 0;
this.VBox.BorderWidth = 0;
this.Resizable = false;
this.AddButton(Stock.Close, Gtk.ResponseType.Ok);
this.DefaultResponse = ResponseType.Ok;
notebook = new Gtk.Notebook ();
notebook.ShowTabs = true;
//
// General Page
//
generalPage = MakeGeneralPage ();
generalPage.Show ();
generalPageId =
notebook.AppendPage (generalPage,
new Label (Catalog.GetString ("General")));
//
// Appearance Page
//
appearancePage = MakeAppearancePage ();
appearancePage.Show ();
notebook.AppendPage (appearancePage,
new Label (Catalog.GetString ("Appearance")));
//
// Backend Page
//
backendPage = null;
backendPageId = -1;
if (GtkApplication.Instance.Backend != null) {
backendPage = (Widget)GtkApplication.Instance.Backend.Preferences;
if (backendPage != null) {
backendPage.Show ();
Label l =
new Label (GLib.Markup.EscapeText (GtkApplication.Instance.Backend.Name));
l.UseMarkup = false;
l.UseUnderline = false;
l.Show ();
backendPageId =
notebook.AppendPage (backendPage, l);
}
}
notebook.Show ();
this.VBox.PackStart (notebook, true, true, 0);
DeleteEvent += WindowDeleted;
}
private Gtk.Widget MakeAppearancePage ()
{
VBox vbox = new VBox (false, 6);
vbox.BorderWidth = 10;
VBox sectionVBox = new VBox (false, 4);
Label l = new Label ();
l.Markup = string.Format ("<span size=\"large\" weight=\"bold\">{0}</span>",
Catalog.GetString ("Color Management"));
l.UseUnderline = false;
l.UseMarkup = true;
l.Wrap = false;
l.Xalign = 0;
l.Show ();
sectionVBox.PackStart (l, false, false, 0);
HBox hbox = new HBox (false, 6);
Label lblTodaysTaskColor = new Label ();
lblTodaysTaskColor.Text = Catalog.GetString ("Today:");
lblTodaysTaskColor.Xalign = 0;
lblTodaysTaskColor.WidthRequest = 75;
lblTodaysTaskColor.Show ();
Preferences prefs = GtkApplication.Instance.Preferences;
txtTodaysTaskColor = new Entry();
txtTodaysTaskColor.Text = prefs.Get (Preferences.TodayTaskTextColor);
txtTodaysTaskColor.Changed += OnTxtTodaysTaskColorChanged;
txtTodaysTaskColor.Show ();
btnChangeTodaysTaskColor = new ColorButton();
string todayTasksColor = prefs.Get (Preferences.TodayTaskTextColor);
Gdk.Color currentColor = new Gdk.Color();
Gdk.Color.Parse (todayTasksColor, ref currentColor);
btnChangeTodaysTaskColor.Color = currentColor;
btnChangeTodaysTaskColor.ColorSet += OnBtnChangeTodaysTaskColorColorSet;
btnChangeTodaysTaskColor.Show ();
hbox.PackStart (lblTodaysTaskColor, false, false, 0);
hbox.PackStart (txtTodaysTaskColor, false, false, 0);
hbox.PackStart (btnChangeTodaysTaskColor, false, false, 0);
hbox.Show ();
HBox hbox2 = new HBox (false, 6);
Label lblOverdueTaskColor = new Label ();
lblOverdueTaskColor.Text = Catalog.GetString ("Overdue:");
lblOverdueTaskColor.WidthRequest = 75;
lblOverdueTaskColor.Xalign = 0;
lblOverdueTaskColor.Show ();
txtOverdueTaskColor = new Entry();
txtOverdueTaskColor.Text = prefs.Get (Preferences.OverdueTaskTextColor);
txtOverdueTaskColor.Changed += OnTxtOverdueTaskColorChanged;
txtOverdueTaskColor.Show ();
btnChangeOverdueTaskColor = new ColorButton();
string overdueTasksColor = prefs.Get (Preferences.OverdueTaskTextColor);
Gdk.Color overdueColor = new Gdk.Color();
Gdk.Color.Parse (overdueTasksColor, ref overdueColor);
btnChangeOverdueTaskColor.Color = overdueColor;
btnChangeOverdueTaskColor.ColorSet += OnBtnChangeOverdueTaskColorColorSet;
btnChangeOverdueTaskColor.Show();
hbox2.PackStart (lblOverdueTaskColor, false, false, 0);
hbox2.PackStart (txtOverdueTaskColor, false, false, 0);
hbox2.PackStart (btnChangeOverdueTaskColor, false, false, 0);
hbox2.Show ();
sectionVBox.PackStart (hbox, false, false, 0);
sectionVBox.PackStart (hbox2, false, false, 0);
sectionVBox.Show();
vbox.PackStart (sectionVBox, false, false, 0);
return vbox;
}
private Gtk.Widget MakeGeneralPage ()
{
VBox vbox = new VBox (false, 6);
vbox.BorderWidth = 10;
//
// Task Management System
//
VBox sectionVBox = new VBox (false, 4);
Label l = new Label ();
l.Markup = string.Format ("<span size=\"large\" weight=\"bold\">{0}</span>",
Catalog.GetString ("Task Management System"));
l.UseUnderline = false;
l.UseMarkup = true;
l.Wrap = false;
l.Xalign = 0;
l.Show ();
sectionVBox.PackStart (l, false, false, 0);
backendComboBox = ComboBox.NewText ();
backendComboMap = new Dictionary<int,Backend> ();
// Fill out the ComboBox
int i = 0;
selectedBackend = -1;
foreach (Backend backend in GtkApplication.Instance.AvailableBackends) {
backendComboBox.AppendText (backend.Name);
backendComboMap [i] = backend;
if (backend == GtkApplication.Instance.Backend)
selectedBackend = i;
i++;
}
if (selectedBackend >= 0)
backendComboBox.Active = selectedBackend;
backendComboBox.Changed += OnBackendComboBoxChanged;
backendComboBox.Show ();
HBox hbox = new HBox (false, 6);
l = new Label (string.Empty); // spacer
l.Show ();
hbox.PackStart (l, false, false, 0);
hbox.PackStart (backendComboBox, false, false, 0);
hbox.Show ();
sectionVBox.PackStart (hbox, false, false, 0);
sectionVBox.Show ();
vbox.PackStart (sectionVBox, false, false, 0);
//
// Task Filtering
//
sectionVBox = new VBox (false, 4);
l = new Label ();
l.Markup = string.Format ("<span size=\"large\" weight=\"bold\">{0}</span>",
Catalog.GetString ("Task Filtering"));
l.UseUnderline = false;
l.UseMarkup = true;
l.Wrap = false;
l.Xalign = 0;
l.Show ();
sectionVBox.PackStart (l, false, false, 0);
HBox sectionHBox = new HBox (false, 6);
l = new Label (string.Empty); // spacer
l.Show ();
sectionHBox.PackStart (l, false, false, 0);
VBox innerSectionVBox = new VBox (false, 6);
hbox = new HBox (false, 6);
bool showCompletedTasks = GtkApplication.Instance.Preferences.GetBool (
Preferences.ShowCompletedTasksKey);
showCompletedTasksCheckButton =
new CheckButton (Catalog.GetString ("Sh_ow completed tasks"));
showCompletedTasksCheckButton.UseUnderline = true;
showCompletedTasksCheckButton.Active = showCompletedTasks;
showCompletedTasksCheckButton.Show ();
hbox.PackStart (showCompletedTasksCheckButton, true, true, 0);
hbox.Show ();
innerSectionVBox.PackStart (hbox, false, false, 0);
// Categories TreeView
l = new Label (Catalog.GetString ("Only _show these categories when \"All\" is selected:"));
l.UseUnderline = true;
l.Xalign = 0;
l.Show ();
innerSectionVBox.PackStart (l, false, false, 0);
ScrolledWindow sw = new ScrolledWindow ();
sw.HscrollbarPolicy = PolicyType.Automatic;
sw.VscrollbarPolicy = PolicyType.Automatic;
sw.ShadowType = ShadowType.EtchedIn;
categoriesTree = new TreeView ();
categoriesTree.Selection.Mode = SelectionMode.None;
categoriesTree.RulesHint = false;
categoriesTree.HeadersVisible = false;
l.MnemonicWidget = categoriesTree;
Gtk.TreeViewColumn column = new Gtk.TreeViewColumn ();
column.Title = Catalog.GetString ("Category");
column.Sizing = Gtk.TreeViewColumnSizing.Autosize;
column.Resizable = false;
Gtk.CellRendererToggle toggleCr = new CellRendererToggle ();
toggleCr.Toggled += OnCategoryToggled;
column.PackStart (toggleCr, false);
column.SetCellDataFunc (toggleCr,
new Gtk.TreeCellDataFunc (ToggleCellDataFunc));
Gtk.CellRendererText textCr = new CellRendererText ();
column.PackStart (textCr, true);
column.SetCellDataFunc (textCr,
new Gtk.TreeCellDataFunc (TextCellDataFunc));
categoriesTree.AppendColumn (column);
categoriesTree.Show ();
sw.Add (categoriesTree);
sw.Show ();
innerSectionVBox.PackStart (sw, true, true, 0);
innerSectionVBox.Show ();
sectionHBox.PackStart (innerSectionVBox, true, true, 0);
sectionHBox.Show ();
sectionVBox.PackStart (sectionHBox, true, true, 0);
sectionVBox.Show ();
vbox.PackStart (sectionVBox, true, true, 0);
return vbox;
}
///<summary>
/// WindowDeleted
/// Cleans up the conversation object with the ConversationManager
///</summary>
private void WindowDeleted (object sender, DeleteEventArgs args)
{
// Save preferences
}
private void LoadPreferences()
{
Debug.WriteLine("Loading preferences");
categoriesToHide =
GtkApplication.Instance.Preferences.GetStringList (Preferences.HideInAllCategory);
//if (categoriesToHide == null || categoriesToHide.Count == 0)
// categoriesToHide = BuildNewCategoryList ();
}
private void ConnectEvents()
{
// showCompletedTasksCheckbox delegate
showCompletedTasksCheckButton.Toggled += delegate {
GtkApplication.Instance.Preferences.SetBool (
Preferences.ShowCompletedTasksKey,
showCompletedTasksCheckButton.Active);
};
}
private void OnBtnChangeTodaysTaskColorColorSet (object sender, EventArgs args)
{
txtTodaysTaskColor.Text =
Utilities.ColorGetHex (btnChangeTodaysTaskColor.Color).ToUpper ();
}
private void OnTxtTodaysTaskColorChanged (object sender, EventArgs args)
{
// Save the user preference
GtkApplication.Instance.Preferences.Set (Preferences.TodayTaskTextColor,
((Entry) sender).Text);
}
private void OnBtnChangeOverdueTaskColorColorSet(object sender, EventArgs args)
{
txtOverdueTaskColor.Text =
Utilities.ColorGetHex (btnChangeOverdueTaskColor.Color).ToUpper ();
}
private void OnTxtOverdueTaskColorChanged (object sender, EventArgs args)
{
// Save the user preference
GtkApplication.Instance.Preferences.Set (Preferences.OverdueTaskTextColor,
((Entry) sender).Text);
}
private void OnBackendComboBoxChanged (object sender, EventArgs args)
{
if (selectedBackend >= 0) {
// TODO: Prompt the user and make sure they really want to change
// which backend they are using.
// Remove the existing backend's preference page
if (backendPageId >= 0) {
notebook.RemovePage (backendPageId);
backendPageId = -1;
backendPage = null;
}
// if yes (replace backend)
if (backendComboMap.ContainsKey (selectedBackend)) {
// Cleanup old backend
Backend oldBackend = backendComboMap [selectedBackend];
Trace.TraceInformation ("Cleaning up '{0}'...", oldBackend.Name);
try {
oldBackend.Dispose ();
} catch (Exception e) {
Trace.TraceWarning ("Exception cleaning up '{0}': {2}",
oldBackend.Name,
e.Message);
}
selectedBackend = -1;
}
}
Backend newBackend = null;
if (backendComboMap.ContainsKey (backendComboBox.Active)) {
newBackend = backendComboMap [backendComboBox.Active];
}
// TODO: Set the new backend
GtkApplication.Instance.Backend = newBackend;
if (newBackend == null)
return;
selectedBackend = backendComboBox.Active;
// Add a backend prefs page if one exists
backendPage = (Widget)newBackend.Preferences;
if (backendPage != null) {
backendPage.Show ();
Label l = new Label (GLib.Markup.EscapeText (newBackend.Name));
l.UseMarkup = false;
l.UseUnderline = false;
l.Show ();
backendPageId =
notebook.AppendPage (backendPage, l);
// If the new backend is not configured, automatically switch
// to the backend's preferences page
if (!newBackend.Configured)
notebook.Page = backendPageId;
}
// Save the user preference
GtkApplication.Instance.Preferences.Set (Preferences.CurrentBackend,
newBackend.GetType ().ToString ());
//categoriesToHide = BuildNewCategoryList ();
//GtkApplication.Instance.Preferences.SetStringList (Preferences.HideInAllCategory,
// categoriesToHide);
RebuildCategoryTree ();
}
private void ToggleCellDataFunc (Gtk.TreeViewColumn column,
Gtk.CellRenderer cell,
Gtk.TreeModel model,
Gtk.TreeIter iter)
{
Gtk.CellRendererToggle crt = cell as Gtk.CellRendererToggle;
Category category = model.GetValue (iter, 0) as Category;
if (category == null) {
crt.Active = true;
return;
}
// If the setting is null or empty, show all categories
if (categoriesToHide == null || categoriesToHide.Count == 0) {
crt.Active = true;
return;
}
// Check to see if the category is specified in the list
if (categoriesToHide.Contains (category.Name)) {
crt.Active = false;
return;
}
crt.Active = true;
}
private void TextCellDataFunc (Gtk.TreeViewColumn treeColumn,
Gtk.CellRenderer renderer, Gtk.TreeModel model,
Gtk.TreeIter iter)
{
Gtk.CellRendererText crt = renderer as Gtk.CellRendererText;
crt.Ellipsize = Pango.EllipsizeMode.End;
Category category = model.GetValue (iter, 0) as Category;
if (category == null) {
crt.Text = string.Empty;
return;
}
crt.Text = GLib.Markup.EscapeText (category.Name);
}
void OnCategoryToggled (object sender, Gtk.ToggledArgs args)
{
Debug.WriteLine ("OnCategoryToggled");
Gtk.TreeIter iter;
Gtk.TreePath path = new Gtk.TreePath (args.Path);
if (!categoriesTree.Model.GetIter (out iter, path))
return; // Do nothing
Category category = categoriesTree.Model.GetValue (iter, 0) as Category;
if (category == null)
return;
//if (categoriesToHide == null)
// categoriesToHide = BuildNewCategoryList ();
if (categoriesToHide.Contains (category.Name))
categoriesToHide.Remove (category.Name);
else
categoriesToHide.Add (category.Name);
GtkApplication.Instance.Preferences.SetStringList (Preferences.HideInAllCategory,
categoriesToHide);
}
/*
/// <summary>
/// Build a new category list setting from all the categories
/// </summary>
/// <param name="?">
/// A <see cref="System.String"/>
/// </param>
List<string> BuildNewCategoryList ()
{
List<string> list = new List<string> ();
TreeModel model;
IBackend backend = GtkApplication.Instance.Backend;
if (backend == null)
return list;
model = backend.Categories;
Gtk.TreeIter iter;
if (model.GetIterFirst (out iter) == false)
return list;
do {
ICategory cat = model.GetValue (iter, 0) as ICategory;
if (cat == null || cat is AllCategory)
continue;
list.Add (cat.Name);
} while (model.IterNext (ref iter) == true);
return list;
}
*/
void RebuildCategoryTree ()
{
if (!backendComboMap.ContainsKey (selectedBackend)) {
categoriesTree.Model = null;
return;
}
Backend backend = backendComboMap [selectedBackend];
filteredCategories = new ListCollectionView<Category> (backend.Categories);
filteredCategories.IsObserving = true;
// Filter out the AllCategory
filteredCategories.Filter = c => c != null;
var adapter = new TreeModelListAdapter<Category> (filteredCategories);
categoriesTree.Model = adapter;
}
void OnShown (object sender, EventArgs args)
{
RebuildCategoryTree ();
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="HtmlShimManager.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Windows.Forms {
using System;
using System.Collections.Generic;
using System.Collections;
using System.Diagnostics;
/// <devdoc>
/// HtmlShimManager - this class manages the shims for HtmlWindows, HtmlDocuments, and HtmlElements.
/// essentially we need a long-lasting object to call back on events from the web browser, and the
/// manager is the one in charge of making sure this list stays around as long as needed.
///
/// When a HtmlWindow unloads we prune our list of corresponding document, window, and element shims.
///
/// </devdoc>
internal sealed class HtmlShimManager :IDisposable {
private Dictionary<HtmlWindow, HtmlWindow.HtmlWindowShim> htmlWindowShims;
private Dictionary<HtmlElement, HtmlElement.HtmlElementShim> htmlElementShims;
private Dictionary<HtmlDocument, HtmlDocument.HtmlDocumentShim> htmlDocumentShims;
internal HtmlShimManager() {
}
/// <devdoc> AddDocumentShim - adds a HtmlDocumentShim to list of shims to manage
/// Can create a WindowShim as a side effect so it knows when to self prune from the list.
///</devdoc>
public void AddDocumentShim(HtmlDocument doc) {
HtmlDocument.HtmlDocumentShim shim = null;
if (htmlDocumentShims == null) {
htmlDocumentShims = new Dictionary<HtmlDocument,HtmlDocument.HtmlDocumentShim>();
shim = new HtmlDocument.HtmlDocumentShim(doc);
htmlDocumentShims[doc] = shim;
}
else if (!htmlDocumentShims.ContainsKey(doc)) {
shim = new HtmlDocument.HtmlDocumentShim(doc);
htmlDocumentShims[doc] = shim;
}
if (shim != null) {
OnShimAdded(shim);
}
}
/// <devdoc> AddWindowShim - adds a HtmlWindowShim to list of shims to manage
///</devdoc>
public void AddWindowShim(HtmlWindow window) {
HtmlWindow.HtmlWindowShim shim = null;
if (htmlWindowShims == null) {
htmlWindowShims = new Dictionary<HtmlWindow,HtmlWindow.HtmlWindowShim>();
shim = new HtmlWindow.HtmlWindowShim(window);
htmlWindowShims[window] = shim;
}
else if (!htmlWindowShims.ContainsKey(window)) {
shim = new HtmlWindow.HtmlWindowShim(window);
htmlWindowShims[window] = shim;
}
if (shim != null) {
// strictly not necessary, but here for future use.
OnShimAdded(shim);
}
}
/// <devdoc> AddElementShim - adds a HtmlDocumentShim to list of shims to manage
/// Can create a WindowShim as a side effect so it knows when to self prune from the list.
///</devdoc>
public void AddElementShim(HtmlElement element) {
HtmlElement.HtmlElementShim shim = null;
if (htmlElementShims == null) {
htmlElementShims = new Dictionary<HtmlElement,HtmlElement.HtmlElementShim>();
shim = new HtmlElement.HtmlElementShim(element);
htmlElementShims[element] = shim;
}
else if (!htmlElementShims.ContainsKey(element)) {
shim = new HtmlElement.HtmlElementShim(element);
htmlElementShims[element] = shim;
}
if (shim != null) {
OnShimAdded(shim);
}
}
internal HtmlDocument.HtmlDocumentShim GetDocumentShim(HtmlDocument document) {
if (htmlDocumentShims == null) {
return null;
}
if (htmlDocumentShims.ContainsKey(document)) {
return htmlDocumentShims[document];
}
return null;
}
internal HtmlElement.HtmlElementShim GetElementShim(HtmlElement element) {
if (htmlElementShims == null) {
return null;
}
if (htmlElementShims.ContainsKey(element)) {
return htmlElementShims[element];
}
return null;
}
internal HtmlWindow.HtmlWindowShim GetWindowShim(HtmlWindow window) {
if (htmlWindowShims == null) {
return null;
}
if (htmlWindowShims.ContainsKey(window)) {
return htmlWindowShims[window];
}
return null;
}
private void OnShimAdded(HtmlShim addedShim) {
Debug.Assert(addedShim != null, "Why are we calling this with a null shim?");
if (addedShim != null) {
if (!(addedShim is HtmlWindow.HtmlWindowShim)) {
// we need to add a window shim here for documents and elements
// so we can [....] Window.Unload. The window shim itself will trap
// the unload event and call back on us on OnWindowUnloaded. When
// that happens we know we can free all our ptrs to COM.
AddWindowShim(new HtmlWindow(this, addedShim.AssociatedWindow));
}
}
}
/// <devdoc>
/// HtmlWindowShim calls back on us when it has unloaded the page. At this point we need to
/// walk through our lists and make sure we've cleaned up
/// </devdoc>
internal void OnWindowUnloaded(HtmlWindow unloadedWindow) {
Debug.Assert(unloadedWindow != null, "Why are we calling this with a null window?");
if (unloadedWindow != null) {
//
// prune documents
//
if (htmlDocumentShims != null) {
HtmlDocument.HtmlDocumentShim[] shims = new HtmlDocument.HtmlDocumentShim[htmlDocumentShims.Count];
htmlDocumentShims.Values.CopyTo(shims,0);
foreach (HtmlDocument.HtmlDocumentShim shim in shims) {
if (shim.AssociatedWindow == unloadedWindow.NativeHtmlWindow) {
htmlDocumentShims.Remove(shim.Document);
shim.Dispose();
}
}
}
//
// prune elements
//
if (htmlElementShims != null) {
HtmlElement.HtmlElementShim[] shims = new HtmlElement.HtmlElementShim[htmlElementShims.Count];
htmlElementShims.Values.CopyTo(shims,0);
foreach (HtmlElement.HtmlElementShim shim in shims) {
if (shim.AssociatedWindow == unloadedWindow.NativeHtmlWindow) {
htmlElementShims.Remove(shim.Element);
shim.Dispose();
}
}
}
//
// prune the particular window from the list.
//
if (htmlWindowShims != null) {
if (htmlWindowShims.ContainsKey(unloadedWindow)) {
HtmlWindow.HtmlWindowShim shim = htmlWindowShims[unloadedWindow];
htmlWindowShims.Remove(unloadedWindow);
shim.Dispose();
}
}
}
}
public void Dispose() {
Dispose(true);
}
private void Dispose(bool disposing) {
if (disposing) {
if (htmlElementShims != null){
foreach (HtmlElement.HtmlElementShim shim in htmlElementShims.Values) {
shim.Dispose();
}
}
if (htmlDocumentShims != null) {
foreach (HtmlDocument.HtmlDocumentShim shim in htmlDocumentShims.Values) {
shim.Dispose();
}
}
if (htmlWindowShims != null) {
foreach (HtmlWindow.HtmlWindowShim shim in htmlWindowShims.Values) {
shim.Dispose();
}
}
htmlWindowShims = null;
htmlDocumentShims = null;
htmlWindowShims = null;
}
}
~HtmlShimManager() {
Dispose(false);
}
}
}
| |
namespace DataAccessLayer.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class InitialSchemaMigration : DbMigration
{
public override void Up()
{
CreateTable(
"dbo.Answers",
c => new
{
Id = c.Int(nullable: false, identity: true),
AnswerValue = c.String(),
Question_Id = c.Int(),
User_Id = c.String(maxLength: 128),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.QuestionClaims", t => t.Question_Id)
.ForeignKey("dbo.AspNetUsers", t => t.User_Id)
.Index(t => t.Question_Id)
.Index(t => t.User_Id);
CreateTable(
"dbo.QuestionClaims",
c => new
{
Id = c.Int(nullable: false, identity: true),
QuestionType = c.String(),
Question = c.String(),
Designation_Id = c.Int(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.Designations", t => t.Designation_Id)
.Index(t => t.Designation_Id);
CreateTable(
"dbo.Designations",
c => new
{
Id = c.Int(nullable: false, identity: true),
Name = c.String(),
})
.PrimaryKey(t => t.Id);
CreateTable(
"dbo.AspNetUsers",
c => new
{
Id = c.String(nullable: false, maxLength: 128),
DateOfBirth = c.DateTime(nullable: false),
DateOfJoining = c.DateTime(nullable: false),
CNIC = c.String(),
ParentCNIC = c.String(),
EmergencyPhoneNumber = c.String(),
Email = c.String(maxLength: 256),
EmailConfirmed = c.Boolean(nullable: false),
PasswordHash = c.String(),
SecurityStamp = c.String(),
PhoneNumber = c.String(),
PhoneNumberConfirmed = c.Boolean(nullable: false),
TwoFactorEnabled = c.Boolean(nullable: false),
LockoutEndDateUtc = c.DateTime(),
LockoutEnabled = c.Boolean(nullable: false),
AccessFailedCount = c.Int(nullable: false),
UserName = c.String(nullable: false, maxLength: 256),
Session_Id = c.Int(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.Sessions", t => t.Session_Id)
.Index(t => t.UserName, unique: true, name: "UserNameIndex")
.Index(t => t.Session_Id);
CreateTable(
"dbo.AspNetUserClaims",
c => new
{
Id = c.Int(nullable: false, identity: true),
UserId = c.String(nullable: false, maxLength: 128),
ClaimType = c.String(),
ClaimValue = c.String(),
Discriminator = c.String(nullable: false, maxLength: 128),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.AspNetUsers", t => t.UserId, cascadeDelete: true)
.Index(t => t.UserId);
CreateTable(
"dbo.Levels",
c => new
{
LevelId = c.Int(nullable: false, identity: true),
Name = c.String(),
})
.PrimaryKey(t => t.LevelId);
CreateTable(
"dbo.Universities",
c => new
{
Id = c.Int(nullable: false, identity: true),
Name = c.String(),
Address = c.String(),
})
.PrimaryKey(t => t.Id);
CreateTable(
"dbo.AspNetUserLogins",
c => new
{
LoginProvider = c.String(nullable: false, maxLength: 128),
ProviderKey = c.String(nullable: false, maxLength: 128),
UserId = c.String(nullable: false, maxLength: 128),
Discriminator = c.String(nullable: false, maxLength: 128),
})
.PrimaryKey(t => new { t.LoginProvider, t.ProviderKey, t.UserId })
.ForeignKey("dbo.AspNetUsers", t => t.UserId, cascadeDelete: true)
.Index(t => t.UserId);
CreateTable(
"dbo.AspNetUserRoles",
c => new
{
UserId = c.String(nullable: false, maxLength: 128),
RoleId = c.String(nullable: false, maxLength: 128),
})
.PrimaryKey(t => new { t.UserId, t.RoleId })
.ForeignKey("dbo.AspNetUsers", t => t.UserId, cascadeDelete: true)
.ForeignKey("dbo.AspNetRoles", t => t.RoleId, cascadeDelete: true)
.Index(t => t.UserId)
.Index(t => t.RoleId);
CreateTable(
"dbo.Sessions",
c => new
{
Id = c.Int(nullable: false, identity: true),
YearFrom = c.String(),
YearTo = c.String(),
Name = c.String(),
Class_Id = c.Int(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.Classes", t => t.Class_Id)
.Index(t => t.Class_Id);
CreateTable(
"dbo.Classes",
c => new
{
Id = c.Int(nullable: false, identity: true),
GradeName = c.String(),
School_Id = c.Int(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.Schools", t => t.School_Id)
.Index(t => t.School_Id);
CreateTable(
"dbo.Schools",
c => new
{
Id = c.Int(nullable: false, identity: true),
Name = c.String(),
Address = c.String(),
Cluster_Id = c.Int(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.Clusters", t => t.Cluster_Id)
.Index(t => t.Cluster_Id);
CreateTable(
"dbo.Clusters",
c => new
{
Id = c.Int(nullable: false, identity: true),
Name = c.String(),
City_Id = c.Int(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.Cities", t => t.City_Id)
.Index(t => t.City_Id);
CreateTable(
"dbo.Cities",
c => new
{
Id = c.Int(nullable: false, identity: true),
Name = c.String(),
})
.PrimaryKey(t => t.Id);
CreateTable(
"dbo.AspNetRoles",
c => new
{
Id = c.String(nullable: false, maxLength: 128),
Name = c.String(nullable: false, maxLength: 256),
Discriminator = c.String(nullable: false, maxLength: 128),
})
.PrimaryKey(t => t.Id)
.Index(t => t.Name, unique: true, name: "RoleNameIndex");
CreateTable(
"dbo.UserDesignations",
c => new
{
User_Id = c.String(nullable: false, maxLength: 128),
Designation_Id = c.Int(nullable: false),
})
.PrimaryKey(t => new { t.User_Id, t.Designation_Id })
.ForeignKey("dbo.AspNetUsers", t => t.User_Id, cascadeDelete: true)
.ForeignKey("dbo.Designations", t => t.Designation_Id, cascadeDelete: true)
.Index(t => t.User_Id)
.Index(t => t.Designation_Id);
CreateTable(
"dbo.LevelQuestionClaims",
c => new
{
Level_LevelId = c.Int(nullable: false),
QuestionClaim_Id = c.Int(nullable: false),
})
.PrimaryKey(t => new { t.Level_LevelId, t.QuestionClaim_Id })
.ForeignKey("dbo.Levels", t => t.Level_LevelId, cascadeDelete: true)
.ForeignKey("dbo.QuestionClaims", t => t.QuestionClaim_Id, cascadeDelete: true)
.Index(t => t.Level_LevelId)
.Index(t => t.QuestionClaim_Id);
CreateTable(
"dbo.UniversityLevels",
c => new
{
University_Id = c.Int(nullable: false),
Level_LevelId = c.Int(nullable: false),
})
.PrimaryKey(t => new { t.University_Id, t.Level_LevelId })
.ForeignKey("dbo.Universities", t => t.University_Id, cascadeDelete: true)
.ForeignKey("dbo.Levels", t => t.Level_LevelId, cascadeDelete: true)
.Index(t => t.University_Id)
.Index(t => t.Level_LevelId);
CreateTable(
"dbo.LevelUsers",
c => new
{
Level_LevelId = c.Int(nullable: false),
User_Id = c.String(nullable: false, maxLength: 128),
})
.PrimaryKey(t => new { t.Level_LevelId, t.User_Id })
.ForeignKey("dbo.Levels", t => t.Level_LevelId, cascadeDelete: true)
.ForeignKey("dbo.AspNetUsers", t => t.User_Id, cascadeDelete: true)
.Index(t => t.Level_LevelId)
.Index(t => t.User_Id);
}
public override void Down()
{
DropForeignKey("dbo.AspNetUserRoles", "RoleId", "dbo.AspNetRoles");
DropForeignKey("dbo.AspNetUsers", "Session_Id", "dbo.Sessions");
DropForeignKey("dbo.Sessions", "Class_Id", "dbo.Classes");
DropForeignKey("dbo.Schools", "Cluster_Id", "dbo.Clusters");
DropForeignKey("dbo.Clusters", "City_Id", "dbo.Cities");
DropForeignKey("dbo.Classes", "School_Id", "dbo.Schools");
DropForeignKey("dbo.AspNetUserRoles", "UserId", "dbo.AspNetUsers");
DropForeignKey("dbo.AspNetUserLogins", "UserId", "dbo.AspNetUsers");
DropForeignKey("dbo.LevelUsers", "User_Id", "dbo.AspNetUsers");
DropForeignKey("dbo.LevelUsers", "Level_LevelId", "dbo.Levels");
DropForeignKey("dbo.UniversityLevels", "Level_LevelId", "dbo.Levels");
DropForeignKey("dbo.UniversityLevels", "University_Id", "dbo.Universities");
DropForeignKey("dbo.LevelQuestionClaims", "QuestionClaim_Id", "dbo.QuestionClaims");
DropForeignKey("dbo.LevelQuestionClaims", "Level_LevelId", "dbo.Levels");
DropForeignKey("dbo.UserDesignations", "Designation_Id", "dbo.Designations");
DropForeignKey("dbo.UserDesignations", "User_Id", "dbo.AspNetUsers");
DropForeignKey("dbo.AspNetUserClaims", "UserId", "dbo.AspNetUsers");
DropForeignKey("dbo.Answers", "User_Id", "dbo.AspNetUsers");
DropForeignKey("dbo.QuestionClaims", "Designation_Id", "dbo.Designations");
DropForeignKey("dbo.Answers", "Question_Id", "dbo.QuestionClaims");
DropIndex("dbo.LevelUsers", new[] { "User_Id" });
DropIndex("dbo.LevelUsers", new[] { "Level_LevelId" });
DropIndex("dbo.UniversityLevels", new[] { "Level_LevelId" });
DropIndex("dbo.UniversityLevels", new[] { "University_Id" });
DropIndex("dbo.LevelQuestionClaims", new[] { "QuestionClaim_Id" });
DropIndex("dbo.LevelQuestionClaims", new[] { "Level_LevelId" });
DropIndex("dbo.UserDesignations", new[] { "Designation_Id" });
DropIndex("dbo.UserDesignations", new[] { "User_Id" });
DropIndex("dbo.AspNetRoles", "RoleNameIndex");
DropIndex("dbo.Clusters", new[] { "City_Id" });
DropIndex("dbo.Schools", new[] { "Cluster_Id" });
DropIndex("dbo.Classes", new[] { "School_Id" });
DropIndex("dbo.Sessions", new[] { "Class_Id" });
DropIndex("dbo.AspNetUserRoles", new[] { "RoleId" });
DropIndex("dbo.AspNetUserRoles", new[] { "UserId" });
DropIndex("dbo.AspNetUserLogins", new[] { "UserId" });
DropIndex("dbo.AspNetUserClaims", new[] { "UserId" });
DropIndex("dbo.AspNetUsers", new[] { "Session_Id" });
DropIndex("dbo.AspNetUsers", "UserNameIndex");
DropIndex("dbo.QuestionClaims", new[] { "Designation_Id" });
DropIndex("dbo.Answers", new[] { "User_Id" });
DropIndex("dbo.Answers", new[] { "Question_Id" });
DropTable("dbo.LevelUsers");
DropTable("dbo.UniversityLevels");
DropTable("dbo.LevelQuestionClaims");
DropTable("dbo.UserDesignations");
DropTable("dbo.AspNetRoles");
DropTable("dbo.Cities");
DropTable("dbo.Clusters");
DropTable("dbo.Schools");
DropTable("dbo.Classes");
DropTable("dbo.Sessions");
DropTable("dbo.AspNetUserRoles");
DropTable("dbo.AspNetUserLogins");
DropTable("dbo.Universities");
DropTable("dbo.Levels");
DropTable("dbo.AspNetUserClaims");
DropTable("dbo.AspNetUsers");
DropTable("dbo.Designations");
DropTable("dbo.QuestionClaims");
DropTable("dbo.Answers");
}
}
}
| |
using CrystalDecisions.CrystalReports.Engine;
using CrystalDecisions.Windows.Forms;
using DpSdkEngLib;
using DPSDKOPSLib;
using Microsoft.VisualBasic;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Drawing;
using System.Diagnostics;
using System.Windows.Forms;
using System.Linq;
using System.Xml.Linq;
namespace _4PosBackOffice.NET
{
[Microsoft.VisualBasic.CompilerServices.DesignerGenerated()]
partial class frmMTWebReg
{
#region "Windows Form Designer generated code "
[System.Diagnostics.DebuggerNonUserCode()]
public frmMTWebReg() : base()
{
Load += frmMTWebReg_Load;
//This call is required by the Windows Form Designer.
InitializeComponent();
}
//Form overrides dispose to clean up the component list.
[System.Diagnostics.DebuggerNonUserCode()]
protected override void Dispose(bool Disposing)
{
if (Disposing) {
if ((components != null)) {
components.Dispose();
}
}
base.Dispose(Disposing);
}
//Required by the Windows Form Designer
private System.ComponentModel.IContainer components;
public System.Windows.Forms.ToolTip ToolTip1;
private System.Windows.Forms.Button withEventsField_Command1;
public System.Windows.Forms.Button Command1 {
get { return withEventsField_Command1; }
set {
if (withEventsField_Command1 != null) {
withEventsField_Command1.Click -= Command1_Click;
}
withEventsField_Command1 = value;
if (withEventsField_Command1 != null) {
withEventsField_Command1.Click += Command1_Click;
}
}
}
private System.Windows.Forms.Timer withEventsField_Timer1;
public System.Windows.Forms.Timer Timer1 {
get { return withEventsField_Timer1; }
set {
if (withEventsField_Timer1 != null) {
withEventsField_Timer1.Tick -= Timer1_Tick;
}
withEventsField_Timer1 = value;
if (withEventsField_Timer1 != null) {
withEventsField_Timer1.Tick += Timer1_Tick;
}
}
}
public System.Windows.Forms.Label lblVerification;
public System.Windows.Forms.Label lblOperation;
public System.Windows.Forms.Label lblAccumulator;
public System.Windows.Forms.Label lblGlobalCounter;
public System.Windows.Forms.Label Label3;
public System.Windows.Forms.Label Label2;
public System.Windows.Forms.Label Label1;
public System.Windows.Forms.Label label4;
//NOTE: The following procedure is required by the Windows Form Designer
//It can be modified using the Windows Form Designer.
//Do not modify it using the code editor.
[System.Diagnostics.DebuggerStepThrough()]
private void InitializeComponent()
{
System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(frmMTWebReg));
this.components = new System.ComponentModel.Container();
this.ToolTip1 = new System.Windows.Forms.ToolTip(components);
this.Command1 = new System.Windows.Forms.Button();
this.Timer1 = new System.Windows.Forms.Timer(components);
this.lblVerification = new System.Windows.Forms.Label();
this.lblOperation = new System.Windows.Forms.Label();
this.lblAccumulator = new System.Windows.Forms.Label();
this.lblGlobalCounter = new System.Windows.Forms.Label();
this.Label3 = new System.Windows.Forms.Label();
this.Label2 = new System.Windows.Forms.Label();
this.Label1 = new System.Windows.Forms.Label();
this.label4 = new System.Windows.Forms.Label();
this.SuspendLayout();
this.ToolTip1.Active = true;
this.Text = "Form1";
this.ClientSize = new System.Drawing.Size(186, 209);
this.Location = new System.Drawing.Point(4, 23);
this.ControlBox = false;
this.ShowInTaskbar = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.WindowsDefaultLocation;
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.BackColor = System.Drawing.SystemColors.Control;
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.Sizable;
this.Enabled = true;
this.KeyPreview = false;
this.MaximizeBox = true;
this.MinimizeBox = true;
this.Cursor = System.Windows.Forms.Cursors.Default;
this.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.HelpButton = false;
this.WindowState = System.Windows.Forms.FormWindowState.Normal;
this.Name = "frmMTWebReg";
this.Command1.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
this.Command1.Text = "Launch New Form";
this.Command1.Size = new System.Drawing.Size(133, 25);
this.Command1.Location = new System.Drawing.Point(24, 180);
this.Command1.TabIndex = 8;
this.Command1.BackColor = System.Drawing.SystemColors.Control;
this.Command1.CausesValidation = true;
this.Command1.Enabled = true;
this.Command1.ForeColor = System.Drawing.SystemColors.ControlText;
this.Command1.Cursor = System.Windows.Forms.Cursors.Default;
this.Command1.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.Command1.TabStop = true;
this.Command1.Name = "Command1";
this.Timer1.Enabled = false;
this.Timer1.Interval = 1;
this.lblVerification.Size = new System.Drawing.Size(109, 13);
this.lblVerification.Location = new System.Drawing.Point(36, 156);
this.lblVerification.TabIndex = 7;
this.lblVerification.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this.lblVerification.BackColor = System.Drawing.SystemColors.Control;
this.lblVerification.Enabled = true;
this.lblVerification.ForeColor = System.Drawing.SystemColors.ControlText;
this.lblVerification.Cursor = System.Windows.Forms.Cursors.Default;
this.lblVerification.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.lblVerification.UseMnemonic = true;
this.lblVerification.Visible = true;
this.lblVerification.AutoSize = false;
this.lblVerification.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.lblVerification.Name = "lblVerification";
this.lblOperation.Size = new System.Drawing.Size(145, 13);
this.lblOperation.Location = new System.Drawing.Point(36, 114);
this.lblOperation.TabIndex = 6;
this.lblOperation.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this.lblOperation.BackColor = System.Drawing.SystemColors.Control;
this.lblOperation.Enabled = true;
this.lblOperation.ForeColor = System.Drawing.SystemColors.ControlText;
this.lblOperation.Cursor = System.Windows.Forms.Cursors.Default;
this.lblOperation.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.lblOperation.UseMnemonic = true;
this.lblOperation.Visible = true;
this.lblOperation.AutoSize = false;
this.lblOperation.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.lblOperation.Name = "lblOperation";
this.lblAccumulator.Size = new System.Drawing.Size(97, 13);
this.lblAccumulator.Location = new System.Drawing.Point(36, 72);
this.lblAccumulator.TabIndex = 5;
this.lblAccumulator.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this.lblAccumulator.BackColor = System.Drawing.SystemColors.Control;
this.lblAccumulator.Enabled = true;
this.lblAccumulator.ForeColor = System.Drawing.SystemColors.ControlText;
this.lblAccumulator.Cursor = System.Windows.Forms.Cursors.Default;
this.lblAccumulator.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.lblAccumulator.UseMnemonic = true;
this.lblAccumulator.Visible = true;
this.lblAccumulator.AutoSize = false;
this.lblAccumulator.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.lblAccumulator.Name = "lblAccumulator";
this.lblGlobalCounter.Size = new System.Drawing.Size(79, 13);
this.lblGlobalCounter.Location = new System.Drawing.Point(36, 30);
this.lblGlobalCounter.TabIndex = 4;
this.lblGlobalCounter.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this.lblGlobalCounter.BackColor = System.Drawing.SystemColors.Control;
this.lblGlobalCounter.Enabled = true;
this.lblGlobalCounter.ForeColor = System.Drawing.SystemColors.ControlText;
this.lblGlobalCounter.Cursor = System.Windows.Forms.Cursors.Default;
this.lblGlobalCounter.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.lblGlobalCounter.UseMnemonic = true;
this.lblGlobalCounter.Visible = true;
this.lblGlobalCounter.AutoSize = false;
this.lblGlobalCounter.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.lblGlobalCounter.Name = "lblGlobalCounter";
this.Label3.Text = "TotalIncrements:";
this.Label3.Size = new System.Drawing.Size(115, 13);
this.Label3.Location = new System.Drawing.Point(12, 132);
this.Label3.TabIndex = 3;
this.Label3.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this.Label3.BackColor = System.Drawing.SystemColors.Control;
this.Label3.Enabled = true;
this.Label3.ForeColor = System.Drawing.SystemColors.ControlText;
this.Label3.Cursor = System.Windows.Forms.Cursors.Default;
this.Label3.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.Label3.UseMnemonic = true;
this.Label3.Visible = true;
this.Label3.AutoSize = false;
this.Label3.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.Label3.Name = "Label3";
this.Label2.Text = "Operation:";
this.Label2.Size = new System.Drawing.Size(127, 13);
this.Label2.Location = new System.Drawing.Point(12, 96);
this.Label2.TabIndex = 2;
this.Label2.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this.Label2.BackColor = System.Drawing.SystemColors.Control;
this.Label2.Enabled = true;
this.Label2.ForeColor = System.Drawing.SystemColors.ControlText;
this.Label2.Cursor = System.Windows.Forms.Cursors.Default;
this.Label2.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.Label2.UseMnemonic = true;
this.Label2.Visible = true;
this.Label2.AutoSize = false;
this.Label2.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.Label2.Name = "Label2";
this.Label1.Text = "Accumulator:";
this.Label1.Size = new System.Drawing.Size(121, 13);
this.Label1.Location = new System.Drawing.Point(12, 54);
this.Label1.TabIndex = 1;
this.Label1.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this.Label1.BackColor = System.Drawing.SystemColors.Control;
this.Label1.Enabled = true;
this.Label1.ForeColor = System.Drawing.SystemColors.ControlText;
this.Label1.Cursor = System.Windows.Forms.Cursors.Default;
this.Label1.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.Label1.UseMnemonic = true;
this.Label1.Visible = true;
this.Label1.AutoSize = false;
this.Label1.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.Label1.Name = "Label1";
this.label4.Text = "GenericGlobalCounter:";
this.label4.Size = new System.Drawing.Size(133, 13);
this.label4.Location = new System.Drawing.Point(12, 12);
this.label4.TabIndex = 0;
this.label4.TextAlign = System.Drawing.ContentAlignment.TopLeft;
this.label4.BackColor = System.Drawing.SystemColors.Control;
this.label4.Enabled = true;
this.label4.ForeColor = System.Drawing.SystemColors.ControlText;
this.label4.Cursor = System.Windows.Forms.Cursors.Default;
this.label4.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.label4.UseMnemonic = true;
this.label4.Visible = true;
this.label4.AutoSize = false;
this.label4.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.label4.Name = "label4";
this.Controls.Add(Command1);
this.Controls.Add(lblVerification);
this.Controls.Add(lblOperation);
this.Controls.Add(lblAccumulator);
this.Controls.Add(lblGlobalCounter);
this.Controls.Add(Label3);
this.Controls.Add(Label2);
this.Controls.Add(Label1);
this.Controls.Add(label4);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
}
}
| |
using System;
using System.Collections;
using UnityEngine;
using UnityEngine.Rendering;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace UTJ.FrameCapturer
{
[ExecuteInEditMode]
public abstract class RecorderBase : MonoBehaviour
{
public enum ResolutionUnit
{
Percent,
Pixels,
}
public enum FrameRateMode
{
Variable,
Constant,
}
public enum CaptureControl
{
Manual,
FrameRange,
TimeRange,
}
[SerializeField] protected DataPath m_outputDir = new DataPath(DataPath.Root.Current, "Capture");
[SerializeField] protected ResolutionUnit m_resolution = ResolutionUnit.Percent;
[SerializeField] [Range(1,100)] protected int m_resolutionPercent = 100;
[SerializeField] protected int m_resolutionWidth = 1920;
[SerializeField] protected FrameRateMode m_framerateMode = FrameRateMode.Constant;
[SerializeField] protected int m_targetFramerate = 30;
[SerializeField] protected bool m_fixDeltaTime = true;
[SerializeField] protected bool m_waitDeltaTime = true;
[SerializeField] [Range(1,10)]protected int m_captureEveryNthFrame = 1;
[SerializeField] protected CaptureControl m_captureControl = CaptureControl.FrameRange;
[SerializeField] protected int m_startFrame = 0;
[SerializeField] protected int m_endFrame = 100;
[SerializeField] protected float m_startTime = 0.0f;
[SerializeField] protected float m_endTime = 10.0f;
[SerializeField] bool m_recordOnStart = false;
protected bool m_recording = false;
protected bool m_aborted = false;
protected int m_initialFrame = 0;
protected float m_initialTime = 0.0f;
protected float m_initialRealTime = 0.0f;
protected int m_frame = 0;
protected int m_recordedFrames = 0;
protected int m_recordedSamples = 0;
public DataPath outputDir
{
get { return m_outputDir; }
set { m_outputDir = value; }
}
public ResolutionUnit resolutionUnit
{
get { return m_resolution; }
set { m_resolution = value; }
}
public int resolutionPercent
{
get { return m_resolutionPercent; }
set { m_resolutionPercent = value; }
}
public int resolutionWidth
{
get { return m_resolutionWidth; }
set { m_resolutionWidth = value; }
}
public FrameRateMode framerateMode
{
get { return m_framerateMode; }
set { m_framerateMode = value; }
}
public int targetFramerate
{
get { return m_targetFramerate; }
set { m_targetFramerate = value; }
}
public bool fixDeltaTime
{
get { return m_fixDeltaTime; }
set { m_fixDeltaTime = value; }
}
public bool waitDeltaTime
{
get { return m_waitDeltaTime; }
set { m_waitDeltaTime = value; }
}
public int captureEveryNthFrame
{
get { return m_captureEveryNthFrame; }
set { m_captureEveryNthFrame = value; }
}
public CaptureControl captureControl
{
get { return m_captureControl; }
set { m_captureControl = value; }
}
public int startFrame
{
get { return m_startFrame; }
set { m_startFrame = value; }
}
public int endFrame
{
get { return m_endFrame; }
set { m_endFrame = value; }
}
public float startTime
{
get { return m_startTime; }
set { m_startTime = value; }
}
public float endTime
{
get { return m_endTime; }
set { m_endTime = value; }
}
public bool isRecording
{
get { return m_recording; }
set {
if (value) { BeginRecording(); }
else { EndRecording(); }
}
}
public bool recordOnStart { set { m_recordOnStart = value; } }
public virtual bool BeginRecording()
{
if(m_recording) { return false; }
// delta time control
if (m_framerateMode == FrameRateMode.Constant && m_fixDeltaTime)
{
Time.maximumDeltaTime = (1.0f / m_targetFramerate);
if (!m_waitDeltaTime)
{
Time.captureFramerate = m_targetFramerate;
}
}
m_initialFrame = Time.renderedFrameCount;
m_initialTime = Time.unscaledTime;
m_initialRealTime = Time.realtimeSinceStartup;
m_recordedFrames = 0;
m_recordedSamples = 0;
m_recording = true;
return true;
}
public virtual void EndRecording()
{
if (!m_recording) { return; }
if (m_framerateMode == FrameRateMode.Constant && m_fixDeltaTime)
{
if (!m_waitDeltaTime)
{
Time.captureFramerate = 0;
}
}
m_recording = false;
m_aborted = true;
}
protected void GetCaptureResolution(ref int w, ref int h)
{
if(m_resolution == ResolutionUnit.Percent)
{
float scale = m_resolutionPercent * 0.01f;
w = (int)(w * scale);
h = (int)(h * scale);
}
else
{
float aspect = (float)h / w;
w = m_resolutionWidth;
h = (int)(m_resolutionWidth * aspect);
}
}
protected IEnumerator Wait()
{
yield return new WaitForEndOfFrame();
float wt = (1.0f / m_targetFramerate) * (Time.renderedFrameCount - m_initialFrame);
while (Time.realtimeSinceStartup - m_initialRealTime < wt)
{
System.Threading.Thread.Sleep(1);
}
}
#if UNITY_EDITOR
protected virtual void OnValidate()
{
m_targetFramerate = Mathf.Max(1, m_targetFramerate);
m_startFrame = Mathf.Max(0, m_startFrame);
m_endFrame = Mathf.Max(m_startFrame, m_endFrame);
m_startTime = Mathf.Max(0.0f, m_startTime);
m_endTime = Mathf.Max(m_startTime, m_endTime);
}
#endif // UNITY_EDITOR
protected virtual void Start()
{
m_initialFrame = Time.renderedFrameCount;
m_initialTime = Time.unscaledTime;
m_initialRealTime = Time.realtimeSinceStartup;
#if UNITY_EDITOR
if (EditorApplication.isPlaying)
#endif
{
if (m_recordOnStart)
{
BeginRecording();
}
}
m_recordOnStart = false;
}
protected virtual void OnDisable()
{
#if UNITY_EDITOR
if (EditorApplication.isPlaying)
#endif
{
EndRecording();
}
}
protected virtual void Update()
{
#if UNITY_EDITOR
if (EditorApplication.isPlaying)
#endif
{
if (m_captureControl == CaptureControl.FrameRange)
{
if (!m_aborted && m_frame >= m_startFrame && m_frame <= m_endFrame)
{
if (!m_recording) { BeginRecording(); }
}
else if (m_recording)
{
EndRecording();
}
}
else if (m_captureControl == CaptureControl.TimeRange)
{
float time = Time.unscaledTime - m_initialTime;
if (!m_aborted && time >= m_startTime && time <= m_endTime)
{
if (!m_recording) { BeginRecording(); }
}
else if (m_recording)
{
EndRecording();
}
}
else if (m_captureControl == CaptureControl.Manual)
{
}
if(m_framerateMode == FrameRateMode.Constant && m_fixDeltaTime && m_waitDeltaTime)
{
StartCoroutine(Wait());
}
}
}
}
}
| |
// Copyright (c) Rotorz Limited. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root.
using System;
using System.ComponentModel;
using UnityEngine;
namespace Rotorz.Games.Collections
{
/// <summary>
/// Arguments which are passed to <see cref="AddMenuClickedEventHandler"/>.
/// </summary>
public sealed class AddMenuClickedEventArgs : EventArgs
{
/// <summary>
/// Initializes a new instance of <see cref="ItemMovedEventArgs"/>.
/// </summary>
/// <param name="adaptor">Reorderable list adaptor.</param>
/// <param name="buttonPosition">Position of the add menu button.</param>
public AddMenuClickedEventArgs(IReorderableListAdaptor adaptor, Rect buttonPosition)
{
this.Adaptor = adaptor;
this.ButtonPosition = buttonPosition;
}
/// <summary>
/// Gets adaptor to reorderable list container.
/// </summary>
public IReorderableListAdaptor Adaptor { get; private set; }
/// <summary>
/// Gets position of the add menu button.
/// </summary>
public Rect ButtonPosition { get; internal set; }
}
/// <summary>
/// An event handler which is invoked when the "Add Menu" button is clicked.
/// </summary>
/// <param name="sender">Object which raised event.</param>
/// <param name="args">Event arguments.</param>
public delegate void AddMenuClickedEventHandler(object sender, AddMenuClickedEventArgs args);
/// <summary>
/// Arguments which are passed to <see cref="ItemInsertedEventHandler"/>.
/// </summary>
public sealed class ItemInsertedEventArgs : EventArgs
{
/// <summary>
/// Initializes a new instance of <see cref="ItemInsertedEventArgs"/>.
/// </summary>
/// <param name="adaptor">Reorderable list adaptor.</param>
/// <param name="itemIndex">Zero-based index of item.</param>
/// <param name="wasDuplicated">Indicates if inserted item was duplicated from another item.</param>
public ItemInsertedEventArgs(IReorderableListAdaptor adaptor, int itemIndex, bool wasDuplicated)
{
this.Adaptor = adaptor;
this.ItemIndex = itemIndex;
this.WasDuplicated = wasDuplicated;
}
/// <summary>
/// Gets adaptor to reorderable list container which contains element.
/// </summary>
public IReorderableListAdaptor Adaptor { get; private set; }
/// <summary>
/// Gets zero-based index of item which was inserted.
/// </summary>
public int ItemIndex { get; private set; }
/// <summary>
/// Indicates if inserted item was duplicated from another item.
/// </summary>
public bool WasDuplicated { get; private set; }
}
/// <summary>
/// An event handler which is invoked after new list item is inserted.
/// </summary>
/// <param name="sender">Object which raised event.</param>
/// <param name="args">Event arguments.</param>
public delegate void ItemInsertedEventHandler(object sender, ItemInsertedEventArgs args);
/// <summary>
/// Arguments which are passed to <see cref="ItemRemovingEventHandler"/>.
/// </summary>
public sealed class ItemRemovingEventArgs : CancelEventArgs
{
/// <summary>
/// Initializes a new instance of <see cref="ItemRemovingEventArgs"/>.
/// </summary>
/// <param name="adaptor">Reorderable list adaptor.</param>
/// <param name="itemIndex">Zero-based index of item.</param>
public ItemRemovingEventArgs(IReorderableListAdaptor adaptor, int itemIndex)
{
this.Adaptor = adaptor;
this.ItemIndex = itemIndex;
}
/// <summary>
/// Gets adaptor to reorderable list container which contains element.
/// </summary>
public IReorderableListAdaptor Adaptor { get; private set; }
/// <summary>
/// Gets zero-based index of item which is being removed.
/// </summary>
public int ItemIndex { get; internal set; }
}
/// <summary>
/// An event handler which is invoked before a list item is removed.
/// </summary>
/// <remarks>
/// <para>Item removal can be cancelled by setting <see cref="CancelEventArgs.Cancel"/>
/// to <c>true</c>.</para>
/// </remarks>
/// <param name="sender">Object which raised event.</param>
/// <param name="args">Event arguments.</param>
public delegate void ItemRemovingEventHandler(object sender, ItemRemovingEventArgs args);
/// <summary>
/// Arguments which are passed to <see cref="ItemMovingEventHandler"/>.
/// </summary>
public sealed class ItemMovingEventArgs : CancelEventArgs
{
/// <summary>
/// Initializes a new instance of <see cref="ItemMovingEventArgs"/>.
/// </summary>
/// <param name="adaptor">Reorderable list adaptor.</param>
/// <param name="itemIndex">Zero-based index of item.</param>
/// <param name="destinationItemIndex">Xero-based index of item destination.</param>
public ItemMovingEventArgs(IReorderableListAdaptor adaptor, int itemIndex, int destinationItemIndex)
{
this.Adaptor = adaptor;
this.ItemIndex = itemIndex;
this.DestinationItemIndex = destinationItemIndex;
}
/// <summary>
/// Gets adaptor to reorderable list container which contains element.
/// </summary>
public IReorderableListAdaptor Adaptor { get; private set; }
/// <summary>
/// Gets current zero-based index of item which is going to be moved.
/// </summary>
public int ItemIndex { get; internal set; }
/// <summary>
/// Gets the new candidate zero-based index for the item.
/// </summary>
/// <seealso cref="NewItemIndex"/>
public int DestinationItemIndex { get; internal set; }
/// <summary>
/// Gets zero-based index of item <strong>after</strong> it has been moved.
/// </summary>
/// <seealso cref="DestinationItemIndex"/>
public int NewItemIndex {
get {
int result = this.DestinationItemIndex;
if (result > this.ItemIndex) {
--result;
}
return result;
}
}
}
/// <summary>
/// An event handler which is invoked before a list item is moved.
/// </summary>
/// <remarks>
/// <para>Moving of item can be cancelled by setting <see cref="CancelEventArgs.Cancel"/>
/// to <c>true</c>.</para>
/// </remarks>
/// <param name="sender">Object which raised event.</param>
/// <param name="args">Event arguments.</param>
public delegate void ItemMovingEventHandler(object sender, ItemMovingEventArgs args);
/// <summary>
/// Arguments which are passed to <see cref="ItemMovedEventHandler"/>.
/// </summary>
public sealed class ItemMovedEventArgs : EventArgs
{
/// <summary>
/// Initializes a new instance of <see cref="ItemMovedEventArgs"/>.
/// </summary>
/// <param name="adaptor">Reorderable list adaptor.</param>
/// <param name="oldItemIndex">Old zero-based index of item.</param>
/// <param name="newItemIndex">New zero-based index of item.</param>
public ItemMovedEventArgs(IReorderableListAdaptor adaptor, int oldItemIndex, int newItemIndex)
{
this.Adaptor = adaptor;
this.OldItemIndex = oldItemIndex;
this.NewItemIndex = newItemIndex;
}
/// <summary>
/// Gets adaptor to reorderable list container which contains element.
/// </summary>
public IReorderableListAdaptor Adaptor { get; private set; }
/// <summary>
/// Gets old zero-based index of the item which was moved.
/// </summary>
public int OldItemIndex { get; internal set; }
/// <summary>
/// Gets new zero-based index of the item which was moved.
/// </summary>
public int NewItemIndex { get; internal set; }
}
/// <summary>
/// An event handler which is invoked after a list item is moved.
/// </summary>
/// <param name="sender">Object which raised event.</param>
/// <param name="args">Event arguments.</param>
public delegate void ItemMovedEventHandler(object sender, ItemMovedEventArgs args);
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Globalization {
using System;
using System.Runtime.Versioning;
using System.Diagnostics.Contracts;
#if !FEATURE_WIN32_REGISTRY
using System.Text;
using Microsoft.Win32;
#endif // FEATURE_WIN32_REGISTRY
////////////////////////////////////////////////////////////////////////////
//
// Rules for the Hijri calendar:
// - The Hijri calendar is a strictly Lunar calendar.
// - Days begin at sunset.
// - Islamic Year 1 (Muharram 1, 1 A.H.) is equivalent to absolute date
// 227015 (Friday, July 16, 622 C.E. - Julian).
// - Leap Years occur in the 2, 5, 7, 10, 13, 16, 18, 21, 24, 26, & 29th
// years of a 30-year cycle. Year = leap iff ((11y+14) mod 30 < 11).
// - There are 12 months which contain alternately 30 and 29 days.
// - The 12th month, Dhu al-Hijjah, contains 30 days instead of 29 days
// in a leap year.
// - Common years have 354 days. Leap years have 355 days.
// - There are 10,631 days in a 30-year cycle.
// - The Islamic months are:
// 1. Muharram (30 days) 7. Rajab (30 days)
// 2. Safar (29 days) 8. Sha'ban (29 days)
// 3. Rabi I (30 days) 9. Ramadan (30 days)
// 4. Rabi II (29 days) 10. Shawwal (29 days)
// 5. Jumada I (30 days) 11. Dhu al-Qada (30 days)
// 6. Jumada II (29 days) 12. Dhu al-Hijjah (29 days) {30}
//
// NOTENOTE
// The calculation of the HijriCalendar is based on the absolute date. And the
// absolute date means the number of days from January 1st, 1 A.D.
// Therefore, we do not support the days before the January 1st, 1 A.D.
//
////////////////////////////////////////////////////////////////////////////
/*
** Calendar support range:
** Calendar Minimum Maximum
** ========== ========== ==========
** Gregorian 0622/07/18 9999/12/31
** Hijri 0001/01/01 9666/04/03
*/
[Serializable]
[System.Runtime.InteropServices.ComVisible(true)]
public class HijriCalendar : Calendar {
public static readonly int HijriEra = 1;
internal const int DatePartYear = 0;
internal const int DatePartDayOfYear = 1;
internal const int DatePartMonth = 2;
internal const int DatePartDay = 3;
internal const int MinAdvancedHijri = -2;
internal const int MaxAdvancedHijri = 2;
internal static readonly int[] HijriMonthDays = {0,30,59,89,118,148,177,207,236,266,295,325,355};
//internal static Calendar m_defaultInstance;
#if FEATURE_WIN32_REGISTRY
private const String InternationalRegKey = "Control Panel\\International";
private const String HijriAdvanceRegKeyEntry = "AddHijriDate";
#endif
private int m_HijriAdvance = Int32.MinValue;
// DateTime.MaxValue = Hijri calendar (year:9666, month: 4, day: 3).
internal const int MaxCalendarYear = 9666;
internal const int MaxCalendarMonth = 4;
internal const int MaxCalendarDay = 3;
// Hijri calendar (year: 1, month: 1, day:1 ) = Gregorian (year: 622, month: 7, day: 18)
// This is the minimal Gregorian date that we support in the HijriCalendar.
internal static readonly DateTime calendarMinValue = new DateTime(622, 7, 18);
internal static readonly DateTime calendarMaxValue = DateTime.MaxValue;
[System.Runtime.InteropServices.ComVisible(false)]
public override DateTime MinSupportedDateTime
{
get
{
return (calendarMinValue);
}
}
[System.Runtime.InteropServices.ComVisible(false)]
public override DateTime MaxSupportedDateTime
{
get
{
return (calendarMaxValue);
}
}
// Return the type of the Hijri calendar.
//
[System.Runtime.InteropServices.ComVisible(false)]
public override CalendarAlgorithmType AlgorithmType
{
get
{
return CalendarAlgorithmType.LunarCalendar;
}
}
/*=================================GetDefaultInstance==========================
**Action: Internal method to provide a default intance of HijriCalendar. Used by NLS+ implementation
** and other calendars.
**Returns:
**Arguments:
**Exceptions:
============================================================================*/
/*
internal static Calendar GetDefaultInstance() {
if (m_defaultInstance == null) {
m_defaultInstance = new HijriCalendar();
}
return (m_defaultInstance);
}
*/
// Construct an instance of Hijri calendar.
public HijriCalendar() {
}
internal override int ID {
get {
return (CAL_HIJRI);
}
}
protected override int DaysInYearBeforeMinSupportedYear
{
get
{
// the year before the 1st year of the cycle would have been the 30th year
// of the previous cycle which is not a leap year. Common years have 354 days.
return 354;
}
}
/*=================================GetAbsoluteDateHijri==========================
**Action: Gets the Absolute date for the given Hijri date. The absolute date means
** the number of days from January 1st, 1 A.D.
**Returns:
**Arguments:
**Exceptions:
============================================================================*/
long GetAbsoluteDateHijri(int y, int m, int d) {
return (long)(DaysUpToHijriYear(y) + HijriMonthDays[m-1] + d - 1 - HijriAdjustment);
}
/*=================================DaysUpToHijriYear==========================
**Action: Gets the total number of days (absolute date) up to the given Hijri Year.
** The absolute date means the number of days from January 1st, 1 A.D.
**Returns: Gets the total number of days (absolute date) up to the given Hijri Year.
**Arguments: HijriYear year value in Hijri calendar.
**Exceptions: None
**Notes:
============================================================================*/
long DaysUpToHijriYear(int HijriYear) {
long NumDays; // number of absolute days
int NumYear30; // number of years up to current 30 year cycle
int NumYearsLeft; // number of years into 30 year cycle
//
// Compute the number of years up to the current 30 year cycle.
//
NumYear30 = ((HijriYear - 1) / 30) * 30;
//
// Compute the number of years left. This is the number of years
// into the 30 year cycle for the given year.
//
NumYearsLeft = HijriYear - NumYear30 - 1;
//
// Compute the number of absolute days up to the given year.
//
NumDays = ((NumYear30 * 10631L) / 30L) + 227013L;
while (NumYearsLeft > 0) {
// Common year is 354 days, and leap year is 355 days.
NumDays += 354 + (IsLeapYear(NumYearsLeft, CurrentEra) ? 1: 0);
NumYearsLeft--;
}
//
// Return the number of absolute days.
//
return (NumDays);
}
public int HijriAdjustment {
[System.Security.SecuritySafeCritical] // auto-generated
get {
if (m_HijriAdvance == Int32.MinValue) {
// Never been set before. Use the system value from registry.
m_HijriAdvance = GetAdvanceHijriDate();
}
return (m_HijriAdvance);
}
set {
// NOTE: Check the value of Min/MaxAdavncedHijri with Arabic speakers to see if the assumption is good.
if (value < MinAdvancedHijri || value > MaxAdvancedHijri) {
throw new ArgumentOutOfRangeException(
nameof(HijriAdjustment),
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Bounds_Lower_Upper"),
MinAdvancedHijri,
MaxAdvancedHijri));
}
Contract.EndContractBlock();
VerifyWritable();
m_HijriAdvance = value;
}
}
/*=================================GetAdvanceHijriDate==========================
**Action: Gets the AddHijriDate value from the registry.
**Returns:
**Arguments: None.
**Exceptions:
**Note:
** The HijriCalendar has a user-overidable calculation. That is, use can set a value from the control
** panel, so that the calculation of the Hijri Calendar can move ahead or backwards from -2 to +2 days.
**
** The valid string values in the registry are:
** "AddHijriDate-2" => Add -2 days to the current calculated Hijri date.
** "AddHijriDate" => Add -1 day to the current calculated Hijri date.
** "" => Add 0 day to the current calculated Hijri date.
** "AddHijriDate+1" => Add +1 days to the current calculated Hijri date.
** "AddHijriDate+2" => Add +2 days to the current calculated Hijri date.
============================================================================*/
[System.Security.SecurityCritical] // auto-generated
static int GetAdvanceHijriDate() {
#if FEATURE_WIN32_REGISTRY
int hijriAdvance = 0;
Microsoft.Win32.RegistryKey key = null;
try {
// Open in read-only mode.
// Use InternalOpenSubKey so that we avoid the security check.
key = Microsoft.Win32.Registry.CurrentUser.InternalOpenSubKey(InternationalRegKey, false);
}
//If this fails for any reason, we'll just return 0.
catch (ObjectDisposedException) { return 0; }
catch (ArgumentException) { return 0; }
if (key != null) {
try {
Object value = key.InternalGetValue(HijriAdvanceRegKeyEntry, null, false, false);
if (value == null) {
return (0);
}
String str = value.ToString();
if (String.Compare(str, 0, HijriAdvanceRegKeyEntry, 0, HijriAdvanceRegKeyEntry.Length, StringComparison.OrdinalIgnoreCase) == 0) {
if (str.Length == HijriAdvanceRegKeyEntry.Length)
hijriAdvance = -1;
else {
str = str.Substring(HijriAdvanceRegKeyEntry.Length);
try {
int advance = Int32.Parse(str.ToString(), CultureInfo.InvariantCulture);
if ((advance >= MinAdvancedHijri) && (advance <= MaxAdvancedHijri)) {
hijriAdvance = advance;
}
}
// If we got garbage from registry just ignore it.
// hijriAdvance = 0 because of declaraction assignment up above.
catch (ArgumentException) { }
catch (FormatException) { }
catch (OverflowException) { }
}
}
}
finally {
key.Close();
}
}
return (hijriAdvance);
#else //FEATURE_WIN32_REGISTRY
return 0;
#endif // FEATURE_WIN32_REGISTRY
}
static internal void CheckTicksRange(long ticks) {
if (ticks < calendarMinValue.Ticks || ticks > calendarMaxValue.Ticks) {
throw new ArgumentOutOfRangeException(
"time",
String.Format(
CultureInfo.InvariantCulture,
Environment.GetResourceString("ArgumentOutOfRange_CalendarRange"),
calendarMinValue,
calendarMaxValue));
}
}
static internal void CheckEraRange(int era) {
if (era != CurrentEra && era != HijriEra) {
throw new ArgumentOutOfRangeException(nameof(era), Environment.GetResourceString("ArgumentOutOfRange_InvalidEraValue"));
}
}
static internal void CheckYearRange(int year, int era) {
CheckEraRange(era);
if (year < 1 || year > MaxCalendarYear) {
throw new ArgumentOutOfRangeException(
nameof(year),
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Range"),
1,
MaxCalendarYear));
}
}
static internal void CheckYearMonthRange(int year, int month, int era) {
CheckYearRange(year, era);
if (year == MaxCalendarYear) {
if (month > MaxCalendarMonth) {
throw new ArgumentOutOfRangeException(
nameof(month),
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Range"),
1,
MaxCalendarMonth));
}
}
if (month < 1 || month > 12) {
throw new ArgumentOutOfRangeException(nameof(month), Environment.GetResourceString("ArgumentOutOfRange_Month"));
}
}
/*=================================GetDatePart==========================
**Action: Returns a given date part of this <i>DateTime</i>. This method is used
** to compute the year, day-of-year, month, or day part.
**Returns:
**Arguments:
**Exceptions: ArgumentException if part is incorrect.
**Notes:
** First, we get the absolute date (the number of days from January 1st, 1 A.C) for the given ticks.
** Use the formula (((AbsoluteDate - 227013) * 30) / 10631) + 1, we can a rough value for the Hijri year.
** In order to get the exact Hijri year, we compare the exact absolute date for HijriYear and (HijriYear + 1).
** From here, we can get the correct Hijri year.
============================================================================*/
internal virtual int GetDatePart(long ticks, int part) {
int HijriYear; // Hijri year
int HijriMonth; // Hijri month
int HijriDay; // Hijri day
long NumDays; // The calculation buffer in number of days.
CheckTicksRange(ticks);
//
// Get the absolute date. The absolute date is the number of days from January 1st, 1 A.D.
// 1/1/0001 is absolute date 1.
//
NumDays = ticks / GregorianCalendar.TicksPerDay + 1;
//
// See how much we need to backup or advance
//
NumDays += HijriAdjustment;
//
// Calculate the appromixate Hijri Year from this magic formula.
//
HijriYear = (int)(((NumDays - 227013) * 30) / 10631) + 1;
long daysToHijriYear = DaysUpToHijriYear(HijriYear); // The absoulte date for HijriYear
long daysOfHijriYear = GetDaysInYear(HijriYear, CurrentEra); // The number of days for (HijriYear+1) year.
if (NumDays < daysToHijriYear) {
daysToHijriYear -= daysOfHijriYear;
HijriYear--;
} else if (NumDays == daysToHijriYear) {
HijriYear--;
daysToHijriYear -= GetDaysInYear(HijriYear, CurrentEra);
} else {
if (NumDays > daysToHijriYear + daysOfHijriYear) {
daysToHijriYear += daysOfHijriYear;
HijriYear++;
}
}
if (part == DatePartYear) {
return (HijriYear);
}
//
// Calculate the Hijri Month.
//
HijriMonth = 1;
NumDays -= daysToHijriYear;
if (part == DatePartDayOfYear) {
return ((int)NumDays);
}
while ((HijriMonth <= 12) && (NumDays > HijriMonthDays[HijriMonth - 1])) {
HijriMonth++;
}
HijriMonth--;
if (part == DatePartMonth) {
return (HijriMonth);
}
//
// Calculate the Hijri Day.
//
HijriDay = (int)(NumDays - HijriMonthDays[HijriMonth - 1]);
if (part == DatePartDay) {
return (HijriDay);
}
// Incorrect part value.
throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_DateTimeParsing"));
}
// Returns the DateTime resulting from adding the given number of
// months to the specified DateTime. The result is computed by incrementing
// (or decrementing) the year and month parts of the specified DateTime by
// value months, and, if required, adjusting the day part of the
// resulting date downwards to the last day of the resulting month in the
// resulting year. The time-of-day part of the result is the same as the
// time-of-day part of the specified DateTime.
//
// In more precise terms, considering the specified DateTime to be of the
// form y / m / d + t, where y is the
// year, m is the month, d is the day, and t is the
// time-of-day, the result is y1 / m1 / d1 + t,
// where y1 and m1 are computed by adding value months
// to y and m, and d1 is the largest value less than
// or equal to d that denotes a valid day in month m1 of year
// y1.
//
public override DateTime AddMonths(DateTime time, int months) {
if (months < -120000 || months > 120000) {
throw new ArgumentOutOfRangeException(
nameof(months),
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Range"),
-120000,
120000));
}
Contract.EndContractBlock();
// Get the date in Hijri calendar.
int y = GetDatePart(time.Ticks, DatePartYear);
int m = GetDatePart(time.Ticks, DatePartMonth);
int d = GetDatePart(time.Ticks, DatePartDay);
int i = m - 1 + months;
if (i >= 0) {
m = i % 12 + 1;
y = y + i / 12;
} else {
m = 12 + (i + 1) % 12;
y = y + (i - 11) / 12;
}
int days = GetDaysInMonth(y, m);
if (d > days) {
d = days;
}
long ticks = GetAbsoluteDateHijri(y, m, d)* TicksPerDay + (time.Ticks % TicksPerDay);
Calendar.CheckAddResult(ticks, MinSupportedDateTime, MaxSupportedDateTime);
return (new DateTime(ticks));
}
// Returns the DateTime resulting from adding the given number of
// years to the specified DateTime. The result is computed by incrementing
// (or decrementing) the year part of the specified DateTime by value
// years. If the month and day of the specified DateTime is 2/29, and if the
// resulting year is not a leap year, the month and day of the resulting
// DateTime becomes 2/28. Otherwise, the month, day, and time-of-day
// parts of the result are the same as those of the specified DateTime.
//
public override DateTime AddYears(DateTime time, int years) {
return (AddMonths(time, years * 12));
}
// Returns the day-of-month part of the specified DateTime. The returned
// value is an integer between 1 and 31.
//
public override int GetDayOfMonth(DateTime time) {
return (GetDatePart(time.Ticks, DatePartDay));
}
// Returns the day-of-week part of the specified DateTime. The returned value
// is an integer between 0 and 6, where 0 indicates Sunday, 1 indicates
// Monday, 2 indicates Tuesday, 3 indicates Wednesday, 4 indicates
// Thursday, 5 indicates Friday, and 6 indicates Saturday.
//
public override DayOfWeek GetDayOfWeek(DateTime time) {
return ((DayOfWeek)((int)(time.Ticks / TicksPerDay + 1) % 7));
}
// Returns the day-of-year part of the specified DateTime. The returned value
// is an integer between 1 and 366.
//
public override int GetDayOfYear(DateTime time) {
return (GetDatePart(time.Ticks, DatePartDayOfYear));
}
// Returns the number of days in the month given by the year and
// month arguments.
//
[Pure]
public override int GetDaysInMonth(int year, int month, int era) {
CheckYearMonthRange(year, month, era);
if (month == 12) {
// For the 12th month, leap year has 30 days, and common year has 29 days.
return (IsLeapYear(year, CurrentEra) ? 30 : 29);
}
// Other months contain 30 and 29 days alternatively. The 1st month has 30 days.
return (((month % 2) == 1) ? 30 : 29);
}
// Returns the number of days in the year given by the year argument for the current era.
//
public override int GetDaysInYear(int year, int era) {
CheckYearRange(year, era);
// Common years have 354 days. Leap years have 355 days.
return (IsLeapYear(year, CurrentEra) ? 355: 354);
}
// Returns the era for the specified DateTime value.
public override int GetEra(DateTime time) {
CheckTicksRange(time.Ticks);
return (HijriEra);
}
public override int[] Eras {
get {
return (new int[] {HijriEra});
}
}
// Returns the month part of the specified DateTime. The returned value is an
// integer between 1 and 12.
//
public override int GetMonth(DateTime time) {
return (GetDatePart(time.Ticks, DatePartMonth));
}
// Returns the number of months in the specified year and era.
public override int GetMonthsInYear(int year, int era) {
CheckYearRange(year, era);
return (12);
}
// Returns the year part of the specified DateTime. The returned value is an
// integer between 1 and MaxCalendarYear.
//
public override int GetYear(DateTime time) {
return (GetDatePart(time.Ticks, DatePartYear));
}
// Checks whether a given day in the specified era is a leap day. This method returns true if
// the date is a leap day, or false if not.
//
public override bool IsLeapDay(int year, int month, int day, int era) {
// The year/month/era value checking is done in GetDaysInMonth().
int daysInMonth = GetDaysInMonth(year, month, era);
if (day < 1 || day > daysInMonth) {
throw new ArgumentOutOfRangeException(
nameof(day),
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Day"),
daysInMonth,
month));
}
return (IsLeapYear(year, era) && month == 12 && day == 30);
}
// Returns the leap month in a calendar year of the specified era. This method returns 0
// if this calendar does not have leap month, or this year is not a leap year.
//
[System.Runtime.InteropServices.ComVisible(false)]
public override int GetLeapMonth(int year, int era)
{
CheckYearRange(year, era);
return (0);
}
// Checks whether a given month in the specified era is a leap month. This method returns true if
// month is a leap month, or false if not.
//
public override bool IsLeapMonth(int year, int month, int era) {
CheckYearMonthRange(year, month, era);
return (false);
}
// Checks whether a given year in the specified era is a leap year. This method returns true if
// year is a leap year, or false if not.
//
public override bool IsLeapYear(int year, int era) {
CheckYearRange(year, era);
return ((((year * 11) + 14) % 30) < 11);
}
// Returns the date and time converted to a DateTime value. Throws an exception if the n-tuple is invalid.
//
public override DateTime ToDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, int era) {
// The year/month/era checking is done in GetDaysInMonth().
int daysInMonth = GetDaysInMonth(year, month, era);
if (day < 1 || day > daysInMonth) {
BCLDebug.Log("year = " + year + ", month = " + month + ", day = " + day);
throw new ArgumentOutOfRangeException(
nameof(day),
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Day"),
daysInMonth,
month));
}
long lDate = GetAbsoluteDateHijri(year, month, day);
if (lDate >= 0) {
return (new DateTime(lDate * GregorianCalendar.TicksPerDay + TimeToTicks(hour, minute, second, millisecond)));
} else {
throw new ArgumentOutOfRangeException(null, Environment.GetResourceString("ArgumentOutOfRange_BadYearMonthDay"));
}
}
private const int DEFAULT_TWO_DIGIT_YEAR_MAX = 1451;
public override int TwoDigitYearMax {
get {
if (twoDigitYearMax == -1) {
twoDigitYearMax = GetSystemTwoDigitYearSetting(ID, DEFAULT_TWO_DIGIT_YEAR_MAX);
}
return (twoDigitYearMax);
}
set {
VerifyWritable();
if (value < 99 || value > MaxCalendarYear)
{
throw new ArgumentOutOfRangeException(
nameof(value),
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Range"),
99,
MaxCalendarYear));
}
twoDigitYearMax = value;
}
}
public override int ToFourDigitYear(int year) {
if (year < 0) {
throw new ArgumentOutOfRangeException(nameof(year),
Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum"));
}
Contract.EndContractBlock();
if (year < 100) {
return (base.ToFourDigitYear(year));
}
if (year > MaxCalendarYear) {
throw new ArgumentOutOfRangeException(
nameof(year),
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("ArgumentOutOfRange_Range"),
1,
MaxCalendarYear));
}
return (year);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.Xml;
using Bloom;
using NUnit.Framework;
using SIL.Xml;
namespace BloomTests
{
//NB: if c# ever allows us to add static exension methods,
//then all this could be an extension on nunit's Assert class.
public class AssertThatXmlIn
{
public static AssertDom Dom(XmlDocument dom)
{
return new AssertDom(dom);
}
public static AssertFile File(string path)
{
return new AssertFile(path);
}
public static AssertHtmlFile HtmlFile(string path)
{
return new AssertHtmlFile(path);
}
public static AssertXmlString String(string xmlString)
{
return new AssertXmlString(xmlString);
}
public static AssertElement Element(XmlElement element)
{
return new AssertElement(element);
}
}
public class AssertXmlString : AssertXmlCommands
{
private readonly string _xmlString;
public AssertXmlString(string xmlString)
{
_xmlString = xmlString;
}
protected override XmlNode NodeOrDom
{
get
{
var dom = new XmlDocument();
dom.LoadXml(_xmlString);
return dom;
}
}
}
public class AssertFile : AssertXmlCommands
{
private readonly string _path;
public AssertFile(string path)
{
_path = path;
}
protected override XmlNode NodeOrDom
{
get
{
var dom = new XmlDocument();
dom.Load(_path);
return dom;
}
}
}
public class AssertHtmlFile : AssertXmlCommands
{
private readonly string _path;
public AssertHtmlFile(string path)
{
_path = path;
}
protected override XmlNode NodeOrDom
{
get
{
return XmlHtmlConverter.GetXmlDomFromHtmlFile(_path,false);
}
}
}
public class AssertDom : AssertXmlCommands
{
private readonly XmlDocument _dom;
public AssertDom(XmlDocument dom)
{
_dom = dom;
}
protected override XmlNode NodeOrDom
{
get
{
return _dom;
}
}
}
public class AssertElement : AssertXmlCommands
{
private readonly XmlElement _element;
public AssertElement(XmlElement element)
{
_element = element;
}
protected override XmlNode NodeOrDom
{
get
{
return _element;
}
}
}
public abstract class AssertXmlCommands
{
protected abstract XmlNode NodeOrDom { get; }
public XmlNameTable NameTable
{
get
{
var doc = NodeOrDom as XmlDocument;
if (doc != null)
return doc.NameTable;
else
// review: may or may not work; I've only tried cases where it IS a document.
return NodeOrDom.OwnerDocument.NameTable;
}
}
public void HasAtLeastOneMatchForXpath(string xpath, XmlNamespaceManager nameSpaceManager)
{
XmlNode node = GetNode(xpath, nameSpaceManager);
if (node == null)
{
Console.WriteLine("Could not match " + xpath);
PrintNodeToConsole(NodeOrDom);
}
Assert.IsNotNull(node, "Not matched: " + xpath);
}
/// <summary>
/// Will honor default namespace
/// </summary>
public void HasAtLeastOneMatchForXpath(string xpath)
{
XmlNode node = GetNode(xpath);
if (node == null)
{
Console.WriteLine("Could not match " + xpath);
PrintNodeToConsole(NodeOrDom);
}
Assert.IsNotNull(node, "Not matched: " + xpath);
}
/// <summary>
/// Will honor default namespace
/// </summary>
public void HasSpecifiedNumberOfMatchesForXpath(string xpath, int count)
{
var nodes = NodeOrDom.SafeSelectNodes(xpath);
CheckCountOfNodes(nodes, xpath, count);
}
public void HasSpecifiedNumberOfMatchesForXpath(string xpath, XmlNamespaceManager nameSpaceManager, int count)
{
var nodes = NodeOrDom.SafeSelectNodes(xpath, nameSpaceManager);
CheckCountOfNodes(nodes, xpath, count);
}
private void CheckCountOfNodes (XmlNodeList nodes, string xpath, int count)
{
int nodeCount = 0;
if (nodes != null)
nodeCount = nodes.Count;
if (nodeCount != count)
{
Console.WriteLine("Expected {0} but got {1} matches for {2}", count, nodeCount, xpath);
PrintNodeToConsole(NodeOrDom);
Assert.AreEqual(count, nodeCount, "matches for "+xpath);
}
}
public int CountOfMatchesForXPath(string xpath)
{
return NodeOrDom.SafeSelectNodes(xpath).Count;
}
public static void PrintNodeToConsole(XmlNode node)
{
// without this, we may get "DTD is not allowed in XML fragments"
var doctype = ( node as XmlDocument)?.DocumentType;
if(doctype != null)
node.RemoveChild(doctype);
XmlWriterSettings settings = new XmlWriterSettings();
settings.Indent = true;
settings.ConformanceLevel = ConformanceLevel.Fragment;
XmlWriter writer = XmlWriter.Create(Console.Out, settings);
node.WriteContentTo(writer);
writer.Flush();
Console.WriteLine();
}
public void HasNoMatchForXpath(string xpath, XmlNamespaceManager nameSpaceManager)
{
XmlNode node = GetNode( xpath, nameSpaceManager);
if (node != null)
{
Console.WriteLine("Was not supposed to match " + xpath);
PrintNodeToConsole(NodeOrDom);
}
Assert.IsNull(node, "Should not have matched: " + xpath);
}
public void HasNoMatchForXpath(string xpath)
{
XmlNode node = GetNode( xpath, new XmlNamespaceManager(new NameTable()));
if (node != null)
{
Console.WriteLine("Was not supposed to match " + xpath);
PrintNodeToConsole(NodeOrDom);
}
Assert.IsNull(node, "Should not have matched: " + xpath);
}
private XmlNode GetNode(string xpath)
{
return NodeOrDom.SelectSingleNodeHonoringDefaultNS(xpath);
}
private XmlNode GetNode(string xpath, XmlNamespaceManager nameSpaceManager)
{
return NodeOrDom.SelectSingleNode(xpath, nameSpaceManager);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Cdn
{
using System.Threading.Tasks;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// Extension methods for ProfilesOperations.
/// </summary>
public static partial class ProfilesOperationsExtensions
{
/// <summary>
/// Lists all the CDN profiles within an Azure subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static Microsoft.Rest.Azure.IPage<Profile> List(this IProfilesOperations operations)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).ListAsync(), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists all the CDN profiles within an Azure subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Microsoft.Rest.Azure.IPage<Profile>> ListAsync(this IProfilesOperations operations, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists all the CDN profiles within a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
public static Microsoft.Rest.Azure.IPage<Profile> ListByResourceGroup(this IProfilesOperations operations, string resourceGroupName)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).ListByResourceGroupAsync(resourceGroupName), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists all the CDN profiles within a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Microsoft.Rest.Azure.IPage<Profile>> ListByResourceGroupAsync(this IProfilesOperations operations, string resourceGroupName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.ListByResourceGroupWithHttpMessagesAsync(resourceGroupName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a CDN profile with the specified profile name under the specified
/// subscription and resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
public static Profile Get(this IProfilesOperations operations, string resourceGroupName, string profileName)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).GetAsync(resourceGroupName, profileName), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets a CDN profile with the specified profile name under the specified
/// subscription and resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task<Profile> GetAsync(this IProfilesOperations operations, string resourceGroupName, string profileName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, profileName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates a new CDN profile with a profile name under the specified
/// subscription and resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='profile'>
/// Profile properties needed to create a new profile.
/// </param>
public static Profile Create(this IProfilesOperations operations, string resourceGroupName, string profileName, Profile profile)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).CreateAsync(resourceGroupName, profileName, profile), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates a new CDN profile with a profile name under the specified
/// subscription and resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='profile'>
/// Profile properties needed to create a new profile.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task<Profile> CreateAsync(this IProfilesOperations operations, string resourceGroupName, string profileName, Profile profile, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.CreateWithHttpMessagesAsync(resourceGroupName, profileName, profile, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates a new CDN profile with a profile name under the specified
/// subscription and resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='profile'>
/// Profile properties needed to create a new profile.
/// </param>
public static Profile BeginCreate(this IProfilesOperations operations, string resourceGroupName, string profileName, Profile profile)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).BeginCreateAsync(resourceGroupName, profileName, profile), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates a new CDN profile with a profile name under the specified
/// subscription and resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='profile'>
/// Profile properties needed to create a new profile.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task<Profile> BeginCreateAsync(this IProfilesOperations operations, string resourceGroupName, string profileName, Profile profile, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.BeginCreateWithHttpMessagesAsync(resourceGroupName, profileName, profile, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Updates an existing CDN profile with the specified profile name under the
/// specified subscription and resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='tags'>
/// Profile tags
/// </param>
public static Profile Update(this IProfilesOperations operations, string resourceGroupName, string profileName, System.Collections.Generic.IDictionary<string, string> tags)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).UpdateAsync(resourceGroupName, profileName, tags), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Updates an existing CDN profile with the specified profile name under the
/// specified subscription and resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='tags'>
/// Profile tags
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task<Profile> UpdateAsync(this IProfilesOperations operations, string resourceGroupName, string profileName, System.Collections.Generic.IDictionary<string, string> tags, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.UpdateWithHttpMessagesAsync(resourceGroupName, profileName, tags, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Updates an existing CDN profile with the specified profile name under the
/// specified subscription and resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='tags'>
/// Profile tags
/// </param>
public static Profile BeginUpdate(this IProfilesOperations operations, string resourceGroupName, string profileName, System.Collections.Generic.IDictionary<string, string> tags)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).BeginUpdateAsync(resourceGroupName, profileName, tags), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Updates an existing CDN profile with the specified profile name under the
/// specified subscription and resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='tags'>
/// Profile tags
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task<Profile> BeginUpdateAsync(this IProfilesOperations operations, string resourceGroupName, string profileName, System.Collections.Generic.IDictionary<string, string> tags, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.BeginUpdateWithHttpMessagesAsync(resourceGroupName, profileName, tags, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes an existing CDN profile with the specified parameters. Deleting a
/// profile will result in the deletion of all subresources including
/// endpoints, origins and custom domains.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
public static void Delete(this IProfilesOperations operations, string resourceGroupName, string profileName)
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).DeleteAsync(resourceGroupName, profileName), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes an existing CDN profile with the specified parameters. Deleting a
/// profile will result in the deletion of all subresources including
/// endpoints, origins and custom domains.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task DeleteAsync(this IProfilesOperations operations, string resourceGroupName, string profileName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.DeleteWithHttpMessagesAsync(resourceGroupName, profileName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Deletes an existing CDN profile with the specified parameters. Deleting a
/// profile will result in the deletion of all subresources including
/// endpoints, origins and custom domains.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
public static void BeginDelete(this IProfilesOperations operations, string resourceGroupName, string profileName)
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).BeginDeleteAsync(resourceGroupName, profileName), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes an existing CDN profile with the specified parameters. Deleting a
/// profile will result in the deletion of all subresources including
/// endpoints, origins and custom domains.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task BeginDeleteAsync(this IProfilesOperations operations, string resourceGroupName, string profileName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.BeginDeleteWithHttpMessagesAsync(resourceGroupName, profileName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Generates a dynamic SSO URI used to sign in to the CDN supplemental
/// portal. Supplemnetal portal is used to configure advanced feature
/// capabilities that are not yet available in the Azure portal, such as core
/// reports in a standard profile; rules engine, advanced HTTP reports, and
/// real-time stats and alerts in a premium profile. The SSO URI changes
/// approximately every 10 minutes.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
public static SsoUri GenerateSsoUri(this IProfilesOperations operations, string resourceGroupName, string profileName)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).GenerateSsoUriAsync(resourceGroupName, profileName), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Generates a dynamic SSO URI used to sign in to the CDN supplemental
/// portal. Supplemnetal portal is used to configure advanced feature
/// capabilities that are not yet available in the Azure portal, such as core
/// reports in a standard profile; rules engine, advanced HTTP reports, and
/// real-time stats and alerts in a premium profile. The SSO URI changes
/// approximately every 10 minutes.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// Name of the Resource group within the Azure subscription.
/// </param>
/// <param name='profileName'>
/// Name of the CDN profile which is unique within the resource group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task<SsoUri> GenerateSsoUriAsync(this IProfilesOperations operations, string resourceGroupName, string profileName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.GenerateSsoUriWithHttpMessagesAsync(resourceGroupName, profileName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists all the CDN profiles within an Azure subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static Microsoft.Rest.Azure.IPage<Profile> ListNext(this IProfilesOperations operations, string nextPageLink)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).ListNextAsync(nextPageLink), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists all the CDN profiles within an Azure subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Microsoft.Rest.Azure.IPage<Profile>> ListNextAsync(this IProfilesOperations operations, string nextPageLink, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists all the CDN profiles within a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static Microsoft.Rest.Azure.IPage<Profile> ListByResourceGroupNext(this IProfilesOperations operations, string nextPageLink)
{
return System.Threading.Tasks.Task.Factory.StartNew(s => ((IProfilesOperations)s).ListByResourceGroupNextAsync(nextPageLink), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists all the CDN profiles within a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Microsoft.Rest.Azure.IPage<Profile>> ListByResourceGroupNextAsync(this IProfilesOperations operations, string nextPageLink, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
using (var _result = await operations.ListByResourceGroupNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.