context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
using System;
using System.IO;
using System.IO.Abstractions;
using System.Linq;
using System.Net;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using System.Text;
using Moq;
using ReactiveUI;
using Shimmer.Client;
using Shimmer.Core;
using Shimmer.Tests.TestHelpers;
using Xunit;
namespace Shimmer.Tests.Client
{
public class DownloadReleasesTests : IEnableLogger
{
[Fact]
public void ChecksumShouldPassOnValidPackages()
{
var filename = "Shimmer.Core.1.0.0.0.nupkg";
var nuGetPkg = IntegrationTestHelper.GetPath("fixtures", filename);
var fs = new Mock<IFileSystemFactory>();
var urlDownloader = new Mock<IUrlDownloader>();
ReleaseEntry entry;
using (var f = File.OpenRead(nuGetPkg)) {
entry = ReleaseEntry.GenerateFromFile(f, filename);
}
var fileInfo = new Mock<FileInfoBase>();
fileInfo.Setup(x => x.OpenRead()).Returns(File.OpenRead(nuGetPkg));
fileInfo.Setup(x => x.Exists).Returns(true);
fileInfo.Setup(x => x.Length).Returns(new FileInfo(nuGetPkg).Length);
fs.Setup(x => x.GetFileInfo(Path.Combine(".", "theApp", "packages", filename))).Returns(fileInfo.Object);
var fixture = ExposedObject.From(
new UpdateManager("http://lol", "theApp", FrameworkVersion.Net40, ".", fs.Object, urlDownloader.Object));
fixture.checksumPackage(entry);
}
[Fact]
public void ChecksumShouldFailIfFilesAreMissing()
{
var filename = "Shimmer.Core.1.0.0.0.nupkg";
var nuGetPkg = IntegrationTestHelper.GetPath("fixtures", filename);
var fs = new Mock<IFileSystemFactory>();
var urlDownloader = new Mock<IUrlDownloader>();
ReleaseEntry entry;
using (var f = File.OpenRead(nuGetPkg)) {
entry = ReleaseEntry.GenerateFromFile(f, filename);
}
var fileInfo = new Mock<FileInfoBase>();
fileInfo.Setup(x => x.OpenRead()).Returns(File.OpenRead(nuGetPkg));
fileInfo.Setup(x => x.Exists).Returns(false);
fs.Setup(x => x.GetFileInfo(Path.Combine(".", "theApp", "packages", filename))).Returns(fileInfo.Object);
var fixture = ExposedObject.From(
new UpdateManager("http://lol", "theApp", FrameworkVersion.Net40, ".", fs.Object, urlDownloader.Object));
bool shouldDie = true;
try {
// NB: We can't use Assert.Throws here because the binder
// will try to pick the wrong method
fixture.checksumPackage(entry);
} catch (Exception) {
shouldDie = false;
}
shouldDie.ShouldBeFalse();
}
[Fact]
public void ChecksumShouldFailIfFilesAreBogus()
{
var filename = "Shimmer.Core.1.0.0.0.nupkg";
var nuGetPkg = IntegrationTestHelper.GetPath("fixtures", filename);
var fs = new Mock<IFileSystemFactory>();
var urlDownloader = new Mock<IUrlDownloader>();
ReleaseEntry entry;
using (var f = File.OpenRead(nuGetPkg)) {
entry = ReleaseEntry.GenerateFromFile(f, filename);
}
var fileInfo = new Mock<FileInfoBase>();
fileInfo.Setup(x => x.OpenRead()).Returns(new MemoryStream(Encoding.UTF8.GetBytes("Lol broken")));
fileInfo.Setup(x => x.Exists).Returns(true);
fileInfo.Setup(x => x.Length).Returns(new FileInfo(nuGetPkg).Length);
fileInfo.Setup(x => x.Delete()).Verifiable();
fs.Setup(x => x.GetFileInfo(Path.Combine(".", "theApp", "packages", filename))).Returns(fileInfo.Object);
var fixture = ExposedObject.From(
new UpdateManager("http://lol", "theApp", FrameworkVersion.Net40, ".", fs.Object, urlDownloader.Object));
bool shouldDie = true;
try {
fixture.checksumPackage(entry);
} catch (Exception ex) {
this.Log().InfoException("Checksum failure", ex);
shouldDie = false;
}
shouldDie.ShouldBeFalse();
fileInfo.Verify(x => x.Delete(), Times.Once());
}
[Fact]
public void DownloadReleasesFromHttpServerIntegrationTest()
{
string tempDir = null;
var updateDir = new DirectoryInfo(IntegrationTestHelper.GetPath("..", "SampleUpdatingApp", "SampleReleasesFolder"));
IDisposable disp;
try {
var httpServer = new StaticHttpServer(30405, updateDir.FullName);
disp = httpServer.Start();
}
catch (HttpListenerException) {
Assert.False(true, @"Windows sucks, go run 'netsh http add urlacl url=http://+:30405/ user=MYMACHINE\MyUser");
return;
}
var entriesToDownload = updateDir.GetFiles("*.nupkg")
.Select(x => ReleaseEntry.GenerateFromFile(x.FullName))
.ToArray();
entriesToDownload.Count().ShouldBeGreaterThan(0);
using (disp)
using (Utility.WithTempDirectory(out tempDir)) {
// NB: This is normally done by CheckForUpdates, but since
// we're skipping that in the test we have to do it ourselves
Directory.CreateDirectory(Path.Combine(tempDir, "SampleUpdatingApp", "packages"));
var fixture = new UpdateManager("http://localhost:30405", "SampleUpdatingApp", FrameworkVersion.Net40, tempDir);
using (fixture) {
var progress = new ReplaySubject<int>();
fixture.DownloadReleases(entriesToDownload, progress).First();
this.Log().Info("Progress: [{0}]", String.Join(",", progress));
progress.Buffer(2,1).All(x => x.Count != 2 || x[1] > x[0]).First().ShouldBeTrue();
progress.Last().ShouldEqual(100);
}
entriesToDownload.ForEach(x => {
this.Log().Info("Looking for {0}", x.Filename);
var actualFile = Path.Combine(tempDir, "SampleUpdatingApp", "packages", x.Filename);
File.Exists(actualFile).ShouldBeTrue();
var actualEntry = ReleaseEntry.GenerateFromFile(actualFile);
actualEntry.SHA1.ShouldEqual(x.SHA1);
actualEntry.Version.ShouldEqual(x.Version);
});
}
}
[Fact]
public void DownloadReleasesFromFileDirectoryIntegrationTest()
{
string tempDir = null;
var updateDir = new DirectoryInfo(IntegrationTestHelper.GetPath("..", "SampleUpdatingApp", "SampleReleasesFolder"));
var entriesToDownload = updateDir.GetFiles("*.nupkg")
.Select(x => ReleaseEntry.GenerateFromFile(x.FullName))
.ToArray();
entriesToDownload.Count().ShouldBeGreaterThan(0);
using (Utility.WithTempDirectory(out tempDir)) {
// NB: This is normally done by CheckForUpdates, but since
// we're skipping that in the test we have to do it ourselves
Directory.CreateDirectory(Path.Combine(tempDir, "SampleUpdatingApp", "packages"));
var fixture = new UpdateManager(updateDir.FullName, "SampleUpdatingApp", FrameworkVersion.Net40, tempDir);
using (fixture) {
var progress = new ReplaySubject<int>();
fixture.DownloadReleases(entriesToDownload, progress).First();
this.Log().Info("Progress: [{0}]", String.Join(",", progress));
progress.Buffer(2,1).All(x => x.Count != 2 || x[1] > x[0]).First().ShouldBeTrue();
progress.Last().ShouldEqual(100);
}
entriesToDownload.ForEach(x => {
this.Log().Info("Looking for {0}", x.Filename);
var actualFile = Path.Combine(tempDir, "SampleUpdatingApp", "packages", x.Filename);
File.Exists(actualFile).ShouldBeTrue();
var actualEntry = ReleaseEntry.GenerateFromFile(actualFile);
actualEntry.SHA1.ShouldEqual(x.SHA1);
actualEntry.Version.ShouldEqual(x.Version);
});
}
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.ComponentModel;
using System.Diagnostics;
using System.Drawing;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using System.Windows.Forms;
using OpenLiveWriter.ApplicationFramework;
using OpenLiveWriter.Controls;
using OpenLiveWriter.CoreServices;
using OpenLiveWriter.CoreServices.HTML;
using OpenLiveWriter.HtmlEditor.Controls;
using OpenLiveWriter.HtmlEditor.Linking;
using OpenLiveWriter.HtmlParser.Parser;
using OpenLiveWriter.Interop.Windows;
using OpenLiveWriter.Localization;
using OpenLiveWriter.Mshtml;
//using OpenLiveWriter.SpellChecker;
namespace OpenLiveWriter.HtmlEditor
{
public class HtmlSourceEditorControl : IHtmlEditor, IHtmlEditorCommandSource
{
CommandContextMenuDefinition contextMenu = new CommandContextMenuDefinition();
private CommandManager _commandManager;
public CommandManager CommandManager
{
get
{
return _commandManager;
}
}
//ToDo: OLW Spell Checker
//public HtmlSourceEditorControl(ISpellingChecker spellingChecker, CommandManager commandManager)
public HtmlSourceEditorControl(CommandManager commandManager)
{
_commandManager = commandManager;
//_spellingChecker = spellingChecker;
contextMenu.Entries.Add(CommandId.Cut, false, false);
contextMenu.Entries.Add(CommandId.CopyCommand, false, false);
contextMenu.Entries.Add(CommandId.Paste, false, false);
contextMenu.Entries.Add(CommandId.PasteSpecial, false, false);
contextMenu.Entries.Add(CommandId.SelectAll, true, true);
contextMenu.Entries.Add(CommandId.InsertLink, true, true);
// create and initialize the editor
_textBox = new TextBoxEditorControl();
_textBox.BorderStyle = BorderStyle.None;
_textBox.Multiline = true;
_textBox.HideSelection = false;
_textBox.ScrollBars = ScrollBars.Vertical;
_textBox.Font = new Font("Courier New", 10);
_textBox.Dock = DockStyle.Fill;
_textBox.MaxLength = 0;
_textBox.AcceptsTab = true;
_textBox.ContextMenu = new ContextMenu();
_textBox.TextChanged += new EventHandler(_textBox_TextChanged);
_textBox.ModifiedChanged += new EventHandler(_textBox_ModifiedChanged);
_textBox.ContextMenuTriggered += new TextBoxEditorControl.ContextMenuTriggeredEventHandler(_textBox_ContextMenuTriggered);
_textBox.GotFocus += new EventHandler(_textBox_GotFocus);
_textBox.LostFocus += new EventHandler(_textBox_LostFocus);
_textBox.KeyDown += new KeyEventHandler(_textBox_KeyDown);
_textBox.MouseDown += new MouseEventHandler(_textBox_MouseDown);
_textBox.MouseUp += new MouseEventHandler(_textBox_MouseUp);
_textBox.RightToLeft = RightToLeft.No;
}
void _textBox_MouseUp(object sender, MouseEventArgs e)
{
if (e.Button == MouseButtons.Left)
OnCommandStateChanged();
}
void _textBox_MouseDown(object sender, MouseEventArgs e)
{
if (e.Button == MouseButtons.Left)
OnCommandStateChanged();
}
void _textBox_KeyDown(object sender, KeyEventArgs e)
{
if (e.KeyCode == Keys.Left ||
e.KeyCode == Keys.Right ||
e.KeyCode == Keys.Up ||
e.KeyCode == Keys.Down ||
e.KeyCode == Keys.Delete ||
e.KeyCode == Keys.Back)
OnCommandStateChanged();
}
~HtmlSourceEditorControl()
{
Debug.Fail("HtmlSourceEditorControl should be disposed");
Dispose(false);
}
private void _textBox_ContextMenuTriggered(object sender, TextBoxEditorControl.ContextMenuTriggeredEventArgs e)
{
OnCommandStateChanged();
Command command = CommandContextMenu.ShowModal(CommandManager, (Control)sender, e.ContextMenuLocation, contextMenu);
if (command != null)
command.PerformExecute();
}
public bool InBody
{
set
{
_canInsertHyperlink = value;
}
}
#region IHtmlEditor Members
public Control EditorControl
{
get
{
return _textBox;
}
}
public IHtmlEditorCommandSource CommandSource
{
get { return this; }
}
private bool _fullyEditableRegionActive;
public bool FullyEditableRegionActive
{
get { return _fullyEditableRegionActive; }
set { _fullyEditableRegionActive = value; }
}
public void LoadHtmlFile(string fileName)
{
// load contents of file
using (TextReader htmlFileReader = new StreamReader(fileName, Encoding.UTF8))
{
// save current selection state so we can restore it
int editorPosition = _textBox.SelectionStart;
string html = htmlFileReader.ReadToEnd();
_textBox.Text = NEWLINE + CleanupHtml(html, _textBox.Width - 5);
_textBox.SelectionStart = editorPosition;
OnCommandStateChanged();
}
}
public virtual string GetEditedHtml(bool preferWellFormed)
{
string editedHtml = _textBox.Text;
if (editedHtml.StartsWith(NEWLINE))
editedHtml = editedHtml.Substring(NEWLINE.Length);
return editedHtml;
}
public virtual string GetEditedHtmlFast()
{
return GetEditedHtml(false);
}
public string SelectedText
{
get
{
return _textBox.SelectedText;
}
}
public string SelectedHtml
{
get
{
return _textBox.SelectedText;
}
}
public void EmptySelection()
{
_textBox.Select(0, 0);
}
public void InsertHtml(string content, bool moveSelectionRight)
{
InsertHtml(content, moveSelectionRight ? HtmlInsertionOptions.MoveCursorAfter : HtmlInsertionOptions.Default);
}
public void InsertHtml(string content, HtmlInsertionOptions options)
{
_textBox.Paste(content);
}
public void InsertLink(string url, string linkText, string linkTitle, string rel, bool newWindow)
{
StringBuilder link = new StringBuilder("<a href=\"{0}\"");
if (newWindow)
{
link.Append(" target=\"_blank\"");
}
if (String.Empty != linkTitle && null != linkTitle)
{
link.Append(" title=\"{2}\"");
}
if (String.Empty != rel && null != rel)
{
link.Append(" rel=\"{3}\"");
}
link.Append(">{1}</a>");
InsertHtml(String.Format(CultureInfo.InvariantCulture, link.ToString(),
HtmlUtils.EscapeEntities(url),
HtmlUtils.EscapeEntities(linkText),
HtmlUtils.EscapeEntities(linkTitle),
HtmlUtils.EscapeEntities(rel)),
true);
}
public bool IsDirty
{
get
{
return _isDirty;
}
set
{
_isDirty = value;
if (_isDirty && IsDirtyEvent != null)
{
IsDirtyEvent(this, EventArgs.Empty);
}
}
}
private bool _isDirty;
public event EventHandler IsDirtyEvent;
public bool SuspendAutoSave
{
get { return false; }
}
public void Print()
{ }
public void PrintPreview()
{ }
public bool CanPrint
{
get
{
return false;
}
}
public void Find()
{
using (HtmlSourceEditorFindTextForm findTextForm = new HtmlSourceEditorFindTextForm(_textBox))
{
// determine ownership and location of form
Form parentForm = _textBox.FindForm();
// No need to to manually center when we can just allow the form to center itself on the parent
//findTextForm.StartPosition = FormStartPosition.CenterParent ;
//findTextForm.Top = parentForm.Top + (parentForm.Height/2) - (findTextForm.Height/2) ;
//findTextForm.Left = parentForm.Right - findTextForm.Width - 25 ;
// show the form
findTextForm.ShowDialog(parentForm);
}
}
public bool CheckSpelling(string contextDictionaryPath)
{
//ToDo: OLW Spell Checker
// check spelling
//using (SpellCheckerForm spellCheckerForm = new SpellCheckerForm(SpellingChecker, EditorControl.FindForm(), false))
//{
// center the spell-checking form over the document body
//spellCheckerForm.StartPosition = FormStartPosition.CenterParent;
// create word range
// TODO: smarter word range for html
//TextBoxWordRange wordRange = new TextBoxWordRange(_textBox, _textBox.SelectionLength > 0);
//HtmlTextBoxWordRange wordRange = new HtmlTextBoxWordRange(_textBox);
// check spelling
//spellCheckerForm.CheckSpelling(wordRange, contextDictionaryPath);
// return completed status
return true; // spellCheckerForm.Completed;
//}
}
//ToDo: OLW Spell Checker
/// <summary>
/// Get the spelling-checker (demand-create and cache/re-use)
/// </summary>
//public ISpellingChecker SpellingChecker
//{
// get
// {
// return _spellingChecker;
// }
//}
//private ISpellingChecker _spellingChecker;
#endregion
#region IHtmlEditorCommandSource Members
public void ViewSource()
{
}
void IHtmlEditorCommandSource.ClearFormatting()
{
Debug.Assert(false, "This should be disabled");
}
bool IHtmlEditorCommandSource.CanApplyFormatting(CommandId? commandId)
{
if (commandId == CommandId.ClearFormatting)
return false;
// we always have a selection
return true;
}
public string SelectionFontFamily
{
get { return String.Empty; }
}
void IHtmlEditorCommandSource.ApplyFontForeColor(int color)
{
string selectedText = _textBox.SelectedText;
_textBox.Paste("<font color=\"" + ColorHelper.ColorToString(Color.FromArgb(color)) + "\">" + selectedText + "</font>");
}
public int SelectionBackColor
{
get { return 0; }
}
void IHtmlEditorCommandSource.ApplyFontBackColor(int? color)
{
string selectedText = _textBox.SelectedText;
if (color.HasValue)
{
_textBox.Paste("<font style=\"background-color:" + ColorHelper.ColorToString(Color.FromArgb(color.Value)) + "\">" + selectedText + "</font>");
}
}
void IHtmlEditorCommandSource.ApplyFontFamily(string fontFamily)
{
ApplyFormattingTag("font", "face=\"" + fontFamily + "\"");
}
public float SelectionFontSize
{
get { return 0; }
}
void IHtmlEditorCommandSource.ApplyFontSize(float fontSize)
{
ApplyFormattingTag("font", "size=\"" + HTMLElementHelper.PointFontSizeToHtmlFontSize(fontSize) + "\"");
}
public int SelectionForeColor
{
get { return 0; }
}
string IHtmlEditorCommandSource.SelectionStyleName
{
get
{
return null;
}
}
void IHtmlEditorCommandSource.ApplyHtmlFormattingStyle(IHtmlFormattingStyle style)
{
ApplyFormattingTag(style.ElementName, null);
}
bool IHtmlEditorCommandSource.SelectionBold
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
void IHtmlEditorCommandSource.ApplyBold()
{
ApplyFormattingTag("strong", null);
}
bool IHtmlEditorCommandSource.SelectionItalic
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
void IHtmlEditorCommandSource.ApplyItalic()
{
ApplyFormattingTag("em", null);
}
bool IHtmlEditorCommandSource.SelectionUnderlined
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
void IHtmlEditorCommandSource.ApplyUnderline()
{
ApplyFormattingTag("u", null);
}
bool IHtmlEditorCommandSource.SelectionStrikethrough
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
void IHtmlEditorCommandSource.ApplyStrikethrough()
{
ApplyFormattingTag("strike", null);
}
bool IHtmlEditorCommandSource.SelectionSuperscript
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
void IHtmlEditorCommandSource.ApplySuperscript()
{
ApplyFormattingTag("sup", null);
}
bool IHtmlEditorCommandSource.SelectionSubscript
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
void IHtmlEditorCommandSource.ApplySubscript()
{
ApplyFormattingTag("sub", null);
}
bool IHtmlEditorCommandSource.SelectionIsLTR
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
void IHtmlEditorCommandSource.InsertLTRTextBlock()
{
ApplyFormattingTag("p", "dir=\"ltr\"");
}
bool IHtmlEditorCommandSource.SelectionIsRTL
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
void IHtmlEditorCommandSource.InsertRTLTextBlock()
{
ApplyFormattingTag("p", "dir=\"rtl\"");
}
bool IHtmlEditorCommandSource.CanPasteSpecial
{
get
{
return false;
}
}
bool IHtmlEditorCommandSource.AllowPasteSpecial
{
get
{
return false;
}
}
void IHtmlEditorCommandSource.PasteSpecial()
{
throw new NotSupportedException();
}
public EditorTextAlignment GetSelectionAlignment()
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return EditorTextAlignment.None;
}
void IHtmlEditorCommandSource.ApplyAlignment(EditorTextAlignment alignment)
{
switch (alignment)
{
case EditorTextAlignment.Left:
ApplyAlignment("left");
break;
case EditorTextAlignment.Center:
ApplyAlignment("center");
break;
case EditorTextAlignment.Right:
ApplyAlignment("right");
break;
case EditorTextAlignment.Justify:
ApplyAlignment("justify");
break;
}
}
bool IHtmlEditorCommandSource.SelectionBulleted
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
void IHtmlEditorCommandSource.ApplyBullets()
{
InsertList("ul");
}
bool IHtmlEditorCommandSource.SelectionNumbered
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
void IHtmlEditorCommandSource.ApplyNumbers()
{
InsertList("ol");
}
void IHtmlEditorCommandSource.ApplyBlockquote()
{
string selectedText = _textBox.SelectedText;
_textBox.Paste("<blockquote>" + NEWLINE + selectedText + NEWLINE + "</blockquote>");
}
bool IHtmlEditorCommandSource.SelectionBlockquoted
{
get
{
// we don't analyze the source to determine latched state (too difficult/expensive)
return false;
}
}
bool IHtmlEditorCommandSource.CanIndent
{
get { return CommandSource.CanApplyFormatting(null); }
}
void IHtmlEditorCommandSource.ApplyIndent()
{
((IHtmlEditorCommandSource)this).ApplyBlockquote();
}
bool IHtmlEditorCommandSource.CanOutdent
{
get { return false; }
}
void IHtmlEditorCommandSource.ApplyOutdent()
{
// not supported
}
bool IHtmlEditorCommandSource.CanInsertLink
{
get
{
return CommandSource.CanApplyFormatting(null);
}
}
void IHtmlEditorCommandSource.InsertLink()
{
using (new WaitCursor())
{
if (!_canInsertHyperlink)
{
DisplayMessage.Show(MessageId.TitleNotLinkable);
return;
}
using (HyperlinkForm hyperlinkForm = new HyperlinkForm(CommandManager, ShowAllLinkOptions))
{
hyperlinkForm.LinkText = _textBox.SelectedText;
hyperlinkForm.EditStyle = false;
if (hyperlinkForm.ShowDialog(Owner) == DialogResult.OK)
{
InsertLink(hyperlinkForm.Hyperlink, hyperlinkForm.LinkText, hyperlinkForm.LinkTitle, hyperlinkForm.Rel, hyperlinkForm.NewWindow);
}
}
}
}
protected virtual bool ShowAllLinkOptions { get { return false; } }
bool IHtmlEditorCommandSource.CanRemoveLink
{
get
{
// not suppported
return false;
}
}
void IHtmlEditorCommandSource.RemoveLink()
{
// not supported
}
bool IHtmlEditorCommandSource.CanFind
{
get
{
return true;
}
}
void IHtmlEditorCommandSource.OpenLink()
{
// not supported
}
void IHtmlEditorCommandSource.AddToGlossary()
{
// not supported
}
LinkInfo IHtmlEditorCommandSource.DiscoverCurrentLink()
{
return new LinkInfo(null, null, null, null, false);
}
#endregion
#region ISimpleTextEditorCommandSource Members
public bool HasFocus
{
get
{
return _textBox.ContainsFocus;
}
}
public bool CanUndo
{
get
{
return _textBox.CanUndo;
}
}
public void Undo()
{
_textBox.Undo(); ;
}
public bool CanRedo
{
get
{
//not suported
return false;
}
}
public void Redo()
{
//not impelemented;
}
public bool CanCut
{
get
{
return _textBox.SelectionLength > 0;
}
}
public void Cut()
{
_textBox.Cut();
}
public bool CanCopy
{
get
{
return CanCut;
}
}
public void Copy()
{
_textBox.Copy();
}
public bool CanPaste
{
get
{
return Clipboard.GetDataObject() != null &&
Clipboard.ContainsData(DataFormats.UnicodeText) ||
Clipboard.ContainsData(DataFormats.Text);
}
}
public void Paste()
{
_textBox.Paste();
}
// Unlike other ISimpleTextCommandSource implementations, "Clear" here
// only refers to invoking the command from the menu or programmatically,
// NOT hitting the delete key. The delete key is handled directly by the
// edit control. This is made possible by the GotFocus and LostFocus event
// handlers telling the command manager to ignore Delete.
public bool CanClear
{
get
{
return HasFocus && _textBox.SelectionLength > 0;
}
}
public void Clear()
{
// This is a no-op if there is no selection.
User32.SendMessage(_textBox.Handle, WM.CLEAR, UIntPtr.Zero, IntPtr.Zero);
}
public void SelectAll()
{
_textBox.SelectAll();
}
public void InsertEuroSymbol()
{
IntPtr euro = Marshal.StringToCoTaskMemUni("€");
try
{
User32.SendMessage(_textBox.Handle, WM.EM_REPLACESEL, new IntPtr(1), euro);
}
finally
{
Marshal.FreeCoTaskMem(euro);
}
}
bool ISimpleTextEditorCommandSource.ReadOnly
{
get { return _textBox.ReadOnly; }
}
public event EventHandler CommandStateChanged;
protected void OnCommandStateChanged()
{
if (CommandStateChanged != null)
CommandStateChanged(this, EventArgs.Empty);
}
public event EventHandler AggressiveCommandStateChanged;
protected void OnAggressiveCommandStateChanged()
{
if (AggressiveCommandStateChanged != null)
AggressiveCommandStateChanged(this, EventArgs.Empty);
}
#endregion
#region IDisposable Members
public void Dispose()
{
GC.SuppressFinalize(this);
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
if (components != null)
components.Dispose();
_textBox.Dispose();
}
}
#endregion
#region Protected Properties
protected TextBox SourceEditor
{
get { return _textBox; }
}
#endregion
#region Syntax Edit event handlers
private void _textBox_TextChanged(object sender, EventArgs e)
{
IsDirty = true;
OnCommandStateChanged();
}
//seems like this is only called if the modified property is changed programmatically
private void _textBox_ModifiedChanged(object sender, EventArgs e)
{
IsDirty = true;
OnCommandStateChanged();
}
private void _textBox_GotFocus(object sender, EventArgs e)
{
// Let the textbox handle delete itself.
CommandManager.IgnoreShortcut(Shortcut.Del);
_canInsertHyperlink = true;
}
private void _textBox_LostFocus(object sender, EventArgs e)
{
// Back to normal delete-key behavior.
CommandManager.UnignoreShortcut(Shortcut.Del);
}
#endregion
#region HTML Formatting Helpers
protected virtual string CleanupHtml(string html, int wrapTextAt)
{
// Don't do anything for now. Add source formatting someday.
return html;
}
private void ApplyAlignment(string alignment)
{
_textBox.Paste(String.Format(CultureInfo.InvariantCulture, "<p align=\"{0}\">{1}</p>", alignment, _textBox.SelectedText));
}
private void ApplyFormattingTag(string tagName, string attributes)
{
if (attributes != null)
_textBox.Paste(String.Format(CultureInfo.InvariantCulture, "<{0} {1}>{2}</{0}>", tagName, attributes, _textBox.SelectedText));
else
_textBox.Paste(String.Format(CultureInfo.InvariantCulture, "<{0}>{1}</{0}>", tagName, _textBox.SelectedText));
}
private void InsertList(string listTag)
{
string selectedText = _textBox.SelectedText;
_textBox.Paste(String.Format(CultureInfo.InvariantCulture, "<{0}>", listTag) + NEWLINE + "<li>" + selectedText + "</li>" + NEWLINE + String.Format(CultureInfo.InvariantCulture, "</{0}>", listTag));
}
#endregion
#region UI Management Helpers
private IWin32Window Owner
{
get
{
if (_textBox != null)
return _textBox.FindForm();
else
return null;
}
}
#endregion
#region Private Data
private IContainer components = new Container();
private TextBoxEditorControl _textBox;
private const string NEWLINE = "\r\n";
private bool _canInsertHyperlink = false;
#endregion
}
}
| |
#region namespace
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Web.Security;
using System.Configuration;
using Umbraco.Core;
using Umbraco.Core.Logging;
using Umbraco.Core.Models;
using Umbraco.Core.Models.Membership;
using Umbraco.Core.Persistence.Querying;
using Umbraco.Core.Security;
using Umbraco.Core.Services;
using System.Security.Cryptography;
using System.Web.Util;
using System.Collections.Specialized;
using System.Configuration.Provider;
using System.Security;
using System.Security.Permissions;
using System.Runtime.CompilerServices;
using Member = umbraco.cms.businesslogic.member.Member;
using MemberType = umbraco.cms.businesslogic.member.MemberType;
using User = umbraco.BusinessLogic.User;
#endregion
namespace umbraco.providers.members
{
/// <summary>
/// Custom Membership Provider for Umbraco Members (User authentication for Frontend applications NOT umbraco CMS)
/// </summary>
[Obsolete("This has been superceded by Umbraco.Web.Security.Providers.MembersMembershipProvider")]
public class UmbracoMembershipProvider : UmbracoMembershipProviderBase, IUmbracoMemberTypeMembershipProvider
{
public UmbracoMembershipProvider()
{
LockPropertyTypeAlias = Constants.Conventions.Member.IsLockedOut;
LastLockedOutPropertyTypeAlias = Constants.Conventions.Member.LastLockoutDate;
FailedPasswordAttemptsPropertyTypeAlias = Constants.Conventions.Member.FailedPasswordAttempts;
ApprovedPropertyTypeAlias = Constants.Conventions.Member.IsApproved;
CommentPropertyTypeAlias = Constants.Conventions.Member.Comments;
LastLoginPropertyTypeAlias = Constants.Conventions.Member.LastLoginDate;
LastPasswordChangedPropertyTypeAlias = Constants.Conventions.Member.LastPasswordChangeDate;
PasswordRetrievalQuestionPropertyTypeAlias = Constants.Conventions.Member.PasswordQuestion;
PasswordRetrievalAnswerPropertyTypeAlias = Constants.Conventions.Member.PasswordAnswer;
}
#region Fields
private string _defaultMemberTypeAlias = "Member";
private string _providerName = Member.UmbracoMemberProviderName;
private volatile bool _hasDefaultMember = false;
private static readonly object Locker = new object();
#endregion
public string LockPropertyTypeAlias { get; protected set; }
public string LastLockedOutPropertyTypeAlias { get; protected set; }
public string FailedPasswordAttemptsPropertyTypeAlias { get; protected set; }
public string ApprovedPropertyTypeAlias { get; protected set; }
public string CommentPropertyTypeAlias { get; protected set; }
public string LastLoginPropertyTypeAlias { get; protected set; }
public string LastPasswordChangedPropertyTypeAlias { get; protected set; }
public string PasswordRetrievalQuestionPropertyTypeAlias { get; protected set; }
public string PasswordRetrievalAnswerPropertyTypeAlias { get; protected set; }
/// <summary>
/// Override to maintain backwards compatibility with 0 required non-alphanumeric chars
/// </summary>
public override int DefaultMinNonAlphanumericChars
{
get { return 0; }
}
/// <summary>
/// Override to maintain backwards compatibility with only 4 required length
/// </summary>
public override int DefaultMinPasswordLength
{
get { return 4; }
}
/// <summary>
/// Override to maintain backwards compatibility
/// </summary>
public override bool DefaultUseLegacyEncoding
{
get { return true; }
}
/// <summary>
/// For backwards compatibility, this provider supports this option
/// </summary>
public override bool AllowManuallyChangingPassword
{
get { return true; }
}
#region Initialization Method
/// <summary>
/// Initializes the provider.
/// </summary>
/// <param name="name">The friendly name of the provider.</param>
/// <param name="config">A collection of the name/value pairs representing the provider-specific attributes specified in the configuration for this provider.</param>
/// <exception cref="T:System.ArgumentNullException">The name of the provider is null.</exception>
/// <exception cref="T:System.InvalidOperationException">An attempt is made to call
/// <see cref="M:System.Configuration.Provider.ProviderBase.Initialize(System.String,System.Collections.Specialized.NameValueCollection)"></see> on a provider after the provider
/// has already been initialized.</exception>
/// <exception cref="T:System.ArgumentException">The name of the provider has a length of zero.</exception>
public override void Initialize(string name, NameValueCollection config)
{
// Intialize values from web.config
if (config == null) throw new ArgumentNullException("config");
if (string.IsNullOrEmpty(name)) name = Constants.Conventions.Member.UmbracoMemberProviderName;
base.Initialize(name, config);
_providerName = name;
// test for membertype (if not specified, choose the first member type available)
if (config["defaultMemberTypeAlias"] != null)
{
_defaultMemberTypeAlias = config["defaultMemberTypeAlias"];
if (_defaultMemberTypeAlias.IsNullOrWhiteSpace())
{
throw new ProviderException("No default user type alias is specified in the web.config string. Please add a 'defaultUserTypeAlias' to the add element in the provider declaration in web.config");
}
_hasDefaultMember = true;
}
// test for approve status
if (config["umbracoApprovePropertyTypeAlias"] != null)
{
ApprovedPropertyTypeAlias = config["umbracoApprovePropertyTypeAlias"];
}
// test for lock attempts
if (config["umbracoLockPropertyTypeAlias"] != null)
{
LockPropertyTypeAlias = config["umbracoLockPropertyTypeAlias"];
}
if (config["umbracoLastLockedPropertyTypeAlias"] != null)
{
LastLockedOutPropertyTypeAlias = config["umbracoLastLockedPropertyTypeAlias"];
}
if (config["umbracoLastPasswordChangedPropertyTypeAlias"] != null)
{
LastPasswordChangedPropertyTypeAlias = config["umbracoLastPasswordChangedPropertyTypeAlias"];
}
if (config["umbracoFailedPasswordAttemptsPropertyTypeAlias"] != null)
{
FailedPasswordAttemptsPropertyTypeAlias = config["umbracoFailedPasswordAttemptsPropertyTypeAlias"];
}
// comment property
if (config["umbracoCommentPropertyTypeAlias"] != null)
{
CommentPropertyTypeAlias = config["umbracoCommentPropertyTypeAlias"];
}
// last login date
if (config["umbracoLastLoginPropertyTypeAlias"] != null)
{
LastLoginPropertyTypeAlias = config["umbracoLastLoginPropertyTypeAlias"];
}
// password retrieval
if (config["umbracoPasswordRetrievalQuestionPropertyTypeAlias"] != null)
{
PasswordRetrievalQuestionPropertyTypeAlias = config["umbracoPasswordRetrievalQuestionPropertyTypeAlias"];
}
if (config["umbracoPasswordRetrievalAnswerPropertyTypeAlias"] != null)
{
PasswordRetrievalAnswerPropertyTypeAlias = config["umbracoPasswordRetrievalAnswerPropertyTypeAlias"];
}
}
#endregion
#region Methods
/// <summary>
/// Processes a request to update the password for a membership user.
/// </summary>
/// <param name="username">The user to update the password for.</param>
/// <param name="oldPassword">This property is ignore for this provider</param>
/// <param name="newPassword">The new password for the specified user.</param>
/// <returns>
/// true if the password was updated successfully; otherwise, false.
/// </returns>
protected override bool PerformChangePassword(string username, string oldPassword, string newPassword)
{
//NOTE: due to backwards compatibilty reasons, this provider doesn't care about the old password and
// allows simply setting the password manually so we don't really care about the old password.
// This is allowed based on the overridden AllowManuallyChangingPassword option.
// in order to support updating passwords from the umbraco core, we can't validate the old password
var m = Member.GetMemberFromLoginName(username);
if (m == null) return false;
string salt;
var encodedPassword = EncryptOrHashNewPassword(newPassword, out salt);
m.ChangePassword(
FormatPasswordForStorage(encodedPassword, salt));
UpdateMemberProperty(m, LastPasswordChangedPropertyTypeAlias, DateTime.Now);
m.Save();
return true;
}
/// <summary>
/// Processes a request to update the password question and answer for a membership user.
/// </summary>
/// <param name="username">The user to change the password question and answer for.</param>
/// <param name="password">The password for the specified user.</param>
/// <param name="newPasswordQuestion">The new password question for the specified user.</param>
/// <param name="newPasswordAnswer">The new password answer for the specified user.</param>
/// <returns>
/// true if the password question and answer are updated successfully; otherwise, false.
/// </returns>
protected override bool PerformChangePasswordQuestionAndAnswer(string username, string password, string newPasswordQuestion, string newPasswordAnswer)
{
if (string.IsNullOrEmpty(PasswordRetrievalQuestionPropertyTypeAlias) || string.IsNullOrEmpty(PasswordRetrievalAnswerPropertyTypeAlias))
{
throw new NotSupportedException("Updating the password Question and Answer is not valid if the properties aren't set in the config file");
}
var m = Member.GetMemberFromLoginName(username);
if (m == null)
{
return false;
}
UpdateMemberProperty(m, PasswordRetrievalQuestionPropertyTypeAlias, newPasswordQuestion);
UpdateMemberProperty(m, PasswordRetrievalAnswerPropertyTypeAlias, newPasswordAnswer);
m.Save();
return true;
}
public override string DefaultMemberTypeAlias
{
get
{
if (_hasDefaultMember == false)
{
lock (Locker)
{
if (_hasDefaultMember == false)
{
var types = MemberType.GetAll;
if (types.Length == 1)
_defaultMemberTypeAlias = types[0].Alias;
else
throw new ProviderException("No default MemberType alias is specified in the web.config string. Please add a 'defaultMemberTypeAlias' to the add element in the provider declaration in web.config");
_hasDefaultMember = true;
}
}
}
return _defaultMemberTypeAlias;
}
}
/// <summary>
/// Adds a new membership user to the data source.
/// </summary>
/// <param name="memberTypeAlias"></param>
/// <param name="username">The user name for the new user.</param>
/// <param name="password">The password for the new user.</param>
/// <param name="email">The e-mail address for the new user.</param>
/// <param name="passwordQuestion">The password question for the new user.</param>
/// <param name="passwordAnswer">The password answer for the new user</param>
/// <param name="isApproved">Whether or not the new user is approved to be validated.</param>
/// <param name="providerUserKey">The unique identifier from the membership data source for the user.</param>
/// <param name="status">A <see cref="T:System.Web.Security.MembershipCreateStatus"></see> enumeration value indicating whether the user was created successfully.</param>
/// <returns>
/// A <see cref="T:System.Web.Security.MembershipUser"></see> object populated with the information for the newly created user.
/// </returns>
protected override MembershipUser PerformCreateUser(string memberTypeAlias, string username, string password, string email, string passwordQuestion,
string passwordAnswer, bool isApproved, object providerUserKey, out MembershipCreateStatus status)
{
if (Member.GetMemberFromLoginName(username) != null)
{
status = MembershipCreateStatus.DuplicateUserName;
LogHelper.Warn<UmbracoMembershipProvider>("Cannot create member as username already exists: " + username);
return null;
}
if (Member.GetMemberFromEmail(email) != null && RequiresUniqueEmail)
{
status = MembershipCreateStatus.DuplicateEmail;
LogHelper.Warn<UmbracoMembershipProvider>(
"Cannot create member as a member with the same email address exists: " + email);
return null;
}
var memberType = MemberType.GetByAlias(memberTypeAlias);
if (memberType == null)
{
throw new InvalidOperationException("Could not find a member type with alias " + memberTypeAlias + ". Ensure your membership provider configuration is up to date and that the default member type exists.");
}
var m = Member.MakeNew(username, email, memberType, User.GetUser(0));
string salt;
var encodedPassword = EncryptOrHashNewPassword(password, out salt);
//set the password on the member
m.ChangePassword(FormatPasswordForStorage(encodedPassword, salt));
// custom fields
if (string.IsNullOrEmpty(PasswordRetrievalQuestionPropertyTypeAlias) == false)
{
UpdateMemberProperty(m, PasswordRetrievalQuestionPropertyTypeAlias, passwordQuestion);
}
if (string.IsNullOrEmpty(PasswordRetrievalAnswerPropertyTypeAlias) == false)
{
UpdateMemberProperty(m, PasswordRetrievalAnswerPropertyTypeAlias, passwordAnswer);
}
if (string.IsNullOrEmpty(ApprovedPropertyTypeAlias) == false)
{
UpdateMemberProperty(m, ApprovedPropertyTypeAlias, isApproved ? 1 : 0);
}
if (string.IsNullOrEmpty(LastLoginPropertyTypeAlias) == false)
{
UpdateMemberProperty(m, LastLoginPropertyTypeAlias, DateTime.Now);
}
if (string.IsNullOrEmpty(LastPasswordChangedPropertyTypeAlias) == false)
{
UpdateMemberProperty(m, LastPasswordChangedPropertyTypeAlias, DateTime.Now);
}
var mUser = ConvertToMembershipUser(m);
// save
m.Save();
status = MembershipCreateStatus.Success;
return mUser;
}
/// <summary>
/// Removes a user from the membership data source.
/// </summary>
/// <param name="username">The name of the user to delete.</param>
/// <param name="deleteAllRelatedData">
/// TODO: This setting currently has no effect
/// </param>
/// <returns>
/// true if the user was successfully deleted; otherwise, false.
/// </returns>
public override bool DeleteUser(string username, bool deleteAllRelatedData)
{
var m = Member.GetMemberFromLoginName(username);
if (m == null) return false;
m.delete();
return true;
}
/// <summary>
/// Gets a collection of membership users where the e-mail address contains the specified e-mail address to match.
/// </summary>
/// <param name="emailToMatch">The e-mail address to search for.</param>
/// <param name="pageIndex">The index of the page of results to return. pageIndex is zero-based.</param>
/// <param name="pageSize">The size of the page of results to return.</param>
/// <param name="totalRecords">The total number of matched users.</param>
/// <returns>
/// A <see cref="T:System.Web.Security.MembershipUserCollection"></see> collection that contains a page of pageSize<see cref="T:System.Web.Security.MembershipUser"></see> objects beginning at the page specified by pageIndex.
/// </returns>
public override MembershipUserCollection FindUsersByEmail(string emailToMatch, int pageIndex, int pageSize, out int totalRecords)
{
var byEmail = ApplicationContext.Current.Services.MemberService.FindByEmail(emailToMatch, pageIndex, pageSize, out totalRecords, StringPropertyMatchType.Wildcard).ToArray();
var collection = new MembershipUserCollection();
foreach (var m in byEmail)
{
collection.Add(ConvertToMembershipUser(m));
}
return collection;
}
/// <summary>
/// Gets a collection of membership users where the user name contains the specified user name to match.
/// </summary>
/// <param name="usernameToMatch">The user name to search for.</param>
/// <param name="pageIndex">The index of the page of results to return. pageIndex is zero-based.</param>
/// <param name="pageSize">The size of the page of results to return.</param>
/// <param name="totalRecords">The total number of matched users.</param>
/// <returns>
/// A <see cref="T:System.Web.Security.MembershipUserCollection"></see> collection that contains a page of pageSize<see cref="T:System.Web.Security.MembershipUser"></see> objects beginning at the page specified by pageIndex.
/// </returns>
public override MembershipUserCollection FindUsersByName(string usernameToMatch, int pageIndex, int pageSize, out int totalRecords)
{
var byEmail = ApplicationContext.Current.Services.MemberService.FindByUsername(usernameToMatch, pageIndex, pageSize, out totalRecords, StringPropertyMatchType.Wildcard).ToArray();
var collection = new MembershipUserCollection();
foreach (var m in byEmail)
{
collection.Add(ConvertToMembershipUser(m));
}
return collection;
}
/// <summary>
/// Gets a collection of all the users in the data source in pages of data.
/// </summary>
/// <param name="pageIndex">The index of the page of results to return. pageIndex is zero-based.</param>
/// <param name="pageSize">The size of the page of results to return.</param>
/// <param name="totalRecords">The total number of matched users.</param>
/// <returns>
/// A <see cref="T:System.Web.Security.MembershipUserCollection"></see> collection that contains a page of pageSize<see cref="T:System.Web.Security.MembershipUser"></see> objects beginning at the page specified by pageIndex.
/// </returns>
public override MembershipUserCollection GetAllUsers(int pageIndex, int pageSize, out int totalRecords)
{
var membersList = new MembershipUserCollection();
var pagedMembers = ApplicationContext.Current.Services.MemberService.GetAll(pageIndex, pageSize, out totalRecords);
foreach (var m in pagedMembers)
{
membersList.Add(ConvertToMembershipUser(m));
}
return membersList;
}
/// <summary>
/// Gets the number of users currently accessing the application.
/// </summary>
/// <returns>
/// The number of users currently accessing the application.
/// </returns>
public override int GetNumberOfUsersOnline()
{
return ApplicationContext.Current.Services.MemberService.GetCount(MemberCountType.Online);
}
/// <summary>
/// Gets the password for the specified user name from the data source.
/// </summary>
/// <param name="username">The user to retrieve the password for.</param>
/// <param name="answer">The password answer for the user.</param>
/// <returns>
/// The password for the specified user name.
/// </returns>
protected override string PerformGetPassword(string username, string answer)
{
var m = Member.GetMemberFromLoginName(username);
if (m == null)
{
throw new MembershipPasswordException("The supplied user is not found");
}
// check if user is locked out
if (string.IsNullOrEmpty(LockPropertyTypeAlias) == false)
{
var isLockedOut = false;
bool.TryParse(GetMemberProperty(m, LockPropertyTypeAlias, true), out isLockedOut);
if (isLockedOut)
{
throw new MembershipPasswordException("The supplied user is locked out");
}
}
if (RequiresQuestionAndAnswer)
{
// check if password answer property alias is set
if (string.IsNullOrEmpty(PasswordRetrievalAnswerPropertyTypeAlias) == false)
{
// match password answer
if (GetMemberProperty(m, PasswordRetrievalAnswerPropertyTypeAlias, false) != answer)
{
throw new MembershipPasswordException("Incorrect password answer");
}
}
else
{
throw new ProviderException("Password retrieval answer property alias is not set! To automatically support password question/answers you'll need to add references to the membertype properties in the 'Member' element in web.config by adding their aliases to the 'umbracoPasswordRetrievalQuestionPropertyTypeAlias' and 'umbracoPasswordRetrievalAnswerPropertyTypeAlias' attributes");
}
}
var decodedPassword = DecryptPassword(m.GetPassword());
return decodedPassword;
}
/// <summary>
/// Gets information from the data source for a user. Provides an option to update the last-activity date/time stamp for the user.
/// </summary>
/// <param name="username">The name of the user to get information for.</param>
/// <param name="userIsOnline">true to update the last-activity date/time stamp for the user; false to return user information without updating the last-activity date/time stamp for the user.</param>
/// <returns>
/// A <see cref="T:System.Web.Security.MembershipUser"></see> object populated with the specified user's information from the data source.
/// </returns>
public override MembershipUser GetUser(string username, bool userIsOnline)
{
if (string.IsNullOrEmpty(username))
return null;
var m = Member.GetMemberFromLoginName(username);
if (m == null)
{
return null;
}
if (userIsOnline && LastLoginPropertyTypeAlias.IsNullOrWhiteSpace() == false)
{
UpdateMemberProperty(m, LastLoginPropertyTypeAlias, DateTime.Now);
//don't raise events for this! It just sets the member dates, if we do raise events this will
// cause all distributed cache to execute - which will clear out some caches we don't want.
// http://issues.umbraco.org/issue/U4-3451
m.Save(false);
}
return ConvertToMembershipUser(m);
}
/// <summary>
/// Gets information from the data source for a user based on the unique identifier for the membership user. Provides an option to update the last-activity date/time stamp for the user.
/// </summary>
/// <param name="providerUserKey">The unique identifier for the membership user to get information for.</param>
/// <param name="userIsOnline">true to update the last-activity date/time stamp for the user; false to return user information without updating the last-activity date/time stamp for the user.</param>
/// <returns>
/// A <see cref="T:System.Web.Security.MembershipUser"></see> object populated with the specified user's information from the data source.
/// </returns>
public override MembershipUser GetUser(object providerUserKey, bool userIsOnline)
{
var asGuid = providerUserKey.TryConvertTo<Guid>();
if (asGuid.Success)
{
var m = new Member(asGuid.Result);
if (userIsOnline && LastLoginPropertyTypeAlias.IsNullOrWhiteSpace() == false)
{
UpdateMemberProperty(m, LastLoginPropertyTypeAlias, DateTime.Now);
//don't raise events for this! It just sets the member dates, if we do raise events this will
// cause all distributed cache to execute - which will clear out some caches we don't want.
// http://issues.umbraco.org/issue/U4-3451
m.Save(false);
}
return ConvertToMembershipUser(m);
}
var asInt = providerUserKey.TryConvertTo<int>();
if (asInt.Success)
{
var m = new Member(asInt.Result);
if (userIsOnline && LastLoginPropertyTypeAlias.IsNullOrWhiteSpace() == false)
{
UpdateMemberProperty(m, LastLoginPropertyTypeAlias, DateTime.Now);
//don't raise events for this! It just sets the member dates, if we do raise events this will
// cause all distributed cache to execute - which will clear out some caches we don't want.
// http://issues.umbraco.org/issue/U4-3451
m.Save(false);
}
return ConvertToMembershipUser(m);
}
throw new InvalidOperationException("The " + GetType() + " provider only supports GUID or Int as a providerUserKey");
}
/// <summary>
/// Gets the user name associated with the specified e-mail address.
/// </summary>
/// <param name="email">The e-mail address to search for.</param>
/// <returns>
/// The user name associated with the specified e-mail address. If no match is found, return null.
/// </returns>
public override string GetUserNameByEmail(string email)
{
var m = Member.GetMemberFromEmail(email);
return m == null ? null : m.LoginName;
}
/// <summary>
/// Resets a user's password to a new, automatically generated password.
/// </summary>
/// <param name="username">The user to reset the password for.</param>
/// <param name="answer">The password answer for the specified user (not used with Umbraco).</param>
/// <param name="generatedPassword"></param>
/// <returns>The new password for the specified user.</returns>
protected override string PerformResetPassword(string username, string answer, string generatedPassword)
{
//TODO: This should be here - but how do we update failure count in this provider??
//if (answer == null && RequiresQuestionAndAnswer)
//{
// UpdateFailureCount(username, "passwordAnswer");
// throw new ProviderException("Password answer required for password reset.");
//}
var m = Member.GetMemberFromLoginName(username);
if (m == null)
{
throw new ProviderException("The supplied user is not found");
}
// check if user is locked out
if (string.IsNullOrEmpty(LockPropertyTypeAlias) == false)
{
var isLockedOut = false;
bool.TryParse(GetMemberProperty(m, LockPropertyTypeAlias, true), out isLockedOut);
if (isLockedOut)
{
throw new ProviderException("The member is locked out.");
}
}
if (RequiresQuestionAndAnswer)
{
// check if password answer property alias is set
if (string.IsNullOrEmpty(PasswordRetrievalAnswerPropertyTypeAlias) == false)
{
// match password answer
if (GetMemberProperty(m, PasswordRetrievalAnswerPropertyTypeAlias, false) != answer)
{
throw new ProviderException("Incorrect password answer");
}
}
else
{
throw new ProviderException("Password retrieval answer property alias is not set! To automatically support password question/answers you'll need to add references to the membertype properties in the 'Member' element in web.config by adding their aliases to the 'umbracoPasswordRetrievalQuestionPropertyTypeAlias' and 'umbracoPasswordRetrievalAnswerPropertyTypeAlias' attributes");
}
}
string salt;
var encodedPassword = EncryptOrHashNewPassword(generatedPassword, out salt);
//set the password on the member
m.ChangePassword(FormatPasswordForStorage(encodedPassword, salt));
if (string.IsNullOrEmpty(LastPasswordChangedPropertyTypeAlias) == false)
{
UpdateMemberProperty(m, LastPasswordChangedPropertyTypeAlias, DateTime.Now);
}
m.Save();
return generatedPassword;
}
/// <summary>
/// Clears a lock so that the membership user can be validated.
/// </summary>
/// <param name="userName">The membership user to clear the lock status for.</param>
/// <returns>
/// true if the membership user was successfully unlocked; otherwise, false.
/// </returns>
public override bool UnlockUser(string userName)
{
if (string.IsNullOrEmpty(LockPropertyTypeAlias) == false)
{
var m = Member.GetMemberFromLoginName(userName);
if (m != null)
{
UpdateMemberProperty(m, LockPropertyTypeAlias, 0);
if (string.IsNullOrEmpty(FailedPasswordAttemptsPropertyTypeAlias) == false)
{
UpdateMemberProperty(m, FailedPasswordAttemptsPropertyTypeAlias, 0);
}
m.Save();
return true;
}
throw new ProviderException(string.Format("No member with the username '{0}' found", userName));
}
throw new ProviderException("To enable lock/unlocking, you need to add a 'bool' property on your membertype and add the alias of the property in the 'umbracoLockPropertyTypeAlias' attribute of the membership element in the web.config.");
}
/// <summary>
/// Updates e-mail and potentially approved status, lock status and comment on a user.
/// </summary>
/// <param name="user">A <see cref="T:System.Web.Security.MembershipUser"></see> object that represents the user to update and the updated information for the user.</param>
public override void UpdateUser(MembershipUser user)
{
var m = Member.GetMemberFromLoginName(user.UserName);
if (m == null)
{
throw new ProviderException(string.Format("No member with the username '{0}' found", user.UserName));
}
m.Email = user.Email;
// if supported, update approve status
UpdateMemberProperty(m, ApprovedPropertyTypeAlias, user.IsApproved ? 1 : 0);
// if supported, update lock status
UpdateMemberProperty(m, LockPropertyTypeAlias, user.IsLockedOut ? 1 : 0);
if (user.IsLockedOut)
{
UpdateMemberProperty(m, LastLockedOutPropertyTypeAlias, DateTime.Now);
}
// if supported, update comment
UpdateMemberProperty(m, CommentPropertyTypeAlias, user.Comment);
m.Save();
}
/// <summary>
/// Verifies that the specified user name and password exist in the data source.
/// </summary>
/// <param name="username">The name of the user to validate.</param>
/// <param name="password">The password for the specified user.</param>
/// <returns>
/// true if the specified username and password are valid; otherwise, false.
/// </returns>
public override bool ValidateUser(string username, string password)
{
var m = Member.GetMemberFromLoginName(username);
if (m == null) return false;
var authenticated = CheckPassword(password, m.GetPassword());
if (authenticated)
{
// check for lock status. If locked, then set the member property to null
if (string.IsNullOrEmpty(LockPropertyTypeAlias) == false)
{
string lockedStatus = GetMemberProperty(m, LockPropertyTypeAlias, true);
if (string.IsNullOrEmpty(lockedStatus) == false)
{
var isLocked = false;
if (bool.TryParse(lockedStatus, out isLocked))
{
if (isLocked)
{
LogHelper.Info<UmbracoMembershipProvider>("Cannot validate member " + username + " because they are currently locked out");
return false;
}
}
}
}
//check for approve status. If not approved, then set the member property to null
if (CheckApproveStatus(m) == false)
{
LogHelper.Info<UmbracoMembershipProvider>("Cannot validate member " + username + " because they are not approved");
return false;
}
// maybe update login date
if (string.IsNullOrEmpty(LastLoginPropertyTypeAlias) == false)
{
UpdateMemberProperty(m, LastLoginPropertyTypeAlias, DateTime.Now);
}
// maybe reset password attempts
if (string.IsNullOrEmpty(FailedPasswordAttemptsPropertyTypeAlias) == false)
{
UpdateMemberProperty(m, FailedPasswordAttemptsPropertyTypeAlias, 0);
}
// persist data
//don't raise events for this! It just sets the member dates, if we do raise events this will
// cause all distributed cache to execute - which will clear out some caches we don't want.
// http://issues.umbraco.org/issue/U4-3451
m.Save(false);
return true;
}
// update fail rate if it's approved
if (string.IsNullOrEmpty(LockPropertyTypeAlias) == false
&& string.IsNullOrEmpty(FailedPasswordAttemptsPropertyTypeAlias) == false)
{
if (CheckApproveStatus(m))
{
var failedAttempts = 0;
int.TryParse(GetMemberProperty(m, FailedPasswordAttemptsPropertyTypeAlias, false), out failedAttempts);
failedAttempts = failedAttempts + 1;
UpdateMemberProperty(m, FailedPasswordAttemptsPropertyTypeAlias, failedAttempts);
// lock user?
if (failedAttempts >= MaxInvalidPasswordAttempts)
{
UpdateMemberProperty(m, LockPropertyTypeAlias, 1);
UpdateMemberProperty(m, LastLockedOutPropertyTypeAlias, DateTime.Now);
LogHelper.Info<UmbracoMembershipProvider>("Member " + username + " is now locked out, max invalid password attempts exceeded");
}
//don't raise events for this! It just sets the member dates, if we do raise events this will
// cause all distributed cache to execute - which will clear out some caches we don't want.
// http://issues.umbraco.org/issue/U4-3451
m.Save(false);
}
}
return false;
}
private static void UpdateMemberProperty(Member m, string propertyTypeAlias, object propertyValue)
{
if (string.IsNullOrEmpty(propertyTypeAlias) == false)
{
if (m.getProperty(propertyTypeAlias) != null)
{
m.getProperty(propertyTypeAlias).Value = propertyValue;
}
}
}
private static string GetMemberProperty(Member m, string propertyTypeAlias, bool isBool)
{
if (string.IsNullOrEmpty(propertyTypeAlias) == false)
{
if (m.getProperty(propertyTypeAlias) != null &&
m.getProperty(propertyTypeAlias).Value != null)
{
if (isBool)
{
// Umbraco stored true as 1, which means it can be bool.tryParse'd
return m.getProperty(propertyTypeAlias).Value.ToString().Replace("1", "true").Replace("0", "false");
}
return m.getProperty(propertyTypeAlias).Value.ToString();
}
}
return null;
}
private static string GetMemberProperty(IMember m, string propertyTypeAlias, bool isBool)
{
if (string.IsNullOrEmpty(propertyTypeAlias) == false)
{
if (m.Properties.Contains(propertyTypeAlias) &&
m.Properties[propertyTypeAlias] != null &&
m.Properties[propertyTypeAlias].Value != null)
{
if (isBool)
{
// Umbraco stored true as 1, which means it can be bool.tryParse'd
return m.Properties[propertyTypeAlias].Value.ToString().Replace("1", "true").Replace("0", "false");
}
return m.Properties[propertyTypeAlias].Value.ToString();
}
}
return null;
}
private bool CheckApproveStatus(Member m)
{
var isApproved = false;
if (string.IsNullOrEmpty(ApprovedPropertyTypeAlias) == false)
{
if (m != null)
{
var approveStatus = GetMemberProperty(m, ApprovedPropertyTypeAlias, true);
if (string.IsNullOrEmpty(approveStatus) == false)
{
//try parsing as bool first (just in case)
if (bool.TryParse(approveStatus, out isApproved) == false)
{
int intStatus;
//if that fails, try parsing as int (since its normally stored as 0 or 1)
if (int.TryParse(approveStatus, out intStatus))
{
isApproved = intStatus != 0;
}
}
}
else
{
//There is no property so we shouldn't use the approve status
isApproved = true;
}
}
}
else {
// if we don't use approve statuses
isApproved = true;
}
return isApproved;
}
#endregion
#region Helper Methods
/// <summary>
/// Encodes the password.
/// </summary>
/// <param name="password">The password.</param>
/// <returns>The encoded password.</returns>
[Obsolete("Do not use this, it is the legacy way to encode a password - use the base class EncryptOrHashExistingPassword instead")]
public string EncodePassword(string password)
{
return LegacyEncodePassword(password);
}
/// <summary>
/// Unencode password.
/// </summary>
/// <param name="encodedPassword">The encoded password.</param>
/// <returns>The unencoded password.</returns>
[Obsolete("Do not use this, it is the legacy way to decode a password - use the base class DecodePassword instead")]
public string UnEncodePassword(string encodedPassword)
{
return LegacyUnEncodePassword(encodedPassword);
}
/// <summary>
/// Converts to membership user.
/// </summary>
/// <param name="m">The m.</param>
/// <returns></returns>
private MembershipUser ConvertToMembershipUser(Member m)
{
if (m == null) return null;
var lastLogin = DateTime.Now;
var lastLocked = DateTime.MinValue;
var isApproved = true;
var isLocked = false;
var comment = "";
var passwordQuestion = "";
// last login
if (string.IsNullOrEmpty(LastLoginPropertyTypeAlias) == false)
{
DateTime.TryParse(GetMemberProperty(m, LastLoginPropertyTypeAlias, false), out lastLogin);
}
// approved
if (string.IsNullOrEmpty(ApprovedPropertyTypeAlias) == false)
{
bool.TryParse(GetMemberProperty(m, ApprovedPropertyTypeAlias, true), out isApproved);
}
// locked
if (string.IsNullOrEmpty(LockPropertyTypeAlias) == false)
{
bool.TryParse(GetMemberProperty(m, LockPropertyTypeAlias, true), out isLocked);
}
// last locked
if (string.IsNullOrEmpty(LastLockedOutPropertyTypeAlias) == false)
{
DateTime.TryParse(GetMemberProperty(m, LastLockedOutPropertyTypeAlias, false), out lastLocked);
}
// comment
if (string.IsNullOrEmpty(CommentPropertyTypeAlias) == false)
{
comment = GetMemberProperty(m, CommentPropertyTypeAlias, false);
}
// password question
if (string.IsNullOrEmpty(PasswordRetrievalQuestionPropertyTypeAlias) == false)
{
passwordQuestion = GetMemberProperty(m, PasswordRetrievalQuestionPropertyTypeAlias, false);
}
return new MembershipUser(_providerName, m.LoginName, m.Id, m.Email, passwordQuestion, comment, isApproved, isLocked, m.CreateDateTime, lastLogin,
DateTime.Now, DateTime.Now, lastLocked);
}
/// <summary>
/// Converts to membership user.
/// </summary>
/// <param name="m">The m.</param>
/// <returns></returns>
private MembershipUser ConvertToMembershipUser(IMember m)
{
if (m == null) return null;
var lastLogin = DateTime.Now;
var lastLocked = DateTime.MinValue;
var isApproved = true;
var isLocked = false;
var comment = "";
var passwordQuestion = "";
// last login
if (string.IsNullOrEmpty(LastLoginPropertyTypeAlias) == false)
{
DateTime.TryParse(GetMemberProperty(m, LastLoginPropertyTypeAlias, false), out lastLogin);
}
// approved
if (string.IsNullOrEmpty(ApprovedPropertyTypeAlias) == false)
{
bool.TryParse(GetMemberProperty(m, ApprovedPropertyTypeAlias, true), out isApproved);
}
// locked
if (string.IsNullOrEmpty(LockPropertyTypeAlias) == false)
{
bool.TryParse(GetMemberProperty(m, LockPropertyTypeAlias, true), out isLocked);
}
// last locked
if (string.IsNullOrEmpty(LastLockedOutPropertyTypeAlias) == false)
{
DateTime.TryParse(GetMemberProperty(m, LastLockedOutPropertyTypeAlias, false), out lastLocked);
}
// comment
if (string.IsNullOrEmpty(CommentPropertyTypeAlias) == false)
{
comment = GetMemberProperty(m, CommentPropertyTypeAlias, false);
}
// password question
if (string.IsNullOrEmpty(PasswordRetrievalQuestionPropertyTypeAlias) == false)
{
passwordQuestion = GetMemberProperty(m, PasswordRetrievalQuestionPropertyTypeAlias, false);
}
return new MembershipUser(_providerName, m.Username, m.Id, m.Email, passwordQuestion, comment, isApproved, isLocked, m.CreateDate, lastLogin,
DateTime.Now, DateTime.Now, lastLocked);
}
#endregion
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
namespace TestCases.HSSF.UserModel
{
using System;
using System.Collections;
using System.Configuration;
using System.IO;
using NUnit.Framework;
using NPOI.HSSF.Record.Aggregates;
using NPOI.HSSF.UserModel;
using NPOI.SS.Formula;
using NPOI.SS.Formula.Eval;
using NPOI.SS.Formula.PTG;
using NPOI.SS.UserModel;
using TestCases.HSSF;
using TestCases.SS.Formula;
/**
*
*/
[TestFixture]
public class TestFormulaEvaluatorBugs
{
private static bool OUTPUT_TEST_FILES = false;
private String tmpDirName;
[SetUp]
public void SetUp()
{
System.Threading.Thread.CurrentThread.CurrentCulture = System.Globalization.CultureInfo.CreateSpecificCulture("en-US");
tmpDirName = ConfigurationManager.AppSettings["java.io.tmpdir"];
}
/**
* An odd problem with EvaluateFormulaCell giving the
* right values when file is Opened, but changes
* to the source data in some versions of excel
* doesn't cause them to be updated. However, other
* versions of excel, and gnumeric, work just fine
* WARNING - tedious bug where you actually have to
* Open up excel
*/
[Test]
public void Test44636()
{
// Open the existing file, tweak one value and
// re-calculate
HSSFWorkbook wb = HSSFTestDataSamples.OpenSampleWorkbook("44636.xls");
NPOI.SS.UserModel.ISheet sheet = wb.GetSheetAt(0);
IRow row = sheet.GetRow(0);
row.GetCell(0).SetCellValue(4.2);
row.GetCell(2).SetCellValue(25);
HSSFFormulaEvaluator.EvaluateAllFormulaCells(wb);
Assert.AreEqual(4.2 * 25, row.GetCell(3).NumericCellValue, 0.0001);
if (OUTPUT_TEST_FILES)
{
// Save
FileStream existing = File.Open(tmpDirName + "44636-existing.xls", FileMode.Open);
existing.Seek(0, SeekOrigin.End);
wb.Write(existing);
existing.Close();
Console.Error.WriteLine("Existing file for bug #44636 written to " + existing.ToString());
}
// Now, do a new file from scratch
wb = new HSSFWorkbook();
sheet = wb.CreateSheet();
row = sheet.CreateRow(0);
row.CreateCell(0).SetCellValue(1.2);
row.CreateCell(1).SetCellValue(4.2);
row = sheet.CreateRow(1);
row.CreateCell(0).CellFormula = ("SUM(A1:B1)");
HSSFFormulaEvaluator.EvaluateAllFormulaCells(wb);
Assert.AreEqual(5.4, row.GetCell(0).NumericCellValue, 0.0001);
if (OUTPUT_TEST_FILES)
{
// Save
FileStream scratch = File.Open(tmpDirName+"44636-scratch.xls",FileMode.Open);
scratch.Seek(0, SeekOrigin.End);
wb.Write(scratch);
scratch.Close();
Console.Error.WriteLine("New file for bug #44636 written to " + scratch.ToString());
}
}
/**
* Bug 44297: 32767+32768 is Evaluated to -1
* Fix: IntPtg must operate with unsigned short. Reading signed short results in incorrect formula calculation
* if a formula has values in the interval [Short.MAX_VALUE, (Short.MAX_VALUE+1)*2]
*
* @author Yegor Kozlov
*/
[Test]
public void Test44297()
{
HSSFWorkbook wb = HSSFTestDataSamples.OpenSampleWorkbook("44297.xls");
IRow row;
ICell cell;
NPOI.SS.UserModel.ISheet sheet = wb.GetSheetAt(0);
HSSFFormulaEvaluator eva = new HSSFFormulaEvaluator(wb);
row = sheet.GetRow(0);
cell = row.GetCell(0);
Assert.AreEqual("31+46", cell.CellFormula);
Assert.AreEqual(77, eva.Evaluate(cell).NumberValue, 0);
row = sheet.GetRow(1);
cell = row.GetCell(0);
Assert.AreEqual("30+53", cell.CellFormula);
Assert.AreEqual(83, eva.Evaluate(cell).NumberValue, 0);
row = sheet.GetRow(2);
cell = row.GetCell(0);
Assert.AreEqual("SUM(A1:A2)", cell.CellFormula);
Assert.AreEqual(160, eva.Evaluate(cell).NumberValue, 0);
row = sheet.GetRow(4);
cell = row.GetCell(0);
Assert.AreEqual("32767+32768", cell.CellFormula);
Assert.AreEqual(65535, eva.Evaluate(cell).NumberValue, 0);
row = sheet.GetRow(7);
cell = row.GetCell(0);
Assert.AreEqual("32744+42333", cell.CellFormula);
Assert.AreEqual(75077, eva.Evaluate(cell).NumberValue, 0);
row = sheet.GetRow(8);
cell = row.GetCell(0);
Assert.AreEqual("327680/32768", cell.CellFormula);
Assert.AreEqual(10, eva.Evaluate(cell).NumberValue, 0);
row = sheet.GetRow(9);
cell = row.GetCell(0);
Assert.AreEqual("32767+32769", cell.CellFormula);
Assert.AreEqual(65536, eva.Evaluate(cell).NumberValue, 0);
row = sheet.GetRow(10);
cell = row.GetCell(0);
Assert.AreEqual("35000+36000", cell.CellFormula);
Assert.AreEqual(71000, eva.Evaluate(cell).NumberValue, 0);
row = sheet.GetRow(11);
cell = row.GetCell(0);
Assert.AreEqual("-1000000-3000000", cell.CellFormula);
Assert.AreEqual(-4000000, eva.Evaluate(cell).NumberValue, 0);
}
/**
* Bug 44410: SUM(C:C) is valid in excel, and means a sum
* of all the rows in Column C
*
* @author Nick Burch
*/
[Test]
public void Test44410()
{
HSSFWorkbook wb = HSSFTestDataSamples.OpenSampleWorkbook("SingleLetterRanges.xls");
NPOI.SS.UserModel.ISheet sheet = wb.GetSheetAt(0);
HSSFFormulaEvaluator eva = new HSSFFormulaEvaluator(wb);
// =index(C:C,2,1) -> 2
IRow rowIDX = sheet.GetRow(3);
// =sum(C:C) -> 6
IRow rowSUM = sheet.GetRow(4);
// =sum(C:D) -> 66
IRow rowSUM2D = sheet.GetRow(5);
// Test the sum
ICell cellSUM = rowSUM.GetCell(0);
FormulaRecordAggregate frec = (FormulaRecordAggregate)((HSSFCell)cellSUM).CellValueRecord;
Ptg[] ops = frec.FormulaRecord.ParsedExpression;
Assert.AreEqual(2, ops.Length);
Assert.AreEqual(typeof(AreaPtg), ops[0].GetType());
Assert.AreEqual(typeof(FuncVarPtg), ops[1].GetType());
// Actually stored as C1 to C65536
// (last row is -1 === 65535)
AreaPtg ptg = (AreaPtg)ops[0];
Assert.AreEqual(2, ptg.FirstColumn);
Assert.AreEqual(2, ptg.LastColumn);
Assert.AreEqual(0, ptg.FirstRow);
Assert.AreEqual(65535, ptg.LastRow);
Assert.AreEqual("C:C", ptg.ToFormulaString());
// Will show as C:C, but won't know how many
// rows it covers as we don't have the sheet
// to hand when turning the Ptgs into a string
Assert.AreEqual("SUM(C:C)", cellSUM.CellFormula);
// But the evaluator knows the sheet, so it
// can do it properly
Assert.AreEqual(6, eva.Evaluate(cellSUM).NumberValue, 0);
// Test the index
// Again, the formula string will be right but
// lacking row count, Evaluated will be right
ICell cellIDX = rowIDX.GetCell(0);
Assert.AreEqual("INDEX(C:C,2,1)", cellIDX.CellFormula);
Assert.AreEqual(2, eva.Evaluate(cellIDX).NumberValue, 0);
// Across two colums
ICell cellSUM2D = rowSUM2D.GetCell(0);
Assert.AreEqual("SUM(C:D)", cellSUM2D.CellFormula);
Assert.AreEqual(66, eva.Evaluate(cellSUM2D).NumberValue, 0);
}
/**
* Tests that we can Evaluate boolean cells properly
*/
[Test]
public void TestEvaluateBooleanInCell_bug44508()
{
HSSFWorkbook wb = new HSSFWorkbook();
NPOI.SS.UserModel.ISheet sheet = wb.CreateSheet();
wb.SetSheetName(0, "Sheet1");
IRow row = sheet.CreateRow(0);
ICell cell = row.CreateCell(0);
cell.CellFormula = ("1=1");
HSSFFormulaEvaluator fe = new HSSFFormulaEvaluator(wb);
try
{
fe.EvaluateInCell(cell);
}
catch (FormatException)
{
Assert.Fail("Identified bug 44508");
}
Assert.AreEqual(true, cell.BooleanCellValue);
}
[Test]
public void TestClassCast_bug44861()
{
HSSFWorkbook wb = HSSFTestDataSamples.OpenSampleWorkbook("44861.xls");
// Check direct
HSSFFormulaEvaluator.EvaluateAllFormulaCells(wb);
// And via calls
int numSheets = wb.NumberOfSheets;
for (int i = 0; i < numSheets; i++)
{
NPOI.SS.UserModel.ISheet s = wb.GetSheetAt(i);
HSSFFormulaEvaluator eval = new HSSFFormulaEvaluator(wb);
for (IEnumerator rows = s.GetRowEnumerator(); rows.MoveNext(); )
{
IRow r = (IRow)rows.Current;
for (IEnumerator cells = r.GetEnumerator(); cells.MoveNext(); )
{
ICell c = (ICell)cells.Current;
eval.EvaluateFormulaCell(c);
}
}
}
}
[Test]
public void TestEvaluateInCellWithErrorCode_bug44950()
{
HSSFWorkbook wb = new HSSFWorkbook();
NPOI.SS.UserModel.ISheet sheet = wb.CreateSheet("Sheet1");
IRow row = sheet.CreateRow(1);
ICell cell = row.CreateCell(0);
cell.CellFormula = ("na()"); // this formula Evaluates to an Excel error code '#N/A'
HSSFFormulaEvaluator fe = new HSSFFormulaEvaluator(wb);
try
{
fe.EvaluateInCell(cell);
}
catch (InvalidOperationException e)
{
if (e.Message.StartsWith("Cannot get a error value from"))
{
throw new AssertionException("Identified bug 44950 b");
}
throw;
}
}
private class EvalListener : EvaluationListener
{
private int _countCacheHits;
private int _countCacheMisses;
public EvalListener()
{
_countCacheHits = 0;
_countCacheMisses = 0;
}
public int GetCountCacheHits()
{
return _countCacheHits;
}
public int GetCountCacheMisses()
{
return _countCacheMisses;
}
public override void OnCacheHit(int sheetIndex, int srcRowNum, int srcColNum, ValueEval result)
{
_countCacheHits++;
}
public override void OnStartEvaluate(IEvaluationCell cell, ICacheEntry entry)
{
_countCacheMisses++;
}
}
/**
* The HSSFFormula evaluator performance benefits greatly from caching of intermediate cell values
*/
[Test]
public void TestSlowEvaluate45376()
{
/*
* Note - to observe behaviour without caching, disable the call to
* updateValue() from FormulaCellCacheEntry.updateFormulaResult().
*/
// Firstly set up a sequence of formula cells where each depends on the previous multiple
// times. Without caching, each subsequent cell take about 4 times longer to Evaluate.
HSSFWorkbook wb = new HSSFWorkbook();
NPOI.SS.UserModel.ISheet sheet = wb.CreateSheet("Sheet1");
IRow row = sheet.CreateRow(0);
for (int i = 1; i < 10; i++)
{
ICell cell = row.CreateCell(i);
char prevCol = (char)('A' + i - 1);
String prevCell = prevCol + "1";
// this formula is inspired by the offending formula of the attachment for bug 45376
String formula = "IF(DATE(YEAR(" + prevCell + "),MONTH(" + prevCell + ")+1,1)<=$D$3," +
"DATE(YEAR(" + prevCell + "),MONTH(" + prevCell + ")+1,1),NA())";
cell.CellFormula = (formula);
}
row.CreateCell(0).SetCellValue(new DateTime(2000,1,1,0,0,0));
// Choose cell A9, so that the Assert.Failing Test case doesn't take too long to execute.
ICell cell1 = row.GetCell(8);
EvalListener evalListener = new EvalListener();
WorkbookEvaluator evaluator = WorkbookEvaluatorTestHelper.CreateEvaluator(wb, evalListener);
ValueEval ve = evaluator.Evaluate(HSSFEvaluationTestHelper.WrapCell(cell1));
int evalCount = evalListener.GetCountCacheMisses();
if (evalCount > 10)
{
// Without caching, evaluating cell 'A9' takes 21845 evaluations which consumes
// much time (~3 sec on Core 2 Duo 2.2GHz)
Console.Error.WriteLine("Cell A9 took " + evalCount + " intermediate evaluations");
throw new AssertionException("Identifed bug 45376 - Formula evaluator should cache values");
}
// With caching, the evaluationCount is 8 which is a big improvement
// Note - these expected values may change if the WorkbookEvaluator is
// ever optimised to short circuit 'if' functions.
Assert.AreEqual(8, evalCount);
// The cache hits would be 24 if fully evaluating all arguments of the
// "IF()" functions (Each of the 8 formulas has 4 refs to formula cells
// which result in 1 cache miss and 3 cache hits). However with the
// short-circuit-if optimisation, 2 of the cell refs get skipped
// reducing this metric 8.
Assert.AreEqual(8, evalListener.GetCountCacheHits());
// confirm the evaluation result too
Assert.AreEqual(ErrorEval.NA, ve);
}
[Test]
public void TestDateWithNegativeParts_bug48528()
{
HSSFWorkbook wb = new HSSFWorkbook();
HSSFSheet sheet = (HSSFSheet)wb.CreateSheet("Sheet1");
HSSFRow row = (HSSFRow)sheet.CreateRow(1);
HSSFCell cell = (HSSFCell)row.CreateCell(0);
HSSFFormulaEvaluator fe = new HSSFFormulaEvaluator(wb);
// 5th Feb 2012 = 40944
// 1st Feb 2012 = 40940
// 5th Jan 2012 = 40913
// 5th Dec 2011 = 40882
// 5th Feb 2011 = 40579
cell.CellFormula=("DATE(2012,2,1)");
fe.NotifyUpdateCell(cell);
Assert.AreEqual(40940.0, fe.Evaluate(cell).NumberValue);
cell.CellFormula=("DATE(2012,2,1+4)");
fe.NotifyUpdateCell(cell);
Assert.AreEqual(40944.0, fe.Evaluate(cell).NumberValue);
cell.CellFormula=("DATE(2012,2-1,1+4)");
fe.NotifyUpdateCell(cell);
Assert.AreEqual(40913.0, fe.Evaluate(cell).NumberValue);
cell.CellFormula=("DATE(2012,2,1-27)");
fe.NotifyUpdateCell(cell);
Assert.AreEqual(40913.0, fe.Evaluate(cell).NumberValue);
cell.CellFormula=("DATE(2012,2-2,1+4)");
fe.NotifyUpdateCell(cell);
Assert.AreEqual(40882.0, fe.Evaluate(cell).NumberValue);
cell.CellFormula=("DATE(2012,2,1-58)");
fe.NotifyUpdateCell(cell);
Assert.AreEqual(40882.0, fe.Evaluate(cell).NumberValue);
cell.CellFormula=("DATE(2012,2-12,1+4)");
fe.NotifyUpdateCell(cell);
Assert.AreEqual(40579.0, fe.Evaluate(cell).NumberValue);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
namespace Microsoft.Azure.Devices.Client.Transport.Mqtt
{
using System;
using System.Collections.Generic;
using System.Diagnostics.Contracts;
using System.Globalization;
using System.IO;
using System.Threading.Tasks;
using DotNetty.Buffers;
using DotNetty.Codecs.Mqtt.Packets;
using DotNetty.Transport.Channels;
using Microsoft.Azure.Devices.Client.Common;
using Microsoft.Azure.Devices.Client.Extensions;
static class Util
{
static class PacketIdGenerator
{
static ushort current = (ushort)new Random((int)DateTime.UtcNow.ToFileTimeUtc()).Next(0, ushort.MaxValue);
public static int Next()
{
unchecked
{
return current++;
}
}
}
static class IotHubWirePropertyNames
{
public const string AbsoluteExpiryTime = "$.exp";
public const string CorrelationId = "$.cid";
public const string MessageId = "$.mid";
public const string To = "$.to";
public const string UserId = "$.uid";
}
static class MessageSystemPropertyNames
{
public const string MessageId = "message-id";
public const string To = "to";
public const string ExpiryTimeUtc = "absolute-expiry-time";
public const string CorrelationId = "correlation-id";
public const string UserId = "user-id";
public const string Operation = "iothub-operation";
public const string Ack = "iothub-ack";
}
static readonly Dictionary<string, string> ToSystemPropertiesMap = new Dictionary<string, string>
{
{IotHubWirePropertyNames.AbsoluteExpiryTime, MessageSystemPropertyNames.ExpiryTimeUtc},
{IotHubWirePropertyNames.CorrelationId, MessageSystemPropertyNames.CorrelationId},
{IotHubWirePropertyNames.MessageId, MessageSystemPropertyNames.MessageId},
{IotHubWirePropertyNames.To, MessageSystemPropertyNames.To},
{IotHubWirePropertyNames.UserId, MessageSystemPropertyNames.UserId},
{MessageSystemPropertyNames.Operation, MessageSystemPropertyNames.Operation},
{MessageSystemPropertyNames.Ack, MessageSystemPropertyNames.Ack}
};
static readonly Dictionary<string, string> FromSystemPropertiesMap = new Dictionary<string, string>
{
{MessageSystemPropertyNames.ExpiryTimeUtc, IotHubWirePropertyNames.AbsoluteExpiryTime},
{MessageSystemPropertyNames.CorrelationId, IotHubWirePropertyNames.CorrelationId},
{MessageSystemPropertyNames.MessageId, IotHubWirePropertyNames.MessageId},
{MessageSystemPropertyNames.To, IotHubWirePropertyNames.To},
{MessageSystemPropertyNames.UserId, IotHubWirePropertyNames.UserId},
{MessageSystemPropertyNames.Operation, MessageSystemPropertyNames.Operation},
{MessageSystemPropertyNames.Ack, MessageSystemPropertyNames.Ack}
};
const char SegmentSeparatorChar = '/';
const char SingleSegmentWildcardChar = '+';
const char MultiSegmentWildcardChar = '#';
static readonly char[] WildcardChars = { MultiSegmentWildcardChar, SingleSegmentWildcardChar };
const string IotHubTrueString = "true";
const string SegmentSeparator = "/";
const int MaxPayloadSize = 0x3ffff;
public static bool CheckTopicFilterMatch(string topicName, string topicFilter)
{
int topicFilterIndex = 0;
int topicNameIndex = 0;
while (topicNameIndex < topicName.Length && topicFilterIndex < topicFilter.Length)
{
int wildcardIndex = topicFilter.IndexOfAny(WildcardChars, topicFilterIndex);
if (wildcardIndex == -1)
{
int matchLength = Math.Max(topicFilter.Length - topicFilterIndex, topicName.Length - topicNameIndex);
return string.Compare(topicFilter, topicFilterIndex, topicName, topicNameIndex, matchLength, StringComparison.Ordinal) == 0;
}
else
{
if (topicFilter[wildcardIndex] == MultiSegmentWildcardChar)
{
if (wildcardIndex == 0) // special case -- any topic name would match
{
return true;
}
else
{
int matchLength = wildcardIndex - topicFilterIndex - 1;
if (string.Compare(topicFilter, topicFilterIndex, topicName, topicNameIndex, matchLength, StringComparison.Ordinal) == 0
&& (topicName.Length == topicNameIndex + matchLength || (topicName.Length > topicNameIndex + matchLength && topicName[topicNameIndex + matchLength] == SegmentSeparatorChar)))
{
// paths match up till wildcard and either it is parent topic in hierarchy (one level above # specified) or any child topic under a matching parent topic
return true;
}
else
{
return false;
}
}
}
else
{
// single segment wildcard
int matchLength = wildcardIndex - topicFilterIndex;
if (matchLength > 0 && string.Compare(topicFilter, topicFilterIndex, topicName, topicNameIndex, matchLength, StringComparison.Ordinal) != 0)
{
return false;
}
topicNameIndex = topicName.IndexOf(SegmentSeparatorChar, topicNameIndex + matchLength);
topicFilterIndex = wildcardIndex + 1;
if (topicNameIndex == -1)
{
// there's no more segments following matched one
return topicFilterIndex == topicFilter.Length;
}
}
}
}
return topicFilterIndex == topicFilter.Length && topicNameIndex == topicName.Length;
}
public static QualityOfService DeriveQos(Message message, MqttTransportSettings config)
{
QualityOfService qos;
string qosValue;
if (message.Properties.TryGetValue(config.QoSPropertyName, out qosValue))
{
int qosAsInt;
if (int.TryParse(qosValue, out qosAsInt))
{
qos = (QualityOfService)qosAsInt;
if (qos > QualityOfService.ExactlyOnce)
{
qos = config.PublishToServerQoS;
}
}
else
{
qos = config.PublishToServerQoS;
}
}
else
{
qos = config.PublishToServerQoS;
}
return qos;
}
public static Message CompleteMessageFromPacket(Message message, PublishPacket packet, MqttTransportSettings mqttTransportSettings)
{
message.MessageId = Guid.NewGuid().ToString("N");
if (packet.RetainRequested)
{
message.Properties[mqttTransportSettings.RetainPropertyName] = IotHubTrueString;
}
if (packet.Duplicate)
{
message.Properties[mqttTransportSettings.DupPropertyName] = IotHubTrueString;
}
return message;
}
public static async Task<PublishPacket> ComposePublishPacketAsync(IChannelHandlerContext context, Message message, QualityOfService qos, string topicName)
{
var packet = new PublishPacket(qos, false, false);
packet.TopicName = PopulateMessagePropertiesFromMessage(topicName, message);
if (qos > QualityOfService.AtMostOnce)
{
int packetId = GetNextPacketId();
switch (qos)
{
case QualityOfService.AtLeastOnce:
packetId &= 0x7FFF; // clear 15th bit
break;
case QualityOfService.ExactlyOnce:
packetId |= 0x8000; // set 15th bit
break;
default:
throw new ArgumentOutOfRangeException(nameof(qos), qos, null);
}
packet.PacketId = packetId;
}
using (Stream payloadStream = message.GetBodyStream())
{
long streamLength = payloadStream.Length;
if (streamLength > MaxPayloadSize)
{
throw new InvalidOperationException($"Message size ({streamLength} bytes) is too big to process. Maximum allowed payload size is {MaxPayloadSize}");
}
int length = (int)streamLength;
IByteBuffer buffer = context.Channel.Allocator.Buffer(length, length);
await buffer.WriteBytesAsync(payloadStream, length);
Contract.Assert(buffer.ReadableBytes == length);
packet.Payload = buffer;
}
return packet;
}
public static async Task WriteMessageAsync(IChannelHandlerContext context, object message, Func<IChannelHandlerContext, Exception, bool> exceptionHandler)
{
try
{
await context.WriteAndFlushAsync(message);
}
catch (Exception ex)
{
if (!exceptionHandler(context, ex))
{
throw;
}
}
}
public static void PopulateMessagePropertiesFromPacket(Message message, PublishPacket publish)
{
message.LockToken = publish.QualityOfService == QualityOfService.AtLeastOnce ? publish.PacketId.ToString() : null;
Dictionary<string, string> properties = UrlEncodedDictionarySerializer.Deserialize(publish.TopicName, publish.TopicName.NthIndexOf('/', 0, 4) + 1);
foreach (KeyValuePair<string, string> property in properties)
{
string propertyName;
if (ToSystemPropertiesMap.TryGetValue(property.Key, out propertyName))
{
message.SystemProperties[propertyName] = ConvertToSystemProperty(property);
}
else
{
message.Properties[property.Key] = property.Value;
}
}
}
static string PopulateMessagePropertiesFromMessage(string topicName, Message message)
{
var systemProperties = new Dictionary<string, string>();
foreach (KeyValuePair<string, object> property in message.SystemProperties)
{
string propertyName;
if (FromSystemPropertiesMap.TryGetValue(property.Key, out propertyName))
{
systemProperties[propertyName] = ConvertFromSystemProperties(property.Value);
}
}
string properties = UrlEncodedDictionarySerializer.Serialize(new ReadOnlyMergeDictionary<string, string>(systemProperties, message.Properties));
return topicName.EndsWith(SegmentSeparator, StringComparison.Ordinal) ? topicName + properties + SegmentSeparator : topicName + SegmentSeparator + properties;
}
static string ConvertFromSystemProperties(object systemProperty)
{
if (systemProperty is string)
{
return (string)systemProperty;
}
if (systemProperty is DateTime)
{
return ((DateTime)systemProperty).ToString("o", CultureInfo.InvariantCulture);
}
return systemProperty?.ToString();
}
static object ConvertToSystemProperty(KeyValuePair<string, string> property)
{
if (string.IsNullOrEmpty(property.Value))
{
return property.Value;
}
if (property.Key == IotHubWirePropertyNames.AbsoluteExpiryTime)
{
return DateTime.ParseExact(property.Value, "o", CultureInfo.InvariantCulture);
}
if (property.Key == MessageSystemPropertyNames.Ack)
{
return Utils.ConvertDeliveryAckTypeFromString(property.Value);
}
return property.Value;
}
public static int GetNextPacketId()
{
return PacketIdGenerator.Next();
}
}
}
| |
//
// Copyright (c) 2004-2016 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using JetBrains.Annotations;
#if !SILVERLIGHT
namespace NLog.Targets
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Net;
using System.Net.Mail;
using System.Text;
using System.IO;
using NLog.Common;
using NLog.Config;
using NLog.Internal;
using NLog.Layouts;
// For issue #1351 - These are not available for Android or IOS
#if !__ANDROID__ && !__IOS__
using System.Configuration;
using System.Net.Configuration;
#endif
/// <summary>
/// Sends log messages by email using SMTP protocol.
/// </summary>
/// <seealso href="https://github.com/nlog/nlog/wiki/Mail-target">Documentation on NLog Wiki</seealso>
/// <example>
/// <p>
/// To set up the target in the <a href="config.html">configuration file</a>,
/// use the following syntax:
/// </p>
/// <code lang="XML" source="examples/targets/Configuration File/Mail/Simple/NLog.config" />
/// <p>
/// This assumes just one target and a single rule. More configuration
/// options are described <a href="config.html">here</a>.
/// </p>
/// <p>
/// To set up the log target programmatically use code like this:
/// </p>
/// <code lang="C#" source="examples/targets/Configuration API/Mail/Simple/Example.cs" />
/// <p>
/// Mail target works best when used with BufferingWrapper target
/// which lets you send multiple log messages in single mail
/// </p>
/// <p>
/// To set up the buffered mail target in the <a href="config.html">configuration file</a>,
/// use the following syntax:
/// </p>
/// <code lang="XML" source="examples/targets/Configuration File/Mail/Buffered/NLog.config" />
/// <p>
/// To set up the buffered mail target programmatically use code like this:
/// </p>
/// <code lang="C#" source="examples/targets/Configuration API/Mail/Buffered/Example.cs" />
/// </example>
[Target("Mail")]
public class MailTarget : TargetWithLayoutHeaderAndFooter
{
private const string RequiredPropertyIsEmptyFormat = "After the processing of the MailTarget's '{0}' property it appears to be empty. The email message will not be sent.";
private Layout _from;
/// <summary>
/// Initializes a new instance of the <see cref="MailTarget" /> class.
/// </summary>
/// <remarks>
/// The default value of the layout is: <code>${longdate}|${level:uppercase=true}|${logger}|${message}</code>
/// </remarks>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors", Justification = "This one is safe.")]
public MailTarget()
{
this.Body = "${message}${newline}";
this.Subject = "Message from NLog on ${machinename}";
this.Encoding = Encoding.UTF8;
this.SmtpPort = 25;
this.SmtpAuthentication = SmtpAuthenticationMode.None;
this.Timeout = 10000;
}
#if !__ANDROID__ && !__IOS__
private SmtpSection _currentailSettings;
/// <summary>
/// Gets the mailSettings/smtp configuration from app.config in cases when we need those configuration.
/// E.g when UseSystemNetMailSettings is enabled and we need to read the From attribute from system.net/mailSettings/smtp
/// </summary>
/// <remarks>Internal for mocking</remarks>
internal SmtpSection SmtpSection
{
get
{
if (_currentailSettings == null)
{
try
{
_currentailSettings = System.Configuration.ConfigurationManager.GetSection("system.net/mailSettings/smtp") as SmtpSection;
}
catch (Exception ex)
{
InternalLogger.Warn(ex, "reading 'From' from .config failed.");
if (ex.MustBeRethrown())
{
throw;
}
_currentailSettings = new SmtpSection();
}
}
return _currentailSettings;
}
set { _currentailSettings = value; }
}
#endif
/// <summary>
/// Gets or sets sender's email address (e.g. joe@domain.com).
/// </summary>
/// <docgen category='Message Options' order='10' />
public Layout From
{
get
{
#if !__ANDROID__ && !__IOS__
if (UseSystemNetMailSettings)
{
// In contrary to other settings, System.Net.Mail.SmtpClient doesn't read the 'From' attribute from the system.net/mailSettings/smtp section in the config file.
// Thus, when UseSystemNetMailSettings is enabled we have to read the configuration section of system.net/mailSettings/smtp to initialize the 'From' address.
// It will do so only if the 'From' attribute in system.net/mailSettings/smtp is not empty.
//only use from config when not set in current
if (_from == null)
{
var from = SmtpSection.From;
if (from == null) return null;
return from;
}
}
#endif
return _from;
}
set { _from = value; }
}
/// <summary>
/// Gets or sets recipients' email addresses separated by semicolons (e.g. john@domain.com;jane@domain.com).
/// </summary>
/// <docgen category='Message Options' order='11' />
[RequiredParameter]
public Layout To { get; set; }
/// <summary>
/// Gets or sets CC email addresses separated by semicolons (e.g. john@domain.com;jane@domain.com).
/// </summary>
/// <docgen category='Message Options' order='12' />
public Layout CC { get; set; }
/// <summary>
/// Gets or sets BCC email addresses separated by semicolons (e.g. john@domain.com;jane@domain.com).
/// </summary>
/// <docgen category='Message Options' order='13' />
public Layout Bcc { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to add new lines between log entries.
/// </summary>
/// <value>A value of <c>true</c> if new lines should be added; otherwise, <c>false</c>.</value>
/// <docgen category='Layout Options' order='99' />
public bool AddNewLines { get; set; }
/// <summary>
/// Gets or sets the mail subject.
/// </summary>
/// <docgen category='Message Options' order='5' />
[DefaultValue("Message from NLog on ${machinename}")]
[RequiredParameter]
public Layout Subject { get; set; }
/// <summary>
/// Gets or sets mail message body (repeated for each log message send in one mail).
/// </summary>
/// <remarks>Alias for the <c>Layout</c> property.</remarks>
/// <docgen category='Message Options' order='6' />
[DefaultValue("${message}${newline}")]
public Layout Body
{
get { return this.Layout; }
set { this.Layout = value; }
}
/// <summary>
/// Gets or sets encoding to be used for sending e-mail.
/// </summary>
/// <docgen category='Layout Options' order='20' />
[DefaultValue("UTF8")]
public Encoding Encoding { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to send message as HTML instead of plain text.
/// </summary>
/// <docgen category='Layout Options' order='11' />
[DefaultValue(false)]
public bool Html { get; set; }
/// <summary>
/// Gets or sets SMTP Server to be used for sending.
/// </summary>
/// <docgen category='SMTP Options' order='10' />
public Layout SmtpServer { get; set; }
/// <summary>
/// Gets or sets SMTP Authentication mode.
/// </summary>
/// <docgen category='SMTP Options' order='11' />
[DefaultValue("None")]
public SmtpAuthenticationMode SmtpAuthentication { get; set; }
/// <summary>
/// Gets or sets the username used to connect to SMTP server (used when SmtpAuthentication is set to "basic").
/// </summary>
/// <docgen category='SMTP Options' order='12' />
public Layout SmtpUserName { get; set; }
/// <summary>
/// Gets or sets the password used to authenticate against SMTP server (used when SmtpAuthentication is set to "basic").
/// </summary>
/// <docgen category='SMTP Options' order='13' />
public Layout SmtpPassword { get; set; }
/// <summary>
/// Gets or sets a value indicating whether SSL (secure sockets layer) should be used when communicating with SMTP server.
/// </summary>
/// <docgen category='SMTP Options' order='14' />.
[DefaultValue(false)]
public bool EnableSsl { get; set; }
/// <summary>
/// Gets or sets the port number that SMTP Server is listening on.
/// </summary>
/// <docgen category='SMTP Options' order='15' />
[DefaultValue(25)]
public int SmtpPort { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the default Settings from System.Net.MailSettings should be used.
/// </summary>
/// <docgen category='SMTP Options' order='16' />
[DefaultValue(false)]
public bool UseSystemNetMailSettings { get; set; }
/// <summary>
/// Specifies how outgoing email messages will be handled.
/// </summary>
/// <docgen category='SMTP Options' order='18' />
[DefaultValue(SmtpDeliveryMethod.Network)]
public SmtpDeliveryMethod DeliveryMethod { get; set; }
/// <summary>
/// Gets or sets the folder where applications save mail messages to be processed by the local SMTP server.
/// </summary>
/// <docgen category='SMTP Options' order='17' />
[DefaultValue(null)]
public string PickupDirectoryLocation { get; set; }
/// <summary>
/// Gets or sets the priority used for sending mails.
/// </summary>
public Layout Priority { get; set; }
/// <summary>
/// Gets or sets a value indicating whether NewLine characters in the body should be replaced with <br/> tags.
/// </summary>
/// <remarks>Only happens when <see cref="Html"/> is set to true.</remarks>
[DefaultValue(false)]
public bool ReplaceNewlineWithBrTagInHtml { get; set; }
/// <summary>
/// Gets or sets a value indicating the SMTP client timeout.
/// </summary>
/// <remarks>Warning: zero is not infinit waiting</remarks>
[DefaultValue(10000)]
public int Timeout { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope", Justification = "This is a factory method.")]
internal virtual ISmtpClient CreateSmtpClient()
{
return new MySmtpClient();
}
/// <summary>
/// Renders the logging event message and adds it to the internal ArrayList of log messages.
/// </summary>
/// <param name="logEvent">The logging event.</param>
protected override void Write(AsyncLogEventInfo logEvent)
{
this.Write(new[] { logEvent });
}
/// <summary>
/// Renders an array logging events.
/// </summary>
/// <param name="logEvents">Array of logging events.</param>
protected override void Write(AsyncLogEventInfo[] logEvents)
{
foreach (var bucket in logEvents.BucketSort(c => this.GetSmtpSettingsKey(c.LogEvent)))
{
var eventInfos = bucket.Value;
this.ProcessSingleMailMessage(eventInfos);
}
}
/// <summary>
/// Initializes the target. Can be used by inheriting classes
/// to initialize logging.
/// </summary>
protected override void InitializeTarget()
{
CheckRequiredParameters();
base.InitializeTarget();
}
/// <summary>
/// Create mail and send with SMTP
/// </summary>
/// <param name="events">event printed in the body of the event</param>
private void ProcessSingleMailMessage([NotNull] List<AsyncLogEventInfo> events)
{
try
{
if (events.Count == 0)
{
throw new NLogRuntimeException("We need at least one event.");
}
LogEventInfo firstEvent = events[0].LogEvent;
LogEventInfo lastEvent = events[events.Count - 1].LogEvent;
// unbuffered case, create a local buffer, append header, body and footer
var bodyBuffer = CreateBodyBuffer(events, firstEvent, lastEvent);
using (var msg = CreateMailMessage(lastEvent, bodyBuffer.ToString()))
{
using (ISmtpClient client = this.CreateSmtpClient())
{
if (!UseSystemNetMailSettings)
{
ConfigureMailClient(lastEvent, client);
}
InternalLogger.Debug("Sending mail to {0} using {1}:{2} (ssl={3})", msg.To, client.Host, client.Port, client.EnableSsl);
InternalLogger.Trace(" Subject: '{0}'", msg.Subject);
InternalLogger.Trace(" From: '{0}'", msg.From.ToString());
client.Send(msg);
foreach (var ev in events)
{
ev.Continuation(null);
}
}
}
}
catch (Exception exception)
{
//always log
InternalLogger.Error(exception, "Error sending mail.");
if (exception.MustBeRethrown())
{
throw;
}
foreach (var ev in events)
{
ev.Continuation(exception);
}
}
}
/// <summary>
/// Create buffer for body
/// </summary>
/// <param name="events">all events</param>
/// <param name="firstEvent">first event for header</param>
/// <param name="lastEvent">last event for footer</param>
/// <returns></returns>
private StringBuilder CreateBodyBuffer(IEnumerable<AsyncLogEventInfo> events, LogEventInfo firstEvent, LogEventInfo lastEvent)
{
var bodyBuffer = new StringBuilder();
if (this.Header != null)
{
bodyBuffer.Append(this.Header.Render(firstEvent));
if (this.AddNewLines)
{
bodyBuffer.Append("\n");
}
}
foreach (AsyncLogEventInfo eventInfo in events)
{
bodyBuffer.Append(this.Layout.Render(eventInfo.LogEvent));
if (this.AddNewLines)
{
bodyBuffer.Append("\n");
}
}
if (this.Footer != null)
{
bodyBuffer.Append(this.Footer.Render(lastEvent));
if (this.AddNewLines)
{
bodyBuffer.Append("\n");
}
}
return bodyBuffer;
}
/// <summary>
/// Set properties of <paramref name="client"/>
/// </summary>
/// <param name="lastEvent">last event for username/password</param>
/// <param name="client">client to set properties on</param>
/// <remarks>Configure not at <see cref="InitializeTarget"/>, as the properties could have layout renderers.</remarks>
internal void ConfigureMailClient(LogEventInfo lastEvent, ISmtpClient client)
{
CheckRequiredParameters();
if (this.SmtpServer == null && string.IsNullOrEmpty(this.PickupDirectoryLocation))
{
throw new NLogRuntimeException(string.Format(RequiredPropertyIsEmptyFormat, "SmtpServer/PickupDirectoryLocation"));
}
if (this.DeliveryMethod == SmtpDeliveryMethod.Network && this.SmtpServer == null)
{
throw new NLogRuntimeException(string.Format(RequiredPropertyIsEmptyFormat, "SmtpServer"));
}
if (this.DeliveryMethod == SmtpDeliveryMethod.SpecifiedPickupDirectory && string.IsNullOrEmpty(this.PickupDirectoryLocation))
{
throw new NLogRuntimeException(string.Format(RequiredPropertyIsEmptyFormat, "PickupDirectoryLocation"));
}
if (this.SmtpServer != null && this.DeliveryMethod == SmtpDeliveryMethod.Network)
{
var renderedSmtpServer = this.SmtpServer.Render(lastEvent);
if (string.IsNullOrEmpty(renderedSmtpServer))
{
throw new NLogRuntimeException(string.Format(RequiredPropertyIsEmptyFormat, "SmtpServer"));
}
client.Host = renderedSmtpServer;
client.Port = this.SmtpPort;
client.EnableSsl = this.EnableSsl;
if (this.SmtpAuthentication == SmtpAuthenticationMode.Ntlm)
{
InternalLogger.Trace(" Using NTLM authentication.");
client.Credentials = CredentialCache.DefaultNetworkCredentials;
}
else if (this.SmtpAuthentication == SmtpAuthenticationMode.Basic)
{
string username = this.SmtpUserName.Render(lastEvent);
string password = this.SmtpPassword.Render(lastEvent);
InternalLogger.Trace(" Using basic authentication: Username='{0}' Password='{1}'", username, new string('*', password.Length));
client.Credentials = new NetworkCredential(username, password);
}
}
if (!string.IsNullOrEmpty(this.PickupDirectoryLocation) && this.DeliveryMethod == SmtpDeliveryMethod.SpecifiedPickupDirectory)
{
client.PickupDirectoryLocation = ConvertDirectoryLocation(PickupDirectoryLocation);
}
// In case DeliveryMethod = PickupDirectoryFromIis we will not require Host nor PickupDirectoryLocation
client.DeliveryMethod = this.DeliveryMethod;
client.Timeout = this.Timeout;
}
/// <summary>
/// Handle <paramref name="pickupDirectoryLocation"/> if it is a virtual directory.
/// </summary>
/// <param name="pickupDirectoryLocation"></param>
/// <returns></returns>
internal static string ConvertDirectoryLocation(string pickupDirectoryLocation)
{
const string virtualPathPrefix = "~/";
if (!pickupDirectoryLocation.StartsWith(virtualPathPrefix))
{
return pickupDirectoryLocation;
}
// Support for Virtual Paths
var root = AppDomain.CurrentDomain.BaseDirectory;
var directory = pickupDirectoryLocation.Substring(virtualPathPrefix.Length).Replace('/', Path.DirectorySeparatorChar);
var pickupRoot = Path.Combine(root, directory);
return pickupRoot;
}
private void CheckRequiredParameters()
{
if (!this.UseSystemNetMailSettings && this.SmtpServer == null && this.DeliveryMethod == SmtpDeliveryMethod.Network)
{
throw new NLogConfigurationException("The MailTarget's '{0}' properties are not set - but needed because useSystemNetMailSettings=false and DeliveryMethod=Network. The email message will not be sent.", "SmtpServer");
}
if (!this.UseSystemNetMailSettings && string.IsNullOrEmpty(this.PickupDirectoryLocation) && this.DeliveryMethod == SmtpDeliveryMethod.SpecifiedPickupDirectory)
{
throw new NLogConfigurationException("The MailTarget's '{0}' properties are not set - but needed because useSystemNetMailSettings=false and DeliveryMethod=SpecifiedPickupDirectory. The email message will not be sent.", "PickupDirectoryLocation");
}
if (this.From == null)
{
throw new NLogConfigurationException(RequiredPropertyIsEmptyFormat, "From");
}
}
/// <summary>
/// Create key for grouping. Needed for multiple events in one mailmessage
/// </summary>
/// <param name="logEvent">event for rendering layouts </param>
///<returns>string to group on</returns>
private string GetSmtpSettingsKey(LogEventInfo logEvent)
{
var sb = new StringBuilder();
AppendLayout(sb, logEvent, this.From);
AppendLayout(sb, logEvent, this.To);
AppendLayout(sb, logEvent, this.CC);
AppendLayout(sb, logEvent, this.Bcc);
AppendLayout(sb, logEvent, this.SmtpServer);
AppendLayout(sb, logEvent, this.SmtpPassword);
AppendLayout(sb, logEvent, this.SmtpUserName);
return sb.ToString();
}
/// <summary>
/// Append rendered layout to the stringbuilder
/// </summary>
/// <param name="sb">append to this</param>
/// <param name="logEvent">event for rendering <paramref name="layout"/></param>
/// <param name="layout">append if not <c>null</c></param>
private static void AppendLayout(StringBuilder sb, LogEventInfo logEvent, Layout layout)
{
sb.Append("|");
if (layout != null)
sb.Append(layout.Render(logEvent));
}
/// <summary>
/// Create the mailmessage with the addresses, properties and body.
/// </summary>
private MailMessage CreateMailMessage(LogEventInfo lastEvent, string body)
{
var msg = new MailMessage();
var renderedFrom = this.From == null ? null : this.From.Render(lastEvent);
if (string.IsNullOrEmpty(renderedFrom))
{
throw new NLogRuntimeException(RequiredPropertyIsEmptyFormat, "From");
}
msg.From = new MailAddress(renderedFrom);
var addedTo = AddAddresses(msg.To, this.To, lastEvent);
var addedCc = AddAddresses(msg.CC, this.CC, lastEvent);
var addedBcc = AddAddresses(msg.Bcc, this.Bcc, lastEvent);
if (!addedTo && !addedCc && !addedBcc)
{
throw new NLogRuntimeException(RequiredPropertyIsEmptyFormat, "To/Cc/Bcc");
}
msg.Subject = this.Subject == null ? string.Empty : this.Subject.Render(lastEvent).Trim();
msg.BodyEncoding = this.Encoding;
msg.IsBodyHtml = this.Html;
if (this.Priority != null)
{
var renderedPriority = this.Priority.Render(lastEvent);
try
{
msg.Priority = (MailPriority)Enum.Parse(typeof(MailPriority), renderedPriority, true);
}
catch
{
InternalLogger.Warn("Could not convert '{0}' to MailPriority, valid values are Low, Normal and High. Using normal priority as fallback.");
msg.Priority = MailPriority.Normal;
}
}
msg.Body = body;
if (msg.IsBodyHtml && ReplaceNewlineWithBrTagInHtml && msg.Body != null)
msg.Body = msg.Body.Replace(EnvironmentHelper.NewLine, "<br/>");
return msg;
}
/// <summary>
/// Render <paramref name="layout"/> and add the addresses to <paramref name="mailAddressCollection"/>
/// </summary>
/// <param name="mailAddressCollection">Addresses appended to this list</param>
/// <param name="layout">layout with addresses, ; separated</param>
/// <param name="logEvent">event for rendering the <paramref name="layout"/></param>
/// <returns>added a address?</returns>
private static bool AddAddresses(MailAddressCollection mailAddressCollection, Layout layout, LogEventInfo logEvent)
{
var added = false;
if (layout != null)
{
foreach (string mail in layout.Render(logEvent).Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries))
{
mailAddressCollection.Add(mail);
added = true;
}
}
return added;
}
}
}
#endif
| |
namespace Nancy.Tests.Unit
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using FakeItEasy;
using Nancy.Helpers;
using Nancy.IO;
using Xunit;
using Xunit.Extensions;
public class RequestFixture
{
[Fact]
public void Should_dispose_request_stream_when_being_disposed()
{
// Given
var stream = A.Fake<RequestStream>(x =>
{
x.Implements(typeof(IDisposable));
x.WithArgumentsForConstructor(() => new RequestStream(0, false));
});
var url = new Url()
{
Scheme = "http",
Path = "localhost"
};
var request = new Request("GET", url, stream);
// When
request.Dispose();
// Then
A.CallTo(() => ((IDisposable)stream).Dispose()).MustHaveHappened();
}
[Fact]
public void Should_be_disposable()
{
// Given, When, Then
typeof(Request).ShouldImplementInterface<IDisposable>();
}
[Fact]
public void Should_override_request_method_on_post()
{
// Given
const string bodyContent = "_method=GET";
var memory = CreateRequestStream();
var writer = new StreamWriter(memory);
writer.Write(bodyContent);
writer.Flush();
memory.Position = 0;
var headers =
new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded" } } };
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers);
// Then
request.Method.ShouldEqual("GET");
}
[Theory]
[InlineData("GET")]
[InlineData("PUT")]
[InlineData("DELETE")]
[InlineData("HEAD")]
public void Should_only_override_method_on_post(string method)
{
// Given
const string bodyContent = "_method=TEST";
var memory = CreateRequestStream();
var writer = new StreamWriter(memory);
writer.Write(bodyContent);
writer.Flush();
memory.Position = 0;
var headers =
new Dictionary<string, IEnumerable<string>> { { "content-type", new[] { "application/x-www-form-urlencoded" } } };
// When
var request = new Request(method, new Url { Path = "/", Scheme = "http" }, memory, headers);
// Then
request.Method.ShouldEqual(method);
}
[Fact]
public void Should_throw_argumentoutofrangeexception_when_initialized_with_null_method()
{
// Given, When
var exception =
Record.Exception(() => new Request(null, "/", "http"));
// Then
exception.ShouldBeOfType<ArgumentOutOfRangeException>();
}
[Fact]
public void Should_throw_argumentoutofrangeexception_when_initialized_with_empty_method()
{
// Given, When
var exception =
Record.Exception(() => new Request(string.Empty, "/", "http"));
// Then
exception.ShouldBeOfType<ArgumentOutOfRangeException>();
}
[Fact]
public void Should_throw_null_exception_when_initialized_with_null_uri()
{
// Given, When
var exception =
Record.Exception(() => new Request("GET", null, "http"));
// Then
exception.ShouldBeOfType<ArgumentNullException>();
}
[Fact]
public void Should_set_method_parameter_value_to_method_property_when_initialized()
{
// Given
const string method = "GET";
// When
var request = new Request(method, "/", "http");
// Then
request.Method.ShouldEqual(method);
}
[Fact]
public void Should_set_uri_parameter_value_to_uri_property_when_initialized()
{
// Given
const string path = "/";
// When
var request = new Request("GET", path, "http");
// Then
request.Path.ShouldEqual(path);
}
[Fact]
public void Should_set_header_parameter_value_to_header_property_when_initialized()
{
// Given
var headers = new Dictionary<string, IEnumerable<string>>()
{
{ "content-type", new[] {"foo/bar"} }
};
// When
var request = new Request("GET", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(), headers);
// Then
request.Headers.ContentType.ShouldNotBeNull();
}
[Fact]
public void Should_set_body_parameter_value_to_body_property_when_initialized()
{
// Given
var body = CreateRequestStream();
// When
var request = new Request("GET", new Url { Path = "/", Scheme = "http" }, body, new Dictionary<string, IEnumerable<string>>());
// Then
request.Body.ShouldBeSameAs(body);
}
[Fact]
public void Should_set_extract_form_data_from_body_when_content_type_is_x_www_form_urlencoded()
{
// Given
const string bodyContent = "name=John+Doe&gender=male&family=5&city=kent&city=miami&other=abc%0D%0Adef&nickname=J%26D";
var memory = CreateRequestStream();
var writer = new StreamWriter(memory);
writer.Write(bodyContent);
writer.Flush();
memory.Position = 0;
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "application/x-www-form-urlencoded" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers);
// Then
((string)request.Form.name).ShouldEqual("John Doe");
}
[Fact]
public void Should_set_extract_form_data_from_body_when_content_type_is_x_www_form_urlencoded_with_character_set()
{
// Given
const string bodyContent = "name=John+Doe&gender=male&family=5&city=kent&city=miami&other=abc%0D%0Adef&nickname=J%26D";
var memory = CreateRequestStream();
var writer = new StreamWriter(memory);
writer.Write(bodyContent);
writer.Flush();
memory.Position = 0;
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "application/x-www-form-urlencoded; charset=UTF-8" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers);
// Then
((string)request.Form.name).ShouldEqual("John Doe");
}
[Fact]
public void Should_set_extracted_form_data_from_body_when_content_type_is_multipart_form_data()
{
// Given
var memory =
new MemoryStream(BuildMultipartFormValues(new Dictionary<string, string>
{
{ "name", "John Doe"},
{ "age", "42"}
}));
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
((string)request.Form.name).ShouldEqual("John Doe");
((string)request.Form.age).ShouldEqual("42");
}
[Fact]
public void Should_respect_case_insensitivity_when_extracting_form_data_from_body_when_content_type_is_x_www_form_urlencoded()
{
// Given
StaticConfiguration.CaseSensitive = false;
const string bodyContent = "key=value&key=value&KEY=VALUE";
var memory = CreateRequestStream();
var writer = new StreamWriter(memory);
writer.Write(bodyContent);
writer.Flush();
memory.Position = 0;
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "application/x-www-form-urlencoded" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers);
// Then
((string)request.Form.key).ShouldEqual("value,value,VALUE");
((string)request.Form.KEY).ShouldEqual("value,value,VALUE");
}
[Fact]
public void Should_respect_case_sensitivity_when_extracting_form_data_from_body_when_content_type_is_x_www_form_urlencoded()
{
// Given
StaticConfiguration.CaseSensitive = true;
const string bodyContent = "key=value&key=value&KEY=VALUE";
var memory = CreateRequestStream();
var writer = new StreamWriter(memory);
writer.Write(bodyContent);
writer.Flush();
memory.Position = 0;
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "application/x-www-form-urlencoded" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers);
// Then
((string)request.Form.key).ShouldEqual("value,value");
((string)request.Form.KEY).ShouldEqual("VALUE");
}
[Fact]
public void Should_respect_case_insensitivity_when_extracting_form_data_from_body_when_content_type_is_multipart_form_data()
{
// Given
StaticConfiguration.CaseSensitive = false;
var memory =
new MemoryStream(BuildMultipartFormValues(new Dictionary<string, string>(StringComparer.Ordinal)
{
{ "key", "value" },
{ "KEY", "VALUE" }
}));
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
((string)request.Form.key).ShouldEqual("value,VALUE");
((string)request.Form.KEY).ShouldEqual("value,VALUE");
}
[Fact]
public void Should_respect_case_sensitivity_when_extracting_form_data_from_body_when_content_type_is_multipart_form_data()
{
// Given
StaticConfiguration.CaseSensitive = true;
var memory =
new MemoryStream(BuildMultipartFormValues(new Dictionary<string, string>(StringComparer.Ordinal)
{
{ "key", "value" },
{ "KEY", "VALUE" }
}));
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
((string)request.Form.key).ShouldEqual("value");
((string)request.Form.KEY).ShouldEqual("VALUE");
}
[Fact]
public void Should_set_extracted_files_to_files_collection_when_body_content_type_is_multipart_form_data()
{
// Given
var memory =
new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>>
{
{ "test", new Tuple<string, string, string>("content/type", "some test content", "whatever")}
}));
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
request.Files.ShouldHaveCount(1);
}
[Fact]
public void Should_set_content_type_on_file_extracted_from_multipart_form_data_body()
{
// Given
var memory =
new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>>
{
{ "sample.txt", new Tuple<string, string, string>("content/type", "some test content", "whatever")}
}));
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
request.Files.First().ContentType.ShouldEqual("content/type");
}
[Fact]
public void Should_set_name_on_file_extracted_from_multipart_form_data_body()
{
// Given
var memory =
new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>>
{
{ "sample.txt", new Tuple<string, string, string>("content/type", "some test content", "whatever")}
}));
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
request.Files.First().Name.ShouldEqual("sample.txt");
}
[Fact]
public void Should_value_on_file_extracted_from_multipart_form_data_body()
{
// Given
var memory =
new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>>
{
{ "sample.txt", new Tuple<string, string, string>("content/type", "some test content", "whatever")}
}));
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
GetStringValue(request.Files.First().Value).ShouldEqual("some test content");
}
[Fact]
public void Should_set_key_on_file_extracted_from_multipart_data_body()
{
// Given
var memory =
new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>>
{
{ "sample.txt", new Tuple<string, string, string>("content/type", "some test content", "fieldname")}
}));
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
request.Files.First().Key.ShouldEqual("fieldname");
}
private static string GetStringValue(Stream stream)
{
var reader = new StreamReader(stream);
return reader.ReadToEnd();
}
[Fact]
public void Should_be_able_to_invoke_form_repeatedly()
{
const string bodyContent = "name=John+Doe&gender=male&family=5&city=kent&city=miami&other=abc%0D%0Adef&nickname=J%26D";
var memory = new MemoryStream();
var writer = new StreamWriter(memory);
writer.Write(bodyContent);
writer.Flush();
memory.Position = 0;
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "application/x-www-form-urlencoded" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
((string)request.Form.name).ShouldEqual("John Doe");
}
[Fact]
public void Should_throw_argumentoutofrangeexception_when_initialized_with_null_protocol()
{
// Given, When
var exception =
Record.Exception(() => new Request("GET", "/", null));
// Then
exception.ShouldBeOfType<ArgumentOutOfRangeException>();
}
[Fact]
public void Should_throw_argumentoutofrangeexception_when_initialized_with_an_empty_protocol()
{
// Given, When
var exception =
Record.Exception(() => new Request("GET", "/", string.Empty));
// Then
exception.ShouldBeOfType<ArgumentOutOfRangeException>();
}
[Fact]
public void Should_set_protocol_parameter_value_to_protocol_property_when_initialized()
{
// Given
const string protocol = "http";
// When
var request = new Request("GET", "/", protocol);
// Then
request.Url.Scheme.ShouldEqual(protocol);
}
[Fact]
public void Should_split_cookie_in_two_parts_only()
{
// Given, when
var cookieName = "_nc";
var cookieData = "Y+M3rcC/7ssXvHTx9pwCbwQVV4g=sp0hUZVApYgGbKZIU4bvXbBCVl9fhSEssEXSGdrt4jVag6PO1oed8lSd+EJD1nzWx4OTTCTZKjYRWeHE97QVND4jJIl+DuKRgJnSl3hWI5gdgGjcxqCSTvMOMGmW3NHLVyKpajGD8tq1DXhXMyXHjTzrCAYl8TGzwyJJGx/gd7VMJeRbAy9JdHOxEUlCKUnPneWN6q+/ITFryAa5hAdfcjXmh4Fgym75whKOMkWO+yM2icdsciX0ShcvnEQ/bXcTHTya6d7dJVfZl7qQ8AgIQv8ucQHxD3NxIvHNPBwms2ClaPds0HG5N+7pu7eMSFZjUHpDrrCnFvYN+JDiG3GMpf98LuCCvxemvipJo2MUkY4J1LvaDFoWA5tIxAfItZJkSIW2d8JPDwFk8OHJy8zhyn8AjD2JFqWaUZr4y9KZOtgI0V0Qlq0mS3mDSlLn29xapgoPHBvykwQjR6TwF2pBLpStsfZa/tXbEv2mc3VO3CnErIA1lEfKNqn9C/Dw6hqW";
var headers = new Dictionary<string, IEnumerable<string>>();
var cookies = new List<string>();
cookies.Add(string.Format("{0}={1}", cookieName, HttpUtility.UrlEncode(cookieData)));
headers.Add("cookie", cookies);
var newUrl = new Url
{
Path = "/"
};
var request = new Request("GET", newUrl, null, headers);
// Then
request.Cookies[cookieName].ShouldEqual(cookieData);
}
[Fact]
public void Should_split_cookie_in_two_parts_with_secure_attribute()
{
// Given, when
const string cookieName = "path";
const string cookieData = "/";
var headers = new Dictionary<string, IEnumerable<string>>();
var cookies = new List<string> { string.Format("{0}={1}; Secure", cookieName, cookieData)} ;
headers.Add("cookie", cookies);
var newUrl = new Url
{
Path = "/"
};
var request = new Request("GET", newUrl, null, headers);
// Then
request.Cookies[cookieName].ShouldEqual(cookieData);
}
[Fact]
public void Should_split_cookie_in_two_parts_with_httponly_and_secure_attribute()
{
// Given, when
const string cookieName = "path";
const string cookieData = "/";
var headers = new Dictionary<string, IEnumerable<string>>();
var cookies = new List<string> { string.Format("{0}={1}; HttpOnly; Secure", cookieName, cookieData) };
headers.Add("cookie", cookies);
var newUrl = new Url
{
Path = "/"
};
var request = new Request("GET", newUrl, null, headers);
// Then
request.Cookies[cookieName].ShouldEqual(cookieData);
}
[Fact]
public void Should_split_cookie_in_two_parts_with_httponly_and_secure_attribute_ignoring_case()
{
// Given, when
const string cookieName = "path";
const string cookieData = "/";
var headers = new Dictionary<string, IEnumerable<string>>();
var cookies = new List<string> { string.Format("{0}={1}; httponly; secure", cookieName, cookieData) };
headers.Add("cookie", cookies);
var newUrl = new Url
{
Path = "/"
};
var request = new Request("GET", newUrl, null, headers);
// Then
request.Cookies[cookieName].ShouldEqual(cookieData);
}
[Fact]
public void Should_split_cookie_in_two_parts_with_httponly_attribute()
{
// Given, when
const string cookieName = "path";
const string cookieData = "/";
var headers = new Dictionary<string, IEnumerable<string>>();
var cookies = new List<string> { string.Format("{0}={1}; HttpOnly", cookieName, cookieData) };
headers.Add("cookie", cookies);
var newUrl = new Url
{
Path = "/"
};
var request = new Request("GET", newUrl, null, headers);
// Then
request.Cookies[cookieName].ShouldEqual(cookieData);
}
[Fact]
public void Should_add_attribute_in_cookie_as_empty_value()
{
// Given, when
const string cookieName = "path";
const string cookieData = "/";
const string cookieAttribute = "SomeAttribute";
var headers = new Dictionary<string, IEnumerable<string>>();
var cookies = new List<string> { string.Format("{0}={1}; {2}", cookieName, cookieData, cookieAttribute) };
headers.Add("cookie", cookies);
var newUrl = new Url
{
Path = "/"
};
var request = new Request("GET", newUrl, null, headers);
// Then
request.Cookies[cookieName].ShouldEqual(cookieData);
request.Cookies[cookieAttribute].ShouldEqual(string.Empty);
}
[Fact]
public void Should_move_request_body_position_to_zero_after_parsing_url_encoded_data()
{
// Given
const string bodyContent = "name=John+Doe&gender=male&family=5&city=kent&city=miami&other=abc%0D%0Adef&nickname=J%26D";
var memory = CreateRequestStream();
var writer = new StreamWriter(memory);
writer.Write(bodyContent);
writer.Flush();
memory.Position = 0;
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "application/x-www-form-urlencoded; charset=UTF-8" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers);
// Then
memory.Position.ShouldEqual(0L);
}
[Fact]
public void Should_move_request_body_position_to_zero_after_parsing_multipart_encoded_data()
{
// Given
var memory =
new MemoryStream(BuildMultipartFileValues(new Dictionary<string, Tuple<string, string, string>>
{
{ "sample.txt", new Tuple<string, string, string>("content/type", "some test content", "whatever")}
}));
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
memory.Position.ShouldEqual(0L);
}
[Fact]
public void Should_preserve_all_values_when_multiple_are_posted_using_same_name_after_parsing_multipart_encoded_data()
{
// Given
var memory =
new MemoryStream(BuildMultipartFormValues(
new KeyValuePair<string, string>("age", "32"),
new KeyValuePair<string, string>("age", "42"),
new KeyValuePair<string, string>("age", "52")
));
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "multipart/form-data; boundary=----NancyFormBoundary" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, CreateRequestStream(memory), headers);
// Then
((string)request.Form.age).ShouldEqual("32,42,52");
}
[Fact]
public void Should_limit_the_amount_of_form_fields_parsed()
{
// Given
var sb = new StringBuilder();
for (int i = 0; i < StaticConfiguration.RequestQueryFormMultipartLimit + 10; i++)
{
if (i > 0)
{
sb.Append('&');
}
sb.AppendFormat("Field{0}=Value{0}", i);
}
var memory = CreateRequestStream();
var writer = new StreamWriter(memory);
writer.Write(sb.ToString());
writer.Flush();
memory.Position = 0;
var headers =
new Dictionary<string, IEnumerable<string>>
{
{ "content-type", new[] { "application/x-www-form-urlencoded" } }
};
// When
var request = new Request("POST", new Url { Path = "/", Scheme = "http" }, memory, headers);
// Then
((IEnumerable<string>)request.Form.GetDynamicMemberNames()).Count().ShouldEqual(StaticConfiguration.RequestQueryFormMultipartLimit);
}
[Fact]
public void Should_limit_the_amount_of_querystring_fields_parsed()
{
// Given
var sb = new StringBuilder();
for (int i = 0; i < StaticConfiguration.RequestQueryFormMultipartLimit + 10; i++)
{
if (i > 0)
{
sb.Append('&');
}
sb.AppendFormat("Field{0}=Value{0}", i);
}
var memory = CreateRequestStream();
// When
var request = new Request("GET", new Url { Path = "/", Scheme = "http", Query = sb.ToString() }, memory, new Dictionary<string, IEnumerable<string>>());
// Then
((IEnumerable<string>)request.Query.GetDynamicMemberNames()).Count().ShouldEqual(StaticConfiguration.RequestQueryFormMultipartLimit);
}
[Fact]
public void Should_change_empty_path_to_root()
{
var request = new Request("GET", "", "http");
request.Path.ShouldEqual("/");
}
[Fact]
public void Should_replace_value_of_query_key_without_value_with_true()
{
// Given
var memory = CreateRequestStream();
// When
var request = new Request("GET", new Url { Path = "/", Scheme = "http", Query = "key1" }, memory);
// Then
((bool)request.Query.key1).ShouldBeTrue();
((string)request.Query.key1).ShouldEqual("key1");
}
[Fact]
public void Should_not_replace_equal_key_value_query_with_bool()
{
// Given
var memory = CreateRequestStream();
// When
var request = new Request("GET", new Url { Path = "/", Scheme = "http", Query = "key1=key1" }, memory);
// Then
ShouldAssertExtensions.ShouldBeOfType<string>(request.Query["key1"].Value);
}
private static RequestStream CreateRequestStream()
{
return CreateRequestStream(new MemoryStream());
}
private static RequestStream CreateRequestStream(Stream stream)
{
return RequestStream.FromStream(stream);
}
private static byte[] BuildMultipartFormValues(params KeyValuePair<string, string>[] values)
{
var boundaryBuilder = new StringBuilder();
foreach (var pair in values)
{
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.Append("--");
boundaryBuilder.Append("----NancyFormBoundary");
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.AppendFormat("Content-Disposition: form-data; name=\"{0}\"", pair.Key);
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.Append(pair.Value);
}
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.Append("------NancyFormBoundary--");
var bytes =
Encoding.ASCII.GetBytes(boundaryBuilder.ToString());
return bytes;
}
private static byte[] BuildMultipartFormValues(Dictionary<string, string> formValues)
{
var pairs =
formValues.Keys.Select(key => new KeyValuePair<string, string>(key, formValues[key]));
return BuildMultipartFormValues(pairs.ToArray());
}
private static byte[] BuildMultipartFileValues(Dictionary<string, Tuple<string, string, string>> formValues)
{
var boundaryBuilder = new StringBuilder();
foreach (var key in formValues.Keys)
{
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.Append("--");
boundaryBuilder.Append("----NancyFormBoundary");
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.AppendFormat("Content-Disposition: form-data; name=\"{1}\"; filename=\"{0}\"", key, formValues[key].Item3);
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.AppendFormat("Content-Type: {0}", formValues[key].Item1);
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.Append(formValues[key].Item2);
}
boundaryBuilder.Append('\r');
boundaryBuilder.Append('\n');
boundaryBuilder.Append("------NancyFormBoundary--");
var bytes =
Encoding.ASCII.GetBytes(boundaryBuilder.ToString());
return bytes;
}
}
}
| |
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2009, 2010 Oracle and/or its affiliates. All rights reserved.
*
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Threading;
using System.Xml;
using NUnit.Framework;
using BerkeleyDB;
namespace CsharpAPITest
{
[TestFixture]
public class DatabaseEnvironmentTest
{
private string testFixtureHome;
private string testFixtureName;
private string testName;
private string testHome;
private DatabaseEnvironment testBeginTransactionEnv;
private BTreeDatabase testBeginTransactionDB;
private DatabaseEnvironment testCheckpointEnv;
private BTreeDatabase testCheckpointDB;
private DatabaseEnvironment testDetectDeadlocksEnv;
private BTreeDatabase testDetectDeadlocksDB;
private DatabaseEnvironment testFailCheckEnv;
private EventWaitHandle signal;
[TestFixtureSetUp]
public void SetUp()
{
testFixtureName = "DatabaseEnvironmentTest";
testFixtureHome = "./TestOut/" + testFixtureName;
try
{
Configuration.ClearDir(testFixtureHome);
}
catch (Exception)
{
throw new TestException(
"Please clean the directory");
}
}
[Test]
public void TestArchivableDatabaseFiles()
{
testName = "TestArchivableDatabaseFiles";
testHome = testFixtureHome + "/" + testName;
string dbFileName1 = testName + "1.db";
string dbFileName2 = testName + "2.db";
Configuration.ClearDir(testHome);
// Open an environment.
DatabaseEnvironmentConfig envConfig =
new DatabaseEnvironmentConfig();
envConfig.AutoCommit = true;
envConfig.Create = true;
envConfig.UseMPool = true;
envConfig.UseLogging = true;
envConfig.UseTxns = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(
testHome, envConfig);
// Open two databases.
BTreeDatabaseConfig dbConfig =
new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = env;
BTreeDatabase db1 = BTreeDatabase.Open(dbFileName1, dbConfig);
db1.Close();
BTreeDatabase db2 = BTreeDatabase.Open(dbFileName2, dbConfig);
db2.Close();
/*
* Get all database files name in the environment.
* Two database file name should be returned and
* the same as the ones when opening the databases.
*/
List<string> dbFiles = env.ArchivableDatabaseFiles(false);
Assert.AreEqual(2, dbFiles.Count);
Assert.IsTrue(dbFiles.Contains(dbFileName1));
Assert.IsTrue(dbFiles.Contains(dbFileName2));
/*
* Get all database file's abosolute path in the
* environment. Confirm those files exist.
*/
List<string> dbFilesPath = env.ArchivableDatabaseFiles(true);
Assert.IsTrue(File.Exists(dbFilesPath[0]));
Assert.IsTrue(File.Exists(dbFilesPath[1]));
env.Close();
}
[Test]
public void TestArchivableLogFiles()
{
testName = "TestArchivableLogFiles";
testHome = testFixtureHome + "/" + testName;
string dbFileName = testName + ".db";
Configuration.ClearDir(testHome);
// Open an environment.
DatabaseEnvironmentConfig envConfig =
new DatabaseEnvironmentConfig();
envConfig.AutoCommit = true;
envConfig.Create = true;
envConfig.UseMPool = true;
envConfig.UseLogging = true;
envConfig.UseTxns = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(
testHome, envConfig);
// Open a databases.
BTreeDatabaseConfig dbConfig =
new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = env;
BTreeDatabase db = BTreeDatabase.Open(
dbFileName, dbConfig);
/*
* Put 1000 records into the database to generate
* more than one log files.
*/
byte[] byteArr = new byte[1024];
for (int i = 0; i < 1000; i++)
db.Put(new DatabaseEntry(
BitConverter.GetBytes(i)),
new DatabaseEntry(byteArr));
db.Close();
List<string> logFiles = env.ArchivableLogFiles(false);
List<string> logFilesPath =
env.ArchivableLogFiles(true);
for (int i = 0; i < logFilesPath.Count; i++)
Assert.IsTrue(File.Exists(logFilesPath[i]));
env.Close();
}
[Test]
public void TestBeginCDSGroup()
{
testName = "TestBeginCDSGroup";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
DatabaseEnvironmentConfig cfg =
new DatabaseEnvironmentConfig();
cfg.Create = true;
cfg.UseCDB = true;
cfg.UseMPool = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(testHome, cfg);
Transaction txn = env.BeginCDSGroup();
BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = env;
BTreeDatabase db = BTreeDatabase.Open(
testName + ".db", dbConfig, txn);
db.Put(new DatabaseEntry(
ASCIIEncoding.ASCII.GetBytes("key")),
new DatabaseEntry(
ASCIIEncoding.ASCII.GetBytes("data")), txn);
db.Close();
txn.Commit();
env.Close();
}
[Test]
public void TestBeginTransaction()
{
testName = "TestBeginTransaction";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
// Open an environment.
DatabaseEnvironmentConfig cfg =
new DatabaseEnvironmentConfig();
cfg.Create = true;
cfg.UseTxns = true;
cfg.UseMPool = true;
cfg.UseLogging = true;
cfg.UseLocking = true;
cfg.NoLocking = false;
cfg.FreeThreaded = true;
testBeginTransactionEnv = DatabaseEnvironment.Open(testHome, cfg);
testBeginTransactionEnv.DeadlockResolution = DeadlockPolicy.OLDEST;
// Open btree database.
BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig();
dbConfig.AutoCommit = true;
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = testBeginTransactionEnv;
dbConfig.Duplicates = DuplicatesPolicy.NONE;
dbConfig.FreeThreaded = true;
testBeginTransactionDB = BTreeDatabase.Open(
testName + ".db", dbConfig);
testBeginTransactionDB.Put(
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("key")),
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data")));
// Begin two threads to run dead lock detection.
Thread thread1 = new Thread(new ThreadStart(
DeadLockThreadWithLockTimeOut));
Thread thread2 = new Thread(new ThreadStart(
DeadLockThreadWithTxnTimeout));
signal = new EventWaitHandle(false, EventResetMode.ManualReset);
thread1.Start();
thread2.Start();
Thread.Sleep(1000);
signal.Set();
thread1.Join();
thread2.Join();
// Close all.
testBeginTransactionDB.Close();
testBeginTransactionEnv.Close();
}
public void DeadLockThreadWithLockTimeOut()
{
// Configure and begin a transaction.
TransactionConfig txnConfig = new TransactionConfig();
txnConfig.LockTimeout = 5000;
txnConfig.Name = "DeadLockThreadWithLockTimeOut";
Transaction txn =
testBeginTransactionEnv.BeginTransaction(txnConfig, null);
try
{
testBeginTransactionDB.Put(
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("key")),
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data")));
signal.WaitOne();
testBeginTransactionDB.Put(
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("newkey")),
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("newdata")),
txn);
txn.Commit();
}
catch (DeadlockException)
{
try
{
txn.Abort();
}
catch (DatabaseException)
{
throw new TestException();
}
}
catch (DatabaseException)
{
try
{
txn.Abort();
}
catch (DatabaseException)
{
throw new TestException();
}
}
}
public void DeadLockThreadWithTxnTimeout()
{
// Configure and begin a transaction.
TransactionConfig txnConfig = new TransactionConfig();
txnConfig.TxnTimeout = 5000;
txnConfig.Name = "DeadLockThreadWithTxnTimeout";
Transaction txn =
testBeginTransactionEnv.BeginTransaction(txnConfig, null);
try
{
testBeginTransactionDB.Put(
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("key")),
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data")));
signal.WaitOne();
testBeginTransactionDB.Put(
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("newkey")),
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("newdata")),
txn);
txn.Commit();
}
catch (DeadlockException)
{
try
{
txn.Abort();
}
catch (DatabaseException)
{
throw new TestException();
}
}
catch (DatabaseException)
{
try
{
txn.Abort();
}
catch (DatabaseException)
{
throw new TestException();
}
}
}
[Test]
public void TestCheckpoint()
{
testName = "TestCheckpoint";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
// Open an environment.
DatabaseEnvironmentConfig cfg =
new DatabaseEnvironmentConfig();
cfg.Create = true;
cfg.UseTxns = true;
cfg.UseMPool = true;
cfg.UseLogging = true;
cfg.UseLocking = true;
cfg.NoLocking = false;
cfg.FreeThreaded = true;
testCheckpointEnv = DatabaseEnvironment.Open(testHome, cfg);
// Open btree database.
BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig();
dbConfig.AutoCommit = true;
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = testCheckpointEnv;
dbConfig.FreeThreaded = true;
testCheckpointDB = BTreeDatabase.Open(testName + ".db", dbConfig);
// Run a thread to put records into database.
Thread thread1 = new Thread(new ThreadStart(PutRecordsThread));
/*
* Run a thread to do checkpoint periodically and
* finally do a checkpoint to flush all in memory pool
* to log files.
*/
Thread thread2 = new Thread(new ThreadStart(CheckpointThread));
thread1.Start();
thread2.Start();
thread1.Join();
thread2.Join();
// Close all.
testCheckpointDB.Close();
testCheckpointEnv.Close();
}
public void PutRecordsThread()
{
Transaction txn = testCheckpointEnv.BeginTransaction();
byte[] byteArr = new byte[1024];
for (int i = 0; i < 1000; i++)
testCheckpointDB.Put(
new DatabaseEntry(BitConverter.GetBytes(i)),
new DatabaseEntry(byteArr), txn);
txn.Commit();
}
public void CheckpointThread()
{
uint bytes = 64;
uint minutes = 1;
uint count = 1;
while (count < 3)
{
testCheckpointEnv.Checkpoint(bytes, minutes);
count++;
}
Thread.Sleep(500);
testCheckpointEnv.Checkpoint();
}
[Test]
public void TestClose()
{
testName = "TestClose";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
DatabaseEnvironmentConfig cfg =
new DatabaseEnvironmentConfig();
cfg.Create = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(
testHome, cfg);
env.Close();
}
[Test]
public void TestConfigAll()
{
testName = "TestConfigAll";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
XmlElement xmlElem = Configuration.TestSetUp(
testFixtureName, testName);
/*
* Open a new environment with all properties,
* fields and subsystems configured.
*/
DatabaseEnvironmentConfig envConig =
new DatabaseEnvironmentConfig();
Config(xmlElem, ref envConig, true, true, true,
true, true, true);
// Configure with methods.
ReplicationHostAddress address =
new ReplicationHostAddress("127.0.0.0", 11111);
envConig.RepSystemCfg.Clockskew(102, 100);
envConig.RepSystemCfg.RetransmissionRequest(10, 100);
envConig.RepSystemCfg.TransmitLimit(1, 1024);
// Open the environment.
DatabaseEnvironment env = DatabaseEnvironment.Open(
testHome, envConig);
// Confirm environment status with its configuration.
Confirm(xmlElem, env, true, true, true, true, true, true);
// Print statistics of the current environment.
env.PrintStats(true, true);
// Print statistics of all subsytems.
env.PrintSubsystemStats(true, true);
env.Close();
}
[Test]
public void TestDeadlockPolicy()
{
testName = "TestDeadlockPolicy";
testHome = testFixtureHome + "/" + testName;
DetectDeadlockPolicy(testHome + "_DEFAULT",
DeadlockPolicy.DEFAULT);
DetectDeadlockPolicy(testHome + "_EXPIRE",
DeadlockPolicy.EXPIRE);
DetectDeadlockPolicy(testHome + "_MAX_LOCKS",
DeadlockPolicy.MAX_LOCKS);
DetectDeadlockPolicy(testHome + "_MAX_WRITE",
DeadlockPolicy.MAX_WRITE);
DetectDeadlockPolicy(testHome + "_MIN_LOCKS",
DeadlockPolicy.MIN_LOCKS);
DetectDeadlockPolicy(testHome + "_MIN_WRITE",
DeadlockPolicy.MIN_WRITE);
DetectDeadlockPolicy(testHome + "_OLDEST",
DeadlockPolicy.OLDEST);
DetectDeadlockPolicy(testHome + "_RANDOM",
DeadlockPolicy.RANDOM);
DetectDeadlockPolicy(testHome + "_YOUNGEST",
DeadlockPolicy.YOUNGEST);
}
public void DetectDeadlockPolicy(
string home, DeadlockPolicy deadlock)
{
Configuration.ClearDir(home);
DatabaseEnvironmentConfig envConfig =
new DatabaseEnvironmentConfig();
envConfig.Create = true;
envConfig.UseLocking = true;
envConfig.UseLogging = true;
envConfig.UseMPool = true;
envConfig.UseTxns = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(
home, envConfig);
env.DeadlockResolution = deadlock;
Assert.AreEqual(deadlock, env.DeadlockResolution);
env.DetectDeadlocks(deadlock);
env.Close();
}
[Test]
public void TestDetectDeadlocks()
{
testName = "TestDetectDeadlocks";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
// Open an environment.
DatabaseEnvironmentConfig cfg =
new DatabaseEnvironmentConfig();
cfg.Create = true;
cfg.UseTxns = true;
cfg.UseMPool = true;
cfg.UseLogging = true;
cfg.UseLocking = true;
cfg.FreeThreaded = true;
testDetectDeadlocksEnv = DatabaseEnvironment.Open(
testHome, cfg);
// Open btree database.
BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig();
dbConfig.AutoCommit = true;
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = testDetectDeadlocksEnv;
dbConfig.Duplicates = DuplicatesPolicy.NONE;
dbConfig.FreeThreaded = true;
testDetectDeadlocksDB = BTreeDatabase.Open(
testName + ".db", dbConfig);
// Put one record("key", "data") into database.
testDetectDeadlocksDB.Put(
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("key")),
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data")));
// Begin two threads to read and write record.
Thread thread1 = new Thread(new ThreadStart(ReadAndPutRecordThread));
Thread thread2 = new Thread(new ThreadStart(ReadAndPutRecordThread));
signal = new EventWaitHandle(false, EventResetMode.ManualReset);
thread1.Start();
thread2.Start();
// Give enough time for threads to read record.
Thread.Sleep(1000);
/*
* Let the two threads apply for write lock
* synchronously.
*/
signal.Set();
// Confirm that there is deadlock in the environment.
Thread.Sleep(1000);
uint deadlockNum = testDetectDeadlocksEnv.DetectDeadlocks(
DeadlockPolicy.DEFAULT);
Assert.Less(0, deadlockNum);
thread1.Join();
thread2.Join();
// Close all.
testDetectDeadlocksDB.Close(false);
testDetectDeadlocksEnv.Close();
}
public void ReadAndPutRecordThread()
{
Transaction txn =
testDetectDeadlocksEnv.BeginTransaction();
try
{
testDetectDeadlocksDB.GetBoth(
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("key")),
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data")), txn);
signal.WaitOne();
testDetectDeadlocksDB.Put(
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("newKey")),
new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("newData")),
txn);
txn.Commit();
}
catch (DeadlockException)
{
txn.Abort();
}
}
[Test]
public void TestFailCheck()
{
testName = "TestFailCheck";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
DatabaseEnvironmentConfig cfg =
new DatabaseEnvironmentConfig();
cfg.Create = true;
cfg.UseTxns = true;
cfg.UseMPool = true;
cfg.UseLogging = true;
cfg.UseLocking = true;
cfg.FreeThreaded = true;
cfg.ThreadIsAlive = new ThreadIsAliveDelegate(ThrdAlive);
cfg.SetThreadID = new SetThreadIDDelegate(SetThrdID);
cfg.ThreadCount = 10;
testFailCheckEnv = DatabaseEnvironment.Open(testHome, cfg);
Thread thread = new Thread(new ThreadStart(WriteThreadWithoutTxnCommit));
thread.Start();
thread.Join();
testFailCheckEnv.FailCheck();
testFailCheckEnv.Close();
}
public void WriteThreadWithoutTxnCommit()
{
Transaction txn = testFailCheckEnv.BeginTransaction();
BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = testFailCheckEnv;
BTreeDatabase db = BTreeDatabase.Open("TestFailCheck.db", dbConfig, txn);
db.Close();
txn.Commit();
}
public bool ThrdAlive(DbThreadID info, bool procOnly)
{
Process pcs = Process.GetProcessById(info.processID);
if (pcs.HasExited == true)
return false;
else if (procOnly)
return true;
ProcessThreadCollection thrds = pcs.Threads;
foreach (ProcessThread pcsThrd in thrds)
{
if (pcsThrd.Id == info.threadID)
{
/*
* We have to use the fully qualified name, ThreadState
* defaults to System.Threading.ThreadState.
*/
return (pcsThrd.ThreadState !=
System.Diagnostics.ThreadState.Terminated);
}
}
// If we can't find the thread, we say it's not alive
return false;
}
public DbThreadID SetThrdID()
{
DbThreadID threadID;
int pid = Process.GetCurrentProcess().Id;
uint tid = (uint)AppDomain.GetCurrentThreadId();
threadID = new DbThreadID(pid, tid);
return threadID;
}
[Test]
public void TestFeedback()
{
testName = "TestFeedback";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
// Open the environment.
DatabaseEnvironmentConfig cfg =
new DatabaseEnvironmentConfig();
cfg.AutoCommit = true;
cfg.UseLocking = true;
cfg.UseLogging = true;
cfg.UseMPool = true;
cfg.UseTxns = true;
cfg.Create = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(testHome, cfg);
env.Feedback = new EnvironmentFeedbackDelegate(
EnvRecovery10PercentFeedback);
env.Feedback(EnvironmentFeedbackEvent.RECOVERY, 10);
env.Close();
}
public void EnvRecovery10PercentFeedback(
EnvironmentFeedbackEvent opcode, int percent)
{
Assert.AreEqual(opcode, EnvironmentFeedbackEvent.RECOVERY);
Assert.AreEqual(10, percent);
}
[Test]
public void TestMutexSystemStats()
{
testName = "TestMutexSystemStats";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
DatabaseEnvironmentConfig cfg =
new DatabaseEnvironmentConfig();
cfg.Create = true;
cfg.UseLogging = true;
cfg.UseLocking = true;
cfg.UseMPool = true;
cfg.UseTxns = true;
cfg.MutexSystemCfg = new MutexConfig();
cfg.MutexSystemCfg.Alignment = 512;
cfg.MutexSystemCfg.Increment = 128;
cfg.MutexSystemCfg.MaxMutexes = 150;
cfg.MutexSystemCfg.NumTestAndSetSpins = 10;
DatabaseEnvironment env = DatabaseEnvironment.Open(testHome, cfg);
MutexStats stats = env.MutexSystemStats();
env.PrintMutexSystemStats(true, true);
Assert.AreEqual(512, stats.Alignment);
Assert.AreEqual(stats.Count, stats.Available + stats.InUse);
Assert.LessOrEqual(stats.InUse, stats.MaxInUse);
Assert.AreNotEqual(0, stats.RegionSize);
Assert.AreEqual(0, stats.RegionWait);
Assert.AreEqual(10, stats.TASSpins);
ulong regionNoWait = stats.RegionNoWait;
BTreeDatabaseConfig dbCfg = new BTreeDatabaseConfig();
dbCfg.Creation = CreatePolicy.IF_NEEDED;
dbCfg.Env = env;
BTreeDatabase db = BTreeDatabase.Open(testName + ".db", dbCfg);
for (int i = 0; i < 1000; i++)
{
db.Put(new DatabaseEntry(BitConverter.GetBytes(i)),
new DatabaseEntry(BitConverter.GetBytes(i)));
stats = env.MutexSystemStats();
}
Assert.LessOrEqual(regionNoWait, stats.RegionNoWait);
regionNoWait = stats.RegionNoWait;
stats = env.MutexSystemStats(true);
env.PrintMutexSystemStats();
stats = env.MutexSystemStats();
Assert.GreaterOrEqual(regionNoWait, stats.RegionNoWait);
db.Close();
env.Close();
}
[Test]
public void TestLogFile()
{
testName = "TestLogFile";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
// Open environment and configure logging subsystem.
DatabaseEnvironmentConfig cfg =
new DatabaseEnvironmentConfig();
cfg.Create = true;
cfg.UseTxns = true;
cfg.AutoCommit = true;
cfg.UseLocking = true;
cfg.UseMPool = true;
cfg.UseLogging = true;
cfg.MPoolSystemCfg = new MPoolConfig();
cfg.MPoolSystemCfg.CacheSize =
new CacheInfo(0, 1048576, 1);
cfg.LogSystemCfg = new LogConfig();
cfg.LogSystemCfg.AutoRemove = false;
cfg.LogSystemCfg.BufferSize = 10240;
cfg.LogSystemCfg.Dir = "./";
cfg.LogSystemCfg.FileMode = 755;
cfg.LogSystemCfg.ForceSync = true;
cfg.LogSystemCfg.InMemory = false;
cfg.LogSystemCfg.MaxFileSize = 1048576;
cfg.LogSystemCfg.NoBuffer = false;
cfg.LogSystemCfg.RegionSize = 204800;
cfg.LogSystemCfg.ZeroOnCreate = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(testHome, cfg);
// Open database.
Transaction allTxn = env.BeginTransaction();
TransactionConfig txnConfig = new TransactionConfig();
txnConfig.Name = "OpenTransaction";
Transaction openTxn = env.BeginTransaction(txnConfig, allTxn);
BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = env;
BTreeDatabase db = BTreeDatabase.Open(
testName + ".db", dbConfig, openTxn);
List<ActiveTransaction> activeTxns =
env.TransactionSystemStats().Transactions;
for (int i = 0; i < activeTxns.Count; i++)
if (activeTxns[i].Name == "OpenTransaction")
{
LSN lsn = new LSN(
activeTxns[i].Begun.LogFileNumber,
activeTxns[i].Begun.Offset);
env.LogFlush(lsn);
string fileName = env.LogFile(lsn);
}
openTxn.Commit();
// Write "##" to log before putting data into database.
env.WriteToLog("##");
// Write 1000 records into database.
TransactionConfig writeTxnConfig = new TransactionConfig();
writeTxnConfig.Name = "WriteTxn";
Transaction writeTxn = env.BeginTransaction(writeTxnConfig, allTxn);
byte[] byteArr = new byte[1024];
for (int i = 0; i < 1000; i++)
{
db.Put(new DatabaseEntry(BitConverter.GetBytes(i)),
new DatabaseEntry(byteArr), writeTxn);
env.LogFlush();
env.WriteToLog("#" + i.ToString(), writeTxn);
}
activeTxns = env.TransactionSystemStats().Transactions;
for (int i = 0; i < activeTxns.Count; i++)
if (activeTxns[i].Name == "WriteTxn")
{
LSN lsn = new LSN(
activeTxns[i].Begun.LogFileNumber,
activeTxns[i].Begun.Offset);
env.LogFlush(lsn);
string fileName = env.LogFile(lsn);
}
writeTxn.Commit();
db.Close();
// Write "##" after data has been put.
env.WriteToLog("##");
List<string> logFiles = env.LogFiles(true);
env.LogWrite(new DatabaseEntry(), true);
env.RemoveUnusedLogFiles();
allTxn.Commit();
env.Close();
}
[Test]
public void TestOpen()
{
testName = "TestOpen";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
DatabaseEnvironmentConfig cfg =
new DatabaseEnvironmentConfig();
cfg.Create = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(testHome, cfg);
// Confirm that the environment is initialized.
Assert.IsNotNull(env);
// Confirm the environment home directory.
Assert.AreEqual(testHome, env.Home);
// Print statistics of the current environment.
env.PrintStats();
// Print statistics of all subsytems.
env.PrintSubsystemStats();
env.Close();
}
[Test]
public void TestMPoolSystemStats()
{
testName = "TestMPoolSystemStats";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
DatabaseEnvironmentConfig envConfig =
new DatabaseEnvironmentConfig();
envConfig.AutoCommit = true;
envConfig.MPoolSystemCfg = new MPoolConfig();
envConfig.MPoolSystemCfg.CacheSize =
new CacheInfo(0, 1048576, 3);
envConfig.Create = true;
envConfig.UseLocking = true;
envConfig.UseLogging = true;
envConfig.UseMPool = true;
envConfig.UseTxns = true;
envConfig.UseLogging = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(
testHome, envConfig);
MPoolStats stats = env.MPoolSystemStats();
env.PrintMPoolSystemStats();
Assert.AreEqual(0, stats.BlockedOperations);
Assert.AreEqual(0, stats.BucketsCheckedDuringAlloc);
Assert.AreEqual(3, stats.CacheRegions);
Assert.LessOrEqual(1048576, stats.CacheSettings.Bytes);
Assert.AreEqual(0, stats.CacheSettings.Gigabytes);
Assert.AreEqual(3, stats.CacheSettings.NCaches);
Assert.AreEqual(0, stats.CleanPages);
Assert.AreEqual(0, stats.CleanPagesEvicted);
Assert.AreEqual(0, stats.DirtyPages);
Assert.AreEqual(0, stats.DirtyPagesEvicted);
Assert.IsNotNull(stats.Files);
Assert.AreEqual(0, stats.FrozenBuffers);
Assert.AreEqual(0, stats.FrozenBuffersFreed);
Assert.LessOrEqual(37, stats.HashBuckets);
Assert.LessOrEqual(0, stats.HashChainSearches);
Assert.AreEqual(0, stats.HashEntriesSearched);
Assert.AreEqual(0, stats.HashLockNoWait);
Assert.AreEqual(0, stats.HashLockWait);
Assert.AreEqual(0, stats.LongestHashChainSearch);
Assert.AreEqual(0, stats.MappedPages);
Assert.AreEqual(0, stats.MaxBucketsCheckedDuringAlloc);
Assert.AreEqual(0, stats.MaxBufferWrites);
Assert.AreEqual(0, stats.MaxBufferWritesSleep);
Assert.AreEqual(0, stats.MaxHashLockNoWait);
Assert.AreEqual(0, stats.MaxHashLockWait);
Assert.AreEqual(0, stats.MaxMMapSize);
Assert.AreEqual(0, stats.MaxOpenFileDescriptors);
Assert.AreEqual(0, stats.MaxPagesCheckedDuringAlloc);
Assert.AreEqual(0, stats.PageAllocations);
Assert.AreEqual(0, stats.Pages);
Assert.AreEqual(0, stats.PagesCheckedDuringAlloc);
Assert.LessOrEqual(0, stats.PagesCreatedInCache);
Assert.AreEqual(0, stats.PagesInCache);
Assert.AreEqual(0, stats.PagesNotInCache);
Assert.AreEqual(0, stats.PagesRead);
Assert.AreEqual(0, stats.PagesTrickled);
Assert.AreEqual(0, stats.PagesWritten);
Assert.AreNotEqual(0, stats.RegionLockNoWait);
Assert.AreEqual(0, stats.RegionLockWait);
Assert.LessOrEqual(0, stats.RegionSize);
Assert.AreEqual(0, stats.ThawedBuffers);
BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = env;
dbConfig.PageSize = 4096;
BTreeDatabase db = BTreeDatabase.Open(
testName + ".db", dbConfig);
byte[] largeByte = new byte[1088576];
for (int i = 0; i < 10; i++)
db.Put(new DatabaseEntry(BitConverter.GetBytes(i)),
new DatabaseEntry(largeByte));
db.Put(new DatabaseEntry(largeByte), new DatabaseEntry(largeByte));
db.Close();
// Clean the stats after printing.
stats = env.MPoolSystemStats(true);
env.PrintMPoolSystemStats(true, true);
stats = env.MPoolSystemStats();
env.PrintMPoolSystemStats(true, true, true);
env.Close();
}
[Test]
public void TestRemove()
{
testName = "TestRemove";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
// Open new environment.
DatabaseEnvironmentConfig envConig =
new DatabaseEnvironmentConfig();
envConig.Create = true;
envConig.ErrorPrefix = testFixtureName + ":" + testName;
DatabaseEnvironment env = DatabaseEnvironment.Open(
testHome, envConig);
env.Close();
// Remove the existing environment.
DatabaseEnvironment.Remove(testHome);
// Confirm that the __db.001 is removed.
Assert.IsFalse(File.Exists(testHome + "__db.001"));
}
[Test]
public void TestRemoveCorruptedEnv()
{
testName = "TestRemoveCorruptedEnv";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
// Open new environment.
DatabaseEnvironmentConfig envConig =
new DatabaseEnvironmentConfig();
envConig.Create = true;
envConig.ErrorPrefix = testFixtureName + ":" + testName;
DatabaseEnvironment env = DatabaseEnvironment.Open(testHome, envConig);
// Panic the environment.
env.Panic();
// Remove the corrupted environment.
DatabaseEnvironment.Remove(testHome, true);
// Confirm that the __db.001 is removed.
Assert.IsFalse(File.Exists(testHome + "__db.001"));
}
[Test, ExpectedException(typeof(ExpectedTestException))]
public void TestRenameDB()
{
testName = "TestRenameDB";
testHome = testFixtureHome + "/" + testName;
RenameDB(testHome, testName, false);
}
[Test, ExpectedException(typeof(ExpectedTestException))]
public void TestRenameDBWithTxn()
{
testName = "TestRenameDBWithTxn";
testHome = testFixtureHome + "/" + testName;
RenameDB(testHome, testName, true);
}
public void RenameDB(string home, string name, bool ifTxn)
{
string dbFileName = name + ".db";
string dbName = "db1";
string dbNewName = "db2";
Configuration.ClearDir(home);
DatabaseEnvironmentConfig envConig =
new DatabaseEnvironmentConfig();
envConig.Create = true;
envConig.UseTxns = true;
envConig.UseLogging = true;
envConig.UseMPool = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(
home, envConig);
Transaction openTxn = env.BeginTransaction();
BTreeDatabaseConfig dbConfig =
new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = env;
BTreeDatabase db = BTreeDatabase.Open(
dbFileName, dbName, dbConfig, openTxn);
db.Close();
openTxn.Commit();
// Open the database.
if (ifTxn == false)
env.RenameDB(dbFileName, dbName, dbNewName, true);
else
{
Transaction renameTxn = env.BeginTransaction();
env.RenameDB(dbFileName, dbName, dbNewName, false, renameTxn);
renameTxn.Commit();
}
// Confirm that the database are renamed.
Transaction reopenTxn = env.BeginTransaction();
try
{
Database db1 = Database.Open(
dbFileName, new DatabaseConfig());
db1.Close();
}
catch (DatabaseException)
{
throw new ExpectedTestException();
}
finally
{
reopenTxn.Commit();
env.Close();
}
}
[Test]
public void TestResetFileID()
{
testName = "TestResetFileID";
testHome = testFixtureHome + "/" + testName;
string dbFileName = testName + ".db";
string dbNewFileName = testName + "_new.db";
Configuration.ClearDir(testHome);
DatabaseEnvironmentConfig envConfig =
new DatabaseEnvironmentConfig();
envConfig.Create = true;
envConfig.UseMPool = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(
testHome, envConfig);
// Opening a new database.
BTreeDatabaseConfig dbConfig =
new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = env;
BTreeDatabase db = BTreeDatabase.Open(
dbFileName, dbConfig);
db.Close();
// Copy the physical database file.
File.Copy(testHome + "/" + dbFileName,
testHome + "/" + dbNewFileName);
// Reset the file ID.
env.ResetFileID(dbNewFileName, false);
// Open the exisiting database in copied database file.
BTreeDatabaseConfig cfg = new BTreeDatabaseConfig();
cfg.Creation = CreatePolicy.NEVER;
cfg.Env = env;
BTreeDatabase newDB = BTreeDatabase.Open(
dbNewFileName, cfg);
newDB.Close();
env.Close();
}
[Test, ExpectedException(typeof(ExpectedTestException))]
public void TestRemoveDB()
{
testName = "TestRemoveDB";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
RmDBWithoutTxn(testHome, testName, false);
}
[Test, ExpectedException(typeof(ExpectedTestException))]
public void TestRemoveDBWithAutoCommit()
{
testName = "TestRemoveDBWithAutoCommit";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
RmDBWithoutTxn(testHome, testName, true);
}
[Test, ExpectedException(typeof(ExpectedTestException))]
public void TestRemoveDBWithinTxn()
{
testName = "TestRemoveDBWithinTxn";
testHome = testFixtureHome + "/" + testName;
string dbFileName = testName + ".db";
string dbName1 = testName + "1";
string dbName2 = testName + "2";
Configuration.ClearDir(testHome);
// Open environment.
DatabaseEnvironmentConfig envConfig =
new DatabaseEnvironmentConfig();
envConfig.Create = true;
envConfig.UseMPool = true;
envConfig.UseTxns = true;
envConfig.UseLogging = true;
DatabaseEnvironment env = DatabaseEnvironment.Open(
testHome, envConfig);
Transaction txn = env.BeginTransaction();
// Create two databases in the environment.
BTreeDatabaseConfig dbConfig =
new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = env;
BTreeDatabase btreeDB1 = BTreeDatabase.Open(
dbFileName, dbName1, dbConfig, txn);
btreeDB1.Close();
BTreeDatabase btreeDB2 = BTreeDatabase.Open(
dbFileName, dbName2, dbConfig, txn);
btreeDB2.Close();
// Remove one database from the environment.
env.RemoveDB(dbFileName, dbName2, false, txn);
// Try to open the existing database.
DatabaseConfig cfg = new DatabaseConfig();
cfg.Env = env;
Database db1 = Database.Open(dbFileName, dbName1, cfg, txn);
db1.Close();
/*
* Attempting to open the removed database should
* cause error.
*/
try
{
Database db2 = Database.Open(
dbFileName, dbName2, cfg, txn);
db2.Close();
}
catch (DatabaseException)
{
throw new ExpectedTestException();
}
finally
{
txn.Commit();
env.Close();
}
}
public void RmDBWithoutTxn(string home, string dbName,
bool ifAutoCommit)
{
string dbFileName = dbName + ".db";
string dbName1 = dbName + "1";
string dbName2 = dbName + "2";
// Open environment.
DatabaseEnvironmentConfig envConfig =
new DatabaseEnvironmentConfig();
envConfig.Create = true;
envConfig.UseMPool = true;
if (ifAutoCommit == true)
{
envConfig.AutoCommit = true;
envConfig.UseTxns = true;
}
DatabaseEnvironment env = DatabaseEnvironment.Open(
home, envConfig);
// Create two databases in the environment.
BTreeDatabaseConfig dbConfig =
new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = env;
BTreeDatabase btreeDB1 = BTreeDatabase.Open(
dbFileName, dbName1, dbConfig);
btreeDB1.Close();
BTreeDatabase btreeDB2 = BTreeDatabase.Open(
dbFileName, dbName2, dbConfig);
btreeDB2.Close();
// Remove one database from the environment.
env.RemoveDB(dbFileName, dbName2, false);
// Try to open the existing database.
DatabaseConfig cfg = new DatabaseConfig();
cfg.Env = env;
Database db1 = Database.Open(dbFileName, dbName1, cfg);
db1.Close();
/*
* Attempting to open the removed database should
* cause error.
*/
try
{
Database db2 = Database.Open(
dbFileName, dbName2, cfg);
db2.Close();
}
catch (DatabaseException)
{
throw new ExpectedTestException();
}
finally
{
env.Close();
}
}
[Test]
public void TestTransactionSystemStats()
{
testName = "TestTransactionSystemStats";
testHome = testFixtureHome + "/" + testName;
Configuration.ClearDir(testHome);
TransactionStats stats;
BTreeDatabase db;
Transaction openTxn = null;
// Open an environment.
long dateTime;
DatabaseEnvironmentConfig envConfig =
new DatabaseEnvironmentConfig();
envConfig.Create = true;
envConfig.MaxTransactions = 50;
envConfig.UseLogging = true;
envConfig.UseLocking = true;
envConfig.UseMPool = true;
envConfig.UseTxns = true;
envConfig.TxnNoSync = false;
envConfig.TxnNoWait = true;
envConfig.TxnSnapshot = true;
envConfig.TxnTimestamp = DateTime.Now;
envConfig.TxnWriteNoSync = false;
DatabaseEnvironment env = DatabaseEnvironment.Open(
testHome, envConfig);
try
{
try
{
// Confirm initial transaction subsystem statistics.
stats = env.TransactionSystemStats();
env.PrintTransactionSystemStats(true, true);
Assert.AreEqual(0, stats.Aborted);
Assert.AreEqual(0, stats.Active);
Assert.AreEqual(0, stats.Begun);
Assert.AreEqual(0, stats.Committed);
Assert.AreEqual(0, stats.LastCheckpoint.LogFileNumber);
Assert.AreEqual(0, stats.LastCheckpoint.Offset);
Assert.AreEqual(50, stats.MaxTransactions);
Assert.AreNotEqual(0, stats.RegionSize);
Assert.AreEqual(0, stats.Transactions.Count);
}
catch (AssertionException e)
{
throw e;
}
try
{
//Begin a transaction called openTxn and open a database.
TransactionConfig openTxnCfg = new TransactionConfig();
openTxnCfg.Name = "openTxn";
openTxn = env.BeginTransaction(openTxnCfg);
openTxn.Priority = 50;
BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig();
dbConfig.Creation = CreatePolicy.IF_NEEDED;
dbConfig.Env = env;
db = BTreeDatabase.Open(testName + ".db", dbConfig, openTxn);
}
catch (DatabaseException e)
{
if (openTxn != null)
openTxn.Abort();
throw e;
}
try
{
// At least there is one transaction that is alive.
env.Checkpoint();
stats = env.TransactionSystemStats();
env.PrintTransactionSystemStats();
Assert.AreNotEqual(0, stats.Active);
Assert.AreNotEqual(0, stats.Transactions.Count);
Assert.AreNotEqual(0, stats.Transactions.Capacity);
Assert.AreNotEqual(0, stats.RegionLockNoWait);
dateTime = stats.LastCheckpointTime;
// Begin an embedded transaction called putTxn.
TransactionConfig putTxnCfg =
new TransactionConfig();
putTxnCfg.Name = "putTxn";
putTxnCfg.NoWait = false;
Transaction putTxn = env.BeginTransaction(
putTxnCfg, openTxn);
putTxn.Priority = 50;
try
{
// Put some records into database within putTxn.
for (int i = 0; i < 50; i++)
db.Put(new DatabaseEntry(BitConverter.GetBytes(i)),
new DatabaseEntry(BitConverter.GetBytes(i)), putTxn);
stats = env.TransactionSystemStats();
Assert.AreNotEqual(0, stats.MaxActive);
Assert.AreNotEqual(0, stats.MaxTransactions);
Assert.AreEqual(0, stats.MaxSnapshot);
Assert.AreEqual(0, stats.Snapshot);
Assert.AreEqual(stats.Begun,
stats.Aborted + stats.Active + stats.Committed);
Assert.AreEqual(2, stats.Transactions.Count);
/*
* Both of LogFileNumber and Offset in active transaction
* couldn't be 0.
*/
uint logFileNumbers = 0;
uint offSets = 0;
for (int i = 0; i < stats.Transactions.Count;i++)
{
logFileNumbers += stats.Transactions[i].Begun.LogFileNumber;
offSets += stats.Transactions[i].Begun.Offset;
}
Assert.AreNotEqual(0, logFileNumbers);
Assert.AreNotEqual(0, offSets);
// All active transactions are run by the same process and thread.
Assert.AreEqual(stats.Transactions[0].ThreadID,
stats.Transactions[1].ThreadID);
Assert.AreEqual(stats.Transactions[0].ProcessID,
stats.Transactions[1].ProcessID);
// All transactions are alive.
Assert.AreEqual(ActiveTransaction.TransactionStatus.RUNNING,
stats.Transactions[0].Status);
Assert.AreEqual(ActiveTransaction.TransactionStatus.RUNNING,
stats.Transactions[1].Status);
Assert.AreEqual(50, stats.Transactions[0].Priority);
Assert.AreEqual(50, stats.Transactions[1].Priority);
/*
* Find the openTxn in active transactions, which is the
* parent transaction of putTxn.
*/
int parentPos = 0;
if (stats.Transactions[0].Name == "putTxn")
parentPos = 1;
// putTxn's parent id should be the openTxn.
Assert.AreEqual(stats.Transactions[parentPos].ID,
stats.Transactions[1 - parentPos].ParentID);
// Other stats should be an positive integer.
for (int i = 0; i < stats.Transactions.Count - 1; i++)
{
Assert.LessOrEqual(0,
stats.Transactions[i].BufferCopiesInCache);
Assert.LessOrEqual(0,
stats.Transactions[i].SnapshotReads.LogFileNumber);
Assert.LessOrEqual(0,
stats.Transactions[i].SnapshotReads.Offset);
Assert.IsNotNull(stats.Transactions[i].GlobalID);
}
// Commit putTxn.
putTxn.Commit();
}
catch (DatabaseException e)
{
putTxn.Abort();
throw e;
}
stats = env.TransactionSystemStats();
Assert.AreNotEqual(0, stats.LastCheckpoint.LogFileNumber);
Assert.AreNotEqual(0, stats.LastCheckpoint.Offset);
Assert.AreEqual(dateTime, stats.LastCheckpointTime);
openTxn.Commit();
}
catch (DatabaseException e)
{
openTxn.Abort();
throw e;
}
finally
{
db.Close();
}
}
finally
{
env.Close();
}
}
/*
* Configure an environment. Here only configure those that could be
* set before environment open.
*/
public void Config(XmlElement xmlElem,
ref DatabaseEnvironmentConfig envConfig, bool compulsory,
bool logging, bool locking, bool mutex, bool mpool, bool replication)
{
XmlNode childNode;
// Configure environment without any subsystems.
DatabaseEnvironmentConfigTest.Config(xmlElem, ref envConfig, compulsory);
// Configure environment with logging subsystem.
if (logging == true)
{
childNode = XMLReader.GetNode(xmlElem, "LogConfig");
envConfig.LogSystemCfg = new LogConfig();
LogConfigTest.Config((XmlElement)childNode,
ref envConfig.LogSystemCfg, compulsory);
}
// Configure environment with locking subsystem.
if (locking == true)
{
childNode = XMLReader.GetNode(xmlElem, "LockingConfig");
envConfig.LockSystemCfg = new LockingConfig();
LockingConfigTest.Config((XmlElement)childNode,
ref envConfig.LockSystemCfg, compulsory);
}
// Configure environment with mutex subsystem.
if (mutex == true)
{
childNode = XMLReader.GetNode(xmlElem, "MutexConfig");
envConfig.MutexSystemCfg = new MutexConfig();
MutexConfigTest.Config((XmlElement)childNode,
ref envConfig.MutexSystemCfg, compulsory);
}
if (mpool == true)
{
childNode = XMLReader.GetNode(xmlElem, "MPoolConfig");
envConfig.MPoolSystemCfg = new MPoolConfig();
MPoolConfigTest.Config((XmlElement)childNode,
ref envConfig.MPoolSystemCfg, compulsory);
}
// Configure environment with replication.
if (replication == true)
{
childNode = XMLReader.GetNode(xmlElem, "ReplicationConfig");
envConfig.RepSystemCfg = new ReplicationConfig();
ReplicationConfigTest.Config((XmlElement)childNode,
ref envConfig.RepSystemCfg, compulsory);
}
}
/*
* Confirm the fields/properties in the environment.
* Those set by setting functions are not checked here.
*/
public static void Confirm(XmlElement xmlElement,
DatabaseEnvironment env, bool compulsory,
bool logging, bool locking, bool mutex, bool mpool,
bool replication)
{
XmlElement childElem;
CacheInfo cacheInfo = new CacheInfo(0, 0, 0);
// Confirm environment configuration.
Configuration.ConfirmBool(xmlElement, "AutoCommit",
env.AutoCommit, compulsory);
Configuration.ConfirmBool(xmlElement, "CDB_ALLDB",
env.CDB_ALLDB, compulsory);
Configuration.ConfirmBool(xmlElement, "Create",
env.Create, compulsory);
Configuration.ConfirmStringList(xmlElement, "DataDirs",
env.DataDirs, compulsory);
Configuration.ConfirmString(xmlElement, "ErrorPrefix",
env.ErrorPrefix, compulsory);
Configuration.ConfirmBool(xmlElement, "ForceFlush",
env.ForceFlush, compulsory);
Configuration.ConfirmBool(xmlElement, "FreeThreaded",
env.FreeThreaded, compulsory);
Configuration.ConfirmBool(xmlElement, "InitRegions",
env.InitRegions, compulsory);
Configuration.ConfirmString(xmlElement, "IntermediateDirMode",
env.IntermediateDirMode, compulsory);
Configuration.ConfirmBool(xmlElement, "Lockdown",
env.Lockdown, compulsory);
Configuration.ConfirmUint(xmlElement, "LockTimeout",
env.LockTimeout, compulsory);
Configuration.ConfirmUint(xmlElement, "MaxTransactions",
env.MaxTransactions, compulsory);
Configuration.ConfirmBool(xmlElement, "NoBuffer",
env.NoBuffer, compulsory);
Configuration.ConfirmBool(xmlElement, "NoLocking",
env.NoLocking, compulsory);
Configuration.ConfirmBool(xmlElement, "NoMMap",
env.NoMMap, compulsory);
Configuration.ConfirmBool(xmlElement, "NoPanic",
env.NoPanic, compulsory);
Configuration.ConfirmBool(xmlElement, "Overwrite",
env.Overwrite, compulsory);
Configuration.ConfirmBool(xmlElement, "Private",
env.Private, compulsory);
Configuration.ConfirmBool(xmlElement, "Register",
env.Register, compulsory);
Configuration.ConfirmBool(xmlElement, "RunFatalRecovery",
env.RunFatalRecovery, compulsory);
Configuration.ConfirmBool(xmlElement, "RunRecovery",
env.RunRecovery, compulsory);
Configuration.ConfirmBool(xmlElement, "SystemMemory",
env.SystemMemory, compulsory);
Configuration.ConfirmString(xmlElement, "TempDir",
env.TempDir, compulsory);
Configuration.ConfirmBool(xmlElement, "TimeNotGranted",
env.TimeNotGranted, compulsory);
Configuration.ConfirmBool(xmlElement, "TxnNoSync",
env.TxnNoSync, compulsory);
Configuration.ConfirmBool(xmlElement, "TxnNoWait",
env.TxnNoWait, compulsory);
Configuration.ConfirmBool(xmlElement, "TxnSnapshot",
env.TxnSnapshot, compulsory);
Configuration.ConfirmDateTime(xmlElement, "TxnTimestamp",
env.TxnTimestamp, compulsory);
Configuration.ConfirmBool(xmlElement, "TxnWriteNoSync",
env.TxnWriteNoSync, compulsory);
Configuration.ConfirmBool(xmlElement, "UseMVCC",
env.UseMVCC, compulsory);
Configuration.ConfirmBool(xmlElement, "UseCDB",
env.UsingCDB, compulsory);
Configuration.ConfirmBool(xmlElement, "UseLocking",
env.UsingLocking, compulsory);
Configuration.ConfirmBool(xmlElement, "UseLogging",
env.UsingLogging, compulsory);
Configuration.ConfirmBool(xmlElement, "UseMPool",
env.UsingMPool, compulsory);
Configuration.ConfirmBool(xmlElement, "UseReplication",
env.UsingReplication, compulsory);
Configuration.ConfirmBool(xmlElement, "UseTxns",
env.UsingTxns, compulsory);
env.Verbosity = new VerboseMessages();
Configuration.ConfirmVerboseMessages(xmlElement,
"Verbosity", env.Verbosity, compulsory);
Configuration.ConfirmBool(xmlElement, "YieldCPU",
env.YieldCPU, compulsory);
/*
* If the locking subsystem is set, check the
* field/properties set in LockingConfig.
*/
if (locking == true)
{
childElem = (XmlElement)XMLReader.GetNode(
xmlElement, "LockingConfig");
Configuration.ConfirmByteMatrix(childElem,
"Conflicts", env.LockConflictMatrix,
compulsory);
Configuration.ConfirmDeadlockPolicy(
childElem, "DeadlockResolution",
env.DeadlockResolution, compulsory);
Configuration.ConfirmUint(childElem,
"Partitions", env.LockPartitions,
compulsory);
Configuration.ConfirmUint(childElem,
"MaxLockers", env.MaxLockers, compulsory);
Configuration.ConfirmUint(childElem,
"MaxLocks", env.MaxLocks, compulsory);
Configuration.ConfirmUint(childElem,
"MaxObjects", env.MaxObjects, compulsory);
}
/*
* If the locking subsystem is set, check the
* field/properties set in LogConfig.
*/
if (logging == true)
{
childElem = (XmlElement)XMLReader.GetNode(
xmlElement, "LogConfig");
Configuration.ConfirmBool(childElem,
"AutoRemove", env.LogAutoRemove,
compulsory);
Configuration.ConfirmUint(childElem,
"BufferSize", env.LogBufferSize,
compulsory);
Configuration.ConfirmString(childElem,
"Dir", env.LogDir, compulsory);
Configuration.ConfirmInt(childElem,
"FileMode", env.LogFileMode, compulsory);
Configuration.ConfirmBool(childElem,
"ForceSync", env.LogForceSync, compulsory);
Configuration.ConfirmBool(childElem,
"InMemory", env.LogInMemory, compulsory);
Configuration.ConfirmBool(childElem,
"NoBuffer", env.LogNoBuffer, compulsory);
Configuration.ConfirmUint(childElem,
"RegionSize", env.LogRegionSize,
compulsory);
Configuration.ConfirmBool(childElem,
"ZeroOnCreate", env.LogZeroOnCreate,
compulsory);
Configuration.ConfirmUint(childElem,
"MaxFileSize", env.MaxLogFileSize,
compulsory);
}
/*
* If the locking subsystem is set, check the
* field/properties set in MutexConfig.
*/
if (mutex == true)
{
childElem = (XmlElement)XMLReader.GetNode(
xmlElement, "MutexConfig");
Configuration.ConfirmUint(childElem,
"Alignment", env.MutexAlignment,
compulsory);
Configuration.ConfirmUint(childElem,
"MaxMutexes", env.MaxMutexes, compulsory);
try
{
Configuration.ConfirmUint(childElem,
"Increment", env.MutexIncrement,
compulsory);
}
catch (AssertionException)
{
Assert.AreEqual(0, env.MutexIncrement);
}
Configuration.ConfirmUint(childElem,
"NumTestAndSetSpins",
env.NumTestAndSetSpins, compulsory);
}
if (mpool == true)
{
childElem = (XmlElement)XMLReader.GetNode(
xmlElement, "MPoolConfig");
Configuration.ConfirmCacheSize(childElem,
"CacheSize", env.CacheSize, compulsory);
if (env.UsingMPool == false)
Configuration.ConfirmCacheSize(childElem,
"MaxCacheSize", env.MaxCacheSize, compulsory);
Configuration.ConfirmInt(childElem,
"MaxOpenFiles", env.MaxOpenFiles, compulsory);
Configuration.ConfirmMaxSequentialWrites(childElem,
"MaxSequentialWrites", env.SequentialWritePause,
env.MaxSequentialWrites, compulsory);
Configuration.ConfirmUint(childElem,
"MMapSize", env.MMapSize, compulsory);
}
if (replication == true)
{
childElem = (XmlElement)XMLReader.GetNode(
xmlElement, "ReplicationConfig");
Configuration.ConfirmUint(childElem,
"AckTimeout", env.RepAckTimeout, compulsory);
Configuration.ConfirmBool(childElem,
"BulkTransfer", env.RepBulkTransfer, compulsory);
Configuration.ConfirmUint(childElem,
"CheckpointDelay", env.RepCheckpointDelay, compulsory);
Configuration.ConfirmUint(childElem,
"ConnectionRetry", env.RepConnectionRetry, compulsory);
Configuration.ConfirmBool(childElem,
"DelayClientSync", env.RepDelayClientSync, compulsory);
Configuration.ConfirmUint(childElem,
"ElectionRetry", env.RepElectionRetry, compulsory);
Configuration.ConfirmUint(childElem,
"ElectionTimeout", env.RepElectionTimeout, compulsory);
Configuration.ConfirmUint(childElem,
"FullElectionTimeout", env.RepFullElectionTimeout,compulsory);
Configuration.ConfirmUint(childElem,
"HeartbeatMonitor", env.RepHeartbeatMonitor, compulsory);
Configuration.ConfirmUint(childElem,
"HeartbeatSend", env.RepHeartbeatSend, compulsory);
Configuration.ConfirmUint(childElem,
"LeaseTimeout", env.RepLeaseTimeout, compulsory);
Configuration.ConfirmBool(childElem,
"AutoInit", env.RepAutoInit, compulsory);
Configuration.ConfirmBool(childElem,
"NoBlocking", env.RepNoBlocking, compulsory);
Configuration.ConfirmUint(childElem,
"NSites", env.RepNSites, compulsory);
Configuration.ConfirmUint(childElem,
"Priority", env.RepPriority, compulsory);
Configuration.ConfirmAckPolicy(childElem,
"RepMgrAckPolicy", env.RepMgrAckPolicy, compulsory);
Configuration.ConfirmBool(childElem,
"Strict2Site", env.RepStrict2Site, compulsory);
Configuration.ConfirmBool(childElem,
"UseMasterLeases", env.RepUseMasterLeases, compulsory);
}
}
}
}
| |
// Copyright 2008 Adrian Akison
// Distributed under license terms of CPOL http://www.codeproject.com/info/cpol10.aspx
using System;
using System.Collections.Generic;
namespace Facet.Combinatorics {
/// <summary>
/// Variations defines a meta-collection, typically a list of lists, of all possible
/// ordered subsets of a particular size from the set of values.
/// This list is enumerable and allows the scanning of all possible Variations using a simple
/// foreach() loop even though the variations are not all in memory.
/// </summary>
/// <remarks>
/// The MetaCollectionType parameter of the constructor allows for the creation of
/// normal Variations and Variations with Repetition.
///
/// When given an input collect {A B C} and lower index of 2, the following sets are generated:
/// MetaCollectionType.WithoutRepetition generates 6 sets: =>
/// {A B}, {A B}, {B A}, {B C}, {C A}, {C B}
/// MetaCollectionType.WithRepetition generates 9 sets:
/// {A A}, {A B}, {A B}, {B A}, {B B }, {B C}, {C A}, {C B}, {C C}
///
/// The equality of multiple inputs is not considered when generating variations.
/// </remarks>
/// <typeparam name="T">The type of the values within the list.</typeparam>
public class Variations<T> : IMetaCollection<T> {
#region Constructors
/// <summary>
/// No default constructor, must provided a list of values and size.
/// </summary>
protected Variations() {
;
}
/// <summary>
/// Create a variation set from the indicated list of values.
/// The upper index is calculated as values.Count, the lower index is specified.
/// Collection type defaults to MetaCollectionType.WithoutRepetition
/// </summary>
/// <param name="values">List of values to select Variations from.</param>
/// <param name="lowerIndex">The size of each variation set to return.</param>
public Variations(IList<T> values, int lowerIndex) {
Initialize(values, lowerIndex, GenerateOption.WithoutRepetition);
}
/// <summary>
/// Create a variation set from the indicated list of values.
/// The upper index is calculated as values.Count, the lower index is specified.
/// </summary>
/// <param name="values">List of values to select variations from.</param>
/// <param name="lowerIndex">The size of each vatiation set to return.</param>
/// <param name="type">Type indicates whether to use repetition in set generation.</param>
public Variations(IList<T> values, int lowerIndex, GenerateOption type) {
Initialize(values, lowerIndex, type);
}
#endregion
#region IEnumerable Interface
/// <summary>
/// Gets an enumerator for the collection of Variations.
/// </summary>
/// <returns>The enumerator.</returns>
public IEnumerator<IList<T>> GetEnumerator() {
if(Type == GenerateOption.WithRepetition) {
return new EnumeratorWithRepetition(this);
}
else {
return new EnumeratorWithoutRepetition(this);
}
}
/// <summary>
/// Gets an enumerator for the collection of Variations.
/// </summary>
/// <returns>The enumerator.</returns>
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() {
if(Type == GenerateOption.WithRepetition) {
return new EnumeratorWithRepetition(this);
}
else {
return new EnumeratorWithoutRepetition(this);
}
}
#endregion
#region Enumerator Inner Class
/// <summary>
/// An enumerator for Variations when the type is set to WithRepetition.
/// </summary>
public class EnumeratorWithRepetition : IEnumerator<IList<T>> {
#region Constructors
/// <summary>
/// Construct a enumerator with the parent object.
/// </summary>
/// <param name="source">The source Variations object.</param>
public EnumeratorWithRepetition(Variations<T> source) {
myParent = source;
Reset();
}
#endregion
#region IEnumerator interface
/// <summary>
/// Resets the Variations enumerator to the first variation.
/// </summary>
public void Reset() {
myCurrentList = null;
myListIndexes = null;
}
/// <summary>
/// Advances to the next variation.
/// </summary>
/// <returns>True if successfully moved to next variation, False if no more variations exist.</returns>
/// <remarks>
/// Increments the internal myListIndexes collection by incrementing the last index
/// and overflow/carrying into others just like grade-school arithemtic. If the
/// finaly carry flag is set, then we would wrap around and are therefore done.
/// </remarks>
public bool MoveNext() {
int carry = 1;
if(myListIndexes == null) {
myListIndexes = new List<int>();
for(int i = 0; i < myParent.LowerIndex; ++i) {
myListIndexes.Add(0);
}
carry = 0;
}
else {
for(int i = myListIndexes.Count - 1; i >= 0 && carry > 0; --i) {
myListIndexes[i] += carry;
carry = 0;
if(myListIndexes[i] >= myParent.UpperIndex) {
myListIndexes[i] = 0;
carry = 1;
}
}
}
myCurrentList = null;
return carry != 1;
}
/// <summary>
/// The current variation
/// </summary>
public IList<T> Current {
get {
ComputeCurrent();
return myCurrentList;
}
}
/// <summary>
/// The current variation.
/// </summary>
object System.Collections.IEnumerator.Current {
get {
ComputeCurrent();
return myCurrentList;
}
}
/// <summary>
/// Cleans up non-managed resources, of which there are none used here.
/// </summary>
public void Dispose() {
;
}
#endregion
#region Heavy Lifting Members
/// <summary>
/// Computes the current list based on the internal list index.
/// </summary>
private void ComputeCurrent() {
if(myCurrentList == null) {
myCurrentList = new List<T>();
foreach(int index in myListIndexes) {
myCurrentList.Add(myParent.myValues[index]);
}
}
}
#endregion
#region Data
/// <summary>
/// Parent object this is an enumerator for.
/// </summary>
private Variations<T> myParent;
/// <summary>
/// The current list of values, this is lazy evaluated by the Current property.
/// </summary>
private List<T> myCurrentList;
/// <summary>
/// An enumertor of the parents list of lexicographic orderings.
/// </summary>
private List<int> myListIndexes;
#endregion
}
/// <summary>
/// An enumerator for Variations when the type is set to WithoutRepetition.
/// </summary>
public class EnumeratorWithoutRepetition : IEnumerator<IList<T>> {
#region Constructors
/// <summary>
/// Construct a enumerator with the parent object.
/// </summary>
/// <param name="source">The source Variations object.</param>
public EnumeratorWithoutRepetition(Variations<T> source) {
myParent = source;
myPermutationsEnumerator = (Permutations<int>.Enumerator)myParent.myPermutations.GetEnumerator();
}
#endregion
#region IEnumerator interface
/// <summary>
/// Resets the Variations enumerator to the first variation.
/// </summary>
public void Reset() {
myPermutationsEnumerator.Reset();
}
/// <summary>
/// Advances to the next variation.
/// </summary>
/// <returns>True if successfully moved to next variation, False if no more variations exist.</returns>
public bool MoveNext() {
bool ret = myPermutationsEnumerator.MoveNext();
myCurrentList = null;
return ret;
}
/// <summary>
/// The current variation.
/// </summary>
public IList<T> Current {
get {
ComputeCurrent();
return myCurrentList;
}
}
/// <summary>
/// The current variation.
/// </summary>
object System.Collections.IEnumerator.Current {
get {
ComputeCurrent();
return myCurrentList;
}
}
/// <summary>
/// Cleans up non-managed resources, of which there are none used here.
/// </summary>
public void Dispose() {
;
}
#endregion
#region Heavy Lifting Members
/// <summary>
/// Creates a list of original values from the int permutation provided.
/// The exception for accessing current (InvalidOperationException) is generated
/// by the call to .Current on the underlying enumeration.
/// </summary>
/// <remarks>
/// To compute the current list of values, the element to use is determined by
/// a permutation position with a non-MaxValue value. It is placed at the position in the
/// output that the index value indicates.
///
/// E.g. Variations of 6 choose 3 without repetition
/// Input array: {A B C D E F}
/// Permutations: {- 1 - - 3 2} (- is Int32.MaxValue)
/// Generates set: {B F E}
/// </remarks>
private void ComputeCurrent() {
if(myCurrentList == null) {
myCurrentList = new List<T>();
int index = 0;
IList<int> currentPermutation = (IList<int>)myPermutationsEnumerator.Current;
for(int i = 0; i < myParent.LowerIndex; ++i) {
myCurrentList.Add(myParent.myValues[0]);
}
for(int i = 0; i < currentPermutation.Count; ++i) {
int position = currentPermutation[i];
if(position != Int32.MaxValue) {
myCurrentList[position] = myParent.myValues[index];
if(myParent.Type == GenerateOption.WithoutRepetition) {
++index;
}
}
else {
++index;
}
}
}
}
#endregion
#region Data
/// <summary>
/// Parent object this is an enumerator for.
/// </summary>
private Variations<T> myParent;
/// <summary>
/// The current list of values, this is lazy evaluated by the Current property.
/// </summary>
private List<T> myCurrentList;
/// <summary>
/// An enumertor of the parents list of lexicographic orderings.
/// </summary>
private Permutations<int>.Enumerator myPermutationsEnumerator;
#endregion
}
#endregion
#region IMetaList Interface
/// <summary>
/// The number of unique variations that are defined in this meta-collection.
/// </summary>
/// <remarks>
/// Variations with repetitions does not behave like other meta-collections and it's
/// count is equal to N^P, where N is the upper index and P is the lower index.
/// </remarks>
public long Count {
get {
if(Type == GenerateOption.WithoutRepetition) {
return myPermutations.Count;
}
else {
return (long)Math.Pow(UpperIndex, LowerIndex);
}
}
}
/// <summary>
/// The type of Variations set that is generated.
/// </summary>
public GenerateOption Type {
get {
return myMetaCollectionType;
}
}
/// <summary>
/// The upper index of the meta-collection, equal to the number of items in the initial set.
/// </summary>
public int UpperIndex {
get {
return myValues.Count;
}
}
/// <summary>
/// The lower index of the meta-collection, equal to the number of items returned each iteration.
/// </summary>
public int LowerIndex {
get {
return myLowerIndex;
}
}
#endregion
#region Heavy Lifting Members
/// <summary>
/// Initialize the variations for constructors.
/// </summary>
/// <param name="values">List of values to select variations from.</param>
/// <param name="lowerIndex">The size of each variation set to return.</param>
/// <param name="type">The type of variations set to generate.</param>
private void Initialize(IList<T> values, int lowerIndex, GenerateOption type) {
myMetaCollectionType = type;
myLowerIndex = lowerIndex;
myValues = new List<T>();
myValues.AddRange(values);
if(type == GenerateOption.WithoutRepetition) {
List<int> myMap = new List<int>();
int index = 0;
for(int i = 0; i < myValues.Count; ++i) {
if(i >= myValues.Count - myLowerIndex) {
myMap.Add(index++);
}
else {
myMap.Add(Int32.MaxValue);
}
}
myPermutations = new Permutations<int>(myMap);
}
else {
; // myPermutations isn't used.
}
}
#endregion
#region Data
/// <summary>
/// Copy of values object is intialized with, required for enumerator reset.
/// </summary>
private List<T> myValues;
/// <summary>
/// Permutations object that handles permutations on int for variation inclusion and ordering.
/// </summary>
private Permutations<int> myPermutations;
/// <summary>
/// The type of the variation collection.
/// </summary>
private GenerateOption myMetaCollectionType;
/// <summary>
/// The lower index defined in the constructor.
/// </summary>
private int myLowerIndex;
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
using System;
using System.Threading.Tasks;
using Windows.ApplicationModel.Background;
using Windows.Devices.Sensors;
using Windows.Foundation;
using Windows.Storage;
using Windows.UI.Core;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Navigation;
namespace SDKTemplate
{
/// <summary>
/// An empty page that can be used on its own or navigated to within a Frame.
/// </summary>
public sealed partial class Scenario1_DeviceUse : Page
{
// A pointer back to the main page. This is needed if you want to call methods in MainPage such
// as NotifyUser()
private MainPage rootPage = MainPage.Current;
private Accelerometer Accelerometer;
private DeviceUseTrigger _deviceUseTrigger;
// Used to register the device use background task
private BackgroundTaskRegistration _deviceUseBackgroundTaskRegistration;
// Used for refreshing the number of samples received when the app is visible
private DispatcherTimer _refreshTimer;
public Scenario1_DeviceUse()
{
this.InitializeComponent();
Accelerometer = Accelerometer.GetDefault();
if (null != Accelerometer)
{
// Save trigger so that we may start the background task later.
// Only one instance of the trigger can exist at a time. Since the trigger does not implement
// IDisposable, it may still be in memory when a new trigger is created.
_deviceUseTrigger = new DeviceUseTrigger();
// Setup a timer to periodically refresh results when the app is visible.
_refreshTimer = new DispatcherTimer()
{
Interval = new TimeSpan(0, 0, 1) // Refresh once every second
};
_refreshTimer.Tick += RefreshTimer_Tick;
}
else
{
rootPage.NotifyUser("No accelerometer found", NotifyType.StatusMessage);
}
}
/// <summary>
/// Invoked when this page is about to be displayed in a Frame.
/// </summary>
/// <param name="e">Event data that describes how this page was reached. The Parameter
/// property is typically used to configure the page.</param>
protected override void OnNavigatedTo(NavigationEventArgs e)
{
// If the background task is active, start the refresh timer and activate the "Disable" button.
// The "IsBackgroundTaskActive" state is set by the background task.
bool isBackgroundTaskActive =
ApplicationData.Current.LocalSettings.Values.ContainsKey("IsBackgroundTaskActive") &&
(bool)ApplicationData.Current.LocalSettings.Values["IsBackgroundTaskActive"];
ScenarioEnableButton.IsEnabled = !isBackgroundTaskActive;
ScenarioDisableButton.IsEnabled = isBackgroundTaskActive;
if (isBackgroundTaskActive)
{
_refreshTimer.Start();
}
// Store a setting for the background task to read
ApplicationData.Current.LocalSettings.Values["IsAppVisible"] = true;
}
/// <summary>
/// Invoked immediately before the Page is unloaded and is no longer the current source of a parent Frame.
/// </summary>
/// <param name="e">
/// Event data that can be examined by overriding code. The event data is representative
/// of the navigation that will unload the current Page unless canceled. The
/// navigation can potentially be canceled by setting Cancel.
/// </param>
protected override void OnNavigatingFrom(NavigatingCancelEventArgs e)
{
Window.Current.VisibilityChanged -= new WindowVisibilityChangedEventHandler(VisibilityChanged);
ApplicationData.Current.LocalSettings.Values["IsAppVisible"] = false;
// The default behavior here is to let the background task continue to run when
// this scenario exits. The background task can be canceled by clicking on the "Disable"
// button the next time the app is navigated to.
// To cancel the background task on scenario exit, uncomment this code.
// if (null != _deviceUseBackgroundTaskRegistration)
// {
// _deviceUseBackgroundTaskRegistration.Unregister(true);
// _deviceUseBackgroundTaskRegistration = null;
// }
base.OnNavigatingFrom(e);
}
/// <summary>
/// This is the event handler for VisibilityChanged events.
/// </summary>
/// <param name="sender"></param>
/// <param name="e">
/// Event data that can be examined for the current visibility state.
/// </param>
private void VisibilityChanged(object sender, VisibilityChangedEventArgs e)
{
if (ScenarioDisableButton.IsEnabled)
{
ApplicationData.Current.LocalSettings.Values["IsAppVisible"] = e.Visible;
if (e.Visible)
{
_refreshTimer.Start();
}
else
{
_refreshTimer.Stop();
}
}
}
/// <summary>
/// This is the click handler for the 'Enable' button.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private async void ScenarioEnable(object sender, RoutedEventArgs e)
{
Window.Current.VisibilityChanged += new WindowVisibilityChangedEventHandler(VisibilityChanged);
if (null != Accelerometer)
{
// Make sure this app is allowed to run background tasks.
// RequestAccessAsync must be called on the UI thread.
BackgroundAccessStatus accessStatus = await BackgroundExecutionManager.RequestAccessAsync();
if ((BackgroundAccessStatus.AllowedWithAlwaysOnRealTimeConnectivity == accessStatus) ||
(BackgroundAccessStatus.AllowedMayUseActiveRealTimeConnectivity == accessStatus))
{
if (await StartSensorBackgroundTaskAsync(Accelerometer.DeviceId))
{
_refreshTimer.Start();
ScenarioEnableButton.IsEnabled = false;
ScenarioDisableButton.IsEnabled = true;
}
}
else
{
rootPage.NotifyUser("Background tasks may be disabled for this app", NotifyType.ErrorMessage);
}
}
else
{
rootPage.NotifyUser("No accelerometer found", NotifyType.StatusMessage);
}
}
/// <summary>
/// This is the click handler for the 'Disable' button.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void ScenarioDisable(object sender, RoutedEventArgs e)
{
Window.Current.VisibilityChanged -= new WindowVisibilityChangedEventHandler(VisibilityChanged);
ScenarioEnableButton.IsEnabled = true;
ScenarioDisableButton.IsEnabled = false;
_refreshTimer.Stop();
if (null != _deviceUseBackgroundTaskRegistration)
{
// Cancel and unregister the background task from the current app session.
_deviceUseBackgroundTaskRegistration.Unregister(true);
_deviceUseBackgroundTaskRegistration = null;
}
else
{
// Cancel and unregister the background task from the previous app session.
FindAndCancelExistingBackgroundTask();
}
rootPage.NotifyUser("Background task was canceled", NotifyType.StatusMessage);
}
/// <summary>
/// Starts the sensor background task.
/// </summary>
/// <param name="deviceId">Device Id for the sensor to be used by the task.</param>
/// <param name="e"></param>
/// <returns>True if the task is started successfully.</returns>
private async Task<bool> StartSensorBackgroundTaskAsync(String deviceId)
{
bool started = false;
// Make sure only 1 task is running.
FindAndCancelExistingBackgroundTask();
// Register the background task.
var backgroundTaskBuilder = new BackgroundTaskBuilder()
{
Name = SampleConstants.Scenario1_DeviceUse_TaskName,
TaskEntryPoint = SampleConstants.Scenario1_DeviceUse_TaskEntryPoint
};
backgroundTaskBuilder.SetTrigger(_deviceUseTrigger);
_deviceUseBackgroundTaskRegistration = backgroundTaskBuilder.Register();
// Make sure we're notified when the task completes or if there is an update.
_deviceUseBackgroundTaskRegistration.Completed += new BackgroundTaskCompletedEventHandler(OnBackgroundTaskCompleted);
try
{
// Request a DeviceUse task to use the accelerometer.
DeviceTriggerResult deviceTriggerResult = await _deviceUseTrigger.RequestAsync(deviceId);
switch (deviceTriggerResult)
{
case DeviceTriggerResult.Allowed:
rootPage.NotifyUser("Background task started", NotifyType.StatusMessage);
started = true;
break;
case DeviceTriggerResult.LowBattery:
rootPage.NotifyUser("Insufficient battery to run the background task", NotifyType.ErrorMessage);
break;
case DeviceTriggerResult.DeniedBySystem:
// The system can deny a task request if the system-wide DeviceUse task limit is reached.
rootPage.NotifyUser("The system has denied the background task request", NotifyType.ErrorMessage);
break;
default:
rootPage.NotifyUser("Could not start the background task: " + deviceTriggerResult, NotifyType.ErrorMessage);
break;
}
}
catch (InvalidOperationException)
{
// If toggling quickly between 'Disable' and 'Enable', the previous task
// could still be in the process of cleaning up.
rootPage.NotifyUser("A previous background task is still running, please wait for it to exit", NotifyType.ErrorMessage);
FindAndCancelExistingBackgroundTask();
}
return started;
}
/// <summary>
/// Finds a previously registered background task for this scenario and cancels it (if present)
/// </summary>
private void FindAndCancelExistingBackgroundTask()
{
foreach (var backgroundTask in BackgroundTaskRegistration.AllTasks.Values)
{
if (SampleConstants.Scenario1_DeviceUse_TaskName == backgroundTask.Name)
{
((BackgroundTaskRegistration)backgroundTask).Unregister(true);
break;
}
}
}
/// <summary>
/// This is the tick handler for the Refresh timer.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void RefreshTimer_Tick(object sender, object e)
{
if (ApplicationData.Current.LocalSettings.Values.ContainsKey("SampleCount"))
{
ulong sampleCount = (ulong)ApplicationData.Current.LocalSettings.Values["SampleCount"];
ScenarioOutput_SampleCount.Text = sampleCount.ToString(System.Globalization.CultureInfo.CurrentCulture);
}
else
{
ScenarioOutput_SampleCount.Text = "No data";
}
}
/// <summary>
/// This is the background task completion handler.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private async void OnBackgroundTaskCompleted(BackgroundTaskRegistration sender, BackgroundTaskCompletedEventArgs e)
{
// Dispatch to the UI thread to display the output.
await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
{
// An exception may be thrown if an error occurs in the background task.
try
{
e.CheckResult();
if (ApplicationData.Current.LocalSettings.Values.ContainsKey("TaskCancelationReason"))
{
string cancelationReason = (string)ApplicationData.Current.LocalSettings.Values["TaskCancelationReason"];
rootPage.NotifyUser("Background task was stopped, reason: " + cancelationReason, NotifyType.StatusMessage);
}
}
catch (Exception ex)
{
rootPage.NotifyUser("Exception in background task: " + ex.Message, NotifyType.ErrorMessage);
}
_refreshTimer.Stop();
});
// Unregister the background task and let the remaining task finish until completion.
if (null != _deviceUseBackgroundTaskRegistration)
{
_deviceUseBackgroundTaskRegistration.Unregister(false);
_deviceUseBackgroundTaskRegistration = null;
}
}
}
}
| |
using System;
using System.Linq;
using System.Threading.Tasks;
using vivego.Collection.EventStore;
using vivego.core;
using vivego.EventStore;
using Xunit;
using Version = vivego.EventStore.Version;
namespace vivego.Collection.Tests.EventStore
{
public abstract class EventStoreTests : DisposableBase
{
protected abstract IEventStore MakeEventStore();
[Fact]
public virtual async Task CanAppend()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
Version version = await eventStore.Append(id,
ExpectedVersion.Any,
new EventData()).ConfigureAwait(false);
Assert.NotNull(version);
Assert.Equal(0, version.Begin);
Assert.Equal(0, version.End);
}
[Fact]
public virtual async Task CanAppendSequential()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id,
ExpectedVersion.Any,
new EventData()).ConfigureAwait(false);
Version version = await eventStore.Append(id,
ExpectedVersion.Any,
new EventData()).ConfigureAwait(false);
Assert.NotNull(version);
Assert.Equal(1, version.Begin);
Assert.Equal(1, version.End);
}
[Fact]
public virtual async Task ThrowExceptionIfStreamNotExist()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await Assert.ThrowsAsync<WrongExpectedVersionException>(() => eventStore.Append(id,
ExpectedVersion.StreamExists,
new EventData())).ConfigureAwait(false);
}
[Fact]
public virtual async Task CanAppendOnlyIfStreamAlreadyExists()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id,
ExpectedVersion.Any,
new EventData()).ConfigureAwait(false);
Version version = await eventStore.Append(id,
ExpectedVersion.StreamExists,
new EventData()).ConfigureAwait(false);
Assert.NotNull(version);
Assert.Equal(1, version.Begin);
Assert.Equal(1, version.End);
}
[Fact]
public virtual async Task CanAppendOnlyIfStreamDoesNotExist()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id,
ExpectedVersion.NoStream,
new EventData()).ConfigureAwait(false);
}
[Fact]
public virtual async Task ThrowExceptionNoStreamExist()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id,
ExpectedVersion.Any,
new EventData()).ConfigureAwait(false);
await Assert.ThrowsAsync<WrongExpectedVersionException>(() => eventStore.Append(id,
ExpectedVersion.NoStream,
new EventData())).ConfigureAwait(false);
}
[Fact]
public async Task CannotAppendNullKey()
{
IEventStore eventStore = MakeEventStore();
await Assert.ThrowsAsync<ArgumentException>(() => eventStore.Append(null!,
ExpectedVersion.Any,
new EventData())).ConfigureAwait(false);
}
[Fact]
public async Task CannotAppendEmptyKey()
{
IEventStore eventStore = MakeEventStore();
await Assert.ThrowsAsync<ArgumentException>(() => eventStore.Append(string.Empty,
ExpectedVersion.Any,
new EventData())).ConfigureAwait(false);
}
[Fact]
public async Task CannotAppendNullEventData()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await Assert.ThrowsAsync<ArgumentNullException>(() => eventStore.Append(id,
ExpectedVersion.Any,
null!)).ConfigureAwait(false);
}
[Fact]
public virtual async Task CanGetEmpty()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
RecordedEvent[] all = await eventStore.GetAll(id).ToArrayAsync().ConfigureAwait(false);
Assert.NotNull(all);
Assert.Empty(all);
}
[Fact]
public virtual async Task CanGetDeleted()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id, ExpectedVersion.Any, new EventData()).ConfigureAwait(false);
await eventStore.Delete(id).ConfigureAwait(false);
RecordedEvent[] all = await eventStore.GetAll(id).ToArrayAsync().ConfigureAwait(false);
Assert.NotNull(all);
Assert.Empty(all);
}
[Fact]
public virtual async Task CanGetMany()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id, ExpectedVersion.Any,
Enumerable.Range(0, 100).Select(_ => new EventData())).ConfigureAwait(false);
RecordedEvent[] all = await eventStore.GetAll(id).ToArrayAsync().ConfigureAwait(false);
Assert.NotNull(all);
Assert.Equal(100, all.Length);
Assert.True(all.Select(recordedEvent => (int) recordedEvent.EventNumber).SequenceEqual(Enumerable.Range(0, 100)));
}
[Fact]
public virtual async Task CanGetManyFrom()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id, ExpectedVersion.Any,
Enumerable.Range(0, 100).Select(_ => new EventData())).ConfigureAwait(false);
RecordedEvent[] all = await eventStore
.GetFrom(id, 10)
.ToArrayAsync()
.ConfigureAwait(false);
Assert.NotNull(all);
Assert.Equal(90, all.Length);
Assert.True(all.Select(recordedEvent => (int) recordedEvent.EventNumber).SequenceEqual(Enumerable.Range(10, 90)));
}
[Fact]
public virtual async Task CanGetManyFromBehind()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id, ExpectedVersion.Any,
Enumerable.Range(0, 100).Select(_ => new EventData())).ConfigureAwait(false);
RecordedEvent[] all = await eventStore
.GetTo(id, 89)
.ToArrayAsync()
.ConfigureAwait(false);
Assert.NotNull(all);
Assert.Equal(90, all.Length);
Assert.True(all.Select(recordedEvent => (int)recordedEvent.EventNumber).SequenceEqual(Enumerable.Range(0, 90)));
}
[Fact]
public virtual async Task CanGetManyReverse()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id, ExpectedVersion.Any,
Enumerable.Range(0, 100).Select(_ => new EventData())).ConfigureAwait(false);
RecordedEvent[] all = await eventStore.GetAllReverse(id).ToArrayAsync().ConfigureAwait(false);
Assert.NotNull(all);
Assert.Equal(100, all.Length);
Assert.True(all.Select(recordedEvent => (int)recordedEvent.EventNumber).SequenceEqual(Enumerable.Range(0, 100).Reverse()));
}
[Fact]
public virtual async Task CanGetManyFromReverse()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id, ExpectedVersion.Any,
Enumerable.Range(0, 100).Select(_ => new EventData())).ConfigureAwait(false);
RecordedEvent[] all = await eventStore
.GetFromReverse(id, 10)
.ToArrayAsync()
.ConfigureAwait(false);
Assert.NotNull(all);
Assert.Equal(90, all.Length);
Assert.True(all.Select(recordedEvent => (int)recordedEvent.EventNumber).SequenceEqual(Enumerable.Range(10, 90).Reverse()));
}
[Fact]
public virtual async Task CanGetManyFromBehindReverse()
{
IEventStore eventStore = MakeEventStore();
string id = Guid.NewGuid().ToString();
await eventStore.Append(id, ExpectedVersion.Any,
Enumerable.Range(0, 100).Select(_ => new EventData())).ConfigureAwait(false);
RecordedEvent[] all = await eventStore
.GetToReverse(id, 89)
.ToArrayAsync()
.ConfigureAwait(false);
Assert.NotNull(all);
Assert.Equal(90, all.Length);
Assert.True(all.Select(recordedEvent => (int)recordedEvent.EventNumber).SequenceEqual(Enumerable.Range(0, 90).Reverse()));
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Windows.Documents.TextPointer.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Windows.Documents
{
public partial class TextPointer : ContentPosition, ITextPointer
{
#region Methods and constructors
public int CompareTo(TextPointer position)
{
Contract.Requires(position != null);
Contract.Ensures(-1 <= Contract.Result<int>());
Contract.Ensures(Contract.Result<int>() <= 1);
return default(int);
}
public int DeleteTextInRun(int count)
{
return default(int);
}
public System.Windows.DependencyObject GetAdjacentElement(LogicalDirection direction)
{
return default(System.Windows.DependencyObject);
}
public System.Windows.Rect GetCharacterRect(LogicalDirection direction)
{
return default(System.Windows.Rect);
}
public TextPointer GetInsertionPosition(LogicalDirection direction)
{
return default(TextPointer);
}
public TextPointer GetLineStartPosition(int count, out int actualCount)
{
Contract.Ensures(Contract.Result<System.Windows.Documents.TextPointer>() != null);
actualCount = default(int);
return default(TextPointer);
}
public TextPointer GetLineStartPosition(int count)
{
return default(TextPointer);
}
public TextPointer GetNextContextPosition(LogicalDirection direction)
{
return default(TextPointer);
}
public TextPointer GetNextInsertionPosition(LogicalDirection direction)
{
return default(TextPointer);
}
public int GetOffsetToPosition(TextPointer position)
{
Contract.Requires(position != null);
return default(int);
}
public TextPointerContext GetPointerContext(LogicalDirection direction)
{
return default(TextPointerContext);
}
public TextPointer GetPositionAtOffset(int offset, LogicalDirection direction)
{
return default(TextPointer);
}
public TextPointer GetPositionAtOffset(int offset)
{
return default(TextPointer);
}
public int GetTextInRun(LogicalDirection direction, char[] textBuffer, int startIndex, int count)
{
return default(int);
}
public string GetTextInRun(LogicalDirection direction)
{
return default(string);
}
public int GetTextRunLength(LogicalDirection direction)
{
return default(int);
}
public TextPointer InsertLineBreak()
{
return default(TextPointer);
}
public TextPointer InsertParagraphBreak()
{
return default(TextPointer);
}
public void InsertTextInRun(string textData)
{
}
public bool IsInSameDocument(TextPointer textPosition)
{
return default(bool);
}
int System.Windows.Documents.ITextPointer.CompareTo(ITextPointer position)
{
return default(int);
}
ITextPointer System.Windows.Documents.ITextPointer.CreatePointer(LogicalDirection gravity)
{
return default(ITextPointer);
}
ITextPointer System.Windows.Documents.ITextPointer.CreatePointer()
{
return default(ITextPointer);
}
ITextPointer System.Windows.Documents.ITextPointer.CreatePointer(int offset)
{
return default(ITextPointer);
}
ITextPointer System.Windows.Documents.ITextPointer.CreatePointer(int offset, LogicalDirection gravity)
{
return default(ITextPointer);
}
void System.Windows.Documents.ITextPointer.DeleteContentToPosition(ITextPointer limit)
{
}
void System.Windows.Documents.ITextPointer.Freeze()
{
}
Object System.Windows.Documents.ITextPointer.GetAdjacentElement(LogicalDirection direction)
{
return default(Object);
}
System.Windows.Rect System.Windows.Documents.ITextPointer.GetCharacterRect(LogicalDirection direction)
{
return default(System.Windows.Rect);
}
Type System.Windows.Documents.ITextPointer.GetElementType(LogicalDirection direction)
{
return default(Type);
}
ITextPointer System.Windows.Documents.ITextPointer.GetFormatNormalizedPosition(LogicalDirection direction)
{
return default(ITextPointer);
}
ITextPointer System.Windows.Documents.ITextPointer.GetFrozenPointer(LogicalDirection logicalDirection)
{
return default(ITextPointer);
}
ITextPointer System.Windows.Documents.ITextPointer.GetInsertionPosition(LogicalDirection direction)
{
return default(ITextPointer);
}
System.Windows.LocalValueEnumerator System.Windows.Documents.ITextPointer.GetLocalValueEnumerator()
{
return default(System.Windows.LocalValueEnumerator);
}
ITextPointer System.Windows.Documents.ITextPointer.GetNextContextPosition(LogicalDirection direction)
{
return default(ITextPointer);
}
ITextPointer System.Windows.Documents.ITextPointer.GetNextInsertionPosition(LogicalDirection direction)
{
return default(ITextPointer);
}
int System.Windows.Documents.ITextPointer.GetOffsetToPosition(ITextPointer position)
{
return default(int);
}
TextPointerContext System.Windows.Documents.ITextPointer.GetPointerContext(LogicalDirection direction)
{
return default(TextPointerContext);
}
int System.Windows.Documents.ITextPointer.GetTextInRun(LogicalDirection direction, char[] textBuffer, int startIndex, int count)
{
return default(int);
}
string System.Windows.Documents.ITextPointer.GetTextInRun(LogicalDirection direction)
{
return default(string);
}
int System.Windows.Documents.ITextPointer.GetTextRunLength(LogicalDirection direction)
{
return default(int);
}
Object System.Windows.Documents.ITextPointer.GetValue(System.Windows.DependencyProperty formattingProperty)
{
return default(Object);
}
bool System.Windows.Documents.ITextPointer.HasEqualScope(ITextPointer position)
{
return default(bool);
}
void System.Windows.Documents.ITextPointer.InsertTextInRun(string textData)
{
}
int System.Windows.Documents.ITextPointer.MoveByOffset(int offset)
{
return default(int);
}
bool System.Windows.Documents.ITextPointer.MoveToInsertionPosition(LogicalDirection direction)
{
return default(bool);
}
int System.Windows.Documents.ITextPointer.MoveToLineBoundary(int count)
{
return default(int);
}
bool System.Windows.Documents.ITextPointer.MoveToNextContextPosition(LogicalDirection direction)
{
return default(bool);
}
bool System.Windows.Documents.ITextPointer.MoveToNextInsertionPosition(LogicalDirection direction)
{
return default(bool);
}
void System.Windows.Documents.ITextPointer.MoveToPosition(ITextPointer position)
{
}
Object System.Windows.Documents.ITextPointer.ReadLocalValue(System.Windows.DependencyProperty formattingProperty)
{
return default(Object);
}
void System.Windows.Documents.ITextPointer.SetLogicalDirection(LogicalDirection direction)
{
}
bool System.Windows.Documents.ITextPointer.ValidateLayout()
{
return default(bool);
}
internal TextPointer()
{
}
#endregion
#region Properties and indexers
public System.Windows.Documents.TextPointer DocumentEnd
{
get
{
return default(System.Windows.Documents.TextPointer);
}
}
public System.Windows.Documents.TextPointer DocumentStart
{
get
{
return default(System.Windows.Documents.TextPointer);
}
}
public bool HasValidLayout
{
get
{
return default(bool);
}
}
public bool IsAtInsertionPosition
{
get
{
return default(bool);
}
}
public bool IsAtLineStartPosition
{
get
{
return default(bool);
}
}
public LogicalDirection LogicalDirection
{
get
{
return default(LogicalDirection);
}
}
public Paragraph Paragraph
{
get
{
return default(Paragraph);
}
}
public System.Windows.DependencyObject Parent
{
get
{
return default(System.Windows.DependencyObject);
}
}
int System.Windows.Documents.ITextPointer.CharOffset
{
get
{
return default(int);
}
}
bool System.Windows.Documents.ITextPointer.HasValidLayout
{
get
{
return default(bool);
}
}
bool System.Windows.Documents.ITextPointer.IsAtCaretUnitBoundary
{
get
{
return default(bool);
}
}
bool System.Windows.Documents.ITextPointer.IsAtInsertionPosition
{
get
{
return default(bool);
}
}
bool System.Windows.Documents.ITextPointer.IsFrozen
{
get
{
return default(bool);
}
}
System.Windows.Documents.LogicalDirection System.Windows.Documents.ITextPointer.LogicalDirection
{
get
{
return default(System.Windows.Documents.LogicalDirection);
}
}
int System.Windows.Documents.ITextPointer.Offset
{
get
{
return default(int);
}
}
Type System.Windows.Documents.ITextPointer.ParentType
{
get
{
return default(Type);
}
}
ITextContainer System.Windows.Documents.ITextPointer.TextContainer
{
get
{
return default(ITextContainer);
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Microsoft.Protocols.TestTools.StackSdk.RemoteDesktop.Rdpegfx
{
/// <summary>
/// This class describes the huffman symbols or "tokens".
/// </summary>
internal class Token
{
public Token(int len, int code, int val, int type, int vbase, string prefix)
{
Len = len;
BitPrefix = prefix;
ValueBits = val;
Code = code;
Type = type;
Vbase = vbase;
}
#region Private Variables
/// <summary>
/// The length of the bit prefix, corresponding to the bitPrefix field.
/// </summary>
private int len;
/// <summary>
/// The decimal value corresponding to the bitPrefix.
/// </summary>
private int code;
/// <summary>
/// Define the type of the token, currentl only have value "0" and "1".
/// </summary>
private int type;
/// <summary>
/// The prefix part of the token, uniquely distinguish a token.
/// </summary>
private string bitPrefix;
/// <summary>
/// The additional bits needed for get the indicated value of the token.
/// </summary>
private int valueBits;
/// <summary>
/// The base value for get the indicated value of the token.
/// </summary>
private int vbase;
#endregion
public int Len
{
get { return len; }
set { len = value; }
}
public int Code
{
get { return code; }
set { code = value; }
}
public int Type
{
get { return type; }
set { type = value; }
}
public string BitPrefix
{
get { return bitPrefix; }
set { bitPrefix = value; }
}
public int ValueBits
{
get { return valueBits; }
set { valueBits = value; }
}
public int Vbase
{
get { return vbase; }
set { vbase = value; }
}
}
/// <summary>
/// This class stores token tables and other configuration data
/// used in the compression procedure.
/// </summary>
internal class Config
{
/// <summary>
/// The table of all the huffman symbols, sorted by prefix length.
/// </summary>
public static Token[] tokenTableDis
= {
new Token(1, 0, 8, 0, 0, "0"), // 1 literal
new Token(5, 17, 5, 1, 0, "10001"), // 2 1-31
new Token(5, 18, 7, 1, 32, "10010"), // 3 32-159
new Token(5, 19, 9, 1, 160, "10011"), // 4 160-671
new Token(5, 20, 10, 1, 672, "10100"), // 5 672-1659
new Token(5, 21, 12, 1, 1696, "10101"), // 6 1696-5791
new Token(5, 24, 0, 0, 0x00, "11000"), // 7
new Token(5, 25, 0, 0, 0x01, "11001"), // 8
new Token(6, 44, 14, 1, 5792, "101100"), // 9 5792-22175
new Token(6, 45, 15, 1, 22176, "101101"), // 10 22176-54943
new Token(6, 52, 0, 0, 0x02, "110100"), // 11
new Token(6, 53, 0, 0, 0x03, "110101"), // 12
new Token(6, 54, 0, 0, 0xFF, "110110"), // 13
new Token(7, 92, 18, 1, 54944, "1011100"), // 14 54944-317087
new Token(7, 93, 20, 1, 317088, "1011101"), // 15 317088-1365663
new Token(7, 110, 0, 0, 0x04, "1101110"), // 16
new Token(7, 111, 0, 0, 0x05, "1101111"), // 17
new Token(7, 112, 0, 0, 0x06, "1110000"), // 18
new Token(7, 113, 0, 0, 0x07, "1110001"), // 19
new Token(7, 114, 0, 0, 0x08, "1110010"), // 20
new Token(7, 115, 0, 0, 0x09, "1110011"), // 21
new Token(7, 116, 0, 0, 0x0A, "1110100"), // 22
new Token(7, 117, 0, 0, 0x0B, "1110101"), // 23
new Token(7, 118, 0, 0, 0x3A, "1110110"), // 24
new Token(7, 119, 0, 0, 0x3B, "1110111"), // 25
new Token(7, 120, 0, 0, 0x3C, "1111000"), // 26
new Token(7, 121, 0, 0, 0x3D, "1111001"), // 27
new Token(7, 122, 0, 0, 0x3E, "1111010"), // 28
new Token(7, 123, 0, 0, 0x3F, "1111011"), // 29
new Token(7, 124, 0, 0, 0x40, "1111100"), // 30
new Token(7, 125, 0, 0, 0x80, "1111101"), // 31
new Token(8, 188, 20, 1, 1365664, "10111100"), // 32 1365664-2414239
new Token(8, 189, 21, 1, 2414240, "10111101"), // 33 2414240-4511391
new Token(8, 252, 0, 0, 0x0C, "11111100"), // 34
new Token(8, 253, 0, 0, 0x38, "11111101"), // 35
new Token(8, 254, 0, 0, 0x39, "11111110"), // 36
new Token(8, 255, 0, 0, 0x66, "11111111"), // 37
new Token(9, 380, 22, 1, 4511392, "101111100"), // 38 4511392-8705695
new Token(9, 381, 23, 1, 8705696, "101111101"), // 39 8705696-17094303
new Token(9, 382, 24, 1, 17094304, "101111111"), // 40 17094304-33871519
new Token(0, 0, 0, 0, 0, "") // 41
};
/// <summary>
/// The table of length token, a length token
/// follows a match token.
/// </summary>
private static Token[] tokenTableLen
= {
new Token(0, 0, 0, 0, 0, "0"),
new Token(0, 0, 2, 0, 4, "10"),
new Token(0, 0, 3, 0, 8,"110"),
new Token(0, 0, 4, 0, 16,"1110"),
new Token(0, 0, 5, 0, 32,"11110"),
new Token(0, 0, 6, 0, 64,"111110"),
new Token(0, 0, 7, 0, 128,"1111110"),
new Token(0, 0, 8, 0, 256,"11111110"),
new Token(0, 0, 9, 0, 512,"111111110"),
new Token(0, 0, 10, 0, 1024,"1111111110"),
new Token(0, 0, 11, 0, 2048,"11111111110"),
new Token(0, 0, 12, 0, 4096,"111111111110"),
new Token(0, 0, 13, 0, 8192,"1111111111110"),
new Token(0, 0, 14, 0, 16384,"11111111111110"),
new Token(0, 0, 15, 0, 32768,"111111111111110")
};
/// <summary>
/// A static value indicates the length of the history buffer
/// used in the compression procedure.
/// </summary>
private static int hitoryLen = 2500000;
/// <summary>
/// A static value indicates the maximum length of the repeat length.
/// </summary>
private static int maxLen = 10000;
public static int HitoryLen
{
get { return hitoryLen; }
set { hitoryLen = value; }
}
public static int MaxLen
{
get { return Config.maxLen; }
set { Config.maxLen = value; }
}
/// <summary>
/// Get a match token according to the match distance.
/// </summary>
/// <param name="distance">The distance here means the needed bytes backward.</param>
public static Token getTokenDis(int distance)
{
if (distance < 32)
return tokenTableDis[1];
else if (distance < 160)
return tokenTableDis[2];
else if (distance < 672)
return tokenTableDis[3];
else if (distance < 1696)
return tokenTableDis[4];
else if (distance < 5792)
return tokenTableDis[5];
else if (distance < 22176)
return tokenTableDis[8];
else if (distance < 54944)
return tokenTableDis[9];
else
return tokenTableDis[13];
}
/// <summary>
/// Get a length token according to the match length.
/// </summary>
/// <param name="length">The length here means the number of bytes to be copied.</param>
public static Token getTokenLen(int length)
{
int exp = 1;
while (length >= Math.Pow(2, exp))
++exp;
return tokenTableLen[exp - 2];
}
}
/// <summary>
/// This class offers the completion for the compression algorithm
/// </summary>
public class CompressFactory
{
#region Variables
/// <summary>
/// Raw data to be compressed.
/// </summary>
private byte[] rawData;
/// <summary>
/// Compressed data to be decompressed.
/// </summary>
private byte[] compressData;
/// <summary>
/// The length of the history buffer.
/// </summary>
private int MaxP;
/// <summary>
/// The maximum value of the repeat length.
/// </summary>
private int MaxL;
/// <summary>
/// The bit buffer used in the compression procedure.
/// </summary>
private List<bool> bitBuffer;
/// <summary>
/// An byte buffer to store the temp result from the bit buffer.
/// </summary>
private ArrayList compressDataList;
/// <summary>
/// Histoty byte buffer to store the MaxP length
/// decompressed result in the decompress procedure.
/// </summary>
private byte[] historyBuffer;
private UInt32 historyIndex;
/// <summary>
/// The result byte buffer used in the decompress procedure.
/// </summary>
private List<byte> outputBuffer;
/// <summary>
/// The result byte array of the decompress algorithm.
/// </summary>
private byte[] recoverData;
/// <summary>
/// The remaining length of bits to be decompress from.s
/// </summary>
UInt32 bitsRemainingLen = 0;
/// <summary>
/// The bits currently useable value in decimal format.
/// </summary>
int bitsCurrent = 0;
/// <summary>
/// The length bits currently useable.
/// </summary>
UInt32 bitsCurrentLen = 0;
UInt32 currentIndex = 0;
#endregion
/// <summary>
/// Constructor, with some initialization.
/// </summary>
public CompressFactory()
{
bitBuffer = new List<bool>();
compressDataList = new ArrayList();
MaxP = Config.HitoryLen;
MaxL = Config.MaxLen;
historyBuffer = new byte[MaxP];
}
#region Private Methods
/// <summary>
/// Convert an BitArray of length 8 to a byte.
/// </summary>
private byte ConvertToByte(BitArray bits)
{
if (bits.Count != 8)
{
throw new ArgumentException("bits");
}
byte[] bytes = new byte[1];
bits.CopyTo(bytes, 0);
return bytes[0];
}
/// <summary>
/// Reverse the bit sequence in a byte.
/// </summary>
private byte littleToBig(byte b)
{
byte[] ba = new byte[] { b };
BitArray bits = new BitArray(ba);
BitArray tbits = new BitArray(8);
for (int i = 0; i < 4; ++i)
{
tbits[i] = bits[7 - i];
tbits[4 + i] = bits[3 - i];
}
return ConvertToByte(tbits);
}
/// <summary>
/// Add a byte (a literal) to the result bit buffer.
/// </summary>
private void addByte(byte b)
{
bitBuffer.Add(false);
byte[] ba = new byte[] { b };
BitArray bits = new BitArray(ba);
for (int i = 7; i >= 0; --i)
bitBuffer.Add(bits[i]);
}
/// <summary>
/// Get the final byte array from the bit buffer.
/// </summary>
private void getCompressedResult()
{
int index = 0;
BitArray bits;
while (index + 8 < bitBuffer.Count)
{
bits = new BitArray(8);
for (int i = 0; i < 8; ++i)
{
bits[i] = bitBuffer[index + i];
}
compressDataList.Add(ConvertToByte(bits));
index += 8;
}
byte finalByte = (byte)(index + 8 - bitBuffer.Count);
bits = new BitArray(8);
for (int i = 0; index + i < bitBuffer.Count; ++i)
bits[i] = bitBuffer[index + i];
compressDataList.Add(ConvertToByte(bits));
index = bitBuffer.Count;
bitBuffer.AddRange(convertByteToBits(finalByte));
for (int i = 0; index + i < bitBuffer.Count; ++i)
bits[i] = bitBuffer[index + i];
compressDataList.Add(ConvertToByte(bits));
compressData = new byte[compressDataList.Count];
for (int i = 0; i < compressDataList.Count; ++i)
{
compressData[i] = (byte)compressDataList[i];
compressData[i] = littleToBig(compressData[i]);
}
}
/// <summary>
/// Add the a match to the result bit buffer.
/// </summary>
/// <param name="distance">The number of bytes backward to copy from.</param>
/// <param name="length">The total number of bytes to be copied.</param>
private void addCompressBytes(int distance, int length)
{
Token token = Config.getTokenDis(distance);
bitBuffer.AddRange(convertStringToBits(token.BitPrefix));
bitBuffer.AddRange(convertIntToBits(distance - token.Vbase, token.ValueBits));
int exp = 1;
while (length >= Math.Pow(2, exp))
++exp;
token = Config.getTokenLen(length);
bitBuffer.AddRange(convertStringToBits(token.BitPrefix));
bitBuffer.AddRange(convertIntToBits(length - (int)Math.Pow(2, exp - 1), token.ValueBits));
}
/// <summary>
/// Convert a byte to an bit list of length 8.
/// </summary>
private List<bool> convertByteToBits(byte b)
{
List<bool> result = new List<bool>();
byte[] ba = new byte[] { b };
BitArray bits = new BitArray(ba);
for (int i = 7; i >= 0; --i)
result.Add(bits[i]);
//DisplayBitArray(bits);
return result;
}
/// <summary>
/// Convert an integer to bit list.
/// </summary>
/// <param name="val">The value of the integer.</param>
/// <param name="len">The length of the return bit list.</param>
private List<bool> convertIntToBits(int val, int len)
{
List<bool> result = new List<bool>();
for (int i = 0; i < len; ++i)
{
if ((val & 1) == 1)
result.Add(true);
else
result.Add(false);
val = val >> 1;
}
result.Reverse();
return result;
}
/// <summary>
/// Convert a string to bit list.
/// </summary>
/// <param name="s">The input string should only consists of "0" and "1".</param>
private List<bool> convertStringToBits(string s)
{
List<bool> result = new List<bool>();
for (int i = 0; i < s.Length; ++i)
{
if (s[i] == '0')
result.Add(false);
else
result.Add(true);
}
return result;
}
/// <summary>
/// The main procedure to do compress.
/// </summary>
private void doCompress()
{
int pos = 0;
int totalLen = rawData.Length;
while (pos < totalLen)
{
// Start looking for sequence.
int l = 0;
// Max left to go.
int sbl = Math.Min(MaxP, pos);
// List of positions of sequence.
List<int> p = new List<int>();
// Init with first appearences.
for(int i = 0; i < sbl; ++i)
{
if (rawData[pos - (i + 1)] == rawData[pos])
p.Add(i);
}
if (p.Count == 0)
{
addByte(rawData[pos]);
// Step forward.
++pos;
}
else
{
l = 1;
while (pos + l < totalLen && l < MaxL - 1)
{
// Tempp stores sequences of length l + 1.
List<int> tempp = new List<int>();
foreach (int i in p)
{
if (rawData[pos - (i+1) + l % (i+1)] == rawData[pos + l])
tempp.Add(i);
}
if (tempp.Count == 0)
break; // Didn't find sequences of l + 1 length.
else
{
// Select only the sequences of length l + 1.
p.Clear();
p.AddRange(tempp);
// Go on.
++l;
}
}
if (l < 3)
{
for (int j = 0; j < l; ++j)
{
addByte(rawData[pos]);
++pos;
}
}
else
{
// Found a l length sequence.
addCompressBytes(p[0] + 1, l);
// Console.WriteLine("distance is {0}, length is {1}", p[0] + 1, l);
pos += l;
}
}
}
// End sequence.
}
/// <summary>
/// The main procedure to do decompress.
/// </summary>
private void doDecompress()
{
outputBuffer = new List<byte>();
int decompressLen = compressData.Length;
bitsRemainingLen = Convert.ToUInt32(8 * (decompressLen - 1) - compressData[decompressLen - 1]);
bitsCurrent = 0;
bitsCurrentLen = 0;
currentIndex = 0;
while (bitsRemainingLen > 0)
{
int haveBits = 0;
int inPrefix = 0;
byte c;
UInt32 count;
UInt32 distance;
// Scan the token table, considering more bits as needed,
// until the resulting token is found.
for (int opIndex = 0; opIndex < Config.tokenTableDis.Length; ++opIndex)
{
// Get more bits if needed.
while (haveBits < Config.tokenTableDis[opIndex].BitPrefix.Length)
{
inPrefix = (inPrefix << 1) + Convert.ToInt32(getBits(1));
haveBits++;
}
// A specified token is found.
if (inPrefix == Config.tokenTableDis[opIndex].Code)
{
// A litreal token, a single byte to output.
if (Config.tokenTableDis[opIndex].Type == 0)
{
c = (byte)(Config.tokenTableDis[opIndex].Vbase + getBits(Convert.ToUInt32(Config.tokenTableDis[opIndex].ValueBits)));
outputLiteral(c);
}
else // A match token.
{
distance = Convert.ToUInt32(Config.tokenTableDis[opIndex].Vbase) + getBits(Convert.ToUInt32(Config.tokenTableDis[opIndex].ValueBits));
// The distance back into the history from which to copy.
if (distance != 0)
{
if (getBits(1) == 0)
{
count = 3;
}
else
{
count = 4;
int extra = 2;
while (getBits(1) == 1)
{
count *= 2;
extra++;
}
count += getBits(Convert.ToUInt32(extra));
}
// From the distance backward,copy count bytes to the output.
outputMatch(distance, count);
}
else // match distance == 0 is a special case, an unencoded sequence is found.
{
count = getBits(15);
// Discard reamining bits.
bitsRemainingLen -= bitsCurrentLen;
bitsCurrentLen = 0;
bitsCurrent = 0;
// Copy count unencoded bytes to the output.
outputUncoded(count);
}
}
break;
}
}
}
}
/// <summary>
/// Add a byte to the decompression result buffer.
/// </summary>
/// <param name="c"></param>
private void outputLiteral(byte c)
{
historyBuffer[historyIndex] = c;
if (++historyIndex == historyBuffer.Length)
historyIndex = 0;
outputBuffer.Add(c);
}
/// <summary>
/// Add bytes to the decompression result
/// according to the match distance and length.
/// </summary>
/// <param name="distance">The backward length of bytes to copy.</param>
/// <param name="count">The total length to be copied.</param>
private void outputMatch(UInt32 distance, UInt32 count)
{
byte c;
UInt32 preIndex = historyIndex + Convert.ToUInt32(historyBuffer.Length) - distance;
preIndex = preIndex % Convert.ToUInt32(historyBuffer.Length);
while (count > 0)
{
c = historyBuffer[preIndex];
if (++preIndex == historyBuffer.Length)
{
preIndex = 0;
}
historyBuffer[historyIndex] = c;
if (++historyIndex == historyBuffer.Length)
{
historyIndex = 0;
}
outputBuffer.Add(c);
count--;
}
}
/// <summary>
/// Add bytes to the decompression result
/// as a result of the uncoded token.
/// </summary>
/// <param name="count">The length of bytes directly copy from
/// stream input to output</param>
private void outputUncoded(UInt32 count)
{
byte c;
while(count > 0)
{
c = compressData[currentIndex++];
bitsRemainingLen -= 8;
historyBuffer[historyIndex] = c;
if (++historyIndex == historyBuffer.Length)
historyIndex = 0;
outputBuffer.Add(c);
--count;
}
}
/// <summary>
/// Get the result byte array from the result byte buffer(a list).
/// </summary>
private byte[] getRecoveryResult()
{
recoverData = new byte[outputBuffer.Count];
for (int i = 0; i < outputBuffer.Count; ++i)
{
recoverData[i] = (byte)outputBuffer[i];
}
return recoverData;
}
/// <summary>
/// Return the value of the next "bitCount" bits as unsigned.
/// </summary>
UInt32 getBits(UInt32 bitCount)
{
while (bitsCurrentLen < bitCount)
{
bitsCurrent <<= 8;
bitsCurrent += compressData[currentIndex++];
bitsCurrentLen += 8;
}
bitsRemainingLen -= bitCount;
bitsCurrentLen -= bitCount;
UInt32 result = Convert.ToUInt32(bitsCurrent >> Convert.ToInt32(bitsCurrentLen));
bitsCurrent -= Convert.ToInt32(result) << Convert.ToInt32(bitsCurrentLen);
return result;
}
#endregion
#region Public Methods
/// <summary>
/// The public interface of the compression algorithm.
/// </summary>
/// <param name="rawdata">The source byte array to compress.</param>
public byte[] Compress(byte[] rawdata)
{
rawData = rawdata;
doCompress();
getCompressedResult();
return compressData;
}
/// <summary>
/// The public interface of the decompression algorithm.
/// </summary>
/// <param name="data">The source byte array to decompress.</param>
/// <param name="header">The header of the bulk data</param>
public byte[] Decompress(byte[] data, byte header)
{
if (header == (RdpSegmentedPdu.PACKET_COMPR_TYPE_RDP8 | RdpSegmentedPdu.PACKET_COMPRESSED))
{
// Data is compressed
compressData = data;
doDecompress();
getRecoveryResult();
return recoverData;
}
else
{
// Data is not compressed, copy data to history buffer directly.
if(data != null && data.Length > 0)
{
for (int i = 0; i < data.Length; i++)
{
historyBuffer[historyIndex] = data[i];
if (++historyIndex == historyBuffer.Length)
historyIndex = 0;
}
}
return data;
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Text.Utf8;
namespace System.Text.JsonLab.Tests
{
public class TestDom
{
public Object Object { get; set; }
public Array Array { get; set; }
public Value Value { get; set; }
public TestDom this[string index]
{
get
{
if (Object == null) throw new NullReferenceException();
if (Object.Pairs == null) throw new NullReferenceException();
var json = new TestDom();
foreach (var pair in Object.Pairs)
{
if (pair.Name == index)
{
switch (pair.Value.Type)
{
case Value.ValueType.Object:
json.Object = pair.Value.ObjectValue;
break;
case Value.ValueType.Array:
json.Array = pair.Value.ArrayValue;
break;
case Value.ValueType.String:
case Value.ValueType.Number:
case Value.ValueType.False:
case Value.ValueType.True:
case Value.ValueType.Null:
json.Value = pair.Value;
break;
default:
break;
}
return json;
}
}
throw new KeyNotFoundException();
}
}
public TestDom this[int index]
{
get
{
if (Array == null) throw new NullReferenceException();
if (Array.Values == null) throw new NullReferenceException();
List<Value> values = Array.Values;
if (index < 0 || index >= values.Count) throw new IndexOutOfRangeException();
Value value = values[index];
var json = new TestDom();
switch (value.Type)
{
case Value.ValueType.Object:
json.Object = value.ObjectValue;
break;
case Value.ValueType.Array:
json.Array = value.ArrayValue;
break;
case Value.ValueType.String:
case Value.ValueType.Number:
case Value.ValueType.False:
case Value.ValueType.True:
case Value.ValueType.Null:
json.Value = value;
break;
default:
break;
}
return json;
}
}
public static explicit operator string (TestDom json)
{
if (json == null || json.Value == null) throw new NullReferenceException();
if (json.Value.Type == Value.ValueType.String)
{
return json.Value.StringValue;
}
else if (json.Value.Type == Value.ValueType.Null)
{
return json.Value.NullValue.ToString();
}
else
{
throw new InvalidCastException();
}
}
public static explicit operator double (TestDom json)
{
if (json == null || json.Value == null) throw new NullReferenceException();
if (json.Value.Type == Value.ValueType.Number)
{
return json.Value.NumberValue;
}
else
{
throw new InvalidCastException();
}
}
public static explicit operator bool (TestDom json)
{
if (json == null || json.Value == null) throw new NullReferenceException();
if (json.Value.Type == Value.ValueType.True)
{
return json.Value.TrueValue;
}
else if (json.Value.Type == Value.ValueType.False)
{
return json.Value.FalseValue;
}
else
{
throw new InvalidCastException();
}
}
public List<Value> GetValueFromPropertyName(string str)
{
return GetValueFromPropertyName(new Utf8Span(str), Object);
}
public List<Value> GetValueFromPropertyName(Utf8Span str)
{
return GetValueFromPropertyName(str, Object);
}
public List<Value> GetValueFromPropertyName(string str, Object obj)
{
return GetValueFromPropertyName(new Utf8Span(str), obj);
}
public List<Value> GetValueFromPropertyName(Utf8Span str, Object obj)
{
var values = new List<Value>();
if (obj == null || obj.Pairs == null) return values;
foreach (var pair in obj.Pairs)
{
if (pair == null || pair.Value == null) return values;
if (pair.Value.Type == Value.ValueType.Object)
{
values.AddRange(GetValueFromPropertyName(str, pair.Value.ObjectValue));
}
if (pair.Value.Type == Value.ValueType.Array)
{
if (pair.Value.ArrayValue == null || pair.Value.ArrayValue.Values == null) return values;
foreach (var value in pair.Value.ArrayValue.Values)
{
if (value != null && value.Type == Value.ValueType.Object)
{
values.AddRange(GetValueFromPropertyName(str, value.ObjectValue));
}
}
}
if (new Utf8Span(pair.Name) == str)
{
values.Add(pair.Value);
}
}
return values;
}
public override string ToString()
{
if (Object != null)
{
return OutputObject(Object);
}
if (Array != null)
{
return OutputArray(Array);
}
return "";
}
private string OutputObject(Object obj)
{
var strBuilder = new StringBuilder();
if (obj == null || obj.Pairs == null) return "";
strBuilder.Append("{");
for (var i = 0; i < obj.Pairs.Count; i++)
{
strBuilder.Append(OutputPair(obj.Pairs[i]));
if (i < obj.Pairs.Count - 1)
{
strBuilder.Append(",");
}
}
strBuilder.Append("}");
return strBuilder.ToString();
}
private string OutputPair(Pair pair)
{
var str = "";
if (pair == null) return str;
str += "\"" + pair.Name + "\":";
str += OutputValue(pair.Value);
return str;
}
private string OutputArray(Array array)
{
var strBuilder = new StringBuilder();
if (array == null || array.Values == null) return "";
strBuilder.Append("[");
for (var i = 0; i < array.Values.Count; i++)
{
strBuilder.Append(OutputValue(array.Values[i]));
if (i < array.Values.Count - 1)
{
strBuilder.Append(",");
}
}
strBuilder.Append("]");
return strBuilder.ToString();
}
private string OutputValue(Value value)
{
var str = "";
if (value == null) return str;
var type = value.Type;
switch (type)
{
case Value.ValueType.String:
str += "\"" + value.StringValue + "\"";
break;
case Value.ValueType.Number:
str += value.NumberValue;
break;
case Value.ValueType.Object:
str += OutputObject(value.ObjectValue);
break;
case Value.ValueType.Array:
str += OutputArray(value.ArrayValue);
break;
case Value.ValueType.True:
str += value.TrueValue.ToString().ToLower();
break;
case Value.ValueType.False:
str += value.FalseValue.ToString().ToLower();
break;
case Value.ValueType.Null:
str += value.NullValue.ToString().ToLower();
break;
default:
throw new ArgumentOutOfRangeException();
}
return str;
}
}
public class Object
{
public List<Pair> Pairs { get; set; }
}
public class Pair
{
public string Name { get; set; }
public Value Value { get; set; }
}
public class Array
{
public List<Value> Values { get; set; }
}
public class Value
{
public ValueType Type { get; set; }
public enum ValueType
{
String,
Number,
Object,
Array,
True,
False,
Null
}
public object Raw()
{
switch (Type)
{
case ValueType.String:
return _string;
case ValueType.Number:
return _number;
case ValueType.Object:
return _object;
case ValueType.Array:
return _array;
case ValueType.True:
return True;
case ValueType.False:
return False;
case ValueType.Null:
return null;
default:
throw new ArgumentOutOfRangeException();
}
}
public string StringValue
{
get
{
if (Type == ValueType.String)
{
return _string;
}
throw new TypeAccessException("Value is not of type 'string'.");
}
set
{
if (Type == ValueType.String)
{
_string = value;
}
}
}
public double NumberValue
{
get
{
if (Type == ValueType.Number)
{
return _number;
}
throw new TypeAccessException("Value is not of type 'number'.");
}
set
{
if (Type == ValueType.Number)
{
_number = value;
}
}
}
public Object ObjectValue
{
get
{
if (Type == ValueType.Object)
{
return _object;
}
throw new TypeAccessException("Value is not of type 'object'.");
}
set
{
if (Type == ValueType.Object)
{
_object = value;
}
}
}
public Array ArrayValue
{
get
{
if (Type == ValueType.Array)
{
return _array;
}
throw new TypeAccessException("Value is not of type 'array'.");
}
set
{
if (Type == ValueType.Array)
{
_array = value;
}
}
}
public bool TrueValue
{
get
{
if (Type == ValueType.True)
{
return True;
}
throw new TypeAccessException("Value is not of type 'true'.");
}
}
public bool FalseValue
{
get
{
if (Type == ValueType.False)
{
return False;
}
throw new TypeAccessException("Value is not of type 'false'.");
}
}
public object NullValue
{
get
{
if (Type == ValueType.Null)
{
return Null;
}
throw new TypeAccessException("Value is not of type 'null'.");
}
}
private string _string;
private double _number;
private Object _object;
private Array _array;
private const bool True = true;
private const bool False = false;
private const string Null = "null";
}
}
| |
using JetBrains.Annotations;
using Silphid.DataTypes;
using Silphid.Extensions;
using UnityEngine;
namespace Silphid.Showzup.Virtual.Layout
{
public abstract class LayoutBase : ILayout
{
private readonly ILayoutInfo _info;
protected readonly RangeCache _ranges;
protected LayoutBase(ILayoutInfo info, RangeCache ranges)
{
_info = info;
_ranges = ranges;
}
#region LayoutInfo shortcuts
protected Orientation Orientation => _info.Orientation;
protected Alignment AlignmentAlong => _info.AlignmentAlong;
protected Alignment AlignmentAcross => _info.AlignmentAcross;
protected Vector2 MinMargin => _info.MinMargin;
protected Vector2 MaxMargin => _info.MaxMargin;
protected float LoadMargin => _info.LoadMargin;
protected SizingAlong SizingAlong => _info.SizingAlong;
protected SizingAcross SizingAcross => _info.SizingAcross;
protected Vector2 Size => _info.Size;
protected Vector2 Spacing => _info.Spacing;
protected float Ratio => _info.Ratio;
protected int CountAcross => _info.CountAcross;
protected float TypicalSizeAlong => _info.TypicalSizeAlong;
protected int PreloadCountAlong => _info.PreloadCountAlong;
[Pure]
protected int GetFirstRowIndex(int index) => _info.GetFirstRowIndex(index);
[Pure]
protected int GetLastIndexAcross(int index) => _info.GetLastIndexAcross(index);
#endregion
#region Layouting
public (float requiredHeight, float adjustment) Perform(LayoutDirection direction,
ILayoutCollection collection,
Rect viewportRect,
Vector2 availableSize)
{
if (_ranges.LoadedIndices.IsEmpty)
return (availableSize.y, 0);
var startIndex = GetStartIndex(direction, collection);
var startPosY = GetStartPosY(startIndex, direction, viewportRect, availableSize);
var allowedWidth = GetAllowedWidth(availableSize.x);
var cellWidth = GetCellWidth(allowedWidth);
var spacingX = GetSpacingX(allowedWidth);
var (isTopLoaded, isBottomLoaded) = GetEdgesLoaded(collection, startIndex, direction);
var directionSign = direction == LayoutDirection.Forward
? 1
: -1;
// Debug.Log(
// $"Direction: {direction} Viewport: {viewportRect} Indices: {layoutedIndexRange} Rect: {layoutedRectRange} StartIndex: {startIndex} StartPos: {startPosY} isTopLoaded: {isTopLoaded} isBottomLoaded: {isBottomLoaded}");
var lastPosY = startPosY;
var currentPosY = startPosY;
for (int i = startIndex;
i >= _ranges.LoadedIndices.Start && i < _ranges.LoadedIndices.End;
i += CountAcross * directionSign)
{
int endRowIndex = (i + CountAcross).AtMost(collection.Count);
var rowHeight = GetRowHeight(i, endRowIndex, collection, availableSize);
var posX = MinMargin.x;
// Any item ready in that row?
if (rowHeight.HasValue)
{
var cellSize = new Vector2(cellWidth, rowHeight.Value);
var posY = currentPosY - (direction == LayoutDirection.Forward
? 0
: cellSize.y);
for (int j = i; j < endRowIndex; j++)
{
if (collection.IsLoaded(j))
{
var layoutedRect = GetLayoutedRect(
collection.GetPreferredSize(j),
new Vector2(posX, posY),
cellSize);
collection.SetRect(j, layoutedRect);
_ranges.OnItemLayouted(j);
}
posX += cellWidth + spacingX;
}
currentPosY += cellSize.y * directionSign;
lastPosY = currentPosY;
currentPosY += Spacing.y * directionSign;
}
}
return OnLayoutCompleted(
direction,
collection,
isTopLoaded,
isBottomLoaded,
startPosY,
lastPosY,
availableSize);
}
protected abstract (float requiredHeight, float adjustment) OnLayoutCompleted(LayoutDirection direction,
ILayoutCollection collection,
bool isTopLoaded,
bool isBottomLoaded,
float startPosY,
float lastPosY,
Vector2 availableSize);
protected abstract float? GetRowHeight(int start, int end, ILayoutCollection collection, Vector2 availableSize);
protected abstract float GetStartPosY(int startIndex,
LayoutDirection direction,
Rect viewportRect,
Vector2 availableSize);
private int GetStartIndex(LayoutDirection direction, ILayoutCollection collection)
{
int index = direction == LayoutDirection.Forward
? _ranges.LoadedIndices.Start
: _ranges.LoadedIndices.End - 1;
// TODO: Account for viewport distance from last layouted rect to skip a certain number of items based on TypicalSizeAlong
return GetFirstRowIndex(index);
}
[Pure]
private (bool, bool) GetEdgesLoaded(ILayoutCollection collection, int startIndex, LayoutDirection direction)
{
bool isTopLoaded = true, isBottomLoaded = true;
if (direction == LayoutDirection.Forward)
{
for (int i = startIndex; i < collection.Count; i++)
if (!collection.IsLoaded(i))
{
isBottomLoaded = false;
break;
}
isTopLoaded = startIndex == 0;
}
else
{
for (int i = startIndex; i >= 0; i--)
if (!collection.IsLoaded(i))
{
isTopLoaded = false;
break;
}
isBottomLoaded = startIndex >= collection.Count - CountAcross;
}
return (isTopLoaded, isBottomLoaded);
}
[Pure]
private Rect GetLayoutedRect(Vector2 preferredSize, Vector2 pos, Vector2 cellSize)
{
var preferredSizeAlong = preferredSize.y.AtMost(cellSize.y);
var preferredSizeAcross = preferredSize.x.AtMost(cellSize.x);
var sizeAlong = AlignmentAlong == Alignment.Stretch
? cellSize.y
: preferredSizeAlong;
var sizeAcross = AlignmentAcross == Alignment.Stretch
? cellSize.x
: preferredSizeAcross;
var minAlong = pos.y + (AlignmentAlong == Alignment.Stretch || AlignmentAlong == Alignment.Min
? 0
: AlignmentAlong == Alignment.Max
? cellSize.y - sizeAlong
: (cellSize.y - sizeAlong) / 2);
var minAcross = pos.x + (AlignmentAcross == Alignment.Stretch || AlignmentAcross == Alignment.Min
? 0
: AlignmentAcross == Alignment.Max
? cellSize.x - sizeAcross
: (cellSize.x - sizeAcross) / 2);
return new Rect(minAcross, minAlong, sizeAcross, sizeAlong);
}
protected float GetAllowedWidth(float availableSizeAcross) =>
availableSizeAcross - MinMargin.x - MaxMargin.x;
protected float GetCellWidth(float allowedSizeAcross) =>
SizingAcross == SizingAcross.FixedSize
? Size.x
: (allowedSizeAcross - (CountAcross - 1) * Spacing.x) / CountAcross;
protected float GetSpacingX(float allowedSizeAcross) =>
SizingAcross == SizingAcross.FixedSize
? (allowedSizeAcross - CountAcross * Size.x) / (CountAcross - 1)
: Spacing.x;
#endregion
#region GetActiveRange
public abstract IntRange GetActiveRange(ILayoutCollection collection, Rect viewportRect, Vector2 availableSize);
#endregion
[Pure]
protected IntRange GetRangeRoundedToWholeRows(IntRange range) =>
new IntRange(GetFirstRowIndex(range.Start), GetLastIndexAcross(range.End - 1) + 1);
}
}
| |
// <copyright file="UnixFileSystem.cs" company="Fubar Development Junker">
// Copyright (c) Fubar Development Junker. All rights reserved.
// </copyright>
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Claims;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using FubarDev.FtpServer.AccountManagement;
using FubarDev.FtpServer.AccountManagement.Compatibility;
using FubarDev.FtpServer.BackgroundTransfer;
using Microsoft.Extensions.Logging;
using Mono.Unix;
using Mono.Unix.Native;
namespace FubarDev.FtpServer.FileSystem.Unix
{
/// <summary>
/// A backend that uses Posix(?) API calls to access the file system.
/// </summary>
public class UnixFileSystem : IUnixFileSystem
{
private readonly ClaimsPrincipal _user;
private readonly UnixUserInfo? _userInfo;
private readonly bool _flushStream;
private readonly ILogger<UnixFileSystem>? _logger;
/// <summary>
/// Initializes a new instance of the <see cref="UnixFileSystem"/> class.
/// </summary>
/// <param name="root">The root directory.</param>
/// <param name="user">The current user.</param>
/// <param name="userInfo">The user information.</param>
[Obsolete("Use the overload with ClaimsPrincipal.")]
public UnixFileSystem(
IUnixDirectoryEntry root,
IFtpUser user,
UnixUserInfo? userInfo)
{
_user = user.CreateClaimsPrincipal();
_userInfo = userInfo;
Root = root;
}
/// <summary>
/// Initializes a new instance of the <see cref="UnixFileSystem"/> class.
/// </summary>
/// <param name="root">The root directory.</param>
/// <param name="user">The current user.</param>
/// <param name="userInfo">The user information.</param>
public UnixFileSystem(
IUnixDirectoryEntry root,
ClaimsPrincipal user,
UnixUserInfo? userInfo)
: this(root, user, userInfo, false)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="UnixFileSystem"/> class.
/// </summary>
/// <param name="root">The root directory.</param>
/// <param name="user">The current user.</param>
/// <param name="userInfo">The user information.</param>
/// <param name="flushStream">Flush the stream after every write operation.</param>
public UnixFileSystem(
IUnixDirectoryEntry root,
ClaimsPrincipal user,
UnixUserInfo? userInfo,
bool flushStream)
: this(root, user, userInfo, flushStream, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="UnixFileSystem"/> class.
/// </summary>
/// <param name="root">The root directory.</param>
/// <param name="user">The current user.</param>
/// <param name="userInfo">The user information.</param>
/// <param name="flushStream">Flush the stream after every write operation.</param>
/// <param name="logger">The logger for this file system implementation.</param>
public UnixFileSystem(
IUnixDirectoryEntry root,
ClaimsPrincipal user,
UnixUserInfo? userInfo,
bool flushStream,
ILogger<UnixFileSystem>? logger)
{
_user = user;
_userInfo = userInfo;
_flushStream = flushStream;
_logger = logger;
Root = root;
}
/// <inheritdoc />
public bool SupportsAppend { get; } = true;
/// <inheritdoc />
public bool SupportsNonEmptyDirectoryDelete { get; } = false;
/// <inheritdoc />
public StringComparer FileSystemEntryComparer { get; } = StringComparer.Ordinal;
/// <inheritdoc />
public IUnixDirectoryEntry Root { get; }
/// <inheritdoc />
public Task<IReadOnlyList<IUnixFileSystemEntry>> GetEntriesAsync(IUnixDirectoryEntry directoryEntry, CancellationToken cancellationToken)
{
var dirEntry = (UnixDirectoryEntry)directoryEntry;
var dirInfo = dirEntry.Info;
var entries = dirInfo.GetFileSystemEntries().Select(x => CreateEntry(dirEntry, x)).ToList();
return Task.FromResult<IReadOnlyList<IUnixFileSystemEntry>>(entries);
}
/// <inheritdoc />
public Task<IUnixFileSystemEntry?> GetEntryByNameAsync(IUnixDirectoryEntry directoryEntry, string name, CancellationToken cancellationToken)
{
var dirEntry = (UnixDirectoryEntry)directoryEntry;
var dirInfo = dirEntry.Info;
var entry = dirInfo.GetFileSystemEntries($"^{Regex.Escape(name)}$")
.Select(x => CreateEntry(dirEntry, x))
.Cast<IUnixFileSystemEntry?>()
.SingleOrDefault();
return Task.FromResult(entry);
}
/// <inheritdoc />
public Task<IUnixFileSystemEntry> MoveAsync(
IUnixDirectoryEntry parent,
IUnixFileSystemEntry source,
IUnixDirectoryEntry target,
string fileName,
CancellationToken cancellationToken)
{
var sourceInfo = ((UnixFileSystemEntry)source).GenericInfo;
var targetEntry = (UnixDirectoryEntry)target;
var targetInfo = targetEntry.Info;
var sourceEntryName = sourceInfo.FullName;
var targetEntryName = UnixPath.Combine(targetInfo.FullName, fileName);
if (Stdlib.rename(sourceEntryName, targetEntryName) == -1)
{
throw new InvalidOperationException("The entry couldn't be moved.");
}
var targetEntryInfo = UnixFileSystemInfo.GetFileSystemEntry(targetEntryName);
return Task.FromResult(CreateEntry(targetEntry, targetEntryInfo));
}
/// <inheritdoc />
public Task UnlinkAsync(IUnixFileSystemEntry entry, CancellationToken cancellationToken)
{
var entryInfo = ((UnixFileSystemEntry)entry).GenericInfo;
entryInfo.Delete();
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<IUnixDirectoryEntry> CreateDirectoryAsync(
IUnixDirectoryEntry targetDirectory,
string directoryName,
CancellationToken cancellationToken)
{
var targetEntry = (UnixDirectoryEntry)targetDirectory;
var newDirectoryName = UnixPath.Combine(targetEntry.Info.FullName, directoryName);
var newDirectoryInfo = new UnixDirectoryInfo(newDirectoryName);
newDirectoryInfo.Create();
return Task.FromResult((IUnixDirectoryEntry)CreateEntry(targetEntry, newDirectoryInfo));
}
/// <inheritdoc />
public Task<Stream> OpenReadAsync(IUnixFileEntry fileEntry, long startPosition, CancellationToken cancellationToken)
{
var fileInfo = ((UnixFileEntry)fileEntry).Info;
var stream = fileInfo.OpenRead();
if (startPosition != 0)
{
stream.Seek(startPosition, SeekOrigin.Begin);
}
return Task.FromResult<Stream>(stream);
}
/// <inheritdoc />
public async Task<IBackgroundTransfer?> AppendAsync(IUnixFileEntry fileEntry, long? startPosition, Stream data, CancellationToken cancellationToken)
{
var fileInfo = ((UnixFileEntry)fileEntry).Info;
_logger?.LogTrace("Start appending to {fileName}", fileInfo.FullName);
using (var stream = fileInfo.Open(FileMode.Append))
{
if (startPosition != null)
{
stream.Seek(startPosition.Value, SeekOrigin.Begin);
}
/* Must be ConfigureAwait(true) to stay in the same synchronization context. */
await data.CopyToAsync(stream, 81920, _flushStream, cancellationToken)
.ConfigureAwait(true);
_logger?.LogTrace("Closing {fileName}", fileInfo.FullName);
}
_logger?.LogTrace("Closed {fileName}", fileInfo.FullName);
return null;
}
/// <inheritdoc />
public async Task<IBackgroundTransfer?> CreateAsync(
IUnixDirectoryEntry targetDirectory,
string fileName,
Stream data,
CancellationToken cancellationToken)
{
var targetInfo = ((UnixDirectoryEntry)targetDirectory).Info;
var fileInfo = new UnixFileInfo(UnixPath.Combine(targetInfo.FullName, fileName));
_logger?.LogTrace("Start writing to {fileName}", fileInfo.FullName);
using (var stream = fileInfo.Open(FileMode.CreateNew, FileAccess.Write, FilePermissions.DEFFILEMODE))
{
/* Must be ConfigureAwait(true) to stay in the same synchronization context. */
await data.CopyToAsync(stream, 81920, _flushStream, cancellationToken)
.ConfigureAwait(true);
_logger?.LogTrace("Closing {fileName}", fileInfo.FullName);
}
_logger?.LogTrace("Closed {fileName}", fileInfo.FullName);
return null;
}
/// <inheritdoc />
public async Task<IBackgroundTransfer?> ReplaceAsync(IUnixFileEntry fileEntry, Stream data, CancellationToken cancellationToken)
{
var fileInfo = ((UnixFileEntry)fileEntry).Info;
_logger?.LogTrace("Start writing to {fileName} while replacing old content", fileInfo.FullName);
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write, FilePermissions.DEFFILEMODE))
{
/* Must be ConfigureAwait(true) to stay in the same synchronization context. */
await data.CopyToAsync(stream, 81920, _flushStream, cancellationToken)
.ConfigureAwait(true);
_logger?.LogTrace("Closing {fileName}", fileInfo.FullName);
}
_logger?.LogTrace("Closed {fileName}", fileInfo.FullName);
return null;
}
/// <inheritdoc />
public Task<IUnixFileSystemEntry> SetMacTimeAsync(
IUnixFileSystemEntry entry,
DateTimeOffset? modify,
DateTimeOffset? access,
DateTimeOffset? create,
CancellationToken cancellationToken)
{
if (access == null && modify == null)
{
return Task.FromResult(entry);
}
var entryInfo = ((UnixFileSystemEntry)entry).GenericInfo;
var times = new Timeval[2];
if (access != null)
{
times[0] = ToTimeval(access.Value.UtcDateTime);
}
else
{
times[0] = ToTimeval(entryInfo.LastAccessTimeUtc);
}
if (modify != null)
{
times[1] = ToTimeval(modify.Value.UtcDateTime);
}
else
{
times[1] = ToTimeval(entryInfo.LastWriteTimeUtc);
}
Syscall.utimes(entryInfo.FullName, times);
entryInfo.Refresh();
return Task.FromResult(entry);
}
private static Timeval ToTimeval(DateTime timestamp)
{
var accessTicks = timestamp.ToUniversalTime().Ticks - NativeConvert.UnixEpoch.Ticks;
var seconds = accessTicks / 10_000_000;
var microseconds = (accessTicks % 10_000_000) / 10;
return new Timeval()
{
tv_sec = seconds,
tv_usec = microseconds,
};
}
private IUnixFileSystemEntry CreateEntry(IUnixDirectoryEntry parent, UnixFileSystemInfo info)
{
switch (info)
{
case UnixFileInfo fileInfo:
return new UnixFileEntry(fileInfo);
case UnixDirectoryInfo dirInfo:
return new UnixDirectoryEntry(dirInfo, _user, _userInfo, parent);
default:
throw new NotSupportedException($"Unsupported file system info type {info}");
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using Xunit;
namespace System.Collections.Tests
{
public class StackBasicTests
{
[Fact]
public static void EmptyStackSizeIsZero()
{
Stack stack = new Stack();
Assert.Equal(0, stack.Count);
}
[Fact]
public static void DefaultStackIsNotSynchronized()
{
Stack stack = new Stack();
Assert.False(stack.IsSynchronized);
}
[Fact]
public static void NumberOfElementsAddedIsEqualToStackSize()
{
int iNumElementsAdded = 1975;
Stack stack = new Stack();
for (int i = 0; i < iNumElementsAdded; i++)
{
stack.Push(new Object());
}
Assert.Equal(stack.Count, iNumElementsAdded);
}
[Fact]
public static void ClearResetsNumberOfElementsToZero()
{
int iNumElementsAdded = 2;
Stack stack = new Stack();
for (int i = 0; i < iNumElementsAdded; i++)
{
stack.Push(new Object());
}
stack.Clear();
Assert.Equal(0, stack.Count);
}
[Fact]
public static void PopDecrementsStackSize()
{
int iNumElementsAdded = 25;
Stack stack = new Stack();
for (int i = 0; i < iNumElementsAdded; i++)
{
stack.Push(i);
}
for (int i = 0; i < iNumElementsAdded; i++)
{
Assert.Equal(stack.Count, iNumElementsAdded - i);
Object objTop = stack.Pop();
Assert.Equal(stack.Count, iNumElementsAdded - i - 1);
}
}
[Fact]
public static void PeekingEmptyStackThrows()
{
Stack stack = new Stack();
Assert.Throws<InvalidOperationException>(() => { var x = stack.Peek(); });
}
[Fact]
public static void PeekingEmptyStackAfterRemovingElementsThrows()
{
object objRet;
Stack stack = new Stack();
for (int i = 0; i < 1000; i++)
{
stack.Push(i);
}
for (int i = 0; i < 1000; i++)
{
objRet = stack.Pop();
}
Assert.Throws<InvalidOperationException>(() => { objRet = stack.Peek(); });
}
[Fact]
public static void ICollectionCanBeGivenToStack()
{
int iNumElements = 10000;
var objArr = new Object[iNumElements];
for (int i = 0; i < iNumElements; i++)
{
objArr[i] = i;
}
Stack stack = new Stack(objArr);
for (int i = 0; i < iNumElements; i++)
{
var objRet = stack.Pop();
Assert.True(objRet.Equals(iNumElements - i - 1));
}
}
[Fact]
public static void PeekingAtElementTwiceGivesSameResults()
{
Stack stack = new Stack();
stack.Push(1);
Assert.True(stack.Peek().Equals(stack.Peek()));
}
[Fact]
public static void PushAndPopWorkOnNullElements()
{
Stack stack = new Stack();
stack.Push(null);
stack.Push(-1);
stack.Push(null);
Assert.Equal(stack.Pop(), null);
Assert.True((-1).Equals(stack.Pop()));
Assert.Equal(stack.Pop(), null);
}
[Fact]
public static void CopyingToNullArrayThrows()
{
Stack stack = new Stack();
stack.Push("hey");
Assert.Throws<ArgumentNullException>(() => stack.CopyTo(null, 0));
}
[Fact]
public static void CopyingToMultiDimArrayThrows()
{
Stack stack = new Stack();
stack.Push("hey");
Assert.Throws<ArgumentException>(() => stack.CopyTo(new Object[8, 8], 0));
}
[Fact]
public static void CopyingOutOfRangeThrows_1()
{
Stack stack = new Stack();
var objArr = new Object[0];
Assert.Throws<ArgumentException>(() => stack.CopyTo(objArr, 1));
stack = new Stack();
Assert.Throws<ArgumentException>(() => stack.CopyTo(objArr, Int32.MaxValue));
stack = new Stack();
Assert.Throws<ArgumentOutOfRangeException>(() => stack.CopyTo(objArr, Int32.MinValue));
stack = new Stack();
Assert.Throws<ArgumentOutOfRangeException>(() => stack.CopyTo(objArr, -1));
}
[Fact]
public static void CopyingOutOfRangeThrows_2()
{
Stack stack = new Stack();
stack.Push("MyString");
var objArr = new Object[0];
Assert.Throws<ArgumentException>(() => stack.CopyTo(objArr, 0));
}
[Fact]
public static void GettingEnumeratorAndLoopingThroughWorks()
{
Stack stack = new Stack();
stack.Push("hey");
stack.Push("hello");
IEnumerator ienum = stack.GetEnumerator();
int iCounter = 0;
while (ienum.MoveNext())
{
iCounter++;
}
Assert.Equal(iCounter, stack.Count);
}
[Fact]
public static void GetBeforeStartingEnumerator()
{
// NOTE: The docs say this behaviour is undefined so if test fails it might be ok
Stack stack = new Stack();
stack.Push("a");
stack.Push("b");
IEnumerator ienum = stack.GetEnumerator();
Assert.Throws<InvalidOperationException>(() => { Object obj = ienum.Current; });
}
[Fact]
public static void EnumeratingBeyondEndOfListThenGetObject()
{
Stack stack = new Stack();
stack.Push(new Object());
stack.Push(stack);
IEnumerator ienum = stack.GetEnumerator();
Assert.True(ienum.MoveNext());
for (int i = 0; i < 100; i++)
{
Object objTemp1 = ienum.Current;
Assert.True(objTemp1.Equals(stack));
}
Assert.True(ienum.MoveNext());
for (int i = 0; i < 100; i++)
{
Assert.False(ienum.MoveNext());
}
Assert.Throws<InvalidOperationException>(() => { var o = ienum.Current; });
}
[Fact]
public static void PassingNegativeCapacityThrows()
{
Assert.Throws<ArgumentOutOfRangeException>(() => { Stack stack = new Stack(Int32.MinValue); });
}
[Fact]
public static void CreatingStackWithZeroCapacityDoesntThrow()
{
Stack stack = new Stack(0);
}
[Fact]
public static void PassingValidCapacityCreatesZeroElementsStack()
{
Stack stack = new Stack(1);
Assert.Equal(0, stack.Count);
}
[Fact]
public static void SynchronizedStacksIsSynchronizedPropertyReturnsTrue()
{
Stack stack = Stack.Synchronized(new Stack());
Assert.True(stack.IsSynchronized);
}
[Fact]
public static void SynchronizingNullStackThrows()
{
Assert.Throws<ArgumentNullException>(() => { Stack stack = Stack.Synchronized(null); });
}
[Fact]
public static void TestingAllMethodsOfSynchronizedStack()
{
Stack q1 = new Stack();
for (int i = 0; i < 10; i++)
{
q1.Push("String_" + i);
}
Stack q2 = Stack.Synchronized(q1);
Assert.Equal(q2.Count, q1.Count);
q2.Clear();
Assert.Equal(0, q2.Count);
for (int i = 0; i < 10; i++)
{
q2.Push("String_" + i);
}
for (int i = 0, j = 9; i < 10; i++, j--)
{
Assert.True(((String)q2.Peek()).Equals("String_" + j));
Assert.True(((String)q2.Pop()).Equals("String_" + j));
}
Assert.Equal(0, q2.Count);
Assert.True(q2.IsSynchronized);
for (int i = 0; i < 10; i++)
q2.Push("String_" + i);
Stack q3 = Stack.Synchronized(q2);
Assert.True(q3.IsSynchronized);
Assert.Equal(q2.Count, q3.Count);
var strArr = new String[10];
q2.CopyTo(strArr, 0);
for (int i = 0, j = 9; i < 10; i++, j--)
{
Assert.True(strArr[i].Equals("String_" + j));
}
strArr = new String[10 + 10];
q2.CopyTo(strArr, 10);
for (int i = 0, j = 9; i < 10; i++, j--)
{
Assert.True(strArr[i + 10].Equals("String_" + j));
}
Assert.Throws<ArgumentNullException>(() => q2.CopyTo(null, 0));
var oArr = q2.ToArray();
for (int i = 0, j = 9; i < 10; i++, j--)
{
Assert.True(((String)oArr[i]).Equals("String_" + j));
}
var ienm1 = q2.GetEnumerator();
Assert.Throws<InvalidOperationException>(() => { var oValue = ienm1.Current; });
var iCount = 9;
while (ienm1.MoveNext())
{
Assert.True(((String)ienm1.Current).Equals("String_" + iCount));
iCount--;
}
ienm1.Reset();
iCount = 9;
while (ienm1.MoveNext())
{
Assert.True(((String)ienm1.Current).Equals("String_" + iCount));
iCount--;
}
ienm1.Reset();
q2.Pop();
Assert.Throws<InvalidOperationException>(() => { var oValue = ienm1.Current; });
Assert.Throws<InvalidOperationException>(() => ienm1.MoveNext());
Assert.Throws<InvalidOperationException>(() => ienm1.Reset());
}
[Fact]
public static void PassingNullCollectionToConstructorThrows()
{
Assert.Throws<ArgumentNullException>(() => { Stack stack = new Stack(null); });
}
[Fact]
public void DebuggerAttributeTests()
{
DebuggerAttributes.ValidateDebuggerDisplayReferences(new Stack());
var testStack = new Stack();
testStack.Push("a");
testStack.Push(1);
testStack.Push("b");
testStack.Push(2);
DebuggerAttributes.ValidateDebuggerTypeProxyProperties(testStack);
}
}
}
| |
using Lucene.Net.Support;
using System;
using System.Diagnostics;
namespace Lucene.Net.Util
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using DocIdSet = Lucene.Net.Search.DocIdSet;
using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
/// <summary>
/// An "open" BitSet implementation that allows direct access to the array of words
/// storing the bits.
/// <para/>
/// NOTE: This can be used in .NET any place where a <c>java.util.BitSet</c> is used in Java.
/// <para/>
/// Unlike <c>java.util.BitSet</c>, the fact that bits are packed into an array of longs
/// is part of the interface. This allows efficient implementation of other algorithms
/// by someone other than the author. It also allows one to efficiently implement
/// alternate serialization or interchange formats.
/// <para/>
/// <see cref="OpenBitSet"/> is faster than <c>java.util.BitSet</c> in most operations
/// and *much* faster at calculating cardinality of sets and results of set operations.
/// It can also handle sets of larger cardinality (up to 64 * 2**32-1)
/// <para/>
/// The goals of <see cref="OpenBitSet"/> are the fastest implementation possible, and
/// maximum code reuse. Extra safety and encapsulation
/// may always be built on top, but if that's built in, the cost can never be removed (and
/// hence people re-implement their own version in order to get better performance).
/// <para/>
/// <h3>Performance Results</h3>
///
/// Test system: Pentium 4, Sun Java 1.5_06 -server -Xbatch -Xmx64M
/// <para/>BitSet size = 1,000,000
/// <para/>Results are java.util.BitSet time divided by OpenBitSet time.
/// <list type="table">
/// <listheader>
/// <term></term> <term>cardinality</term> <term>IntersectionCount</term> <term>Union</term> <term>NextSetBit</term> <term>Get</term> <term>GetIterator</term>
/// </listheader>
/// <item>
/// <term>50% full</term> <description>3.36</description> <description>3.96</description> <description>1.44</description> <description>1.46</description> <description>1.99</description> <description>1.58</description>
/// </item>
/// <item>
/// <term>1% full</term> <description>3.31</description> <description>3.90</description> <description> </description> <description>1.04</description> <description> </description> <description>0.99</description>
/// </item>
/// </list>
/// <para/>
/// <para/>
/// Test system: AMD Opteron, 64 bit linux, Sun Java 1.5_06 -server -Xbatch -Xmx64M
/// <para/>BitSet size = 1,000,000
/// <para/>Results are java.util.BitSet time divided by OpenBitSet time.
/// <list type="table">
/// <listheader>
/// <term></term> <term>cardinality</term> <term>IntersectionCount</term> <term>Union</term> <term>NextSetBit</term> <term>Get</term> <term>GetIterator</term>
/// </listheader>
/// <item>
/// <term>50% full</term> <description>2.50</description> <description>3.50</description> <description>1.00</description> <description>1.03</description> <description>1.12</description> <description>1.25</description>
/// </item>
/// <item>
/// <term>1% full</term> <description>2.51</description> <description>3.49</description> <description> </description> <description>1.00</description> <description> </description> <description>1.02</description>
/// </item>
/// </list>
/// </summary>
public class OpenBitSet : DocIdSet, IBits
#if FEATURE_CLONEABLE
, System.ICloneable
#endif
{
protected internal long[] m_bits;
protected internal int m_wlen; // number of words (elements) used in the array
// Used only for assert:
private long numBits;
/// <summary>
/// Constructs an <see cref="OpenBitSet"/> large enough to hold <paramref name="numBits"/>. </summary>
public OpenBitSet(long numBits)
{
this.numBits = numBits;
m_bits = new long[Bits2words(numBits)];
m_wlen = m_bits.Length;
}
/// <summary>
/// Constructor: allocates enough space for 64 bits. </summary>
public OpenBitSet()
: this(64)
{
}
/// <summary>
/// Constructs an <see cref="OpenBitSet"/> from an existing <see cref="T:long[]"/>.
/// <para/>
/// The first 64 bits are in long[0], with bit index 0 at the least significant
/// bit, and bit index 63 at the most significant. Given a bit index, the word
/// containing it is long[index/64], and it is at bit number index%64 within
/// that word.
/// <para/>
/// <paramref name="numWords"/> are the number of elements in the array that contain set bits
/// (non-zero longs). <paramref name="numWords"/> should be <= bits.Length, and any existing
/// words in the array at position >= numWords should be zero.
/// </summary>
public OpenBitSet(long[] bits, int numWords)
{
if (numWords > bits.Length)
{
throw new System.ArgumentException("numWords cannot exceed bits.length");
}
this.m_bits = bits;
this.m_wlen = numWords;
this.numBits = m_wlen * 64;
}
public override DocIdSetIterator GetIterator()
{
return new OpenBitSetIterator(m_bits, m_wlen);
}
public override IBits Bits
{
get { return this; }
}
/// <summary>
/// This DocIdSet implementation is cacheable. </summary>
public override bool IsCacheable
{
get
{
return true;
}
}
/// <summary>
/// Returns the current capacity in bits (1 greater than the index of the last bit). </summary>
public virtual long Capacity
{
get { return m_bits.Length << 6; }
}
// LUCENENET specific - eliminating this extra property, since it is identical to
// Length anyway, and Length is required by the IBits interface.
///// <summary>
///// Returns the current capacity of this set. Included for
///// compatibility. this is *not* equal to <seealso cref="#cardinality"/>.
///// </summary>
//public virtual long Size
//{
// get { return Capacity; }
//}
/// <summary>
/// Returns the current capacity of this set. This is *not* equal to <see cref="Cardinality()"/>.
/// <para/>
/// NOTE: This is equivalent to size() or length() in Lucene.
/// </summary>
public virtual int Length
{
get { return m_bits.Length << 6; }
}
/// <summary>
/// Returns <c>true</c> if there are no set bits </summary>
public virtual bool IsEmpty
{
get
{
return Cardinality() == 0;
}
}
/// <summary>
/// Expert: returns the <see cref="T:long[]"/> storing the bits. </summary>
[WritableArray]
public virtual long[] GetBits()
{
return m_bits;
}
/// <summary>
/// Expert: gets the number of <see cref="long"/>s in the array that are in use. </summary>
public virtual int NumWords
{
get
{
return m_wlen;
}
}
/// <summary>
/// Returns <c>true</c> or <c>false</c> for the specified bit <paramref name="index"/>. </summary>
public virtual bool Get(int index)
{
int i = index >> 6; // div 64
// signed shift will keep a negative index and force an
// array-index-out-of-bounds-exception, removing the need for an explicit check.
if (i >= m_bits.Length)
{
return false;
}
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
return (m_bits[i] & bitmask) != 0;
}
/// <summary>
/// Returns <c>true</c> or <c>false</c> for the specified bit <paramref name="index"/>.
/// The index should be less than the <see cref="Length"/>.
/// </summary>
public virtual bool FastGet(int index)
{
Debug.Assert(index >= 0 && index < numBits);
int i = index >> 6; // div 64
// signed shift will keep a negative index and force an
// array-index-out-of-bounds-exception, removing the need for an explicit check.
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
return (m_bits[i] & bitmask) != 0;
}
/// <summary>
/// Returns <c>true</c> or <c>false</c> for the specified bit <paramref name="index"/>.
/// </summary>
public virtual bool Get(long index)
{
int i = (int)(index >> 6); // div 64
if (i >= m_bits.Length)
{
return false;
}
int bit = (int)index & 0x3f; // mod 64
long bitmask = 1L << bit;
return (m_bits[i] & bitmask) != 0;
}
/// <summary>
/// Returns <c>true</c> or <c>false</c> for the specified bit <paramref name="index"/>.
/// The index should be less than the <see cref="Length"/>.
/// </summary>
public virtual bool FastGet(long index)
{
Debug.Assert(index >= 0 && index < numBits);
int i = (int)(index >> 6); // div 64
int bit = (int)index & 0x3f; // mod 64
long bitmask = 1L << bit;
return (m_bits[i] & bitmask) != 0;
}
/*
// alternate implementation of get()
public boolean get1(int index) {
int i = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
return ((bits[i]>>>bit) & 0x01) != 0;
// this does a long shift and a bittest (on x86) vs
// a long shift, and a long AND, (the test for zero is prob a no-op)
// testing on a P4 indicates this is slower than (bits[i] & bitmask) != 0;
}
*/
/// <summary>
/// Returns 1 if the bit is set, 0 if not.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual int GetBit(int index)
{
Debug.Assert(index >= 0 && index < numBits);
int i = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
return ((int)((long)((ulong)m_bits[i] >> bit))) & 0x01;
}
/*
public boolean get2(int index) {
int word = index >> 6; // div 64
int bit = index & 0x0000003f; // mod 64
return (bits[word] << bit) < 0; // hmmm, this would work if bit order were reversed
// we could right shift and check for parity bit, if it was available to us.
}
*/
/// <summary>
/// Sets a bit, expanding the set size if necessary. </summary>
public virtual void Set(long index)
{
int wordNum = ExpandingWordNum(index);
int bit = (int)index & 0x3f;
long bitmask = 1L << bit;
m_bits[wordNum] |= bitmask;
}
/// <summary>
/// Sets the bit at the specified <paramref name="index"/>.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual void FastSet(int index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
m_bits[wordNum] |= bitmask;
}
/// <summary>
/// Sets the bit at the specified <paramref name="index"/>.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual void FastSet(long index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = (int)(index >> 6);
int bit = (int)index & 0x3f;
long bitmask = 1L << bit;
m_bits[wordNum] |= bitmask;
}
/// <summary>
/// Sets a range of bits, expanding the set size if necessary.
/// </summary>
/// <param name="startIndex"> Lower index </param>
/// <param name="endIndex"> One-past the last bit to set </param>
public virtual void Set(long startIndex, long endIndex)
{
if (endIndex <= startIndex)
{
return;
}
int startWord = (int)(startIndex >> 6);
// since endIndex is one past the end, this is index of the last
// word to be changed.
int endWord = ExpandingWordNum(endIndex - 1);
long startmask = -1L << (int)startIndex;
long endmask = (long)(0xffffffffffffffffUL >> (int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
if (startWord == endWord)
{
m_bits[startWord] |= (startmask & endmask);
return;
}
m_bits[startWord] |= startmask;
Arrays.Fill(m_bits, startWord + 1, endWord, -1L);
m_bits[endWord] |= endmask;
}
protected virtual int ExpandingWordNum(long index)
{
int wordNum = (int)(index >> 6);
if (wordNum >= m_wlen)
{
EnsureCapacity(index + 1);
}
return wordNum;
}
/// <summary>
/// Clears a bit.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual void FastClear(int index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = index >> 6;
int bit = index & 0x03f;
long bitmask = 1L << bit;
m_bits[wordNum] &= ~bitmask;
// hmmm, it takes one more instruction to clear than it does to set... any
// way to work around this? If there were only 63 bits per word, we could
// use a right shift of 10111111...111 in binary to position the 0 in the
// correct place (using sign extension).
// Could also use Long.rotateRight() or rotateLeft() *if* they were converted
// by the JVM into a native instruction.
// bits[word] &= Long.rotateLeft(0xfffffffe,bit);
}
/// <summary>
/// Clears a bit.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual void FastClear(long index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = (int)(index >> 6); // div 64
int bit = (int)index & 0x3f; // mod 64
long bitmask = 1L << bit;
m_bits[wordNum] &= ~bitmask;
}
/// <summary>
/// Clears a bit, allowing access beyond the current set size without changing the size. </summary>
public virtual void Clear(long index)
{
int wordNum = (int)(index >> 6); // div 64
if (wordNum >= m_wlen)
{
return;
}
int bit = (int)index & 0x3f; // mod 64
long bitmask = 1L << bit;
m_bits[wordNum] &= ~bitmask;
}
/// <summary>
/// Clears a range of bits. Clearing past the end does not change the size of the set.
/// </summary>
/// <param name="startIndex"> Lower index </param>
/// <param name="endIndex"> One-past the last bit to clear </param>
public virtual void Clear(int startIndex, int endIndex)
{
if (endIndex <= startIndex)
{
return;
}
int startWord = (startIndex >> 6);
if (startWord >= m_wlen)
{
return;
}
// since endIndex is one past the end, this is index of the last
// word to be changed.
int endWord = ((endIndex - 1) >> 6);
long startmask = (-1L) << startIndex; // -1 << (startIndex mod 64)
long endmask = (-1L) << endIndex; // -1 << (endIndex mod 64)
if ((endIndex & 0x3f) == 0)
{
endmask = 0;
}
startmask = ~startmask;
if (startWord == endWord)
{
m_bits[startWord] &= (startmask | endmask);
return;
}
m_bits[startWord] &= startmask;
int middle = Math.Min(m_wlen, endWord);
Arrays.Fill(m_bits, startWord + 1, middle, 0L);
if (endWord < m_wlen)
{
m_bits[endWord] &= endmask;
}
}
/// <summary>
/// Clears a range of bits. Clearing past the end does not change the size of the set.
/// </summary>
/// <param name="startIndex"> Lower index </param>
/// <param name="endIndex"> One-past the last bit to clear </param>
public virtual void Clear(long startIndex, long endIndex)
{
if (endIndex <= startIndex)
{
return;
}
int startWord = (int)(startIndex >> 6);
if (startWord >= m_wlen)
{
return;
}
// since endIndex is one past the end, this is index of the last
// word to be changed.
int endWord = (int)((endIndex - 1) >> 6);
long startmask = -1L << (int)startIndex;
long endmask = -(int)((uint)1L >> (int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
// invert masks since we are clearing
startmask = ~startmask;
endmask = ~endmask;
if (startWord == endWord)
{
m_bits[startWord] &= (startmask | endmask);
return;
}
m_bits[startWord] &= startmask;
int middle = Math.Min(m_wlen, endWord);
Arrays.Fill(m_bits, startWord + 1, middle, 0L);
if (endWord < m_wlen)
{
m_bits[endWord] &= endmask;
}
}
/// <summary>
/// Sets a bit and returns the previous value.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual bool GetAndSet(int index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
bool val = (m_bits[wordNum] & bitmask) != 0;
m_bits[wordNum] |= bitmask;
return val;
}
/// <summary>
/// Sets a bit and returns the previous value.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual bool GetAndSet(long index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = (int)(index >> 6); // div 64
int bit = (int)index & 0x3f; // mod 64
long bitmask = 1L << bit;
bool val = (m_bits[wordNum] & bitmask) != 0;
m_bits[wordNum] |= bitmask;
return val;
}
/// <summary>
/// Flips a bit.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual void FastFlip(int index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
m_bits[wordNum] ^= bitmask;
}
/// <summary>
/// Flips a bit.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual void FastFlip(long index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = (int)(index >> 6); // div 64
int bit = (int)index & 0x3f; // mod 64
long bitmask = 1L << bit;
m_bits[wordNum] ^= bitmask;
}
/// <summary>
/// Flips a bit, expanding the set size if necessary. </summary>
public virtual void Flip(long index)
{
int wordNum = ExpandingWordNum(index);
int bit = (int)index & 0x3f; // mod 64
long bitmask = 1L << bit;
m_bits[wordNum] ^= bitmask;
}
/// <summary>
/// Flips a bit and returns the resulting bit value.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual bool FlipAndGet(int index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
m_bits[wordNum] ^= bitmask;
return (m_bits[wordNum] & bitmask) != 0;
}
/// <summary>
/// Flips a bit and returns the resulting bit value.
/// The <paramref name="index"/> should be less than the <see cref="Length"/>.
/// </summary>
public virtual bool FlipAndGet(long index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = (int)(index >> 6); // div 64
int bit = (int)index & 0x3f; // mod 64
long bitmask = 1L << bit;
m_bits[wordNum] ^= bitmask;
return (m_bits[wordNum] & bitmask) != 0;
}
/// <summary>
/// Flips a range of bits, expanding the set size if necessary.
/// </summary>
/// <param name="startIndex"> Lower index </param>
/// <param name="endIndex"> One-past the last bit to flip </param>
public virtual void Flip(long startIndex, long endIndex)
{
if (endIndex <= startIndex)
{
return;
}
int startWord = (int)(startIndex >> 6);
// since endIndex is one past the end, this is index of the last
// word to be changed.
int endWord = ExpandingWordNum(endIndex - 1);
//* Grrr, java shifting wraps around so -1L>>>64 == -1
// for that reason, make sure not to use endmask if the bits to flip will
// be zero in the last word (redefine endWord to be the last changed...)
// long startmask = -1L << (startIndex & 0x3f); // example: 11111...111000
// long endmask = -1L >>> (64-(endIndex & 0x3f)); // example: 00111...111111
// **
long startmask = -1L << (int)startIndex;
long endmask = (long)(0xffffffffffffffffUL >> (int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
if (startWord == endWord)
{
m_bits[startWord] ^= (startmask & endmask);
return;
}
m_bits[startWord] ^= startmask;
for (int i = startWord + 1; i < endWord; i++)
{
m_bits[i] = ~m_bits[i];
}
m_bits[endWord] ^= endmask;
}
/*
public static int pop(long v0, long v1, long v2, long v3) {
// derived from pop_array by setting last four elems to 0.
// exchanges one pop() call for 10 elementary operations
// saving about 7 instructions... is there a better way?
long twosA=v0 & v1;
long ones=v0^v1;
long u2=ones^v2;
long twosB =(ones&v2)|(u2&v3);
ones=u2^v3;
long fours=(twosA&twosB);
long twos=twosA^twosB;
return (pop(fours)<<2)
+ (pop(twos)<<1)
+ pop(ones);
}
*/
/// <summary>
/// Get the number of set bits.
/// </summary>
/// <returns> The number of set bits. </returns>
public virtual long Cardinality()
{
return BitUtil.Pop_Array(m_bits, 0, m_wlen);
}
/// <summary>
/// Returns the popcount or cardinality of the intersection of the two sets.
/// Neither set is modified.
/// </summary>
public static long IntersectionCount(OpenBitSet a, OpenBitSet b)
{
return BitUtil.Pop_Intersect(a.m_bits, b.m_bits, 0, Math.Min(a.m_wlen, b.m_wlen));
}
/// <summary>
/// Returns the popcount or cardinality of the union of the two sets.
/// Neither set is modified.
/// </summary>
public static long UnionCount(OpenBitSet a, OpenBitSet b)
{
long tot = BitUtil.Pop_Union(a.m_bits, b.m_bits, 0, Math.Min(a.m_wlen, b.m_wlen));
if (a.m_wlen < b.m_wlen)
{
tot += BitUtil.Pop_Array(b.m_bits, a.m_wlen, b.m_wlen - a.m_wlen);
}
else if (a.m_wlen > b.m_wlen)
{
tot += BitUtil.Pop_Array(a.m_bits, b.m_wlen, a.m_wlen - b.m_wlen);
}
return tot;
}
/// <summary>
/// Returns the popcount or cardinality of "a and not b"
/// or "intersection(a, not(b))".
/// Neither set is modified.
/// </summary>
public static long AndNotCount(OpenBitSet a, OpenBitSet b)
{
long tot = BitUtil.Pop_AndNot(a.m_bits, b.m_bits, 0, Math.Min(a.m_wlen, b.m_wlen));
if (a.m_wlen > b.m_wlen)
{
tot += BitUtil.Pop_Array(a.m_bits, b.m_wlen, a.m_wlen - b.m_wlen);
}
return tot;
}
/// <summary>
/// Returns the popcount or cardinality of the exclusive-or of the two sets.
/// Neither set is modified.
/// </summary>
public static long XorCount(OpenBitSet a, OpenBitSet b)
{
long tot = BitUtil.Pop_Xor(a.m_bits, b.m_bits, 0, Math.Min(a.m_wlen, b.m_wlen));
if (a.m_wlen < b.m_wlen)
{
tot += BitUtil.Pop_Array(b.m_bits, a.m_wlen, b.m_wlen - a.m_wlen);
}
else if (a.m_wlen > b.m_wlen)
{
tot += BitUtil.Pop_Array(a.m_bits, b.m_wlen, a.m_wlen - b.m_wlen);
}
return tot;
}
/// <summary>
/// Returns the index of the first set bit starting at the <paramref name="index"/> specified.
/// -1 is returned if there are no more set bits.
/// </summary>
public virtual int NextSetBit(int index)
{
int i = index >> 6;
if (i >= m_wlen)
{
return -1;
}
int subIndex = index & 0x3f; // index within the word
long word = m_bits[i] >> subIndex; // skip all the bits to the right of index
if (word != 0)
{
return (i << 6) + subIndex + Number.NumberOfTrailingZeros(word);
}
while (++i < m_wlen)
{
word = m_bits[i];
if (word != 0)
{
return (i << 6) + Number.NumberOfTrailingZeros(word);
}
}
return -1;
}
/// <summary>
/// Returns the index of the first set bit starting at the <paramref name="index"/> specified.
/// -1 is returned if there are no more set bits.
/// </summary>
public virtual long NextSetBit(long index)
{
int i = (int)((long)((ulong)index >> 6));
if (i >= m_wlen)
{
return -1;
}
int subIndex = (int)index & 0x3f; // index within the word
long word = (long)((ulong)m_bits[i] >> subIndex); // skip all the bits to the right of index
if (word != 0)
{
return (((long)i) << 6) + (subIndex + Number.NumberOfTrailingZeros(word));
}
while (++i < m_wlen)
{
word = m_bits[i];
if (word != 0)
{
return (((long)i) << 6) + Number.NumberOfTrailingZeros(word);
}
}
return -1;
}
/// <summary>
/// Returns the index of the first set bit starting downwards at
/// the <paramref name="index"/> specified.
/// -1 is returned if there are no more set bits.
/// </summary>
public virtual int PrevSetBit(int index)
{
int i = index >> 6;
int subIndex;
long word;
if (i >= m_wlen)
{
i = m_wlen - 1;
if (i < 0)
{
return -1;
}
subIndex = 63; // last possible bit
word = m_bits[i];
}
else
{
if (i < 0)
{
return -1;
}
subIndex = index & 0x3f; // index within the word
word = (m_bits[i] << (63 - subIndex)); // skip all the bits to the left of index
}
if (word != 0)
{
return (i << 6) + subIndex - Number.NumberOfLeadingZeros(word); // See LUCENE-3197
}
while (--i >= 0)
{
word = m_bits[i];
if (word != 0)
{
return (i << 6) + 63 - Number.NumberOfLeadingZeros(word);
}
}
return -1;
}
/// <summary>
/// Returns the index of the first set bit starting downwards at
/// the <paramref name="index"/> specified.
/// -1 is returned if there are no more set bits.
/// </summary>
public virtual long PrevSetBit(long index)
{
int i = (int)(index >> 6);
int subIndex;
long word;
if (i >= m_wlen)
{
i = m_wlen - 1;
if (i < 0)
{
return -1;
}
subIndex = 63; // last possible bit
word = m_bits[i];
}
else
{
if (i < 0)
{
return -1;
}
subIndex = (int)index & 0x3f; // index within the word
word = (m_bits[i] << (63 - subIndex)); // skip all the bits to the left of index
}
if (word != 0)
{
return (((long)i) << 6) + subIndex - Number.NumberOfLeadingZeros(word); // See LUCENE-3197
}
while (--i >= 0)
{
word = m_bits[i];
if (word != 0)
{
return (((long)i) << 6) + 63 - Number.NumberOfLeadingZeros(word);
}
}
return -1;
}
public object Clone()
{
//OpenBitSet obs = (OpenBitSet)base.Clone();
//obs.bits = (long[])obs.bits.Clone(); // hopefully an array clone is as fast(er) than arraycopy
OpenBitSet obs = new OpenBitSet((long[])m_bits.Clone(), m_wlen);
return obs;
}
/// <summary>
/// this = this AND other </summary>
public virtual void Intersect(OpenBitSet other)
{
int newLen = Math.Min(this.m_wlen, other.m_wlen);
long[] thisArr = this.m_bits;
long[] otherArr = other.m_bits;
// testing against zero can be more efficient
int pos = newLen;
while (--pos >= 0)
{
thisArr[pos] &= otherArr[pos];
}
if (this.m_wlen > newLen)
{
// fill zeros from the new shorter length to the old length
Arrays.Fill(m_bits, newLen, this.m_wlen, 0);
}
this.m_wlen = newLen;
}
/// <summary>
/// this = this OR other </summary>
public virtual void Union(OpenBitSet other)
{
int newLen = Math.Max(m_wlen, other.m_wlen);
EnsureCapacityWords(newLen);
Debug.Assert((numBits = Math.Max(other.numBits, numBits)) >= 0);
long[] thisArr = this.m_bits;
long[] otherArr = other.m_bits;
int pos = Math.Min(m_wlen, other.m_wlen);
while (--pos >= 0)
{
thisArr[pos] |= otherArr[pos];
}
if (this.m_wlen < newLen)
{
Array.Copy(otherArr, this.m_wlen, thisArr, this.m_wlen, newLen - this.m_wlen);
}
this.m_wlen = newLen;
}
/// <summary>
/// Remove all elements set in other. this = this AND_NOT other. </summary>
public virtual void Remove(OpenBitSet other)
{
int idx = Math.Min(m_wlen, other.m_wlen);
long[] thisArr = this.m_bits;
long[] otherArr = other.m_bits;
while (--idx >= 0)
{
thisArr[idx] &= ~otherArr[idx];
}
}
/// <summary>
/// this = this XOR other </summary>
public virtual void Xor(OpenBitSet other)
{
int newLen = Math.Max(m_wlen, other.m_wlen);
EnsureCapacityWords(newLen);
Debug.Assert((numBits = Math.Max(other.numBits, numBits)) >= 0);
long[] thisArr = this.m_bits;
long[] otherArr = other.m_bits;
int pos = Math.Min(m_wlen, other.m_wlen);
while (--pos >= 0)
{
thisArr[pos] ^= otherArr[pos];
}
if (this.m_wlen < newLen)
{
Array.Copy(otherArr, this.m_wlen, thisArr, this.m_wlen, newLen - this.m_wlen);
}
this.m_wlen = newLen;
}
// some BitSet compatability methods
/// <summary>see <see cref="Intersect(OpenBitSet)"/></summary>
public virtual void And(OpenBitSet other)
{
Intersect(other);
}
/// <summary>see <see cref="Union(OpenBitSet)"/></summary>
public virtual void Or(OpenBitSet other)
{
Union(other);
}
/// <summary>see <see cref="AndNot(OpenBitSet)"/></summary>
public virtual void AndNot(OpenBitSet other)
{
Remove(other);
}
/// <summary>
/// returns <c>true</c> if the sets have any elements in common. </summary>
public virtual bool Intersects(OpenBitSet other)
{
int pos = Math.Min(this.m_wlen, other.m_wlen);
long[] thisArr = this.m_bits;
long[] otherArr = other.m_bits;
while (--pos >= 0)
{
if ((thisArr[pos] & otherArr[pos]) != 0)
{
return true;
}
}
return false;
}
/// <summary>
/// Expand the <see cref="T:long[]"/> with the size given as a number of words (64 bit longs). </summary>
public virtual void EnsureCapacityWords(int numWords)
{
m_bits = ArrayUtil.Grow(m_bits, numWords);
m_wlen = numWords;
Debug.Assert((this.numBits = Math.Max(this.numBits, numWords << 6)) >= 0);
}
/// <summary>
/// Ensure that the <see cref="T:long[]"/> is big enough to hold numBits, expanding it if
/// necessary.
/// </summary>
public virtual void EnsureCapacity(long numBits)
{
EnsureCapacityWords(Bits2words(numBits));
// ensureCapacityWords sets numBits to a multiple of 64, but we want to set
// it to exactly what the app asked.
Debug.Assert((this.numBits = Math.Max(this.numBits, numBits)) >= 0);
}
/// <summary>
/// Lowers numWords, the number of words in use,
/// by checking for trailing zero words.
/// </summary>
public virtual void TrimTrailingZeros()
{
int idx = m_wlen - 1;
while (idx >= 0 && m_bits[idx] == 0)
{
idx--;
}
m_wlen = idx + 1;
}
/// <summary>
/// Returns the number of 64 bit words it would take to hold <paramref name="numBits"/>. </summary>
public static int Bits2words(long numBits)
{
return (int)(((numBits - 1) >> 6) + 1);
}
/// <summary>
/// Returns <c>true</c> if both sets have the same bits set. </summary>
public override bool Equals(object o)
{
if (this == o)
{
return true;
}
if (!(o is OpenBitSet))
{
return false;
}
OpenBitSet a;
OpenBitSet b = (OpenBitSet)o;
// make a the larger set.
if (b.m_wlen > this.m_wlen)
{
a = b;
b = this;
}
else
{
a = this;
}
// check for any set bits out of the range of b
for (int i = a.m_wlen - 1; i >= b.m_wlen; i--)
{
if (a.m_bits[i] != 0)
{
return false;
}
}
for (int i = b.m_wlen - 1; i >= 0; i--)
{
if (a.m_bits[i] != b.m_bits[i])
{
return false;
}
}
return true;
}
public override int GetHashCode()
{
// Start with a zero hash and use a mix that results in zero if the input is zero.
// this effectively truncates trailing zeros without an explicit check.
long h = 0;
for (int i = m_bits.Length; --i >= 0; )
{
h ^= m_bits[i];
h = (h << 1) | ((long)((ulong)h >> 63)); // rotate left
}
// fold leftmost bits into right and add a constant to prevent
// empty sets from returning 0, which is too common.
return (int)((h >> 32) ^ h) + unchecked((int)0x98761234);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.DirectoryServices.ActiveDirectory
{
using System;
using System.Collections;
using System.Diagnostics;
using System.ComponentModel;
using System.Security.Principal;
using System.Runtime.InteropServices;
using System.Security.Permissions;
using System.Globalization;
public class GlobalCatalog : DomainController
{
// private variables
private ActiveDirectorySchema _schema = null;
private bool _disabled = false;
#region constructors
internal GlobalCatalog(DirectoryContext context, string globalCatalogName)
: base(context, globalCatalogName)
{ }
internal GlobalCatalog(DirectoryContext context, string globalCatalogName, DirectoryEntryManager directoryEntryMgr)
: base(context, globalCatalogName, directoryEntryMgr)
{ }
#endregion constructors
#region public methods
public static GlobalCatalog GetGlobalCatalog(DirectoryContext context)
{
string gcDnsName = null;
bool isGlobalCatalog = false;
DirectoryEntryManager directoryEntryMgr = null;
// check that the context argument is not null
if (context == null)
throw new ArgumentNullException("context");
// target should be GC
if (context.ContextType != DirectoryContextType.DirectoryServer)
{
throw new ArgumentException(SR.TargetShouldBeGC, "context");
}
// target should be a server
if (!(context.isServer()))
{
throw new ActiveDirectoryObjectNotFoundException(String.Format(CultureInfo.CurrentCulture, SR.GCNotFound , context.Name), typeof(GlobalCatalog), context.Name);
}
// work with copy of the context
context = new DirectoryContext(context);
try
{
// Get dns name of the dc
// by binding to root dse and getting the "dnsHostName" attribute
// (also check that the "isGlobalCatalogReady" attribute is true)
directoryEntryMgr = new DirectoryEntryManager(context);
DirectoryEntry rootDSE = DirectoryEntryManager.GetDirectoryEntry(context, WellKnownDN.RootDSE);
if (!Utils.CheckCapability(rootDSE, Capability.ActiveDirectory))
{
throw new ActiveDirectoryObjectNotFoundException(String.Format(CultureInfo.CurrentCulture, SR.GCNotFound , context.Name), typeof(GlobalCatalog), context.Name);
}
gcDnsName = (string)PropertyManager.GetPropertyValue(context, rootDSE, PropertyManager.DnsHostName);
isGlobalCatalog = (bool)Boolean.Parse((string)PropertyManager.GetPropertyValue(context, rootDSE, PropertyManager.IsGlobalCatalogReady));
if (!isGlobalCatalog)
{
throw new ActiveDirectoryObjectNotFoundException(String.Format(CultureInfo.CurrentCulture, SR.GCNotFound , context.Name), typeof(GlobalCatalog), context.Name);
}
}
catch (COMException e)
{
int errorCode = e.ErrorCode;
if (errorCode == unchecked((int)0x8007203a))
{
throw new ActiveDirectoryObjectNotFoundException(String.Format(CultureInfo.CurrentCulture, SR.GCNotFound , context.Name), typeof(GlobalCatalog), context.Name);
}
else
{
throw ExceptionHelper.GetExceptionFromCOMException(context, e);
}
}
return new GlobalCatalog(context, gcDnsName, directoryEntryMgr);
}
public static new GlobalCatalog FindOne(DirectoryContext context)
{
if (context == null)
{
throw new ArgumentNullException("context");
}
if (context.ContextType != DirectoryContextType.Forest)
{
throw new ArgumentException(SR.TargetShouldBeForest, "context");
}
return FindOneWithCredentialValidation(context, null, 0);
}
public static new GlobalCatalog FindOne(DirectoryContext context, string siteName)
{
if (context == null)
{
throw new ArgumentNullException("context");
}
if (context.ContextType != DirectoryContextType.Forest)
{
throw new ArgumentException(SR.TargetShouldBeForest, "context");
}
if (siteName == null)
{
throw new ArgumentNullException("siteName");
}
return FindOneWithCredentialValidation(context, siteName, 0);
}
public static new GlobalCatalog FindOne(DirectoryContext context, LocatorOptions flag)
{
if (context == null)
{
throw new ArgumentNullException("context");
}
if (context.ContextType != DirectoryContextType.Forest)
{
throw new ArgumentException(SR.TargetShouldBeForest, "context");
}
return FindOneWithCredentialValidation(context, null, flag);
}
public static new GlobalCatalog FindOne(DirectoryContext context, string siteName, LocatorOptions flag)
{
if (context == null)
{
throw new ArgumentNullException("context");
}
if (context.ContextType != DirectoryContextType.Forest)
{
throw new ArgumentException(SR.TargetShouldBeForest, "context");
}
if (siteName == null)
{
throw new ArgumentNullException("siteName");
}
return FindOneWithCredentialValidation(context, siteName, flag);
}
public static new GlobalCatalogCollection FindAll(DirectoryContext context)
{
if (context == null)
{
throw new ArgumentNullException("context");
}
if (context.ContextType != DirectoryContextType.Forest)
{
throw new ArgumentException(SR.TargetShouldBeForest, "context");
}
// work with copy of the context
context = new DirectoryContext(context);
return FindAllInternal(context, null);
}
public static new GlobalCatalogCollection FindAll(DirectoryContext context, string siteName)
{
if (context == null)
{
throw new ArgumentNullException("context");
}
if (context.ContextType != DirectoryContextType.Forest)
{
throw new ArgumentException(SR.TargetShouldBeForest, "context");
}
if (siteName == null)
{
throw new ArgumentNullException("siteName");
}
// work with copy of the context
context = new DirectoryContext(context);
return FindAllInternal(context, siteName);
}
public override GlobalCatalog EnableGlobalCatalog()
{
CheckIfDisposed();
throw new InvalidOperationException(SR.CannotPerformOnGCObject);
}
public DomainController DisableGlobalCatalog()
{
CheckIfDisposed();
CheckIfDisabled();
// bind to the server object
DirectoryEntry serverNtdsaEntry = directoryEntryMgr.GetCachedDirectoryEntry(NtdsaObjectName);
// reset the NTDSDSA_OPT_IS_GC flag on the "options" property
int options = 0;
try
{
if (serverNtdsaEntry.Properties[PropertyManager.Options].Value != null)
{
options = (int)serverNtdsaEntry.Properties[PropertyManager.Options].Value;
}
serverNtdsaEntry.Properties[PropertyManager.Options].Value = options & (~1);
serverNtdsaEntry.CommitChanges();
}
catch (COMException e)
{
throw ExceptionHelper.GetExceptionFromCOMException(context, e);
}
// mark as disbaled
_disabled = true;
// return a domain controller object
return new DomainController(context, Name);
}
public override bool IsGlobalCatalog()
{
CheckIfDisposed();
CheckIfDisabled();
// since this is a global catalog object, this should always return true
return true;
}
public ReadOnlyActiveDirectorySchemaPropertyCollection FindAllProperties()
{
CheckIfDisposed();
CheckIfDisabled();
// create an ActiveDirectorySchema object
if (_schema == null)
{
string schemaNC = null;
try
{
schemaNC = directoryEntryMgr.ExpandWellKnownDN(WellKnownDN.SchemaNamingContext);
}
catch (COMException e)
{
throw ExceptionHelper.GetExceptionFromCOMException(context, e);
}
DirectoryContext schemaContext = Utils.GetNewDirectoryContext(Name, DirectoryContextType.DirectoryServer, context);
_schema = new ActiveDirectorySchema(context, schemaNC);
}
// return the global catalog replicated properties
return _schema.FindAllProperties(PropertyTypes.InGlobalCatalog);
}
public override DirectorySearcher GetDirectorySearcher()
{
CheckIfDisposed();
CheckIfDisabled();
return InternalGetDirectorySearcher();
}
#endregion public methods
#region private methods
private void CheckIfDisabled()
{
if (_disabled)
{
throw new InvalidOperationException(SR.GCDisabled);
}
}
internal static new GlobalCatalog FindOneWithCredentialValidation(DirectoryContext context, string siteName, LocatorOptions flag)
{
GlobalCatalog gc;
bool retry = false;
bool credsValidated = false;
// work with copy of the context
context = new DirectoryContext(context);
// authenticate against this GC to validate the credentials
gc = FindOneInternal(context, context.Name, siteName, flag);
try
{
ValidateCredential(gc, context);
credsValidated = true;
}
catch (COMException e)
{
if (e.ErrorCode == unchecked((int)0x8007203a))
{
// server is down , so try again with force rediscovery if the flags did not already contain force rediscovery
if ((flag & LocatorOptions.ForceRediscovery) == 0)
{
retry = true;
}
else
{
throw new ActiveDirectoryObjectNotFoundException(String.Format(CultureInfo.CurrentCulture, SR.GCNotFoundInForest , context.Name), typeof(GlobalCatalog), null);
}
}
else
{
throw ExceptionHelper.GetExceptionFromCOMException(context, e);
}
}
finally
{
if (!credsValidated)
{
gc.Dispose();
}
}
if (retry)
{
credsValidated = false;
gc = FindOneInternal(context, context.Name, siteName, flag | LocatorOptions.ForceRediscovery);
try
{
ValidateCredential(gc, context);
credsValidated = true;
}
catch (COMException e)
{
if (e.ErrorCode == unchecked((int)0x8007203a))
{
// server is down
throw new ActiveDirectoryObjectNotFoundException(String.Format(CultureInfo.CurrentCulture, SR.GCNotFoundInForest , context.Name), typeof(GlobalCatalog), null);
}
else
{
throw ExceptionHelper.GetExceptionFromCOMException(context, e);
}
}
finally
{
if (!credsValidated)
{
gc.Dispose();
}
}
}
return gc;
}
internal static new GlobalCatalog FindOneInternal(DirectoryContext context, string forestName, string siteName, LocatorOptions flag)
{
DomainControllerInfo domainControllerInfo;
int errorCode = 0;
if (siteName != null && siteName.Length == 0)
{
throw new ArgumentException(SR.EmptyStringParameter, "siteName");
}
// check that the flags passed have only the valid bits set
if (((long)flag & (~((long)LocatorOptions.AvoidSelf | (long)LocatorOptions.ForceRediscovery | (long)LocatorOptions.KdcRequired | (long)LocatorOptions.TimeServerRequired | (long)LocatorOptions.WriteableRequired))) != 0)
{
throw new ArgumentException(SR.InvalidFlags, "flag");
}
if (forestName == null)
{
// get the dns name of the logged on forest
DomainControllerInfo tempDomainControllerInfo;
int error = Locator.DsGetDcNameWrapper(null, DirectoryContext.GetLoggedOnDomain(), null, (long)PrivateLocatorFlags.DirectoryServicesRequired, out tempDomainControllerInfo);
if (error == NativeMethods.ERROR_NO_SUCH_DOMAIN)
{
// throw not found exception
throw new ActiveDirectoryObjectNotFoundException(SR.ContextNotAssociatedWithDomain, typeof(GlobalCatalog), null);
}
else if (error != 0)
{
throw ExceptionHelper.GetExceptionFromErrorCode(errorCode);
}
Debug.Assert(tempDomainControllerInfo.DnsForestName != null);
forestName = tempDomainControllerInfo.DnsForestName;
}
// call DsGetDcName
errorCode = Locator.DsGetDcNameWrapper(null, forestName, siteName, (long)flag | (long)(PrivateLocatorFlags.GCRequired | PrivateLocatorFlags.DirectoryServicesRequired), out domainControllerInfo);
if (errorCode == NativeMethods.ERROR_NO_SUCH_DOMAIN)
{
throw new ActiveDirectoryObjectNotFoundException(String.Format(CultureInfo.CurrentCulture, SR.GCNotFoundInForest , forestName), typeof(GlobalCatalog), null);
}
// this can only occur when flag is being explicitly passed (since the flags that we pass internally are valid)
if (errorCode == NativeMethods.ERROR_INVALID_FLAGS)
{
throw new ArgumentException(SR.InvalidFlags, "flag");
}
else if (errorCode != 0)
{
throw ExceptionHelper.GetExceptionFromErrorCode(errorCode);
}
// create a GlobalCatalog object
// the name is returned in the form "\\servername", so skip the "\\"
Debug.Assert(domainControllerInfo.DomainControllerName.Length > 2);
string globalCatalogName = domainControllerInfo.DomainControllerName.Substring(2);
// create a new context object for the global catalog
DirectoryContext gcContext = Utils.GetNewDirectoryContext(globalCatalogName, DirectoryContextType.DirectoryServer, context);
return new GlobalCatalog(gcContext, globalCatalogName);
}
internal static GlobalCatalogCollection FindAllInternal(DirectoryContext context, string siteName)
{
ArrayList gcList = new ArrayList();
if (siteName != null && siteName.Length == 0)
{
throw new ArgumentException(SR.EmptyStringParameter, "siteName");
}
foreach (string gcName in Utils.GetReplicaList(context, null /* not specific to any partition */, siteName, false /* isDefaultNC */, false /* isADAM */, true /* mustBeGC */))
{
DirectoryContext gcContext = Utils.GetNewDirectoryContext(gcName, DirectoryContextType.DirectoryServer, context);
gcList.Add(new GlobalCatalog(gcContext, gcName));
}
return new GlobalCatalogCollection(gcList);
}
private DirectorySearcher InternalGetDirectorySearcher()
{
DirectoryEntry de = new DirectoryEntry("GC://" + Name);
if (DirectoryContext.ServerBindSupported)
{
de.AuthenticationType = Utils.DefaultAuthType | AuthenticationTypes.ServerBind;
}
else
{
de.AuthenticationType = Utils.DefaultAuthType;
}
de.Username = context.UserName;
de.Password = context.Password;
return new DirectorySearcher(de);
}
#endregion
}
}
| |
// ===========================================================
// Copyright (C) 2014-2015 Kendar.org
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
// modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
// is furnished to do so, subject to the following conditions:
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
// BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
// OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ===========================================================
using System;
using System.Collections.Generic;
using Node.Cs.Authorization;
using Node.Cs.Lib;
using Node.Cs.Lib.Attributes;
using Node.Cs.Lib.Controllers;
using Node.Cs.MVC.Controllers;
using Node.Cs.Project.Template.Src.Models;
namespace Node.Cs.Project.Template.Src.Controllers
{
public class AccountController : ControllerBase
{
//
// GET: /Account/LogOn
public IEnumerable<IResponse> LogOn()
{
yield return View(new LogOnModel());
}
//
// POST: /Account/LogOn
[HttpPost]
public IEnumerable<IResponse> LogOn(LogOnModel model, string returnUrl)
{
if (ModelState.IsValid)
{
var authProvider = GlobalVars.AuthenticationDataProvider;
if (authProvider.IsUserAuthorized(model.UserName, model.Password))
{
Authorize.FormSetAuthCookie(HttpContext, model.UserName, model.RememberMe);
if (Url.IsLocalUrl(returnUrl) && returnUrl.Length > 1 && returnUrl.StartsWith("/")
&& !returnUrl.StartsWith("//") && !returnUrl.StartsWith("/\\"))
{
yield return Redirect(returnUrl);
}
else
{
yield return RedirectToAction("Index", "Home");
}
}
else
{
ModelState.AddModelError("", "The user name or password provided is incorrect.");
}
}
// If we got this far, something failed, redisplay form
yield return View(model);
}
//
// GET: /Account/LogOff
public IEnumerable<IResponse> LogOff()
{
Authorize.FormSignOut(HttpContext);
yield return RedirectToAction("Index", "Home");
}
//
// GET: /Account/Register
public IEnumerable<IResponse> Register()
{
yield return View(new RegisterModel());
}
//
// POST: /Account/Register
[HttpPost]
public IEnumerable<IResponse> Register(RegisterModel model)
{
if (ModelState.IsValid)
{
var authProvider = GlobalVars.AuthenticationDataProvider;
IUser user = null;
// Attempt to register the user
var createStatus = AuthenticationCreateStatus.ProviderError;
yield return InvokeAsTaskAndWait(() =>
{
user = authProvider.CreateUser(model.UserName, model.Password, model.Email,
null, null, true, null, out createStatus);
});
if (createStatus == AuthenticationCreateStatus.Success)
{
Authorize.FormSetAuthCookie(HttpContext, user.UserName, false /* createPersistentCookie */);
yield return RedirectToAction("Index", "Home");
}
else
{
ModelState.AddModelError("", ErrorCodeToString(createStatus));
}
}
// If we got this far, something failed, redisplay form
yield return View(model);
}
//
// GET: /Account/ChangePassword
[Authorize]
public IEnumerable<IResponse> ChangePassword()
{
yield return View(new ChangePasswordModel());
}
//
// POST: /Account/ChangePassword
[Authorize]
[HttpPost]
public IEnumerable<IResponse> ChangePassword(ChangePasswordModel model)
{
if (ModelState.IsValid)
{
// ChangePassword will throw an exception rather
// than yield return false in certain failure scenarios.
bool changePasswordSucceeded;
try
{
var authProvider = GlobalVars.AuthenticationDataProvider;
var currentUser = authProvider.GetUser(User.Identity.Name, true /* userIsOnline */);
changePasswordSucceeded = currentUser.ChangePassword(model.OldPassword, model.NewPassword);
}
catch (Exception)
{
changePasswordSucceeded = false;
}
if (changePasswordSucceeded)
{
yield return RedirectToAction("ChangePasswordSuccess");
}
else
{
ModelState.AddModelError("", "The current password is incorrect or the new password is invalid.");
}
}
// If we got this far, something failed, redisplay form
yield return View(model);
}
//
// GET: /Account/ChangePasswordSuccess
public IEnumerable<IResponse> ChangePasswordSuccess()
{
yield return View();
}
#region Status Codes
private string ErrorCodeToString(AuthenticationCreateStatus createStatus)
{
// See http://go.microsoft.com/fwlink/?LinkID=177550 for
// a full list of status codes.
switch (createStatus)
{
case AuthenticationCreateStatus.DuplicateUserName:
return "User name already exists. Please enter a different user name.";
case AuthenticationCreateStatus.DuplicateEmail:
return "A user name for that e-mail address already exists. Please enter a different e-mail address.";
case AuthenticationCreateStatus.InvalidPassword:
return "The password provided is invalid. Please enter a valid password value.";
case AuthenticationCreateStatus.InvalidEmail:
return "The e-mail address provided is invalid. Please check the value and try again.";
case AuthenticationCreateStatus.InvalidAnswer:
return "The password retrieval answer provided is invalid. Please check the value and try again.";
case AuthenticationCreateStatus.InvalidQuestion:
return "The password retrieval question provided is invalid. Please check the value and try again.";
case AuthenticationCreateStatus.InvalidUserName:
return "The user name provided is invalid. Please check the value and try again.";
case AuthenticationCreateStatus.ProviderError:
return "The authentication provider returned an error. Please verify your entry and try again. If the problem persists, please contact your system administrator.";
case AuthenticationCreateStatus.UserRejected:
return "The user creation request has been canceled. Please verify your entry and try again. If the problem persists, please contact your system administrator.";
default:
return "An unknown error occurred. Please verify your entry and try again. If the problem persists, please contact your system administrator.";
}
}
#endregion
}
}
| |
/* =======================================================================
* vCard Library for .NET
* Copyright (c) 2007-2009 David Pinch; http://wwww.thoughtproject.com
* See LICENSE.TXT for licensing information.
* ======================================================================= */
using System;
using System.IO;
using System.Text;
using Thought.vCards;
namespace VcardLibrary
{
/// <summary>
/// Implements the standard vCard 2.1 and 3.0 text formats.
/// </summary>
public class vCardStandardWriter : vCardWriter
{
private bool embedInternetImages;
private bool embedLocalImages;
private vCardStandardWriterOptions options;
private string productId;
/// <summary>
/// The characters that are escaped per the original
/// vCard specification.
/// </summary>
private readonly char[] standardEscapedCharacters =
new char[] { ',', '\\', ';', '\r', '\n' };
/// <summary>
/// The characters that are escaped by Microsoft Outlook.
/// </summary>
/// <remarks>
/// Microsoft Outlook does not property decode escaped
/// commas in values.
/// </remarks>
private readonly char[] outlookEscapedCharacters =
new char[] { '\\', ';', '\r', '\n' };
/// <summary>
/// Creates a new instance of the standard writer.
/// </summary>
/// <remarks>
/// The standard writer is configured to create vCard
/// files in the highest supported version. This is
/// currently version 3.0.
/// </remarks>
public vCardStandardWriter()
{
this.embedLocalImages = true;
}
/// <summary>
/// Indicates whether images that reference Internet
/// URLs should be embedded in the output. If not,
/// then a URL is written instead.
/// </summary>
public bool EmbedInternetImages
{
get
{
return this.embedInternetImages;
}
set
{
this.embedInternetImages = value;
}
}
/// <summary>
/// Indicates whether or not references to local
/// images should be embedded in the vCard. If not,
/// then a local file reference is written instead.
/// </summary>
public bool EmbedLocalImages
{
get
{
return this.embedLocalImages;
}
set
{
this.embedLocalImages = value;
}
}
/// <summary>
/// Extended options for the vCard writer.
/// </summary>
public vCardStandardWriterOptions Options
{
get
{
return this.options;
}
set
{
this.options = value;
}
}
/// <summary>
/// The product ID to use when writing a vCard.
/// </summary>
public string ProductId
{
get
{
return this.productId;
}
set
{
this.productId = value;
}
}
// The next set of functions generate raw vCard properties
// from an object in the vCard object model. Every method
// has a collection (into which new properties should be
// placed) and a vCard object (from which the properties
// should be generated).
#region [ BuildProperties ]
/// <summary>
/// Builds a collection of standard properties based on
/// the specified vCard.
/// </summary>
/// <returns>
/// A <see cref="vCardPropertyCollection"/> that contains all
/// properties for the current vCard, including the header
/// and footer properties.
/// </returns>
/// <seealso cref="vCard"/>
/// <seealso cref="vCardProperty"/>
private vCardPropertyCollection BuildProperties(
vCard card)
{
vCardPropertyCollection properties =
new vCardPropertyCollection();
// The BEGIN:VCARD line marks the beginning of
// the vCard contents. Later it will end with END:VCARD.
// See section 2.1.1 of RFC 2426.
properties.Add(new vCardProperty("BEGIN", "VCARD"));
BuildProperties_NAME(
properties,
card);
BuildProperties_SOURCE(
properties,
card);
BuildProperties_N(
properties,
card);
BuildProperties_FN(
properties,
card);
BuildProperties_ADR(
properties,
card);
BuildProperties_BDAY(
properties,
card);
BuildProperties_CATEGORIES(
properties,
card);
BuildProperties_CLASS(
properties,
card);
BuildProperties_EMAIL(
properties,
card);
BuildProperties_GEO(
properties,
card);
BuildProperties_KEY(
properties,
card);
BuildProperties_LABEL(
properties,
card);
BuildProperties_MAILER(
properties,
card);
BuildProperties_NICKNAME(
properties,
card);
BuildProperties_NOTE(
properties,
card);
BuildProperties_ORG(
properties,
card);
BuildProperties_PHOTO(
properties,
card);
BuildProperties_PRODID(
properties,
card);
BuildProperties_REV(
properties,
card);
BuildProperties_ROLE(
properties,
card);
BuildProperties_TEL(
properties,
card);
BuildProperties_TITLE(
properties,
card);
BuildProperties_TZ(
properties,
card);
BuildProperties_UID(
properties,
card);
BuildProperties_URL(
properties,
card);
BuildProperties_X_WAB_GENDER(
properties,
card);
// The end of the vCard is marked with an END:VCARD.
properties.Add(new vCardProperty("END", "VCARD"));
return properties;
}
#endregion
#region [ BuildProperties_ADR ]
/// <summary>
/// Builds ADR properties.
/// </summary>
private void BuildProperties_ADR(
vCardPropertyCollection properties,
vCard card)
{
foreach (vCardDeliveryAddress address in card.DeliveryAddresses)
{
// Do not generate a postal address (ADR) property
// if the entire address is blank.
if (
(!string.IsNullOrEmpty(address.City)) ||
(!string.IsNullOrEmpty(address.Country)) ||
(!string.IsNullOrEmpty(address.PostalCode)) ||
(!string.IsNullOrEmpty(address.Region)) ||
(!string.IsNullOrEmpty(address.Street)))
{
// The ADR property contains the following
// subvalues in order. All are required:
//
// - Post office box
// - Extended address
// - Street address
// - Locality (e.g. city)
// - Region (e.g. province or state)
// - Postal code (e.g. ZIP code)
// - Country name
vCardValueCollection values = new vCardValueCollection(';');
values.Add(string.Empty);
values.Add(string.Empty);
values.Add(address.Street);
values.Add(address.City);
values.Add(address.Region);
values.Add(address.PostalCode);
values.Add(address.Country);
vCardProperty property =
new vCardProperty("ADR", values);
if (address.IsDomestic)
property.Subproperties.Add("DOM");
if (address.IsInternational)
property.Subproperties.Add("INTL");
if (address.IsParcel)
property.Subproperties.Add("PARCEL");
if (address.IsPostal)
property.Subproperties.Add("POSTAL");
if (address.IsHome)
property.Subproperties.Add("HOME");
if (address.IsWork)
property.Subproperties.Add("WORK");
properties.Add(property);
}
}
}
#endregion
#region [ BuildProperties_BDAY ]
/// <summary>
/// Builds the BDAY property.
/// </summary>
private void BuildProperties_BDAY(
vCardPropertyCollection properties,
vCard card)
{
// The BDAY property indicates the birthdate
// of the person. The output format here is based on
// Microsoft Outlook, which writes the date as YYYMMDD.
if (card.BirthDate.HasValue)
{
vCardProperty property =
new vCardProperty("BDAY", card.BirthDate.Value);
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_CATEGORIES ]
private void BuildProperties_CATEGORIES(
vCardPropertyCollection properties,
vCard card)
{
if (card.Categories.Count > 0)
{
vCardValueCollection values = new vCardValueCollection(',');
foreach (string category in card.Categories)
{
if (!string.IsNullOrEmpty(category))
values.Add(category);
}
properties.Add(
new vCardProperty("CATEGORIES", values));
}
}
#endregion
#region [ BuildProperties_CLASS ]
private void BuildProperties_CLASS(
vCardPropertyCollection properties,
vCard card)
{
vCardProperty property = new vCardProperty("CLASS");
switch (card.AccessClassification)
{
case vCardAccessClassification.Unknown:
// No value is written.
return;
case vCardAccessClassification.Confidential:
property.Value = "CONFIDENTIAL";
break;
case vCardAccessClassification.Private:
property.Value = "PRIVATE";
break;
case vCardAccessClassification.Public:
property.Value = "PUBLIC";
break;
default:
throw new NotSupportedException();
}
properties.Add(property);
}
#endregion
#region [ BuildProperties_EMAIL ]
/// <summary>
/// Builds EMAIL properties.
/// </summary>
private void BuildProperties_EMAIL(
vCardPropertyCollection properties,
vCard card)
{
// The EMAIL property contains an electronic
// mail address for the purpose. A vCard may contain
// as many email addresses as needed. The format also
// supports various vendors, such as CompuServe addresses
// and Internet SMTP addresses.
//
// EMAIL;INTERNET:support@fairmetric.com
foreach (vCardEmailAddress emailAddress in card.EmailAddresses)
{
vCardProperty property = new vCardProperty();
property.Name = "EMAIL";
property.Value = emailAddress.Address;
if (emailAddress.IsPreferred)
{
property.Subproperties.Add("PREF");
}
switch (emailAddress.EmailType)
{
case vCardEmailAddressType.Internet:
property.Subproperties.Add("INTERNET");
break;
case vCardEmailAddressType.AOL:
property.Subproperties.Add("AOL");
break;
case vCardEmailAddressType.AppleLink:
property.Subproperties.Add("AppleLink");
break;
case vCardEmailAddressType.AttMail:
property.Subproperties.Add("ATTMail");
break;
case vCardEmailAddressType.CompuServe:
property.Subproperties.Add("CIS");
break;
case vCardEmailAddressType.eWorld:
property.Subproperties.Add("eWorld");
break;
case vCardEmailAddressType.IBMMail:
property.Subproperties.Add("IBMMail");
break;
case vCardEmailAddressType.MCIMail:
property.Subproperties.Add("MCIMail");
break;
case vCardEmailAddressType.PowerShare:
property.Subproperties.Add("POWERSHARE");
break;
case vCardEmailAddressType.Prodigy:
property.Subproperties.Add("PRODIGY");
break;
case vCardEmailAddressType.Telex:
property.Subproperties.Add("TLX");
break;
case vCardEmailAddressType.X400:
property.Subproperties.Add("X400");
break;
default:
property.Subproperties.Add("INTERNET");
break;
}
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_FN ]
private void BuildProperties_FN(
vCardPropertyCollection properties,
vCard card)
{
// The FN property indicates the formatted
// name of the person. This can be something
// like "John Smith".
if (!string.IsNullOrEmpty(card.FormattedName))
{
vCardProperty property =
new vCardProperty("FN", card.FormattedName);
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_GEO ]
/// <summary>
/// Builds the GEO property.
/// </summary>
private void BuildProperties_GEO(
vCardPropertyCollection properties,
vCard card)
{
// The GEO properties contains the latitude and
// longitude of the person or company of the vCard.
if (card.Latitude.HasValue && card.Longitude.HasValue)
{
vCardProperty property = new vCardProperty();
property.Name = "GEO";
property.Value =
card.Latitude.ToString() + ";" + card.Longitude.ToString();
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_KEY ]
/// <summary>
/// Builds KEY properties.
/// </summary>
private void BuildProperties_KEY(
vCardPropertyCollection properties,
vCard card)
{
// A KEY field contains an embedded security certificate.
foreach (vCardCertificate certificate in card.Certificates)
{
vCardProperty property = new vCardProperty();
property.Name = "KEY";
property.Value = certificate.Data;
property.Subproperties.Add(certificate.KeyType);
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_LABEL ]
private void BuildProperties_LABEL(
vCardPropertyCollection properties,
vCard card)
{
foreach (vCardDeliveryLabel label in card.DeliveryLabels)
{
if (label.Text.Length > 0)
{
vCardProperty property = new vCardProperty("LABEL", label.Text);
if (label.IsDomestic)
property.Subproperties.Add("DOM");
if (label.IsInternational)
property.Subproperties.Add("INTL");
if (label.IsParcel)
property.Subproperties.Add("PARCEL");
if (label.IsPostal)
property.Subproperties.Add("POSTAL");
if (label.IsHome)
property.Subproperties.Add("HOME");
if (label.IsWork)
property.Subproperties.Add("WORK");
// Give a hint to use QUOTED-PRINTABLE.
property.Subproperties.Add("ENCODING", "QUOTED-PRINTABLE");
properties.Add(property);
}
}
}
#endregion
#region [ BuildProperties_MAILER ]
/// <summary>
/// Builds the MAILER property.
/// </summary>
private void BuildProperties_MAILER(
vCardPropertyCollection properties,
vCard card)
{
// The MAILER property indicates the software that
// generated the vCard. See section 2.4.3 of the
// vCard 2.1 specification. Support is not widespread.
if (!string.IsNullOrEmpty(card.Mailer))
{
vCardProperty property =
new vCardProperty("MAILER", card.Mailer);
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_N ]
private void BuildProperties_N(
vCardPropertyCollection properties,
vCard card)
{
// The property has the following components: Family Name,
// Given Name, Additional Names, Name Prefix, and Name
// Suffix. Example:
//
// N:Pinch;David
// N:Pinch;David;John
//
// The N property is required (see section 3.1.2 of RFC 2426).
vCardValueCollection values = new vCardValueCollection(';');
values.Add(card.FamilyName);
values.Add(card.GivenName);
values.Add(card.AdditionalNames);
values.Add(card.NamePrefix);
values.Add(card.NameSuffix);
vCardProperty property = new vCardProperty("N", values);
properties.Add(property);
}
#endregion
#region [ BuildProperties_NAME ]
private void BuildProperties_NAME(
vCardPropertyCollection properties,
vCard card)
{
if (!string.IsNullOrEmpty(card.DisplayName))
{
vCardProperty property =
new vCardProperty("NAME", card.DisplayName);
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_NICKNAME ]
/// <summary>
/// Builds the NICKNAME property.
/// </summary>
private void BuildProperties_NICKNAME(
vCardPropertyCollection properties,
vCard card)
{
// The NICKNAME property specifies the familiar name
// of the person, such as Jim. This is defined in
// section 3.1.3 of RFC 2426. Multiple names can
// be listed, separated by commas.
if (card.Nicknames.Count > 0)
{
// A NICKNAME property is a comma-separated
// list of values. Create a value list and
// add the nicknames collection to it.
vCardValueCollection values = new vCardValueCollection(',');
values.Add(card.Nicknames);
// Create the new properties with each name separated
// by a comma.
vCardProperty property =
new vCardProperty("NICKNAME", values);
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_NOTE ]
/// <summary>
/// Builds the NOTE property.
/// </summary>
private void BuildProperties_NOTE(
vCardPropertyCollection properties,
vCard card)
{
foreach (vCardNote note in card.Notes)
{
if (!string.IsNullOrEmpty(note.Text))
{
vCardProperty property = new vCardProperty();
property.Name = "NOTE";
property.Value = note.Text;
if (!string.IsNullOrEmpty(note.Language))
{
property.Subproperties.Add("language", note.Language);
}
property.Subproperties.Add("ENCODING", "QUOTED-PRINTABLE");
properties.Add(property);
}
}
}
#endregion
#region [ BuildProperties_ORG ]
/// <summary>
/// Builds the ORG property.
/// </summary>
private void BuildProperties_ORG(
vCardPropertyCollection properties,
vCard card)
{
// The ORG property specifies the name of the
// person's company or organization. Example:
//
// ORG:FairMetric LLC
if (!string.IsNullOrEmpty(card.Organization))
{
vCardProperty property =
new vCardProperty("ORG", card.Organization);
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_PHOTO ]
private void BuildProperties_PHOTO(
vCardPropertyCollection properties,
vCard card)
{
foreach (vCardPhoto photo in card.Photos)
{
if (photo.Url == null)
{
// This photo does not have a URL associated
// with it. Therefore a property can be
// generated only if the image data is loaded.
// Otherwise there is not enough information.
if (photo.IsLoaded)
{
properties.Add(
new vCardProperty("PHOTO", photo.GetBytes()));
}
}
else
{
// This photo has a URL associated with it. The
// PHOTO property can either be linked as an image
// or embedded, if desired.
bool doEmbedded =
photo.Url.IsFile ? this.embedLocalImages : this.embedInternetImages;
if (doEmbedded)
{
// According to the settings of the card writer,
// this linked image should be embedded into the
// vCard data. Attempt to fetch the data.
try
{
//photo.Fetch(); // PCL
}
catch
{
// An error was encountered. The image can
// still be written as a link, however.
doEmbedded = false;
}
}
// At this point, doEmbedded is true only if (a) the
// writer was configured to embed the image, and (b)
// the image was successfully downloaded.
if (doEmbedded)
{
properties.Add(
new vCardProperty("PHOTO", photo.GetBytes()));
}
else
{
vCardProperty uriPhotoProperty =
new vCardProperty("PHOTO");
// Set the VALUE property to indicate that
// the data for the photo is a URI.
uriPhotoProperty.Subproperties.Add("VALUE", "URI");
uriPhotoProperty.Value = photo.Url.ToString();
properties.Add(uriPhotoProperty);
}
}
}
}
#endregion
#region [ BuildProperties_PRODID ]
/// <summary>
/// Builds PRODID properties.
/// </summary>
private void BuildProperties_PRODID(
vCardPropertyCollection properties,
vCard card)
{
if (!string.IsNullOrEmpty(card.ProductId))
{
vCardProperty property = new vCardProperty();
property.Name = "PRODID";
property.Value = card.ProductId;
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_REV ]
/// <summary>
/// Builds the REV property.
/// </summary>
private void BuildProperties_REV(
vCardPropertyCollection properties,
vCard card)
{
if (card.RevisionDate.HasValue)
{
vCardProperty property =
new vCardProperty("REV", card.RevisionDate.Value.ToString());
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_ROLE ]
/// <summary>
/// Builds the ROLE property.
/// </summary>
private void BuildProperties_ROLE(
vCardPropertyCollection properties,
vCard card)
{
// The ROLE property identifies the role of
// the person at his/her organization.
if (!string.IsNullOrEmpty(card.Role))
{
vCardProperty property =
new vCardProperty("ROLE", card.Role);
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_SOURCE ]
/// <summary>
/// Builds SOURCE properties.
/// </summary>
private void BuildProperties_SOURCE(
vCardPropertyCollection properties,
vCard card)
{
foreach (vCardSource source in card.Sources)
{
vCardProperty property = new vCardProperty();
property.Name = "SOURCE";
property.Value = source.Uri.ToString();
if (!string.IsNullOrEmpty(source.Context))
property.Subproperties.Add("CONTEXT", source.Context);
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_TEL ]
/// <summary>
/// Builds TEL properties.
/// </summary>
private void BuildProperties_TEL(
vCardPropertyCollection properties,
vCard card)
{
// The TEL property indicates a telephone number of
// the person (including non-voice numbers like fax
// and BBS numbers).
//
// TEL;VOICE;WORK:1-800-929-5805
foreach (vCardPhone phone in card.Phones)
{
// A telephone entry has the property name TEL and
// can have zero or more subproperties like FAX
// or HOME. Examples:
//
// TEL;HOME:+1-612-555-1212
// TEL;FAX;HOME:+1-612-555-1212
vCardProperty property = new vCardProperty();
property.Name = "TEL";
if (phone.IsBBS)
property.Subproperties.Add("BBS");
if (phone.IsCar)
property.Subproperties.Add("CAR");
if (phone.IsCellular)
property.Subproperties.Add("CELL");
if (phone.IsFax)
property.Subproperties.Add("FAX");
if (phone.IsHome)
property.Subproperties.Add("HOME");
if (phone.IsISDN)
property.Subproperties.Add("ISDN");
if (phone.IsMessagingService)
property.Subproperties.Add("MSG");
if (phone.IsModem)
property.Subproperties.Add("MODEM");
if (phone.IsPager)
property.Subproperties.Add("PAGER");
if (phone.IsPreferred)
property.Subproperties.Add("PREF");
if (phone.IsVideo)
property.Subproperties.Add("VIDEO");
if (phone.IsVoice)
property.Subproperties.Add("VOICE");
if (phone.IsWork)
property.Subproperties.Add("WORK");
property.Value = phone.FullNumber;
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_TITLE ]
private void BuildProperties_TITLE(
vCardPropertyCollection properties,
vCard card)
{
// The TITLE property specifies the job title of
// the person. Example:
//
// TITLE:Systems Analyst
// TITLE:President
if (!string.IsNullOrEmpty(card.Title))
{
vCardProperty property =
new vCardProperty("TITLE", card.Title);
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_TZ ]
private void BuildProperties_TZ(
vCardPropertyCollection properties,
vCard card)
{
if (!string.IsNullOrEmpty(card.TimeZone))
{
properties.Add(new vCardProperty("TZ", card.TimeZone));
}
}
#endregion
#region [ BuildProperties_UID ]
private void BuildProperties_UID(
vCardPropertyCollection properties,
vCard card)
{
if (!string.IsNullOrEmpty(card.UniqueId))
{
vCardProperty property = new vCardProperty();
property.Name = "UID";
property.Value = card.UniqueId;
properties.Add(property);
}
}
#endregion
#region [ BuildProperties_URL ]
private void BuildProperties_URL(
vCardPropertyCollection properties,
vCard card)
{
foreach (vCardWebsite webSite in card.Websites)
{
if (!string.IsNullOrEmpty(webSite.Url))
{
vCardProperty property =
new vCardProperty("URL", webSite.Url.ToString());
if (webSite.IsWorkSite)
property.Subproperties.Add("WORK");
properties.Add(property);
}
}
}
#endregion
#region [ BuildProperties_X_WAB_GENDER ]
private void BuildProperties_X_WAB_GENDER(
vCardPropertyCollection properties,
vCard card)
{
// The X-WAB-GENDER property is an extended (custom)
// property supported by Microsoft Outlook.
switch (card.Gender)
{
case vCardGender.Female:
properties.Add(new vCardProperty("X-WAB-GENDER", "1"));
break;
case vCardGender.Male:
properties.Add(new vCardProperty("X-WAB-GENDER", "2"));
break;
}
}
#endregion
// The next set of functions translate raw values into
// various string encodings. A vCard file is a text file
// with a defined format; values that break the format (such
// as binary values or strings with ASCII control characters)
// must be encoded.
#region [ EncodeBase64(byte) ]
/// <summary>
/// Converts a byte to a BASE64 string.
/// </summary>
public static string EncodeBase64(byte value)
{
return Convert.ToBase64String(new byte[] { value });
}
#endregion
#region [ EncodeBase64(byte[]) ]
/// <summary>
/// Converts a byte array to a BASE64 string.
/// </summary>
public static string EncodeBase64(byte[] value)
{
return Convert.ToBase64String(value);
}
#endregion
#region [ EncodeBase64(int) ]
/// <summary>
/// Converts an integer to a BASE64 string.
/// </summary>
public static string EncodeBase64(int value)
{
byte[] buffer = new byte[4];
buffer[0] = (byte)(value);
buffer[1] = (byte)(value >> 8);
buffer[2] = (byte)(value >> 16);
buffer[3] = (byte)(value >> 24);
return Convert.ToBase64String(buffer);
}
#endregion
#region [ EncodeEscaped(string) ]
/// <summary>
/// Encodes a string using simple escape codes.
/// </summary>
public string EncodeEscaped(string value)
{
if (
(this.options & vCardStandardWriterOptions.IgnoreCommas) ==
vCardStandardWriterOptions.IgnoreCommas)
{
return EncodeEscaped(value, outlookEscapedCharacters);
}
else
{
return EncodeEscaped(value, standardEscapedCharacters);
}
}
#endregion
#region [ EncodeEscaped(string, char[]) ]
/// <summary>
/// Encodes a character array using simple escape sequences.
/// </summary>
public static string EncodeEscaped(
string value,
char[] escaped)
{
if (escaped == null)
throw new ArgumentNullException("escaped");
if (string.IsNullOrEmpty(value))
return value;
StringBuilder buffer = new StringBuilder();
int startIndex = 0;
do
{
// Get the index of the next character
// to be escaped (e.g. the next semicolon).
int nextIndex = value.IndexOfAny(escaped, startIndex);
if (nextIndex == -1)
{
// No more characters need to be escaped.
// Any characters between the start index
// and the end of the string can be copied
// to the buffer.
buffer.Append(
value,
startIndex,
value.Length - startIndex);
break;
}
else
{
char replacement;
switch (value[nextIndex])
{
case '\n':
replacement = 'n';
break;
case '\r':
replacement = 'r';
break;
default:
replacement = value[nextIndex];
break;
}
buffer.Append(
value,
startIndex,
nextIndex - startIndex);
buffer.Append('\\');
buffer.Append(replacement);
startIndex = nextIndex + 1;
}
} while (startIndex < value.Length);
return buffer.ToString();
// The following must be encoded:
//
// Backslash (\\)
// Colon (\:)
// Semicolon (\;)
}
#endregion
#region [ EncodeQuotedPrintable ]
/// <summary>
/// Converts a string to quoted-printable format.
/// </summary>
/// <param name="value">
/// The value to encode in Quoted Printable format.
/// </param>
/// <returns>
/// The value encoded in Quoted Printable format.
/// </returns>
public static string EncodeQuotedPrintable(string value)
{
if (string.IsNullOrEmpty(value))
return value;
StringBuilder builder = new StringBuilder();
foreach (char c in value)
{
int v = (int)c;
// The following are not required to be encoded:
//
// - Tab (ASCII 9)
// - Space (ASCII 32)
// - Characters 33 to 126, except for the equal sign (61).
if (
(v == 9) ||
((v >= 32) && (v <= 60)) ||
((v >= 62) && (v <= 126)))
{
builder.Append(c);
}
else
{
builder.Append('=');
builder.Append(v.ToString("X2"));
}
}
char lastChar = builder[builder.Length - 1];
if (char.IsWhiteSpace(lastChar))
{
builder.Remove(builder.Length - 1, 1);
builder.Append('=');
builder.Append(((int)lastChar).ToString("X2"));
}
return builder.ToString();
}
#endregion
/// <summary>
/// Returns property encoded into a standard vCard NAME:VALUE format.
/// </summary>
public string EncodeProperty(vCardProperty property)
{
if (property == null)
throw new ArgumentNullException("property");
if (string.IsNullOrEmpty(property.Name))
throw new ArgumentException();
StringBuilder builder = new StringBuilder();
builder.Append(property.Name);
foreach (vCardSubproperty subproperty in property.Subproperties)
{
builder.Append(';');
builder.Append(subproperty.Name);
if (!string.IsNullOrEmpty(subproperty.Value))
{
builder.Append('=');
builder.Append(subproperty.Value);
}
}
// The property name and all subproperties have been
// written to the string builder (the colon separator
// has not been written). The next step is to write
// the value. Depending on the type of value and any
// characters in the value, it may be necessary to
// use an non-default encoding. For example, byte arrays
// are written encoded in BASE64.
if (property.Value == null)
{
builder.Append(':');
}
else
{
Type valueType = property.Value.GetType();
if (valueType == typeof(byte[]))
{
// A byte array should be encoded in BASE64 format.
builder.Append(";ENCODING=BASE64:");
builder.Append(EncodeBase64((byte[])property.Value));
}
else if (valueType == typeof(vCardValueCollection))
{
vCardValueCollection values = (vCardValueCollection)property.Value;
builder.Append(':');
for (int index = 0; index < values.Count; index++)
{
builder.Append(EncodeEscaped(values[index]));
if (index < values.Count - 1)
{
builder.Append(values.Separator);
}
}
}
else
{
// The object will be converted to a string (if it is
// not a string already) and encoded if necessary.
// The first step is to get the string value.
string stringValue = null;
if (valueType == typeof(char[]))
{
stringValue = new string(((char[])property.Value));
}
else
{
stringValue = property.Value.ToString();
}
builder.Append(':');
switch (property.Subproperties.GetValue("ENCODING"))
{
case "QUOTED-PRINTABLE":
builder.Append(EncodeQuotedPrintable(stringValue));
break;
default:
builder.Append(EncodeEscaped(stringValue));
break;
}
}
}
return builder.ToString();
}
/// <summary>
/// Writes a vCard to an output text writer.
/// </summary>
public override void Write(vCard card, TextWriter output)
{
if (card == null)
throw new ArgumentNullException("card");
if (output == null)
throw new ArgumentNullException("output");
// Get the properties of the vCard.
vCardPropertyCollection properties =
BuildProperties(card);
Write(properties, output);
}
/// <summary>
/// Writes a collection of vCard properties to an output text writer.
/// </summary>
public void Write(vCardPropertyCollection properties, TextWriter output)
{
if (properties == null)
throw new ArgumentNullException("properties");
if (output == null)
throw new ArgumentNullException("output");
foreach (vCardProperty property in properties)
{
output.WriteLine(EncodeProperty(property));
}
}
}
}
| |
/*
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the MIT License. See License.txt in the project root for license information.
*/
namespace Adxstudio.Xrm.Cases
{
using System;
using System.Collections.Generic;
using System.Linq;
using Adxstudio.Xrm.Notes;
using Adxstudio.Xrm.Resources;
using Adxstudio.Xrm.Web;
using Microsoft.Crm.Sdk.Messages;
using Microsoft.Xrm.Client;
using Microsoft.Xrm.Client.Messages;
using Microsoft.Xrm.Portal.Core;
using Microsoft.Xrm.Sdk;
using Microsoft.Xrm.Sdk.Client;
using Microsoft.Xrm.Sdk.Messages;
using Microsoft.Xrm.Sdk.Metadata;
/// <summary>
/// Provides data operations for a single case, as represented by a Case (incident) entity.
/// </summary>
public class CaseDataAdapter : ICaseDataAdapter
{
public CaseDataAdapter(EntityReference incident, IDataAdapterDependencies dependencies)
{
if (incident == null) throw new ArgumentNullException("incident");
if (incident.LogicalName != "incident") throw new ArgumentException(string.Format(ResourceManager.GetString("Value_Missing_For_LogicalName"), incident.LogicalName), "incident");
if (dependencies == null) throw new ArgumentNullException("dependencies");
Incident = incident;
Dependencies = dependencies;
}
protected IDataAdapterDependencies Dependencies { get; private set; }
protected EntityReference Incident { get; set; }
public virtual void AddNote(string text, string fileName = null, string contentType = null, byte[] fileContent = null, EntityReference ownerId = null)
{
try
{
var da = new AnnotationDataAdapter(Dependencies);
var annotation = new Annotation
{
Subject = AnnotationHelper.BuildNoteSubject(Dependencies),
NoteText = string.Format("{0}{1}", AnnotationHelper.WebAnnotationPrefix, text),
Regarding = Incident,
Owner = ownerId
};
if (fileContent != null && fileContent.Length > 0 && !string.IsNullOrEmpty(fileName) && !string.IsNullOrEmpty(contentType))
{
annotation.FileAttachment = AnnotationDataAdapter.CreateFileAttachment(EnsureValidFileName(fileName), contentType, fileContent);
}
da.CreateAnnotation(annotation);
}
catch (Exception e)
{
WebEventSource.Log.GenericErrorException(new Exception("Create annotation error", e));
throw;
}
}
public virtual void Cancel()
{
var serviceContext = Dependencies.GetServiceContextForWrite();
var incident = serviceContext.GetCase(Incident.Id);
if (incident == null)
{
throw new InvalidOperationException("Unable to retrieve the case with ID {0}.".FormatWith(Incident.Id));
}
CancelRelatedActivities(serviceContext, incident);
serviceContext.SetState((int)IncidentState.Canceled, -1, incident.ToEntityReference());
}
public virtual void Reopen()
{
var serviceContext = Dependencies.GetServiceContextForWrite();
serviceContext.SetState((int)IncidentState.Active, -1, Incident);
}
public virtual void Resolve(int? customerSatisfactionCode, string resolutionSubject, string resolutionDescription)
{
Resolve(customerSatisfactionCode, resolutionSubject, resolutionDescription, null);
}
public virtual void Resolve(int? customerSatisfactionCode, string resolutionSubject, string resolutionDescription, int? statuscode)
{
if (string.IsNullOrWhiteSpace(resolutionDescription))
{
throw new ArgumentException("Value can't be null or whitespace.", "resolutionDescription");
}
var serviceContext = Dependencies.GetServiceContextForWrite();
var incident = serviceContext.GetCase(Incident.Id);
if (incident == null)
{
throw new InvalidOperationException("Unable to retrieve the case with ID {0}.".FormatWith(Incident.Id));
}
CancelRelatedActivities(serviceContext, incident);
// Here, we both create a standard incidentresolution activity, but also set a more permanent
// resolution text and date on the incident itself. This makes it easier to retieve and reference
// a canonical resolution for the incident, without having to use a kbarticle, or risk having
// resolutions be canceled/deleted in the reopening of cases.
if (customerSatisfactionCode != null)
incident["customersatisfactioncode"] = new OptionSetValue((int)customerSatisfactionCode);
incident["adx_resolution"] = resolutionDescription;
incident["adx_resolutiondate"] = DateTime.UtcNow;
serviceContext.UpdateObject(incident);
serviceContext.SaveChanges();
var resolution = new Entity("incidentresolution");
resolution["incidentid"] = incident.ToEntityReference();
resolution["statuscode"] = new OptionSetValue(-1);
resolution["subject"] = resolutionSubject;
resolution["description"] = resolutionDescription;
var status = statuscode ?? -1;
serviceContext.Execute(new CloseIncidentRequest
{
IncidentResolution = resolution,
Status = new OptionSetValue(status)
});
}
public virtual ICase Select()
{
var serviceContext = Dependencies.GetServiceContext();
var incident = serviceContext.GetCase(Incident.Id);
if (incident == null)
{
return null;
}
var responsibleContact = GetResponsibleContact(serviceContext, incident);
var url = Dependencies.GetUrlProvider().GetUrl(serviceContext, incident);
return new Case(incident, GetIncidentMetadata(serviceContext), url, responsibleContact);
}
public virtual ICaseAccess SelectAccess()
{
var @case = Select();
if (@case == null)
{
return CaseAccess.None;
}
var @public = @case.PublishToWeb && @case.IsResolved;
var user = Dependencies.GetPortalUser();
if (user == null)
{
return new CaseAccess(@public: @public);
}
var permissionScopes = Dependencies.GetPermissionScopesProviderForPortalUser().SelectPermissionScopes();
// If the user *is* the customer on the case, case access is based on any Self-scoped permissions
// they have.
if (@case.HasCustomer(user))
{
return CaseAccess.FromPermissions(permissionScopes.Self, @public);
}
// If the customer on the case is a contact, look up the parent customer account for that case, and
// base case access on the merger of any Account-scoped permissions the user has for that account.
EntityReference parentcustomerid;
if (TryGetContactParentCustomer(@case.Customer, out parentcustomerid))
{
return CaseAccess.FromPermissions(
permissionScopes.Accounts.Where(permissions =>
permissions.Account.Equals(parentcustomerid)), @public);
}
// Otherwise, their access is based on the merger of any Account-scoped permissions they
// have for the case customer account.
//
// Permissions are merged by ORing individual rights. For example, if the user is granted the Read
// right on any of their case access permissions, they get Read access to the case.
return CaseAccess.FromPermissions(
permissionScopes.Accounts.Where(permissions =>
@case.HasCustomer(permissions.Account)), @public);
}
public virtual IEnumerable<IAnnotation> SelectNotes()
{
var serviceContext = Dependencies.GetServiceContext();
IAnnotationDataAdapter annotationDataAdapter = new AnnotationDataAdapter(Dependencies);
return serviceContext.CreateQuery("annotation")
.Where(e => e.GetAttributeValue<EntityReference>("objectid") == Incident
&& e.GetAttributeValue<string>("objecttypecode") == Incident.LogicalName
&& e.GetAttributeValue<string>("notetext").Contains(AnnotationHelper.WebAnnotationPrefix))
.ToArray()
.Select(entity => annotationDataAdapter.GetAnnotation(entity))
.OrderBy(e => e.CreatedOn)
.ToArray();
}
public virtual IEnumerable<ICaseResolution> SelectResolutions()
{
var @case = Select();
if (@case == null || string.IsNullOrEmpty(@case.Resolution))
{
return Enumerable.Empty<ICaseResolution>();
}
return new[] { new CaseResolution(@case.Resolution, @case.ResolutionDate.GetValueOrDefault()) };
}
private bool TryGetContactParentCustomer(EntityReference customer, out EntityReference parentcustomerid)
{
parentcustomerid = null;
if (customer == null || customer.LogicalName != "contact")
{
return false;
}
var serviceContext = Dependencies.GetServiceContext();
var contact = serviceContext.CreateQuery("contact")
.FirstOrDefault(e => e.GetAttributeValue<Guid>("contactid") == customer.Id);
if (contact == null)
{
return false;
}
parentcustomerid = contact.GetAttributeValue<EntityReference>("parentcustomerid");
return parentcustomerid != null;
}
internal static EntityMetadata GetIncidentMetadata(OrganizationServiceContext serviceContext)
{
var retrieveAttributeRequest = new RetrieveEntityRequest
{
LogicalName = "incident", EntityFilters = EntityFilters.Attributes
};
var response = (RetrieveEntityResponse)serviceContext.Execute(retrieveAttributeRequest);
return response.EntityMetadata;
}
private enum ActivityPointerState
{
Open = 0,
Completed = 1,
Canceled = 2,
Scheduled = 3,
}
/// <summary>
/// Cancel any activity pointers related to the incident, as case resolution will fail if there are any
/// open activities on the case.
/// </summary>
private static void CancelRelatedActivities(OrganizationServiceContext serviceContext, Entity incident)
{
if (serviceContext == null) throw new ArgumentNullException("serviceContext");
if (incident == null) throw new ArgumentNullException("incident");
var activityPointers = incident.GetRelatedEntities(serviceContext, new Relationship("Incident_ActivityPointers"));
foreach (var activityPointer in activityPointers)
{
var statecode = activityPointer.GetAttributeValue<OptionSetValue>("statecode");
if (statecode == null)
{
continue;
}
if (!(statecode.Value == (int)ActivityPointerState.Open || statecode.Value == (int)ActivityPointerState.Scheduled))
{
continue;
}
var activityid = activityPointer.GetAttributeValue<Guid>("activityid");
var activitytypecode = activityPointer.GetAttributeValue<string>("activitytypecode");
var activityEntity = serviceContext.CreateQuery(activitytypecode).FirstOrDefault(e => e.GetAttributeValue<Guid>("activityid") == activityid);
if (activityEntity == null)
{
continue;
}
serviceContext.SetState(activityEntity.ToEntityReference(), new OptionSetValue((int)ActivityPointerState.Canceled), new OptionSetValue(-1));
}
}
private static string EnsureValidFileName(string fileName)
{
return fileName.IndexOf("\\", StringComparison.Ordinal) >= 0 ? fileName.Substring(fileName.LastIndexOf("\\", StringComparison.Ordinal) + 1) : fileName;
}
private static string GetNoteSubject(OrganizationServiceContext serviceContext, EntityReference user)
{
if (serviceContext == null) throw new ArgumentNullException("serviceContext");
var now = DateTime.UtcNow;
if (user == null || user.LogicalName != "contact")
{
return string.Format(ResourceManager.GetString("Note_Created_On_Message"), now);
}
var contact = serviceContext.CreateQuery("contact").FirstOrDefault(e => e.GetAttributeValue<Guid>("contactid") == user.Id);
if (contact == null)
{
return string.Format(ResourceManager.GetString("Note_Created_On_Message"), now);
}
// Tack the contact entity reference onto the end of the note subject, so that if we really wanted to, we
// could parse this subject and find the portal user that submitted the note.
return string.Format(ResourceManager.GetString("Note_Created_On_DateTime_By_Message"), now, contact.GetAttributeValue<string>("fullname"), contact.LogicalName, contact.Id);
}
private static Entity GetResponsibleContact(OrganizationServiceContext serviceContext, Entity incident)
{
if (serviceContext == null) throw new ArgumentNullException("serviceContext");
if (incident == null) throw new ArgumentNullException("incident");
var responsibleContact = incident.GetAttributeValue<EntityReference>("responsiblecontactid");
if (responsibleContact != null)
{
return serviceContext.CreateQuery("contact").FirstOrDefault(e => e.GetAttributeValue<Guid>("contactid") == responsibleContact.Id);
}
var customer = incident.GetAttributeValue<EntityReference>("customerid");
if (customer != null && customer.LogicalName == "contact")
{
return serviceContext.CreateQuery("contact").FirstOrDefault(e => e.GetAttributeValue<Guid>("contactid") == customer.Id);
}
return null;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.Web.Security;
using Umbraco.Core;
using Umbraco.Core.Configuration;
using Umbraco.Core.Configuration.UmbracoSettings;
using Umbraco.Core.Models;
using umbraco;
using umbraco.cms.businesslogic.web;
using RenderingEngine = Umbraco.Core.RenderingEngine;
namespace Umbraco.Web.Routing
{
/// <summary>
/// Represents a request for one specified Umbraco IPublishedContent to be rendered
/// by one specified template, using one specified Culture and RenderingEngine.
/// </summary>
public class PublishedContentRequest
{
private bool _readonly;
private bool _readonlyUri;
/// <summary>
/// Triggers before the published content request is prepared.
/// </summary>
/// <remarks>When the event triggers, no preparation has been done. It is still possible to
/// modify the request's Uri property, for example to restore its original, public-facing value
/// that might have been modified by an in-between equipement such as a load-balancer.</remarks>
public static event EventHandler<EventArgs> Preparing;
/// <summary>
/// Triggers once the published content request has been prepared, but before it is processed.
/// </summary>
/// <remarks>When the event triggers, preparation is done ie domain, culture, document, template,
/// rendering engine, etc. have been setup. It is then possible to change anything, before
/// the request is actually processed and rendered by Umbraco.</remarks>
public static event EventHandler<EventArgs> Prepared;
// the engine that does all the processing
// because in order to keep things clean and separated,
// the content request is just a data holder
private readonly PublishedContentRequestEngine _engine;
// the cleaned up uri
// the cleaned up Uri has no virtual directory, no trailing slash, no .aspx extension, etc.
private Uri _uri;
/// <summary>
/// Initializes a new instance of the <see cref="PublishedContentRequest"/> class with a specific Uri and routing context.
/// </summary>
/// <param name="uri">The request <c>Uri</c>.</param>
/// <param name="routingContext">A routing context.</param>
/// <param name="getRolesForLogin">A callback method to return the roles for the provided login name when required</param>
/// <param name="routingConfig"></param>
public PublishedContentRequest(Uri uri, RoutingContext routingContext, IWebRoutingSection routingConfig, Func<string, IEnumerable<string>> getRolesForLogin)
{
if (uri == null) throw new ArgumentNullException("uri");
if (routingContext == null) throw new ArgumentNullException("routingContext");
Uri = uri;
RoutingContext = routingContext;
GetRolesForLogin = getRolesForLogin;
_engine = new PublishedContentRequestEngine(
routingConfig,
this);
RenderingEngine = RenderingEngine.Unknown;
}
[EditorBrowsable(EditorBrowsableState.Never)]
[Obsolete("Use the constructor specifying all dependencies instead")]
public PublishedContentRequest(Uri uri, RoutingContext routingContext)
: this(uri, routingContext, UmbracoConfig.For.UmbracoSettings().WebRouting, s => Roles.Provider.GetRolesForUser(s))
{
}
/// <summary>
/// Gets the engine associated to the request.
/// </summary>
internal PublishedContentRequestEngine Engine { get { return _engine; } }
/// <summary>
/// Prepares the request.
/// </summary>
public void Prepare()
{
_engine.PrepareRequest();
}
/// <summary>
/// Called to configure the request
/// </summary>
/// <remarks>
/// This public method is legacy, Prepare() has been made public now which should be used and ensures the domains are assigned and
/// if a public content item is already assigned Prepare() now ensures that the finders are not executed.
/// </remarks>
[Obsolete("Use Prepare() instead which configures the request and wires up everything correctly")]
public void ConfigureRequest()
{
_engine.ConfigureRequest();
}
/// <summary>
/// Updates the request when there is no template to render the content.
/// </summary>
internal void UpdateOnMissingTemplate()
{
var __readonly = _readonly;
_readonly = false;
_engine.UpdateRequestOnMissingTemplate();
_readonly = __readonly;
}
/// <summary>
/// Triggers the Preparing event.
/// </summary>
internal void OnPreparing()
{
var handler = Preparing;
if (handler != null) handler(this, EventArgs.Empty);
_readonlyUri = true;
}
/// <summary>
/// Triggers the Prepared event.
/// </summary>
internal void OnPrepared()
{
var handler = Prepared;
if (handler != null) handler(this, EventArgs.Empty);
if (HasPublishedContent == false)
Is404 = true; // safety
_readonly = true;
}
/// <summary>
/// Gets or sets the cleaned up Uri used for routing.
/// </summary>
/// <remarks>The cleaned up Uri has no virtual directory, no trailing slash, no .aspx extension, etc.</remarks>
public Uri Uri {
get
{
return _uri;
}
set
{
if (_readonlyUri)
throw new InvalidOperationException("Cannot modify Uri after Preparing has triggered.");
_uri = value;
}
}
private void EnsureWriteable()
{
if (_readonly)
throw new InvalidOperationException("Cannot modify a PublishedContentRequest once it is read-only.");
}
#region PublishedContent
/// <summary>
/// The requested IPublishedContent, if any, else <c>null</c>.
/// </summary>
private IPublishedContent _publishedContent;
/// <summary>
/// The initial requested IPublishedContent, if any, else <c>null</c>.
/// </summary>
/// <remarks>The initial requested content is the content that was found by the finders,
/// before anything such as 404, redirect... took place.</remarks>
private IPublishedContent _initialPublishedContent;
/// <summary>
/// Gets or sets the requested content.
/// </summary>
/// <remarks>Setting the requested content clears <c>Template</c>.</remarks>
public IPublishedContent PublishedContent
{
get { return _publishedContent; }
set
{
EnsureWriteable();
_publishedContent = value;
IsInternalRedirectPublishedContent = false;
TemplateModel = null;
}
}
/// <summary>
/// Sets the requested content, following an internal redirect.
/// </summary>
/// <param name="content">The requested content.</param>
/// <remarks>Depending on <c>UmbracoSettings.InternalRedirectPreservesTemplate</c>, will
/// preserve or reset the template, if any.</remarks>
public void SetInternalRedirectPublishedContent(IPublishedContent content)
{
if (content == null) throw new ArgumentNullException("content");
EnsureWriteable();
// unless a template has been set already by the finder,
// template should be null at that point.
// IsInternalRedirect if IsInitial, or already IsInternalRedirect
var isInternalRedirect = IsInitialPublishedContent || IsInternalRedirectPublishedContent;
// redirecting to self
if (content.Id == PublishedContent.Id) // neither can be null
{
// no need to set PublishedContent, we're done
IsInternalRedirectPublishedContent = isInternalRedirect;
return;
}
// else
// save
var template = _template;
var renderingEngine = RenderingEngine;
// set published content - this resets the template, and sets IsInternalRedirect to false
PublishedContent = content;
IsInternalRedirectPublishedContent = isInternalRedirect;
// must restore the template if it's an internal redirect & the config option is set
if (isInternalRedirect && UmbracoConfig.For.UmbracoSettings().WebRouting.InternalRedirectPreservesTemplate)
{
// restore
_template = template;
RenderingEngine = renderingEngine;
}
}
/// <summary>
/// Gets the initial requested content.
/// </summary>
/// <remarks>The initial requested content is the content that was found by the finders,
/// before anything such as 404, redirect... took place.</remarks>
public IPublishedContent InitialPublishedContent { get { return _initialPublishedContent; } }
/// <summary>
/// Gets value indicating whether the current published content is the initial one.
/// </summary>
public bool IsInitialPublishedContent
{
get
{
return _initialPublishedContent != null && _initialPublishedContent == _publishedContent;
}
}
/// <summary>
/// Indicates that the current PublishedContent is the initial one.
/// </summary>
public void SetIsInitialPublishedContent()
{
EnsureWriteable();
// note: it can very well be null if the initial content was not found
_initialPublishedContent = _publishedContent;
IsInternalRedirectPublishedContent = false;
}
/// <summary>
/// Gets or sets a value indicating whether the current published content has been obtained
/// from the initial published content following internal redirections exclusively.
/// </summary>
/// <remarks>Used by PublishedContentRequestEngine.FindTemplate() to figure out whether to
/// apply the internal redirect or not, when content is not the initial content.</remarks>
public bool IsInternalRedirectPublishedContent { get; private set; }
/// <summary>
/// Gets a value indicating whether the content request has a content.
/// </summary>
public bool HasPublishedContent
{
get { return PublishedContent != null; }
}
#endregion
#region Template
/// <summary>
/// The template model, if any, else <c>null</c>.
/// </summary>
private ITemplate _template;
/// <summary>
/// Gets or sets the template model to use to display the requested content.
/// </summary>
internal ITemplate TemplateModel
{
get
{
return _template;
}
set
{
_template = value;
RenderingEngine = RenderingEngine.Unknown; // reset
if (_template != null)
RenderingEngine = _engine.FindTemplateRenderingEngine(_template.Alias);
}
}
/// <summary>
/// Gets the alias of the template to use to display the requested content.
/// </summary>
public string TemplateAlias
{
get
{
return _template == null ? null : _template.Alias;
}
}
/// <summary>
/// Tries to set the template to use to display the requested content.
/// </summary>
/// <param name="alias">The alias of the template.</param>
/// <returns>A value indicating whether a valid template with the specified alias was found.</returns>
/// <remarks>
/// <para>Successfully setting the template does refresh <c>RenderingEngine</c>.</para>
/// <para>If setting the template fails, then the previous template (if any) remains in place.</para>
/// </remarks>
public bool TrySetTemplate(string alias)
{
EnsureWriteable();
if (string.IsNullOrWhiteSpace(alias))
{
TemplateModel = null;
return true;
}
// NOTE - can we stil get it with whitespaces in it due to old legacy bugs?
alias = alias.Replace(" ", "");
var model = ApplicationContext.Current.Services.FileService.GetTemplate(alias);
if (model == null)
return false;
TemplateModel = model;
return true;
}
/// <summary>
/// Sets the template to use to display the requested content.
/// </summary>
/// <param name="template">The template.</param>
/// <remarks>Setting the template does refresh <c>RenderingEngine</c>.</remarks>
public void SetTemplate(ITemplate template)
{
EnsureWriteable();
TemplateModel = template;
}
/// <summary>
/// Resets the template.
/// </summary>
/// <remarks>The <c>RenderingEngine</c> becomes unknown.</remarks>
public void ResetTemplate()
{
EnsureWriteable();
TemplateModel = null;
}
/// <summary>
/// Gets a value indicating whether the content request has a template.
/// </summary>
public bool HasTemplate
{
get { return _template != null; }
}
#endregion
#region Domain and Culture
[Obsolete("Do not use this property, use the non-legacy UmbracoDomain property instead")]
public Domain Domain
{
get { return new Domain(UmbracoDomain); }
}
//TODO: Should we publicize the setter now that we are using a non-legacy entity??
/// <summary>
/// Gets or sets the content request's domain.
/// </summary>
public IDomain UmbracoDomain { get; internal set; }
/// <summary>
/// Gets or sets the content request's domain Uri.
/// </summary>
/// <remarks>The <c>Domain</c> may contain "example.com" whereas the <c>Uri</c> will be fully qualified eg "http://example.com/".</remarks>
public Uri DomainUri { get; internal set; }
/// <summary>
/// Gets a value indicating whether the content request has a domain.
/// </summary>
public bool HasDomain
{
get { return UmbracoDomain != null; }
}
private CultureInfo _culture;
/// <summary>
/// Gets or sets the content request's culture.
/// </summary>
public CultureInfo Culture
{
get { return _culture; }
set
{
EnsureWriteable();
_culture = value;
}
}
// note: do we want to have an ordered list of alternate cultures,
// to allow for fallbacks when doing dictionnary lookup and such?
#endregion
#region Rendering
/// <summary>
/// Gets or sets whether the rendering engine is MVC or WebForms.
/// </summary>
public RenderingEngine RenderingEngine { get; internal set; }
#endregion
/// <summary>
/// Gets or sets the current RoutingContext.
/// </summary>
public RoutingContext RoutingContext { get; private set; }
internal Func<string, IEnumerable<string>> GetRolesForLogin { get; private set; }
/// <summary>
/// The "umbraco page" object.
/// </summary>
private page _umbracoPage;
/// <summary>
/// Gets or sets the "umbraco page" object.
/// </summary>
/// <remarks>
/// This value is only used for legacy/webforms code.
/// </remarks>
internal page UmbracoPage
{
get
{
if (_umbracoPage == null)
throw new InvalidOperationException("The UmbracoPage object has not been initialized yet.");
return _umbracoPage;
}
set { _umbracoPage = value; }
}
#region Status
/// <summary>
/// Gets or sets a value indicating whether the requested content could not be found.
/// </summary>
/// <remarks>This is set in the <c>PublishedContentRequestBuilder</c>.</remarks>
public bool Is404 { get; internal set; }
/// <summary>
/// Indicates that the requested content could not be found.
/// </summary>
/// <remarks>This is for public access, in custom content finders or <c>Prepared</c> event handlers,
/// where we want to allow developers to indicate a request is 404 but not to cancel it.</remarks>
public void SetIs404()
{
EnsureWriteable();
Is404 = true;
}
/// <summary>
/// Gets a value indicating whether the content request triggers a redirect (permanent or not).
/// </summary>
public bool IsRedirect { get { return string.IsNullOrWhiteSpace(RedirectUrl) == false; } }
/// <summary>
/// Gets or sets a value indicating whether the redirect is permanent.
/// </summary>
public bool IsRedirectPermanent { get; private set; }
/// <summary>
/// Gets or sets the url to redirect to, when the content request triggers a redirect.
/// </summary>
public string RedirectUrl { get; private set; }
/// <summary>
/// Indicates that the content request should trigger a redirect (302).
/// </summary>
/// <param name="url">The url to redirect to.</param>
/// <remarks>Does not actually perform a redirect, only registers that the response should
/// redirect. Redirect will or will not take place in due time.</remarks>
public void SetRedirect(string url)
{
EnsureWriteable();
RedirectUrl = url;
IsRedirectPermanent = false;
}
/// <summary>
/// Indicates that the content request should trigger a permanent redirect (301).
/// </summary>
/// <param name="url">The url to redirect to.</param>
/// <remarks>Does not actually perform a redirect, only registers that the response should
/// redirect. Redirect will or will not take place in due time.</remarks>
public void SetRedirectPermanent(string url)
{
EnsureWriteable();
RedirectUrl = url;
IsRedirectPermanent = true;
}
/// <summary>
/// Indicates that the content requet should trigger a redirect, with a specified status code.
/// </summary>
/// <param name="url">The url to redirect to.</param>
/// <param name="status">The status code (300-308).</param>
/// <remarks>Does not actually perform a redirect, only registers that the response should
/// redirect. Redirect will or will not take place in due time.</remarks>
public void SetRedirect(string url, int status)
{
EnsureWriteable();
if (status < 300 || status > 308)
throw new ArgumentOutOfRangeException("status", "Valid redirection status codes 300-308.");
RedirectUrl = url;
IsRedirectPermanent = (status == 301 || status == 308);
if (status != 301 && status != 302) // default redirect statuses
ResponseStatusCode = status;
}
/// <summary>
/// Gets or sets the content request http response status code.
/// </summary>
/// <remarks>Does not actually set the http response status code, only registers that the response
/// should use the specified code. The code will or will not be used, in due time.</remarks>
public int ResponseStatusCode { get; private set; }
/// <summary>
/// Gets or sets the content request http response status description.
/// </summary>
/// <remarks>Does not actually set the http response status description, only registers that the response
/// should use the specified description. The description will or will not be used, in due time.</remarks>
public string ResponseStatusDescription { get; private set; }
/// <summary>
/// Sets the http response status code, along with an optional associated description.
/// </summary>
/// <param name="code">The http status code.</param>
/// <param name="description">The description.</param>
/// <remarks>Does not actually set the http response status code and description, only registers that
/// the response should use the specified code and description. The code and description will or will
/// not be used, in due time.</remarks>
public void SetResponseStatus(int code, string description = null)
{
EnsureWriteable();
// .Status is deprecated
// .SubStatusCode is IIS 7+ internal, ignore
ResponseStatusCode = code;
ResponseStatusDescription = description;
}
#endregion
}
}
| |
//Sony Computer Entertainment Confidential
using System;
using System.Collections.Generic;
using System.Text;
using System.Reflection;
using System.IO;
using NUnit.Framework;
using Scea.Editors.Plugins;
using Scea.Collections;
using Scea.Dom;
using Scea.Pipelines.Geometry;
using Scea.Utilities;
using Sce.Atf;
using Sce.Atf.VectorMath;
namespace UnitTestAtgi
{
[TestFixture]
public class TestAtgi
{
private string m_appPath;
private DomCollection m_domCollection = null;
public TestAtgi()
{
PluginDictionary plugins = Singleton<PluginDictionary>.Instance;
m_appPath = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location);
// Setup schema path
string schemaPath = Path.Combine(m_appPath, "schemas");
DomSchemaRegistry.SchemaResolver = new FileStreamResolver(schemaPath);
// load atgi plugin
string atgiPath = m_appPath + @"\Plugins\Scea.Atgi.pll";
Assembly assembly = Assembly.LoadFrom(atgiPath);
plugins.Add(assembly);
plugins.GetPlugins<PluginBase>();
// 2- create colleciton
DomRepository repository = new DomRepository();
string atgiFile = m_appPath + @"\cube.atgi";
m_domCollection = repository.ReadCollection(new Uri(atgiFile));
m_domCollection.IsReadOnly = true;
// add dc to DomRepository
repository.Add(m_domCollection);
}
[Test(Description = "Can create IWorld on the root dom object")]
public void RootObjectIsWorld()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
Assert.IsNotNull(world);
}
[Test(Description = "The World must have a Scene")]
public void WorldMustHaveScene()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
Assert.IsNotNull(scene);
}
[Test(Description = "The Scene must only have five nodes")]
public void NumberOfNodes()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
Assert.AreEqual(5,nodes.Count,"Invalid node count");
}
[Test(Description = "validate node names")]
public void NodeNames()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
string n = "";
foreach (INode node in nodes)
n += node.Name + " " + node.Transform.ToString() + " ";
bool test = n.Contains("persp") && n.Contains("top") && n.Contains("front") &&
n.Contains("side") && n.Contains("pCube1");
Assert.IsTrue(test);
}
[Test(Description = "All the transformation matrix must be Identity")]
public void NodeTransformMatrix()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
foreach (INode node in nodes)
Assert.IsTrue(node.Transform.Equals(Matrix4F.Identity));
}
[Test(Description = "Only pCube1 node have one mesh\r\n the rest of the nodes do not have mesh.")]
public void NumberOfMeshes()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
foreach (INode node in nodes)
if (node.Name.Contains("pCube1"))
{
Assert.AreEqual(1,node.Meshes.Count);
break;
}
else
Assert.AreEqual(0,node.Meshes.Count);
}
[Test(Description = "there is no node hierarchy")]
public void ChildNodes()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
foreach (INode node in nodes)
Assert.AreEqual(0,node.ChildNodes.Count);
}
[Test(Description = "validating mesh for pCube1 node")]
public void DataSet()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
IMesh mesh = null;
foreach (INode node in nodes)
if (node.Name.Contains("pCube1"))
mesh = node.Meshes[0];
Assert.AreEqual(2,mesh.DataSets.Count);
Assert.AreEqual(1, mesh.PrimitiveSets.Count);
Assert.IsNull(mesh.BoundingBox,"bounding box must be null");
}
[Test(Description = "validate position data")]
public void PositionData()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
IMesh mesh = null;
foreach (INode node in nodes)
if (node.Name.Contains("pCube1"))
mesh = node.Meshes[0];
IList<IDataSet> datasets = mesh.DataSets;
float[] data = new float[] {
-0.005f, -0.005f, 0.005f, 0.005f, -0.005f, 0.005f, -0.005f, 0.005f,
0.005f, 0.005f, 0.005f, 0.005f, -0.005f, 0.005f, -0.005f, 0.005f, 0.005f, -0.005f,
-0.005f, -0.005f, -0.005f, 0.005f, -0.005f, -0.005f};
IDataSet position = datasets[0];
Assert.AreEqual("position",position.Name );
Assert.AreEqual(3,position.ElementSize);
Assert.AreEqual(24,position.Data.Length);
for (int i = 0; i < data.Length; i++)
Assert.AreEqual(data[i],position.Data[i]);
}
[Test(Description = "validate normal data")]
public void NormalData()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
IMesh mesh = null;
foreach (INode node in nodes)
if (node.Name.Contains("pCube1"))
mesh = node.Meshes[0];
IList<IDataSet> datasets = mesh.DataSets;
float[] data = new float[]{
0f, 0f, 1f, 0f, 0f, 1f, 0f, 0f, 1f, 0f, 0f, 1f, 0f, 1f, 0f, 0f, 1f,
0f, 0f, 1f, 0f, 0f, 1f, 0f, 0f, 0f, -1f, 0f, 0f, -1f, 0f, 0f, -1f, 0f, 0f, -1f,
0f, -1f, 0f, 0f, -1f, 0f, 0f, -1f, 0f, 0f, -1f, 0f, 1f, 0f, 0f, 1f, 0f, 0f, 1f,
0f, 0f, 1f, 0f, 0f, -1f, 0f, 0f, -1f, 0f, 0f, -1f, 0f, 0f, -1f, 0f, 0f};
IDataSet normal = datasets[1];
Assert.AreEqual("normal",normal.Name);
Assert.AreEqual(72,normal.Data.Length);
Assert.AreEqual(3,normal.ElementSize);
for (int i = 0; i < data.Length; i++)
Assert.AreEqual(data[i], normal.Data[i]);
}
[Test(Description = "The mesh for pCube1 node must only have one primset")]
public void NumberOfPrimSets()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
IMesh mesh = null;
foreach (INode node in nodes)
if (node.Name.Contains("pCube1"))
mesh = node.Meshes[0];
IList<IPrimitiveSet> primSets = mesh.PrimitiveSets;
Assert.AreEqual(1,primSets.Count);
}
[Test(Description = "Validating primset")]
public void PrimSet()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
IMesh mesh = null;
foreach (INode node in nodes)
if (node.Name.Contains("pCube1"))
mesh = node.Meshes[0];
IList<IPrimitiveSet> primSets = mesh.PrimitiveSets;
IPrimitiveSet primSet = primSets[0];
Assert.AreEqual(2, primSet.Bindings.Count);
Assert.AreEqual(primSet.Bindings.Count, primSet.BindingCount);
Assert.AreEqual(48,primSet.PrimitiveIndices.Length);
Assert.AreEqual(6,primSet.PrimitiveSizes.Length);
Assert.AreEqual("POLYGONS",primSet.PrimitiveType);
Assert.IsNotNull(primSet.Shader,"Shader must not be null");
}
[Test(Description = "validating bindinings")]
public void Bindings()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
IMesh mesh = null;
foreach (INode node in nodes)
if (node.Name.Contains("pCube1"))
mesh = node.Meshes[0];
IList<IPrimitiveSet> primSets = mesh.PrimitiveSets;
IPrimitiveSet primSet = primSets[0];
IList<IBinding> bindings = primSet.Bindings;
IBinding firstBinding = bindings[0];
Assert.AreEqual("position",bindings[0].Source.GetAttribute("name").ToString());
Assert.AreEqual("normal",bindings[1].Source.GetAttribute("name").ToString());
}
[Test(Description="Validating primset indexes")]
public void PrimSetIndices()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
IMesh mesh = null;
foreach (INode node in nodes)
if (node.Name.Contains("pCube1"))
mesh = node.Meshes[0];
IList<IPrimitiveSet> primSets = mesh.PrimitiveSets;
IPrimitiveSet primSet = primSets[0];
int[] indexes = primSet.PrimitiveIndices;
int[] data = new int[] {
0, 0, 1, 1, 3, 2, 2, 3, 2, 4, 3, 5, 5, 6, 4, 7, 4, 8, 5, 9, 7, 10,
6, 11, 6, 12, 7, 13, 1, 14, 0, 15, 1, 16, 7, 17, 5, 18, 3, 19, 6, 20, 0, 21, 2,
22, 4, 23};
for (int i = 0; i < data.Length; i++)
Assert.AreEqual(data[i], indexes[i]);
}
[Test(Description="Validating primitive sizes")]
public void PrimSetSizes()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
IMesh mesh = null;
foreach (INode node in nodes)
if (node.Name.Contains("pCube1"))
mesh = node.Meshes[0];
IList<IPrimitiveSet> primSets = mesh.PrimitiveSets;
IPrimitiveSet primSet = primSets[0];
int[] sizes = primSet.PrimitiveSizes;
int[] data = new int[]{4, 4, 4, 4, 4, 4};
for (int i = 0; i < data.Length; i++)
Assert.AreEqual(data[i], sizes[i]);
}
[Test(Description="Validating shader")]
public void Shader()
{
IWorld world = m_domCollection.RootDomObject.CreateInterface<IWorld>();
IScene scene = world.Scene;
IList<INode> nodes = scene.Nodes;
IMesh mesh = null;
foreach (INode node in nodes)
if (node.Name.Contains("pCube1"))
mesh = node.Meshes[0];
IList<IPrimitiveSet> primSets = mesh.PrimitiveSets;
IPrimitiveSet primSet = primSets[0];
IShader shader = primSet.Shader;
IList<IBinding> binding = shader.Bindings;
Assert.AreEqual(0, shader.Bindings.Count);
Assert.IsNull(shader.CustomAttributes);
}
}
}
| |
namespace java.text
{
[global::MonoJavaBridge.JavaClass(typeof(global::java.text.DateFormat_))]
public abstract partial class DateFormat : java.text.Format
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
static DateFormat()
{
InitJNI();
}
protected DateFormat(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
internal static global::MonoJavaBridge.MethodId _equals15067;
public override bool equals(java.lang.Object arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallBooleanMethod(this.JvmHandle, global::java.text.DateFormat._equals15067, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
else
return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._equals15067, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
internal static global::MonoJavaBridge.MethodId _hashCode15068;
public override int hashCode()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallIntMethod(this.JvmHandle, global::java.text.DateFormat._hashCode15068);
else
return @__env.CallNonVirtualIntMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._hashCode15068);
}
internal static global::MonoJavaBridge.MethodId _clone15069;
public override global::java.lang.Object clone()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.text.DateFormat._clone15069)) as java.lang.Object;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._clone15069)) as java.lang.Object;
}
internal static global::MonoJavaBridge.MethodId _format15070;
public sealed override global::java.lang.StringBuffer format(java.lang.Object arg0, java.lang.StringBuffer arg1, java.text.FieldPosition arg2)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.text.DateFormat._format15070, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2))) as java.lang.StringBuffer;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._format15070, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2))) as java.lang.StringBuffer;
}
internal static global::MonoJavaBridge.MethodId _format15071;
public abstract global::java.lang.StringBuffer format(java.util.Date arg0, java.lang.StringBuffer arg1, java.text.FieldPosition arg2);
internal static global::MonoJavaBridge.MethodId _format15072;
public virtual global::java.lang.String format(java.util.Date arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.text.DateFormat._format15072, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.lang.String;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._format15072, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.lang.String;
}
internal static global::MonoJavaBridge.MethodId _getInstance15073;
public static global::java.text.DateFormat getInstance()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getInstance15073)) as java.text.DateFormat;
}
internal static global::MonoJavaBridge.MethodId _parse15074;
public virtual global::java.util.Date parse(java.lang.String arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.text.DateFormat._parse15074, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.util.Date;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._parse15074, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.util.Date;
}
internal static global::MonoJavaBridge.MethodId _parse15075;
public abstract global::java.util.Date parse(java.lang.String arg0, java.text.ParsePosition arg1);
internal static global::MonoJavaBridge.MethodId _getAvailableLocales15076;
public static global::java.util.Locale[] getAvailableLocales()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaArrayObject<java.util.Locale>(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getAvailableLocales15076)) as java.util.Locale[];
}
internal static global::MonoJavaBridge.MethodId _getTimeZone15077;
public virtual global::java.util.TimeZone getTimeZone()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.text.DateFormat._getTimeZone15077)) as java.util.TimeZone;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._getTimeZone15077)) as java.util.TimeZone;
}
internal static global::MonoJavaBridge.MethodId _setTimeZone15078;
public virtual void setTimeZone(java.util.TimeZone arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
@__env.CallVoidMethod(this.JvmHandle, global::java.text.DateFormat._setTimeZone15078, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
else
@__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._setTimeZone15078, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
internal static global::MonoJavaBridge.MethodId _getDateTimeInstance15079;
public static global::java.text.DateFormat getDateTimeInstance(int arg0, int arg1, java.util.Locale arg2)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getDateTimeInstance15079, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2))) as java.text.DateFormat;
}
internal static global::MonoJavaBridge.MethodId _getDateTimeInstance15080;
public static global::java.text.DateFormat getDateTimeInstance(int arg0, int arg1)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getDateTimeInstance15080, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1))) as java.text.DateFormat;
}
internal static global::MonoJavaBridge.MethodId _getDateTimeInstance15081;
public static global::java.text.DateFormat getDateTimeInstance()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getDateTimeInstance15081)) as java.text.DateFormat;
}
internal static global::MonoJavaBridge.MethodId _setLenient15082;
public virtual void setLenient(bool arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
@__env.CallVoidMethod(this.JvmHandle, global::java.text.DateFormat._setLenient15082, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
else
@__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._setLenient15082, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
internal static global::MonoJavaBridge.MethodId _isLenient15083;
public virtual bool isLenient()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return @__env.CallBooleanMethod(this.JvmHandle, global::java.text.DateFormat._isLenient15083);
else
return @__env.CallNonVirtualBooleanMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._isLenient15083);
}
internal static global::MonoJavaBridge.MethodId _parseObject15084;
public override global::java.lang.Object parseObject(java.lang.String arg0, java.text.ParsePosition arg1)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.text.DateFormat._parseObject15084, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1))) as java.lang.Object;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._parseObject15084, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1))) as java.lang.Object;
}
internal static global::MonoJavaBridge.MethodId _getTimeInstance15085;
public static global::java.text.DateFormat getTimeInstance(int arg0, java.util.Locale arg1)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getTimeInstance15085, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1))) as java.text.DateFormat;
}
internal static global::MonoJavaBridge.MethodId _getTimeInstance15086;
public static global::java.text.DateFormat getTimeInstance(int arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getTimeInstance15086, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.text.DateFormat;
}
internal static global::MonoJavaBridge.MethodId _getTimeInstance15087;
public static global::java.text.DateFormat getTimeInstance()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getTimeInstance15087)) as java.text.DateFormat;
}
internal static global::MonoJavaBridge.MethodId _getDateInstance15088;
public static global::java.text.DateFormat getDateInstance()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getDateInstance15088)) as java.text.DateFormat;
}
internal static global::MonoJavaBridge.MethodId _getDateInstance15089;
public static global::java.text.DateFormat getDateInstance(int arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getDateInstance15089, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0))) as java.text.DateFormat;
}
internal static global::MonoJavaBridge.MethodId _getDateInstance15090;
public static global::java.text.DateFormat getDateInstance(int arg0, java.util.Locale arg1)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallStaticObjectMethod(java.text.DateFormat.staticClass, global::java.text.DateFormat._getDateInstance15090, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1))) as java.text.DateFormat;
}
internal static global::MonoJavaBridge.MethodId _setCalendar15091;
public virtual void setCalendar(java.util.Calendar arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
@__env.CallVoidMethod(this.JvmHandle, global::java.text.DateFormat._setCalendar15091, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
else
@__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._setCalendar15091, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
internal static global::MonoJavaBridge.MethodId _getCalendar15092;
public virtual global::java.util.Calendar getCalendar()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.text.DateFormat._getCalendar15092)) as java.util.Calendar;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._getCalendar15092)) as java.util.Calendar;
}
internal static global::MonoJavaBridge.MethodId _setNumberFormat15093;
public virtual void setNumberFormat(java.text.NumberFormat arg0)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
@__env.CallVoidMethod(this.JvmHandle, global::java.text.DateFormat._setNumberFormat15093, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
else
@__env.CallNonVirtualVoidMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._setNumberFormat15093, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
internal static global::MonoJavaBridge.MethodId _getNumberFormat15094;
public virtual global::java.text.NumberFormat getNumberFormat()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.text.DateFormat._getNumberFormat15094)) as java.text.NumberFormat;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.text.DateFormat.staticClass, global::java.text.DateFormat._getNumberFormat15094)) as java.text.NumberFormat;
}
internal static global::MonoJavaBridge.MethodId _DateFormat15095;
protected DateFormat() : base(global::MonoJavaBridge.JNIEnv.ThreadEnv)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(java.text.DateFormat.staticClass, global::java.text.DateFormat._DateFormat15095);
Init(@__env, handle);
}
public static int ERA_FIELD
{
get
{
return 0;
}
}
public static int YEAR_FIELD
{
get
{
return 1;
}
}
public static int MONTH_FIELD
{
get
{
return 2;
}
}
public static int DATE_FIELD
{
get
{
return 3;
}
}
public static int HOUR_OF_DAY1_FIELD
{
get
{
return 4;
}
}
public static int HOUR_OF_DAY0_FIELD
{
get
{
return 5;
}
}
public static int MINUTE_FIELD
{
get
{
return 6;
}
}
public static int SECOND_FIELD
{
get
{
return 7;
}
}
public static int MILLISECOND_FIELD
{
get
{
return 8;
}
}
public static int DAY_OF_WEEK_FIELD
{
get
{
return 9;
}
}
public static int DAY_OF_YEAR_FIELD
{
get
{
return 10;
}
}
public static int DAY_OF_WEEK_IN_MONTH_FIELD
{
get
{
return 11;
}
}
public static int WEEK_OF_YEAR_FIELD
{
get
{
return 12;
}
}
public static int WEEK_OF_MONTH_FIELD
{
get
{
return 13;
}
}
public static int AM_PM_FIELD
{
get
{
return 14;
}
}
public static int HOUR1_FIELD
{
get
{
return 15;
}
}
public static int HOUR0_FIELD
{
get
{
return 16;
}
}
public static int TIMEZONE_FIELD
{
get
{
return 17;
}
}
public static int FULL
{
get
{
return 0;
}
}
public static int LONG
{
get
{
return 1;
}
}
public static int MEDIUM
{
get
{
return 2;
}
}
public static int SHORT
{
get
{
return 3;
}
}
public static int DEFAULT
{
get
{
return 2;
}
}
private static void InitJNI()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::java.text.DateFormat.staticClass = @__env.NewGlobalRef(@__env.FindClass("java/text/DateFormat"));
global::java.text.DateFormat._equals15067 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "equals", "(Ljava/lang/Object;)Z");
global::java.text.DateFormat._hashCode15068 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "hashCode", "()I");
global::java.text.DateFormat._clone15069 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "clone", "()Ljava/lang/Object;");
global::java.text.DateFormat._format15070 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "format", "(Ljava/lang/Object;Ljava/lang/StringBuffer;Ljava/text/FieldPosition;)Ljava/lang/StringBuffer;");
global::java.text.DateFormat._format15071 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "format", "(Ljava/util/Date;Ljava/lang/StringBuffer;Ljava/text/FieldPosition;)Ljava/lang/StringBuffer;");
global::java.text.DateFormat._format15072 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "format", "(Ljava/util/Date;)Ljava/lang/String;");
global::java.text.DateFormat._getInstance15073 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getInstance", "()Ljava/text/DateFormat;");
global::java.text.DateFormat._parse15074 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "parse", "(Ljava/lang/String;)Ljava/util/Date;");
global::java.text.DateFormat._parse15075 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "parse", "(Ljava/lang/String;Ljava/text/ParsePosition;)Ljava/util/Date;");
global::java.text.DateFormat._getAvailableLocales15076 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getAvailableLocales", "()[Ljava/util/Locale;");
global::java.text.DateFormat._getTimeZone15077 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getTimeZone", "()Ljava/util/TimeZone;");
global::java.text.DateFormat._setTimeZone15078 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "setTimeZone", "(Ljava/util/TimeZone;)V");
global::java.text.DateFormat._getDateTimeInstance15079 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getDateTimeInstance", "(IILjava/util/Locale;)Ljava/text/DateFormat;");
global::java.text.DateFormat._getDateTimeInstance15080 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getDateTimeInstance", "(II)Ljava/text/DateFormat;");
global::java.text.DateFormat._getDateTimeInstance15081 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getDateTimeInstance", "()Ljava/text/DateFormat;");
global::java.text.DateFormat._setLenient15082 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "setLenient", "(Z)V");
global::java.text.DateFormat._isLenient15083 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "isLenient", "()Z");
global::java.text.DateFormat._parseObject15084 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "parseObject", "(Ljava/lang/String;Ljava/text/ParsePosition;)Ljava/lang/Object;");
global::java.text.DateFormat._getTimeInstance15085 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getTimeInstance", "(ILjava/util/Locale;)Ljava/text/DateFormat;");
global::java.text.DateFormat._getTimeInstance15086 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getTimeInstance", "(I)Ljava/text/DateFormat;");
global::java.text.DateFormat._getTimeInstance15087 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getTimeInstance", "()Ljava/text/DateFormat;");
global::java.text.DateFormat._getDateInstance15088 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getDateInstance", "()Ljava/text/DateFormat;");
global::java.text.DateFormat._getDateInstance15089 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getDateInstance", "(I)Ljava/text/DateFormat;");
global::java.text.DateFormat._getDateInstance15090 = @__env.GetStaticMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getDateInstance", "(ILjava/util/Locale;)Ljava/text/DateFormat;");
global::java.text.DateFormat._setCalendar15091 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "setCalendar", "(Ljava/util/Calendar;)V");
global::java.text.DateFormat._getCalendar15092 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getCalendar", "()Ljava/util/Calendar;");
global::java.text.DateFormat._setNumberFormat15093 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "setNumberFormat", "(Ljava/text/NumberFormat;)V");
global::java.text.DateFormat._getNumberFormat15094 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "getNumberFormat", "()Ljava/text/NumberFormat;");
global::java.text.DateFormat._DateFormat15095 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat.staticClass, "<init>", "()V");
}
}
[global::MonoJavaBridge.JavaProxy(typeof(global::java.text.DateFormat))]
public sealed partial class DateFormat_ : java.text.DateFormat
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
static DateFormat_()
{
InitJNI();
}
internal DateFormat_(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
internal static global::MonoJavaBridge.MethodId _format15096;
public override global::java.lang.StringBuffer format(java.util.Date arg0, java.lang.StringBuffer arg1, java.text.FieldPosition arg2)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.text.DateFormat_._format15096, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2))) as java.lang.StringBuffer;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.text.DateFormat_.staticClass, global::java.text.DateFormat_._format15096, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2))) as java.lang.StringBuffer;
}
internal static global::MonoJavaBridge.MethodId _parse15097;
public override global::java.util.Date parse(java.lang.String arg0, java.text.ParsePosition arg1)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (!IsClrObject)
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallObjectMethod(this.JvmHandle, global::java.text.DateFormat_._parse15097, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1))) as java.util.Date;
else
return global::MonoJavaBridge.JavaBridge.WrapJavaObject(@__env.CallNonVirtualObjectMethod(this.JvmHandle, global::java.text.DateFormat_.staticClass, global::java.text.DateFormat_._parse15097, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1))) as java.util.Date;
}
private static void InitJNI()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::java.text.DateFormat_.staticClass = @__env.NewGlobalRef(@__env.FindClass("java/text/DateFormat"));
global::java.text.DateFormat_._format15096 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat_.staticClass, "format", "(Ljava/util/Date;Ljava/lang/StringBuffer;Ljava/text/FieldPosition;)Ljava/lang/StringBuffer;");
global::java.text.DateFormat_._parse15097 = @__env.GetMethodIDNoThrow(global::java.text.DateFormat_.staticClass, "parse", "(Ljava/lang/String;Ljava/text/ParsePosition;)Ljava/util/Date;");
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using Pixie.Markup;
namespace Pixie.Terminal.Devices
{
/// <summary>
/// A style manager implementation that uses ANSI control codes to
/// style output.
/// </summary>
public sealed class AnsiStyleManager : StyleManager
{
/// <summary>
/// Creates an ANSI style manager from a text writer.
/// </summary>
/// <param name="writer">A text writer to which control sequences are written.</param>
public AnsiStyleManager(TextWriter writer)
: this(
writer,
ConsoleStyle.ToPixieColor(Console.ForegroundColor, Colors.White),
ConsoleStyle.ToPixieColor(Console.BackgroundColor, Colors.Black))
{ }
/// <summary>
/// Creates an ANSI style manager from a text writer, a default foreground
/// and a default background color.
/// </summary>
/// <param name="writer">A text writer to which control sequences are written.</param>
/// <param name="defaultForegroundColor">The default foreground color.</param>
/// <param name="defaultBackgroundColor">The default background color.</param>
public AnsiStyleManager(
TextWriter writer,
Color defaultForegroundColor,
Color defaultBackgroundColor)
{
this.Writer = writer;
this.defaultForegroundColor = defaultForegroundColor;
this.defaultBackgroundColor = defaultBackgroundColor;
this.styleStack = new Stack<AnsiStyle>();
this.styleStack.Push(
new AnsiStyle(
default(Nullable<Color>),
default(Nullable<Color>),
TextDecoration.None));
}
private Stack<AnsiStyle> styleStack;
private Color defaultForegroundColor;
private Color defaultBackgroundColor;
/// <summary>
/// Gets the writer to which ANSI control codes are written.
/// </summary>
/// <returns>A text writer.</returns>
public TextWriter Writer { get; private set; }
private AnsiStyle CurrentStyle => styleStack.Peek();
/// <inheritdoc/>
public override void PushForegroundColor(Color color)
{
var curStyle = CurrentStyle;
PushStyle(
new AnsiStyle(
Over(color, curStyle.ForegroundColor, defaultForegroundColor),
curStyle.BackgroundColor,
curStyle.Decoration));
}
/// <inheritdoc/>
public override void PushBackgroundColor(Color color)
{
var curStyle = CurrentStyle;
PushStyle(
new AnsiStyle(
curStyle.ForegroundColor,
Over(color, curStyle.BackgroundColor, defaultBackgroundColor),
curStyle.Decoration));
}
/// <inheritdoc/>
public override void PushDecoration(
TextDecoration decoration,
Func<TextDecoration, TextDecoration, TextDecoration> updateDecoration)
{
var curStyle = CurrentStyle;
PushStyle(
new AnsiStyle(
curStyle.ForegroundColor,
curStyle.BackgroundColor,
updateDecoration(curStyle.Decoration, decoration)));
}
private void PushStyle(AnsiStyle style)
{
style.Apply(Writer, CurrentStyle);
styleStack.Push(style);
}
/// <inheritdoc/>
public override void PopStyle()
{
var popped = styleStack.Pop();
CurrentStyle.Apply(Writer, popped);
}
private static Nullable<Color> Over(
Color top,
Nullable<Color> bottom,
Color defaultBottom)
{
if (top.Alpha == 0.0)
{
return bottom;
}
else
{
return top.Over(bottom.GetValueOrDefault(defaultBottom));
}
}
}
internal enum AnsiControlCode : byte
{
Reset = 0,
Bold = 1,
Faint = 2,
Italic = 3,
Underline = 4,
BlinkSlow = 5,
BlinkFast = 6,
Strikethrough = 9,
ForegroundBlack = 30,
ForegroundRed = 31,
ForegroundGreen = 32,
ForegroundYellow = 33,
ForegroundBlue = 34,
ForegroundMagenta = 35,
ForegroundCyan = 36,
ForegroundWhite = 37,
BackgroundBlack = 40,
BackgroundRed = 41,
BackgroundGreen = 42,
BackgroundYellow = 43,
BackgroundBlue = 44,
BackgroundMagenta = 45,
BackgroundCyan = 46,
BackgroundWhite = 47,
}
internal sealed class AnsiStyle
{
public AnsiStyle(
Nullable<Color> foregroundColor,
Nullable<Color> backgroundColor,
TextDecoration decoration)
{
this.ForegroundColor = foregroundColor;
this.BackgroundColor = backgroundColor;
this.Decoration = decoration;
}
public Nullable<Color> ForegroundColor { get; private set; }
public Nullable<Color> BackgroundColor { get; private set; }
public TextDecoration Decoration { get; private set; }
private void WriteControlSequence(
TextWriter writer,
IEnumerable<AnsiControlCode> commands)
{
writer.Write("\x1b[");
bool first = false;
foreach (var item in commands)
{
if (first)
{
first = false;
}
else
{
writer.Write(';');
}
writer.Write((int)item);
}
writer.Write('m');
}
private void Apply(TextWriter writer)
{
var commands = new List<AnsiControlCode>();
// Always reset first.
commands.Add(AnsiControlCode.Reset);
// Write the foreground color.
if (ForegroundColor.HasValue)
{
bool isFaint;
commands.Add(ToForegroundColor(
ConsoleStyle.ToConsoleColor(ForegroundColor.Value),
out isFaint));
if (isFaint)
{
commands.Add(AnsiControlCode.Faint);
}
}
// Write the background color.
if (BackgroundColor.HasValue)
{
commands.Add(
ToBackgroundColor(
ConsoleStyle.ToConsoleColor(BackgroundColor.Value)));
}
// Apply decorations
if (HasDecoration(TextDecoration.Bold))
{
commands.Add(AnsiControlCode.Bold);
}
if (HasDecoration(TextDecoration.Italic))
{
commands.Add(AnsiControlCode.Italic);
}
if (HasDecoration(TextDecoration.Underline))
{
commands.Add(AnsiControlCode.Underline);
}
if (HasDecoration(TextDecoration.Strikethrough))
{
commands.Add(AnsiControlCode.Strikethrough);
}
WriteControlSequence(writer, commands);
}
private bool HasDecoration(TextDecoration decor)
{
return (Decoration & decor) == decor;
}
/// <summary>
/// Applies this style, given a previous style.
/// </summary>
public void Apply(TextWriter writer, AnsiStyle style)
{
if (Decoration != style.Decoration
|| !QuantizedColorEquals(ForegroundColor, style.ForegroundColor)
|| !QuantizedColorEquals(BackgroundColor, style.BackgroundColor))
{
Apply(writer);
}
}
private static AnsiControlCode ToForegroundColor(ConsoleColor color, out bool isFaint)
{
switch (color)
{
case ConsoleColor.Black:
isFaint = false;
return AnsiControlCode.ForegroundBlack;
case ConsoleColor.Blue:
isFaint = false;
return AnsiControlCode.ForegroundBlue;
case ConsoleColor.Cyan:
isFaint = false;
return AnsiControlCode.ForegroundCyan;
case ConsoleColor.Green:
isFaint = false;
return AnsiControlCode.ForegroundGreen;
case ConsoleColor.Magenta:
isFaint = false;
return AnsiControlCode.ForegroundMagenta;
case ConsoleColor.Red:
isFaint = false;
return AnsiControlCode.ForegroundRed;
case ConsoleColor.White:
isFaint = false;
return AnsiControlCode.ForegroundWhite;
case ConsoleColor.Yellow:
isFaint = false;
return AnsiControlCode.ForegroundYellow;
case ConsoleColor.Gray:
isFaint = true;
return AnsiControlCode.ForegroundWhite;
case ConsoleColor.DarkBlue:
isFaint = true;
return AnsiControlCode.ForegroundBlue;
case ConsoleColor.DarkCyan:
isFaint = true;
return AnsiControlCode.ForegroundCyan;
case ConsoleColor.DarkGray:
isFaint = true;
return AnsiControlCode.ForegroundBlack;
case ConsoleColor.DarkGreen:
isFaint = true;
return AnsiControlCode.ForegroundGreen;
case ConsoleColor.DarkMagenta:
isFaint = true;
return AnsiControlCode.ForegroundMagenta;
case ConsoleColor.DarkRed:
isFaint = true;
return AnsiControlCode.ForegroundRed;
case ConsoleColor.DarkYellow:
isFaint = true;
return AnsiControlCode.ForegroundYellow;
default:
throw new NotSupportedException("Unsupported color " + color);
}
}
private static AnsiControlCode ToBackgroundColor(ConsoleColor color)
{
bool isFaint;
return ToForegroundColor(color, out isFaint) + 10;
}
private static bool QuantizedColorEquals(Color first, Color second)
{
return ConsoleStyle.ToConsoleColor(first) == ConsoleStyle.ToConsoleColor(second);
}
private static bool QuantizedColorEquals(
Nullable<Color> first,
Nullable<Color> second)
{
if (first.HasValue)
{
if (second.HasValue)
{
return QuantizedColorEquals(first.Value, second.Value);
}
else
{
return false;
}
}
else
{
return !second.HasValue;
}
}
}
}
| |
/// <license>
/// This is a port of the SciMark2a Java Benchmark to C# by
/// Chris Re (cmr28@cornell.edu) and Werner Vogels (vogels@cs.cornell.edu)
///
/// For details on the original authors see http://math.nist.gov/scimark2
///
/// This software is likely to burn your processor, bitflip your memory chips
/// anihilate your screen and corrupt all your disks, so you it at your
/// own risk.
/// </license>
using System;
namespace SciMark2
{
public class kernel
{
// each measurement returns approx Mflops
public static double measureFFT(int N, double mintime, Random R)
{
// initialize FFT data as complex (N real/img pairs)
double[] x = RandomVector(2 * N, R);
//double oldx[] = NewVectorCopy(x);
long cycles = 1;
Stopwatch Q = new Stopwatch();
while (true)
{
Q.start();
for (int i = 0; i < cycles; i++)
{
FFT.transform(x); // forward transform
FFT.inverse(x); // backward transform
}
Q.stop();
if (Q.read() >= mintime)
break;
cycles *= 2;
}
// approx Mflops
const double EPS = 1.0e-10;
if (FFT.test(x) / N > EPS)
return 0.0;
return FFT.num_flops(N) * cycles / Q.read() * 1.0e-6;
}
public static double measureSOR(int N, double min_time, Random R)
{
double[][] G = RandomMatrix(N, N, R);
Stopwatch Q = new Stopwatch();
int cycles = 1;
while (true)
{
Q.start();
SOR.execute(1.25, G, cycles);
Q.stop();
if (Q.read() >= min_time)
break;
cycles *= 2;
}
// approx Mflops
return SOR.num_flops(N, N, cycles) / Q.read() * 1.0e-6;
}
public static double measureMonteCarlo(double min_time, Random R)
{
Stopwatch Q = new Stopwatch();
int cycles = 1;
while (true)
{
Q.start();
MonteCarlo.integrate(cycles);
Q.stop();
if (Q.read() >= min_time)
break;
cycles *= 2;
}
// approx Mflops
return MonteCarlo.num_flops(cycles) / Q.read() * 1.0e-6;
}
public static double measureSparseMatmult(int N, int nz, double min_time, Random R)
{
// initialize vector multipliers and storage for result
// y = A*y;
double[] x = RandomVector(N, R);
double[] y = new double[N];
// initialize square sparse matrix
//
// for this test, we create a sparse matrix wit M/nz nonzeros
// per row, with spaced-out evenly between the begining of the
// row to the main diagonal. Thus, the resulting pattern looks
// like
// +-----------------+
// +* +
// +*** +
// +* * * +
// +** * * +
// +** * * +
// +* * * * +
// +* * * * +
// +* * * * +
// +-----------------+
//
// (as best reproducible with integer artihmetic)
// Note that the first nr rows will have elements past
// the diagonal.
int nr = nz / N; // average number of nonzeros per row
int anz = nr * N; // _actual_ number of nonzeros
double[] val = RandomVector(anz, R);
int[] col = new int[anz];
int[] row = new int[N + 1];
row[0] = 0;
for (int r = 0; r < N; r++)
{
// initialize elements for row r
int rowr = row[r];
row[r + 1] = rowr + nr;
int step = r / nr;
if (step < 1)
step = 1;
// take at least unit steps
for (int i = 0; i < nr; i++)
col[rowr + i] = i * step;
}
Stopwatch Q = new Stopwatch();
int cycles = 1;
while (true)
{
Q.start();
SparseCompRow.matmult(y, val, row, col, x, cycles);
Q.stop();
if (Q.read() >= min_time)
break;
cycles *= 2;
}
// approx Mflops
return SparseCompRow.num_flops(N, nz, cycles) / Q.read() * 1.0e-6;
}
public static double measureLU(int N, double min_time, Random R)
{
// compute approx Mlfops, or O if LU yields large errors
double[][] A = RandomMatrix(N, N, R);
double[][] lu = new double[N][];
for (int i = 0; i < N; i++)
{
lu[i] = new double[N];
}
int[] pivot = new int[N];
Stopwatch Q = new Stopwatch();
int cycles = 1;
while (true)
{
Q.start();
for (int i = 0; i < cycles; i++)
{
CopyMatrix(lu, A);
LU.factor(lu, pivot);
}
Q.stop();
if (Q.read() >= min_time)
break;
cycles *= 2;
}
// verify that LU is correct
double[] b = RandomVector(N, R);
double[] x = NewVectorCopy(b);
LU.solve(lu, pivot, x);
const double EPS = 1.0e-12;
if (normabs(b, matvec(A, x)) / N > EPS)
return 0.0;
// else return approx Mflops
//
return LU.num_flops(N) * cycles / Q.read() * 1.0e-6;
}
private static double[] NewVectorCopy(double[] x)
{
int N = x.Length;
double[] y = new double[N];
for (int i = 0; i < N; i++)
y[i] = x[i];
return y;
}
private static void CopyVector(double[] B, double[] A)
{
int N = A.Length;
for (int i = 0; i < N; i++)
B[i] = A[i];
}
private static double normabs(double[] x, double[] y)
{
int N = x.Length;
double sum = 0.0;
for (int i = 0; i < N; i++)
sum += System.Math.Abs(x[i] - y[i]);
return sum;
}
private static void CopyMatrix(double[][] B, double[][] A)
{
int M = A.Length;
int N = A[0].Length;
int remainder = N & 3; // N mod 4;
for (int i = 0; i < M; i++)
{
double[] Bi = B[i];
double[] Ai = A[i];
for (int j = 0; j < remainder; j++)
Bi[j] = Ai[j];
for (int j = remainder; j < N; j += 4)
{
Bi[j] = Ai[j];
Bi[j + 1] = Ai[j + 1];
Bi[j + 2] = Ai[j + 2];
Bi[j + 3] = Ai[j + 3];
}
}
}
private static double[][] RandomMatrix(int M, int N, Random R)
{
double[][] A = new double[M][];
for (int i = 0; i < M; i++)
{
A[i] = new double[N];
}
for (int i = 0; i < N; i++)
for (int j = 0; j < N; j++)
A[i][j] = R.nextDouble();
return A;
}
private static double[] RandomVector(int N, Random R)
{
double[] A = new double[N];
for (int i = 0; i < N; i++)
A[i] = R.nextDouble();
return A;
}
private static double[] matvec(double[][] A, double[] x)
{
int N = x.Length;
double[] y = new double[N];
matvec(A, x, y);
return y;
}
private static void matvec(double[][] A, double[] x, double[] y)
{
int M = A.Length;
int N = A[0].Length;
for (int i = 0; i < M; i++)
{
double sum = 0.0;
double[] Ai = A[i];
for (int j = 0; j < N; j++)
sum += Ai[j] * x[j];
y[i] = sum;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
namespace System.Globalization
{
/// <summary>
/// Modern Persian calendar is a solar observation based calendar. Each new year begins on the day when the vernal equinox occurs before noon.
/// The epoch is the date of the vernal equinox prior to the epoch of the Islamic calendar (March 19, 622 Julian or March 22, 622 Gregorian)
/// There is no Persian year 0. Ordinary years have 365 days. Leap years have 366 days with the last month (Esfand) gaining the extra day.
/// </summary>
/// <remarks>
/// Calendar support range:
/// Calendar Minimum Maximum
/// ========== ========== ==========
/// Gregorian 0622/03/22 9999/12/31
/// Persian 0001/01/01 9378/10/13
/// </remarks>
public class PersianCalendar : Calendar
{
public static readonly int PersianEra = 1;
private static readonly long s_persianEpoch = new DateTime(622, 3, 22).Ticks / GregorianCalendar.TicksPerDay;
private const int ApproximateHalfYear = 180;
private const int DatePartYear = 0;
private const int DatePartDayOfYear = 1;
private const int DatePartMonth = 2;
private const int DatePartDay = 3;
private const int MonthsPerYear = 12;
private static readonly int[] s_daysToMonth = { 0, 31, 62, 93, 124, 155, 186, 216, 246, 276, 306, 336, 366 };
private const int MaxCalendarYear = 9378;
private const int MaxCalendarMonth = 10;
private const int MaxCalendarDay = 13;
// Persian calendar (year: 1, month: 1, day:1 ) = Gregorian (year: 622, month: 3, day: 22)
// This is the minimal Gregorian date that we support in the PersianCalendar.
private static readonly DateTime s_minDate = new DateTime(622, 3, 22);
private static readonly DateTime s_maxDate = DateTime.MaxValue;
public override DateTime MinSupportedDateTime => s_minDate;
public override DateTime MaxSupportedDateTime => s_maxDate;
public override CalendarAlgorithmType AlgorithmType => CalendarAlgorithmType.SolarCalendar;
public PersianCalendar()
{
}
internal override CalendarId BaseCalendarID => CalendarId.GREGORIAN;
internal override CalendarId ID => CalendarId.PERSIAN;
private long GetAbsoluteDatePersian(int year, int month, int day)
{
if (year < 1 || year > MaxCalendarYear || month < 1 || month > 12)
{
throw new ArgumentOutOfRangeException(null, SR.ArgumentOutOfRange_BadYearMonthDay);
}
// day is one based, make 0 based since this will be the number of days we add to beginning of year below
int ordinalDay = DaysInPreviousMonths(month) + day - 1;
int approximateDaysFromEpochForYearStart = (int)(CalendricalCalculationsHelper.MeanTropicalYearInDays * (year - 1));
long yearStart = CalendricalCalculationsHelper.PersianNewYearOnOrBefore(s_persianEpoch + approximateDaysFromEpochForYearStart + ApproximateHalfYear);
yearStart += ordinalDay;
return yearStart;
}
internal static void CheckTicksRange(long ticks)
{
if (ticks < s_minDate.Ticks || ticks > s_maxDate.Ticks)
{
throw new ArgumentOutOfRangeException(
"time",
ticks,
SR.Format(SR.ArgumentOutOfRange_CalendarRange, s_minDate, s_maxDate));
}
}
internal static void CheckEraRange(int era)
{
if (era != CurrentEra && era != PersianEra)
{
throw new ArgumentOutOfRangeException(nameof(era), era, SR.ArgumentOutOfRange_InvalidEraValue);
}
}
internal static void CheckYearRange(int year, int era)
{
CheckEraRange(era);
if (year < 1 || year > MaxCalendarYear)
{
throw new ArgumentOutOfRangeException(
nameof(year),
year,
SR.Format(SR.ArgumentOutOfRange_Range, 1, MaxCalendarYear));
}
}
internal static void CheckYearMonthRange(int year, int month, int era)
{
CheckYearRange(year, era);
if (year == MaxCalendarYear)
{
if (month > MaxCalendarMonth)
{
throw new ArgumentOutOfRangeException(
nameof(month),
month,
SR.Format(SR.ArgumentOutOfRange_Range, 1, MaxCalendarMonth));
}
}
if (month < 1 || month > 12)
{
throw new ArgumentOutOfRangeException(nameof(month), month, SR.ArgumentOutOfRange_Month);
}
}
private static int MonthFromOrdinalDay(int ordinalDay)
{
Debug.Assert(ordinalDay <= 366);
int index = 0;
while (ordinalDay > s_daysToMonth[index])
{
index++;
}
return index;
}
private static int DaysInPreviousMonths(int month)
{
Debug.Assert(1 <= month && month <= 12);
// months are one based but for calculations use 0 based
--month;
return s_daysToMonth[month];
}
internal int GetDatePart(long ticks, int part)
{
CheckTicksRange(ticks);
// Get the absolute date. The absolute date is the number of days from January 1st, 1 A.D.
// 1/1/0001 is absolute date 1.
long numDays = ticks / GregorianCalendar.TicksPerDay + 1;
// Calculate the appromixate Persian Year.
long yearStart = CalendricalCalculationsHelper.PersianNewYearOnOrBefore(numDays);
int y = (int)(Math.Floor(((yearStart - s_persianEpoch) / CalendricalCalculationsHelper.MeanTropicalYearInDays) + 0.5)) + 1;
Debug.Assert(y >= 1);
if (part == DatePartYear)
{
return y;
}
// Calculate the Persian Month.
int ordinalDay = (int)(numDays - CalendricalCalculationsHelper.GetNumberOfDays(this.ToDateTime(y, 1, 1, 0, 0, 0, 0, 1)));
if (part == DatePartDayOfYear)
{
return ordinalDay;
}
int m = MonthFromOrdinalDay(ordinalDay);
Debug.Assert(ordinalDay >= 1);
Debug.Assert(m >= 1 && m <= 12);
if (part == DatePartMonth)
{
return m;
}
int d = ordinalDay - DaysInPreviousMonths(m);
Debug.Assert(1 <= d);
Debug.Assert(d <= 31);
// Calculate the Persian Day.
if (part == DatePartDay)
{
return d;
}
// Incorrect part value.
throw new InvalidOperationException(SR.InvalidOperation_DateTimeParsing);
}
public override DateTime AddMonths(DateTime time, int months)
{
if (months < -120000 || months > 120000)
{
throw new ArgumentOutOfRangeException(
nameof(months),
months,
SR.Format(SR.ArgumentOutOfRange_Range, -120000, 120000));
}
// Get the date in Persian calendar.
int y = GetDatePart(time.Ticks, DatePartYear);
int m = GetDatePart(time.Ticks, DatePartMonth);
int d = GetDatePart(time.Ticks, DatePartDay);
int i = m - 1 + months;
if (i >= 0)
{
m = i % 12 + 1;
y = y + i / 12;
}
else
{
m = 12 + (i + 1) % 12;
y = y + (i - 11) / 12;
}
int days = GetDaysInMonth(y, m);
if (d > days)
{
d = days;
}
long ticks = GetAbsoluteDatePersian(y, m, d) * TicksPerDay + time.Ticks % TicksPerDay;
Calendar.CheckAddResult(ticks, MinSupportedDateTime, MaxSupportedDateTime);
return new DateTime(ticks);
}
public override DateTime AddYears(DateTime time, int years)
{
return AddMonths(time, years * 12);
}
public override int GetDayOfMonth(DateTime time)
{
return GetDatePart(time.Ticks, DatePartDay);
}
public override DayOfWeek GetDayOfWeek(DateTime time)
{
return (DayOfWeek)((int)(time.Ticks / TicksPerDay + 1) % 7);
}
public override int GetDayOfYear(DateTime time)
{
return GetDatePart(time.Ticks, DatePartDayOfYear);
}
public override int GetDaysInMonth(int year, int month, int era)
{
CheckYearMonthRange(year, month, era);
if ((month == MaxCalendarMonth) && (year == MaxCalendarYear))
{
return MaxCalendarDay;
}
int daysInMonth = s_daysToMonth[month] - s_daysToMonth[month - 1];
if ((month == MonthsPerYear) && !IsLeapYear(year))
{
Debug.Assert(daysInMonth == 30);
--daysInMonth;
}
return daysInMonth;
}
public override int GetDaysInYear(int year, int era)
{
CheckYearRange(year, era);
if (year == MaxCalendarYear)
{
return s_daysToMonth[MaxCalendarMonth - 1] + MaxCalendarDay;
}
return IsLeapYear(year, CurrentEra) ? 366 : 365;
}
public override int GetEra(DateTime time)
{
CheckTicksRange(time.Ticks);
return PersianEra;
}
public override int[] Eras => new int[] { PersianEra };
public override int GetMonth(DateTime time)
{
return GetDatePart(time.Ticks, DatePartMonth);
}
public override int GetMonthsInYear(int year, int era)
{
CheckYearRange(year, era);
if (year == MaxCalendarYear)
{
return MaxCalendarMonth;
}
return 12;
}
public override int GetYear(DateTime time)
{
return GetDatePart(time.Ticks, DatePartYear);
}
public override bool IsLeapDay(int year, int month, int day, int era)
{
// The year/month/era value checking is done in GetDaysInMonth().
int daysInMonth = GetDaysInMonth(year, month, era);
if (day < 1 || day > daysInMonth)
{
throw new ArgumentOutOfRangeException(
nameof(day),
day,
SR.Format(SR.ArgumentOutOfRange_Day, daysInMonth, month));
}
return IsLeapYear(year, era) && month == 12 && day == 30;
}
public override int GetLeapMonth(int year, int era)
{
CheckYearRange(year, era);
return 0;
}
public override bool IsLeapMonth(int year, int month, int era)
{
CheckYearMonthRange(year, month, era);
return false;
}
public override bool IsLeapYear(int year, int era)
{
CheckYearRange(year, era);
if (year == MaxCalendarYear)
{
return false;
}
return (GetAbsoluteDatePersian(year + 1, 1, 1) - GetAbsoluteDatePersian(year, 1, 1)) == 366;
}
public override DateTime ToDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, int era)
{
// The year/month/era checking is done in GetDaysInMonth().
int daysInMonth = GetDaysInMonth(year, month, era);
if (day < 1 || day > daysInMonth)
{
throw new ArgumentOutOfRangeException(
nameof(day),
day,
SR.Format(SR.ArgumentOutOfRange_Day, daysInMonth, month));
}
long lDate = GetAbsoluteDatePersian(year, month, day);
if (lDate < 0)
{
throw new ArgumentOutOfRangeException(null, SR.ArgumentOutOfRange_BadYearMonthDay);
}
return new DateTime(lDate * GregorianCalendar.TicksPerDay + TimeToTicks(hour, minute, second, millisecond));
}
private const int DefaultTwoDigitYearMax = 1410;
public override int TwoDigitYearMax
{
get
{
if (_twoDigitYearMax == -1)
{
_twoDigitYearMax = GetSystemTwoDigitYearSetting(ID, DefaultTwoDigitYearMax);
}
return _twoDigitYearMax;
}
set
{
VerifyWritable();
if (value < 99 || value > MaxCalendarYear)
{
throw new ArgumentOutOfRangeException(
nameof(value),
value,
SR.Format(SR.ArgumentOutOfRange_Range, 99, MaxCalendarYear));
}
_twoDigitYearMax = value;
}
}
public override int ToFourDigitYear(int year)
{
if (year < 0)
{
throw new ArgumentOutOfRangeException(nameof(year), year, SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (year < 100)
{
return base.ToFourDigitYear(year);
}
if (year > MaxCalendarYear)
{
throw new ArgumentOutOfRangeException(
nameof(year),
year,
SR.Format(SR.ArgumentOutOfRange_Range, 1, MaxCalendarYear));
}
return year;
}
}
}
| |
#region License
/*
* All content copyright Marko Lahma, unless otherwise indicated. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
#endregion
using System;
using System.Threading;
using System.Threading.Tasks;
using Quartz.Impl;
namespace Quartz.Examples.Example02
{
/// <summary>
/// This example will demonstrate all of the basics of scheduling capabilities
/// of Quartz using Simple Triggers <see cref="ISimpleTrigger"/>.
/// </summary>
/// <author>Bill Kratzer</author>
/// <author>Marko Lahma (.NET)</author>
public class SchedulingCapabilitiesUsingSimpleTriggersExample : IExample
{
public virtual async Task Run()
{
Console.WriteLine("------- Initializing -------------------");
// First we must get a reference to a scheduler
ISchedulerFactory sf = new StdSchedulerFactory();
IScheduler sched = await sf.GetScheduler();
Console.WriteLine("------- Initialization Complete --------");
Console.WriteLine("------- Scheduling Jobs ----------------");
// jobs can be scheduled before sched.start() has been called
// get a "nice round" time a few seconds in the future...
DateTimeOffset startTime = DateBuilder.NextGivenSecondDate(null, 15);
// job1 will only fire once at date/time "ts"
IJobDetail job = JobBuilder.Create<SimpleJob>()
.WithIdentity("job1", "group1")
.Build();
ISimpleTrigger trigger = (ISimpleTrigger) TriggerBuilder.Create()
.WithIdentity("trigger1", "group1")
.StartAt(startTime)
.Build();
// schedule it to run!
DateTimeOffset? ft = await sched.ScheduleJob(job, trigger);
Console.WriteLine(job.Key +
" will run at: " + ft +
" and repeat: " + trigger.RepeatCount +
" times, every " + trigger.RepeatInterval.TotalSeconds + " seconds");
// job2 will only fire once at date/time "ts"
job = JobBuilder.Create<SimpleJob>()
.WithIdentity("job2", "group1")
.Build();
trigger = (ISimpleTrigger) TriggerBuilder.Create()
.WithIdentity("trigger2", "group1")
.StartAt(startTime)
.Build();
ft = await sched.ScheduleJob(job, trigger);
Console.WriteLine(job.Key +
" will run at: " + ft +
" and repeat: " + trigger.RepeatCount +
" times, every " + trigger.RepeatInterval.TotalSeconds + " seconds");
// job3 will run 11 times (run once and repeat 10 more times)
// job3 will repeat every 10 seconds
job = JobBuilder.Create<SimpleJob>()
.WithIdentity("job3", "group1")
.Build();
trigger = (ISimpleTrigger) TriggerBuilder.Create()
.WithIdentity("trigger3", "group1")
.StartAt(startTime)
.WithSimpleSchedule(x => x.WithIntervalInSeconds(10).WithRepeatCount(10))
.Build();
ft = await sched.ScheduleJob(job, trigger);
Console.WriteLine(job.Key +
" will run at: " + ft +
" and repeat: " + trigger.RepeatCount +
" times, every " + trigger.RepeatInterval.TotalSeconds + " seconds");
// the same job (job3) will be scheduled by a another trigger
// this time will only repeat twice at a 70 second interval
trigger = (ISimpleTrigger) TriggerBuilder.Create()
.WithIdentity("trigger3", "group2")
.StartAt(startTime)
.WithSimpleSchedule(x => x.WithIntervalInSeconds(10).WithRepeatCount(2))
.ForJob(job)
.Build();
ft = await sched.ScheduleJob(trigger);
Console.WriteLine(job.Key +
" will [also] run at: " + ft +
" and repeat: " + trigger.RepeatCount +
" times, every " + trigger.RepeatInterval.TotalSeconds + " seconds");
// job4 will run 6 times (run once and repeat 5 more times)
// job4 will repeat every 10 seconds
job = JobBuilder.Create<SimpleJob>()
.WithIdentity("job4", "group1")
.Build();
trigger = (ISimpleTrigger) TriggerBuilder.Create()
.WithIdentity("trigger4", "group1")
.StartAt(startTime)
.WithSimpleSchedule(x => x.WithIntervalInSeconds(10).WithRepeatCount(5))
.Build();
ft = await sched.ScheduleJob(job, trigger);
Console.WriteLine(job.Key +
" will run at: " + ft +
" and repeat: " + trigger.RepeatCount +
" times, every " + trigger.RepeatInterval.TotalSeconds + " seconds");
// job5 will run once, five minutes in the future
job = JobBuilder.Create<SimpleJob>()
.WithIdentity("job5", "group1")
.Build();
trigger = (ISimpleTrigger) TriggerBuilder.Create()
.WithIdentity("trigger5", "group1")
.StartAt(DateBuilder.FutureDate(5, IntervalUnit.Minute))
.Build();
ft = await sched.ScheduleJob(job, trigger);
Console.WriteLine(job.Key +
" will run at: " + ft +
" and repeat: " + trigger.RepeatCount +
" times, every " + trigger.RepeatInterval.TotalSeconds + " seconds");
// job6 will run indefinitely, every 40 seconds
job = JobBuilder.Create<SimpleJob>()
.WithIdentity("job6", "group1")
.Build();
trigger = (ISimpleTrigger) TriggerBuilder.Create()
.WithIdentity("trigger6", "group1")
.StartAt(startTime)
.WithSimpleSchedule(x => x.WithIntervalInSeconds(40).RepeatForever())
.Build();
ft = await sched.ScheduleJob(job, trigger);
Console.WriteLine(job.Key +
" will run at: " + ft +
" and repeat: " + trigger.RepeatCount +
" times, every " + trigger.RepeatInterval.TotalSeconds + " seconds");
Console.WriteLine("------- Starting Scheduler ----------------");
// All of the jobs have been added to the scheduler, but none of the jobs
// will run until the scheduler has been started
await sched.Start();
Console.WriteLine("------- Started Scheduler -----------------");
// jobs can also be scheduled after start() has been called...
// job7 will repeat 20 times, repeat every five minutes
job = JobBuilder.Create<SimpleJob>()
.WithIdentity("job7", "group1")
.Build();
trigger = (ISimpleTrigger) TriggerBuilder.Create()
.WithIdentity("trigger7", "group1")
.StartAt(startTime)
.WithSimpleSchedule(x => x.WithIntervalInMinutes(5).WithRepeatCount(20))
.Build();
ft = await sched.ScheduleJob(job, trigger);
Console.WriteLine(job.Key +
" will run at: " + ft +
" and repeat: " + trigger.RepeatCount +
" times, every " + trigger.RepeatInterval.TotalSeconds + " seconds");
// jobs can be fired directly... (rather than waiting for a trigger)
job = JobBuilder.Create<SimpleJob>()
.WithIdentity("job8", "group1")
.StoreDurably()
.Build();
await sched.AddJob(job, true);
Console.WriteLine("'Manually' triggering job8...");
await sched.TriggerJob(new JobKey("job8", "group1"));
Console.WriteLine("------- Waiting 30 seconds... --------------");
try
{
// wait 30 seconds to show jobs
await Task.Delay(TimeSpan.FromSeconds(30));
// executing...
}
catch (ThreadInterruptedException)
{
}
// jobs can be re-scheduled...
// job 7 will run immediately and repeat 10 times for every second
Console.WriteLine("------- Rescheduling... --------------------");
trigger = (ISimpleTrigger) TriggerBuilder.Create()
.WithIdentity("trigger7", "group1")
.StartAt(startTime)
.WithSimpleSchedule(x => x.WithIntervalInMinutes(5).WithRepeatCount(20))
.Build();
ft = await sched.RescheduleJob(trigger.Key, trigger);
Console.WriteLine("job7 rescheduled to run at: " + ft);
Console.WriteLine("------- Waiting five minutes... ------------");
// wait five minutes to show jobs
await Task.Delay(TimeSpan.FromMinutes(5));
// executing...
Console.WriteLine("------- Shutting Down ---------------------");
await sched.Shutdown(true);
Console.WriteLine("------- Shutdown Complete -----------------");
// display some stats about the schedule that just ran
SchedulerMetaData metaData = await sched.GetMetaData();
Console.WriteLine($"Executed {metaData.NumberOfJobsExecuted} jobs.");
}
}
}
| |
/*
* Anarres C Preprocessor
* Copyright (c) 2007-2008, Shevek
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
using System;
using System.Text;
using System.Collections.Generic;
using boolean = System.Boolean;
using Debug = System.Diagnostics.Debug;
namespace CppNet {
/* This source should always be active, since we don't expand macros
* in any inactive context. */
internal class MacroTokenSource : Source {
private Macro macro;
private Iterator<Token> tokens; /* Pointer into the macro. */
private List<Argument> args; /* { unexpanded, expanded } */
private Iterator<Token> arg; /* "current expansion" */
internal MacroTokenSource(Macro m, List<Argument> args) {
this.macro = m;
this.tokens = m.getTokens().iterator();
this.args = args;
this.arg = null;
}
override internal boolean isExpanding(Macro m) {
/* When we are expanding an arg, 'this' macro is not
* being expanded, and thus we may re-expand it. */
if (/* XXX this.arg == null && */ this.macro == m)
return true;
return base.isExpanding(m);
}
/* XXX Called from Preprocessor [ugly]. */
internal static void escape(StringBuilder buf, string cs) {
if (cs == null)
{
return;
}
for (int i = 0; i < cs.length(); i++) {
char c = cs.charAt(i);
switch (c) {
case '\\':
buf.append("\\\\");
break;
case '"':
buf.append("\\\"");
break;
case '\n':
buf.append("\\n");
break;
case '\r':
buf.append("\\r");
break;
default:
buf.append(c);
break;
}
}
}
private void concat(StringBuilder buf, Argument arg) {
Iterator<Token> it = arg.iterator();
while (it.hasNext()) {
Token tok = it.next();
buf.append(tok.getText());
}
}
private Token stringify(Token pos, Argument arg) {
StringBuilder buf = new StringBuilder();
concat(buf, arg);
// System.out.println("Concat: " + arg + " -> " + buf);
StringBuilder str = new StringBuilder("\"");
escape(str, buf.ToString());
str.append("\"");
// System.out.println("Escape: " + buf + " -> " + str);
return new Token(Token.STRING,
pos.getLine(), pos.getColumn(),
str.toString(), buf.toString());
}
/* At this point, we have consumed the first M_PASTE.
* @see Macro#addPaste(Token) */
private void paste(Token ptok) {
StringBuilder buf = new StringBuilder();
Token err = null;
/* We know here that arg is null or expired,
* since we cannot paste an expanded arg. */
int count = 2;
for (int i = 0; i < count; i++) {
if (!tokens.hasNext()) {
/* XXX This one really should throw. */
error(ptok.getLine(), ptok.getColumn(),
"Paste at end of expansion");
buf.append(' ').append(ptok.getText());
break;
}
Token tok = tokens.next();
// System.out.println("Paste " + tok);
switch (tok.getType()) {
case Token.M_PASTE:
/* One extra to paste, plus one because the
* paste token didn't count. */
count += 2;
ptok = tok;
break;
case Token.M_ARG:
int idx = (int)tok.getValue();
concat(buf, args.get(idx));
break;
/* XXX Test this. */
case Token.CCOMMENT:
case Token.CPPCOMMENT:
break;
default:
buf.append(tok.getText());
break;
}
}
/* Push and re-lex. */
/*
StringBuilder src = new StringBuilder();
escape(src, buf);
StringLexerSource sl = new StringLexerSource(src.toString());
*/
StringLexerSource sl = new StringLexerSource(buf.toString());
/* XXX Check that concatenation produces a valid token. */
arg = new SourceIterator(sl);
}
override public Token token() {
for (;;) {
/* Deal with lexed tokens first. */
if (arg != null) {
if (arg.hasNext()) {
Token tok2 = arg.next();
/* XXX PASTE -> INVALID. */
Debug.Assert(tok2.getType() != Token.M_PASTE,
"Unexpected paste token");
return tok2;
}
arg = null;
}
if (!tokens.hasNext())
return new Token(Token.EOF, -1, -1, ""); /* End of macro. */
Token tok = tokens.next();
int idx;
switch (tok.getType()) {
case Token.M_STRING:
/* Use the nonexpanded arg. */
idx = (int)tok.getValue();
return stringify(tok, args.get(idx));
case Token.M_ARG:
/* Expand the arg. */
idx = (int)tok.getValue();
// System.out.println("Pushing arg " + args.get(idx));
arg = args.get(idx).expansion();
break;
case Token.M_PASTE:
paste(tok);
break;
default:
return tok;
}
} /* for */
}
override public String ToString() {
StringBuilder buf = new StringBuilder();
buf.Append("expansion of ").Append(macro.getName());
Source parent = getParent();
if (parent != null)
buf.Append(" in ").Append(parent);
return buf.ToString();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Data.Common;
using System.Diagnostics;
namespace System.Data
{
internal sealed class Select
{
private readonly DataTable _table;
private readonly IndexField[] _indexFields;
private DataViewRowState _recordStates;
private DataExpression _rowFilter;
private ExpressionNode _expression;
private Index _index;
private int[] _records;
private int _recordCount;
private ExpressionNode _linearExpression;
private bool _candidatesForBinarySearch;
private sealed class ColumnInfo
{
public bool flag = false; // Misc. Use
public bool equalsOperator = false; // True when the associated expr has = Operator defined
public BinaryNode expr = null; // Binary Search capable expression associated
}
private ColumnInfo[] _candidateColumns;
private int _nCandidates;
private int _matchedCandidates;
public Select(DataTable table, string filterExpression, string sort, DataViewRowState recordStates)
{
_table = table;
_indexFields = table.ParseSortString(sort);
if (filterExpression != null && filterExpression.Length > 0)
{
_rowFilter = new DataExpression(_table, filterExpression);
_expression = _rowFilter.ExpressionNode;
}
_recordStates = recordStates;
}
private bool IsSupportedOperator(int op)
{
return ((op >= Operators.EqualTo && op <= Operators.LessOrEqual) || op == Operators.Is || op == Operators.IsNot);
}
// Gathers all linear expressions in to this.linearExpression and all binary expressions in to their respective candidate columns expressions
private void AnalyzeExpression(BinaryNode expr)
{
if (_linearExpression == _expression)
return;
if (expr._op == Operators.Or)
{
_linearExpression = _expression;
return;
}
else
if (expr._op == Operators.And)
{
bool isLeft = false, isRight = false;
if (expr._left is BinaryNode)
{
AnalyzeExpression((BinaryNode)expr._left);
if (_linearExpression == _expression)
return;
isLeft = true;
}
else
{
UnaryNode unaryNode = expr._left as UnaryNode;
if (unaryNode != null)
{
while (unaryNode._op == Operators.Noop && unaryNode._right is UnaryNode && ((UnaryNode)unaryNode._right)._op == Operators.Noop)
{
unaryNode = (UnaryNode)unaryNode._right;
}
if (unaryNode._op == Operators.Noop && unaryNode._right is BinaryNode)
{
AnalyzeExpression((BinaryNode)(unaryNode._right));
if (_linearExpression == _expression)
{
return;
}
isLeft = true;
}
}
}
if (expr._right is BinaryNode)
{
AnalyzeExpression((BinaryNode)expr._right);
if (_linearExpression == _expression)
return;
isRight = true;
}
else
{
UnaryNode unaryNode = expr._right as UnaryNode;
if (unaryNode != null)
{
while (unaryNode._op == Operators.Noop && unaryNode._right is UnaryNode && ((UnaryNode)unaryNode._right)._op == Operators.Noop)
{
unaryNode = (UnaryNode)unaryNode._right;
}
if (unaryNode._op == Operators.Noop && unaryNode._right is BinaryNode)
{
AnalyzeExpression((BinaryNode)(unaryNode._right));
if (_linearExpression == _expression)
{
return;
}
isRight = true;
}
}
}
if (isLeft && isRight)
return;
ExpressionNode e = isLeft ? expr._right : expr._left;
_linearExpression = (_linearExpression == null ? e : new BinaryNode(_table, Operators.And, e, _linearExpression));
return;
}
else
if (IsSupportedOperator(expr._op))
{
if (expr._left is NameNode && expr._right is ConstNode)
{
ColumnInfo canColumn = _candidateColumns[((NameNode)(expr._left))._column.Ordinal];
canColumn.expr = (canColumn.expr == null ? expr : new BinaryNode(_table, Operators.And, expr, canColumn.expr));
if (expr._op == Operators.EqualTo)
{
canColumn.equalsOperator = true;
}
_candidatesForBinarySearch = true;
return;
}
else
if (expr._right is NameNode && expr._left is ConstNode)
{
ExpressionNode temp = expr._left;
expr._left = expr._right;
expr._right = temp;
switch (expr._op)
{
case Operators.GreaterThen: expr._op = Operators.LessThen; break;
case Operators.LessThen: expr._op = Operators.GreaterThen; break;
case Operators.GreaterOrEqual: expr._op = Operators.LessOrEqual; break;
case Operators.LessOrEqual: expr._op = Operators.GreaterOrEqual; break;
default: break;
}
ColumnInfo canColumn = _candidateColumns[((NameNode)(expr._left))._column.Ordinal];
canColumn.expr = (canColumn.expr == null ? expr : new BinaryNode(_table, Operators.And, expr, canColumn.expr));
if (expr._op == Operators.EqualTo)
{
canColumn.equalsOperator = true;
}
_candidatesForBinarySearch = true;
return;
}
}
_linearExpression = (_linearExpression == null ? expr : new BinaryNode(_table, Operators.And, expr, _linearExpression));
return;
}
private bool CompareSortIndexDesc(IndexField[] fields)
{
if (fields.Length < _indexFields.Length)
return false;
int j = 0;
for (int i = 0; i < fields.Length && j < _indexFields.Length; i++)
{
if (fields[i] == _indexFields[j])
{
j++;
}
else
{
ColumnInfo canColumn = _candidateColumns[fields[i].Column.Ordinal];
if (!(canColumn != null && canColumn.equalsOperator))
return false;
}
}
return j == _indexFields.Length;
}
private bool FindSortIndex()
{
_index = null;
_table._indexesLock.EnterUpgradeableReadLock();
try
{
int count = _table._indexes.Count;
int rowsCount = _table.Rows.Count;
for (int i = 0; i < count; i++)
{
Index ndx = _table._indexes[i];
if (ndx.RecordStates != _recordStates)
continue;
if (!ndx.IsSharable)
{
continue;
}
if (CompareSortIndexDesc(ndx._indexFields))
{
_index = ndx;
return true;
}
}
}
finally
{
_table._indexesLock.ExitUpgradeableReadLock();
}
return false;
}
// Returns no. of columns that are matched
private int CompareClosestCandidateIndexDesc(IndexField[] fields)
{
int count = (fields.Length < _nCandidates ? fields.Length : _nCandidates);
int i = 0;
for (; i < count; i++)
{
ColumnInfo canColumn = _candidateColumns[fields[i].Column.Ordinal];
if (canColumn == null || canColumn.expr == null)
{
break;
}
else
if (!canColumn.equalsOperator)
{
return i + 1;
}
}
return i;
}
// Returns whether the found index (if any) is a sort index as well
private bool FindClosestCandidateIndex()
{
_index = null;
_matchedCandidates = 0;
bool sortPriority = true;
_table._indexesLock.EnterUpgradeableReadLock();
try
{
int count = _table._indexes.Count;
int rowsCount = _table.Rows.Count;
for (int i = 0; i < count; i++)
{
Index ndx = _table._indexes[i];
if (ndx.RecordStates != _recordStates)
continue;
if (!ndx.IsSharable)
continue;
int match = CompareClosestCandidateIndexDesc(ndx._indexFields);
if (match > _matchedCandidates || (match == _matchedCandidates && !sortPriority))
{
_matchedCandidates = match;
_index = ndx;
sortPriority = CompareSortIndexDesc(ndx._indexFields);
if (_matchedCandidates == _nCandidates && sortPriority)
{
return true;
}
}
}
}
finally
{
_table._indexesLock.ExitUpgradeableReadLock();
}
return (_index != null ? sortPriority : false);
}
// Initialize candidate columns to new columnInfo and leave all non candidate columns to null
private void InitCandidateColumns()
{
_nCandidates = 0;
_candidateColumns = new ColumnInfo[_table.Columns.Count];
if (_rowFilter == null)
return;
DataColumn[] depColumns = _rowFilter.GetDependency();
for (int i = 0; i < depColumns.Length; i++)
{
if (depColumns[i].Table == _table)
{
_candidateColumns[depColumns[i].Ordinal] = new ColumnInfo();
_nCandidates++;
}
}
}
// Based on the required sorting and candidate columns settings, create a new index; Should be called only when there is no existing index to be reused
private void CreateIndex()
{
if (_index == null)
{
if (_nCandidates == 0)
{
_index = new Index(_table, _indexFields, _recordStates, null);
_index.AddRef();
}
else
{
int i;
int lenCanColumns = _candidateColumns.Length;
int lenIndexDesc = _indexFields.Length;
bool equalsOperator = true;
for (i = 0; i < lenCanColumns; i++)
{
if (_candidateColumns[i] != null)
{
if (!_candidateColumns[i].equalsOperator)
{
equalsOperator = false;
break;
}
}
}
int j = 0;
for (i = 0; i < lenIndexDesc; i++)
{
ColumnInfo candidateColumn = _candidateColumns[_indexFields[i].Column.Ordinal];
if (candidateColumn != null)
{
candidateColumn.flag = true;
j++;
}
}
int indexNotInCandidates = lenIndexDesc - j;
int candidatesNotInIndex = _nCandidates - j;
IndexField[] ndxFields = new IndexField[_nCandidates + indexNotInCandidates];
if (equalsOperator)
{
j = 0;
for (i = 0; i < lenCanColumns; i++)
{
if (_candidateColumns[i] != null)
{
ndxFields[j++] = new IndexField(_table.Columns[i], isDescending: false);
_candidateColumns[i].flag = false;// this means it is processed
}
}
for (i = 0; i < lenIndexDesc; i++)
{
ColumnInfo canColumn = _candidateColumns[_indexFields[i].Column.Ordinal];
if (canColumn == null || canColumn.flag)
{ // if sort column is not a filter col , or not processed
ndxFields[j++] = _indexFields[i];
if (canColumn != null)
{
canColumn.flag = false;
}
}
}
for (i = 0; i < _candidateColumns.Length; i++)
{
if (_candidateColumns[i] != null)
{
_candidateColumns[i].flag = false;// same as before, it is false when it returns
}
}
// Debug.Assert(j == candidatesNotInIndex, "Whole ndxDesc should be filled!");
_index = new Index(_table, ndxFields, _recordStates, null);
if (!IsOperatorIn(_expression))
{
// if the expression contains an 'IN' operator, the index will not be shared
// therefore we do not need to index.AddRef, also table would track index consuming more memory until first write
_index.AddRef();
}
_matchedCandidates = _nCandidates;
}
else
{
for (i = 0; i < lenIndexDesc; i++)
{
ndxFields[i] = _indexFields[i];
ColumnInfo canColumn = _candidateColumns[_indexFields[i].Column.Ordinal];
if (canColumn != null)
canColumn.flag = true;
}
j = i;
for (i = 0; i < lenCanColumns; i++)
{
if (_candidateColumns[i] != null)
{
if (!_candidateColumns[i].flag)
{
ndxFields[j++] = new IndexField(_table.Columns[i], isDescending: false);
}
else
{
_candidateColumns[i].flag = false;
}
}
}
// Debug.Assert(j == nCandidates+indexNotInCandidates, "Whole ndxDesc should be filled!");
_index = new Index(_table, ndxFields, _recordStates, null);
_matchedCandidates = 0;
if (_linearExpression != _expression)
{
IndexField[] fields = _index._indexFields;
while (_matchedCandidates < j)
{
ColumnInfo canColumn = _candidateColumns[fields[_matchedCandidates].Column.Ordinal];
if (canColumn == null || canColumn.expr == null)
break;
_matchedCandidates++;
if (!canColumn.equalsOperator)
break;
}
}
for (i = 0; i < _candidateColumns.Length; i++)
{
if (_candidateColumns[i] != null)
{
_candidateColumns[i].flag = false;// same as before, it is false when it returns
}
}
}
}
}
}
private bool IsOperatorIn(ExpressionNode enode)
{
BinaryNode bnode = (enode as BinaryNode);
if (null != bnode)
{
if (Operators.In == bnode._op ||
IsOperatorIn(bnode._right) ||
IsOperatorIn(bnode._left))
{
return true;
}
}
return false;
}
// Based on the current index and candidate columns settings, build the linear expression; Should be called only when there is atleast something for Binary Searching
private void BuildLinearExpression()
{
int i;
IndexField[] fields = _index._indexFields;
int lenId = fields.Length;
Debug.Assert(_matchedCandidates > 0 && _matchedCandidates <= lenId, "BuildLinearExpression : Invalid Index");
for (i = 0; i < _matchedCandidates; i++)
{
ColumnInfo canColumn = _candidateColumns[fields[i].Column.Ordinal];
Debug.Assert(canColumn != null && canColumn.expr != null, "BuildLinearExpression : Must be a matched candidate");
canColumn.flag = true;
}
//this is invalid assert, assumption was that all equals operator exists at the begining of candidateColumns
// but with QFE 1704, this assumption is not true anymore
// Debug.Assert(matchedCandidates==1 || candidateColumns[matchedCandidates-1].equalsOperator, "BuildLinearExpression : Invalid matched candidates");
int lenCanColumns = _candidateColumns.Length;
for (i = 0; i < lenCanColumns; i++)
{
if (_candidateColumns[i] != null)
{
if (!_candidateColumns[i].flag)
{
if (_candidateColumns[i].expr != null)
{
_linearExpression = (_linearExpression == null ? _candidateColumns[i].expr : new BinaryNode(_table, Operators.And, _candidateColumns[i].expr, _linearExpression));
}
}
else
{
_candidateColumns[i].flag = false;
}
}
}
}
public DataRow[] SelectRows()
{
bool needSorting = true;
InitCandidateColumns();
if (_expression is BinaryNode)
{
AnalyzeExpression((BinaryNode)_expression);
if (!_candidatesForBinarySearch)
{
_linearExpression = _expression;
}
if (_linearExpression == _expression)
{
for (int i = 0; i < _candidateColumns.Length; i++)
{
if (_candidateColumns[i] != null)
{
_candidateColumns[i].equalsOperator = false;
_candidateColumns[i].expr = null;
}
}
}
else
{
needSorting = !FindClosestCandidateIndex();
}
}
else
{
_linearExpression = _expression;
}
if (_index == null && (_indexFields.Length > 0 || _linearExpression == _expression))
{
needSorting = !FindSortIndex();
}
if (_index == null)
{
CreateIndex();
needSorting = false;
}
if (_index.RecordCount == 0)
return _table.NewRowArray(0);
Range range;
if (_matchedCandidates == 0)
{
range = new Range(0, _index.RecordCount - 1);
Debug.Assert(!needSorting, "What are we doing here if no real reuse of this index ?");
_linearExpression = _expression;
return GetLinearFilteredRows(range);
}
else
{
range = GetBinaryFilteredRecords();
if (range.Count == 0)
return _table.NewRowArray(0);
if (_matchedCandidates < _nCandidates)
{
BuildLinearExpression();
}
if (!needSorting)
{
return GetLinearFilteredRows(range);
}
else
{
_records = GetLinearFilteredRecords(range);
_recordCount = _records.Length;
if (_recordCount == 0)
return _table.NewRowArray(0);
Sort(0, _recordCount - 1);
return GetRows();
}
}
}
public DataRow[] GetRows()
{
DataRow[] newRows = _table.NewRowArray(_recordCount);
for (int i = 0; i < newRows.Length; i++)
{
newRows[i] = _table._recordManager[_records[i]];
}
return newRows;
}
private bool AcceptRecord(int record)
{
DataRow row = _table._recordManager[record];
if (row == null)
return true;
DataRowVersion version = DataRowVersion.Default;
if (row._oldRecord == record)
{
version = DataRowVersion.Original;
}
else if (row._newRecord == record)
{
version = DataRowVersion.Current;
}
else if (row._tempRecord == record)
{
version = DataRowVersion.Proposed;
}
object val = _linearExpression.Eval(row, version);
bool result;
try
{
result = DataExpression.ToBoolean(val);
}
catch (Exception e) when (ADP.IsCatchableExceptionType(e))
{
throw ExprException.FilterConvertion(_rowFilter.Expression);
}
return result;
}
private int Eval(BinaryNode expr, DataRow row, DataRowVersion version)
{
if (expr._op == Operators.And)
{
int lResult = Eval((BinaryNode)expr._left, row, version);
if (lResult != 0)
return lResult;
int rResult = Eval((BinaryNode)expr._right, row, version);
if (rResult != 0)
return rResult;
return 0;
}
long c = 0;
object vLeft = expr._left.Eval(row, version);
if (expr._op != Operators.Is && expr._op != Operators.IsNot)
{
object vRight = expr._right.Eval(row, version);
bool isLConst = (expr._left is ConstNode);
bool isRConst = (expr._right is ConstNode);
if ((vLeft == DBNull.Value) || (expr._left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft)))
return -1;
if ((vRight == DBNull.Value) || (expr._right.IsSqlColumn && DataStorage.IsObjectSqlNull(vRight)))
return 1;
StorageType leftType = DataStorage.GetStorageType(vLeft.GetType());
if (StorageType.Char == leftType)
{
if ((isRConst) || (!expr._right.IsSqlColumn))
vRight = Convert.ToChar(vRight, _table.FormatProvider);
else
vRight = SqlConvert.ChangeType2(vRight, StorageType.Char, typeof(char), _table.FormatProvider);
}
StorageType rightType = DataStorage.GetStorageType(vRight.GetType());
StorageType resultType;
if (expr._left.IsSqlColumn || expr._right.IsSqlColumn)
{
resultType = expr.ResultSqlType(leftType, rightType, isLConst, isRConst, expr._op);
}
else
{
resultType = expr.ResultType(leftType, rightType, isLConst, isRConst, expr._op);
}
if (StorageType.Empty == resultType)
{
expr.SetTypeMismatchError(expr._op, vLeft.GetType(), vRight.GetType());
}
// if comparing a Guid column value against a string literal
// use InvariantCulture instead of DataTable.Locale because in the Danish related cultures
// sorting a Guid as a string has different results than in Invariant and English related cultures.
// This fix is restricted to DataTable.Select("GuidColumn = 'string literal'") types of queries
NameNode namedNode = null;
System.Globalization.CompareInfo comparer =
((isLConst && !isRConst && (leftType == StorageType.String) && (rightType == StorageType.Guid) && (null != (namedNode = expr._right as NameNode)) && (namedNode._column.DataType == typeof(Guid))) ||
(isRConst && !isLConst && (rightType == StorageType.String) && (leftType == StorageType.Guid) && (null != (namedNode = expr._left as NameNode)) && (namedNode._column.DataType == typeof(Guid))))
? System.Globalization.CultureInfo.InvariantCulture.CompareInfo : null;
c = expr.BinaryCompare(vLeft, vRight, resultType, expr._op, comparer);
}
switch (expr._op)
{
case Operators.EqualTo: c = (c == 0 ? 0 : c < 0 ? -1 : 1); break;
case Operators.GreaterThen: c = (c > 0 ? 0 : -1); break;
case Operators.LessThen: c = (c < 0 ? 0 : 1); break;
case Operators.GreaterOrEqual: c = (c >= 0 ? 0 : -1); break;
case Operators.LessOrEqual: c = (c <= 0 ? 0 : 1); break;
case Operators.Is: c = (vLeft == DBNull.Value ? 0 : -1); break;
case Operators.IsNot: c = (vLeft != DBNull.Value ? 0 : 1); break;
default: Debug.Assert(true, "Unsupported Binary Search Operator!"); break;
}
return (int)c;
}
private int Evaluate(int record)
{
DataRow row = _table._recordManager[record];
if (row == null)
return 0;
DataRowVersion version = DataRowVersion.Default;
if (row._oldRecord == record)
{
version = DataRowVersion.Original;
}
else if (row._newRecord == record)
{
version = DataRowVersion.Current;
}
else if (row._tempRecord == record)
{
version = DataRowVersion.Proposed;
}
IndexField[] fields = _index._indexFields;
for (int i = 0; i < _matchedCandidates; i++)
{
int columnOrdinal = fields[i].Column.Ordinal;
Debug.Assert(_candidateColumns[columnOrdinal] != null, "How come this is not a candidate column");
Debug.Assert(_candidateColumns[columnOrdinal].expr != null, "How come there is no associated expression");
int c = Eval(_candidateColumns[columnOrdinal].expr, row, version);
if (c != 0)
return fields[i].IsDescending ? -c : c;
}
return 0;
}
private int FindFirstMatchingRecord()
{
int rec = -1;
int lo = 0;
int hi = _index.RecordCount - 1;
while (lo <= hi)
{
int i = lo + hi >> 1;
int recNo = _index.GetRecord(i);
int c = Evaluate(recNo);
if (c == 0) { rec = i; }
if (c < 0) lo = i + 1;
else hi = i - 1;
}
return rec;
}
private int FindLastMatchingRecord(int lo)
{
int rec = -1;
int hi = _index.RecordCount - 1;
while (lo <= hi)
{
int i = lo + hi >> 1;
int recNo = _index.GetRecord(i);
int c = Evaluate(recNo);
if (c == 0) { rec = i; }
if (c <= 0) lo = i + 1;
else hi = i - 1;
}
return rec;
}
private Range GetBinaryFilteredRecords()
{
if (_matchedCandidates == 0)
{
return new Range(0, _index.RecordCount - 1);
}
Debug.Assert(_matchedCandidates <= _index._indexFields.Length, "GetBinaryFilteredRecords : Invalid Index");
int lo = FindFirstMatchingRecord();
if (lo == -1)
{
return new Range();
}
int hi = FindLastMatchingRecord(lo);
Debug.Assert(lo <= hi, "GetBinaryFilteredRecords : Invalid Search Results");
return new Range(lo, hi);
}
private int[] GetLinearFilteredRecords(Range range)
{
if (_linearExpression == null)
{
int[] resultRecords = new int[range.Count];
RBTree<int>.RBTreeEnumerator iterator = _index.GetEnumerator(range.Min);
for (int i = 0; i < range.Count && iterator.MoveNext(); i++)
{
resultRecords[i] = iterator.Current;
}
return resultRecords;
}
else
{
List<int> matchingRecords = new List<int>();
RBTree<int>.RBTreeEnumerator iterator = _index.GetEnumerator(range.Min);
for (int i = 0; i < range.Count && iterator.MoveNext(); i++)
{
if (AcceptRecord(iterator.Current))
{
matchingRecords.Add(iterator.Current);
}
}
return matchingRecords.ToArray();
}
}
private DataRow[] GetLinearFilteredRows(Range range)
{
DataRow[] resultRows;
if (_linearExpression == null)
{
return _index.GetRows(range);
}
List<DataRow> matchingRows = new List<DataRow>();
RBTree<int>.RBTreeEnumerator iterator = _index.GetEnumerator(range.Min);
for (int i = 0; i < range.Count && iterator.MoveNext(); i++)
{
if (AcceptRecord(iterator.Current))
{
matchingRows.Add(_table._recordManager[iterator.Current]);
}
}
resultRows = _table.NewRowArray(matchingRows.Count);
matchingRows.CopyTo(resultRows);
return resultRows;
}
private int CompareRecords(int record1, int record2)
{
int lenIndexDesc = _indexFields.Length;
for (int i = 0; i < lenIndexDesc; i++)
{
int c = _indexFields[i].Column.Compare(record1, record2);
if (c != 0)
{
if (_indexFields[i].IsDescending) c = -c;
return c;
}
}
long id1 = _table._recordManager[record1] == null ? 0 : _table._recordManager[record1].rowID;
long id2 = _table._recordManager[record2] == null ? 0 : _table._recordManager[record2].rowID;
int diff = (id1 < id2) ? -1 : ((id2 < id1) ? 1 : 0);
// if they're two records in the same row, we need to be able to distinguish them.
if (diff == 0 && record1 != record2 &&
_table._recordManager[record1] != null && _table._recordManager[record2] != null)
{
id1 = (int)_table._recordManager[record1].GetRecordState(record1);
id2 = (int)_table._recordManager[record2].GetRecordState(record2);
diff = (id1 < id2) ? -1 : ((id2 < id1) ? 1 : 0);
}
return diff;
}
private void Sort(int left, int right)
{
int i, j;
int record;
do
{
i = left;
j = right;
record = _records[i + j >> 1];
do
{
while (CompareRecords(_records[i], record) < 0) i++;
while (CompareRecords(_records[j], record) > 0) j--;
if (i <= j)
{
int r = _records[i];
_records[i] = _records[j];
_records[j] = r;
i++;
j--;
}
} while (i <= j);
if (left < j) Sort(left, j);
left = i;
} while (i < right);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using AutoMapper.Configuration;
using AutoMapper.Features;
using AutoMapper.Internal;
using AutoMapper.Internal.Mappers;
using AutoMapper.QueryableExtensions.Impl;
namespace AutoMapper
{
using static Expression;
using static Execution.ExpressionBuilder;
public interface IConfigurationProvider
{
/// <summary>
/// Dry run all configured type maps and throw <see cref="AutoMapperConfigurationException"/> for each problem
/// </summary>
void AssertConfigurationIsValid();
/// <summary>
/// Create a mapper instance based on this configuration. Mapper instances are lightweight and can be created as needed.
/// </summary>
/// <returns>The mapper instance</returns>
IMapper CreateMapper();
/// <summary>
/// Create a mapper instance with the specified service constructor to be used for resolvers and type converters.
/// </summary>
/// <param name="serviceCtor">Service factory to create services</param>
/// <returns>The mapper instance</returns>
IMapper CreateMapper(Func<Type, object> serviceCtor);
/// <summary>
/// Builds the execution plan used to map the source to destination.
/// Useful to understand what exactly is happening during mapping.
/// See <a href="https://automapper.readthedocs.io/en/latest/Understanding-your-mapping.html">the wiki</a> for details.
/// </summary>
/// <param name="sourceType">the runtime type of the source object</param>
/// <param name="destinationType">the runtime type of the destination object</param>
/// <returns>the execution plan</returns>
LambdaExpression BuildExecutionPlan(Type sourceType, Type destinationType);
/// <summary>
/// Compile all underlying mapping expressions to cached delegates.
/// Use if you want AutoMapper to compile all mappings up front instead of deferring expression compilation for each first map.
/// </summary>
void CompileMappings();
}
public class MapperConfiguration : IGlobalConfiguration
{
private static readonly MethodInfo MappingError = typeof(MapperConfiguration).GetMethod(nameof(GetMappingError));
private readonly IObjectMapper[] _mappers;
private readonly Dictionary<TypePair, TypeMap> _configuredMaps;
private readonly Dictionary<TypePair, TypeMap> _resolvedMaps;
private readonly LockingConcurrentDictionary<TypePair, TypeMap> _runtimeMaps;
private readonly ProjectionBuilder _projectionBuilder;
private readonly LockingConcurrentDictionary<MapRequest, Delegate> _executionPlans;
private readonly ConfigurationValidator _validator;
private readonly Features<IRuntimeFeature> _features = new();
private readonly int _recursiveQueriesMaxDepth;
private readonly int _maxExecutionPlanDepth;
private readonly bool _enableNullPropagationForQueryMapping;
private readonly Func<Type, object> _serviceCtor;
private readonly bool _sealed;
private readonly bool _hasOpenMaps;
public MapperConfiguration(MapperConfigurationExpression configurationExpression)
{
var configuration = (IGlobalConfigurationExpression)configurationExpression;
if (configuration.MethodMappingEnabled != false)
{
configuration.IncludeSourceExtensionMethods(typeof(Enumerable));
}
_mappers = configuration.Mappers.ToArray();
_executionPlans = new(CompileExecutionPlan);
_validator = new(this, configuration);
_projectionBuilder = new(this, configuration.ProjectionMappers.ToArray());
_serviceCtor = configuration.ServiceCtor;
_enableNullPropagationForQueryMapping = configuration.EnableNullPropagationForQueryMapping ?? false;
_maxExecutionPlanDepth = configuration.MaxExecutionPlanDepth + 1;
_recursiveQueriesMaxDepth = configuration.RecursiveQueriesMaxDepth;
Configuration = new((IProfileConfiguration)configuration);
int typeMapsCount = Configuration.TypeMapsCount;
int openTypeMapsCount = Configuration.OpenTypeMapsCount;
Profiles = new ProfileMap[configuration.Profiles.Count + 1];
Profiles[0] = Configuration;
int index = 1;
foreach (var profile in configuration.Profiles)
{
var profileMap = new ProfileMap(profile, configuration);
Profiles[index++] = profileMap;
typeMapsCount += profileMap.TypeMapsCount;
openTypeMapsCount += profileMap.OpenTypeMapsCount;
}
_configuredMaps = new(typeMapsCount);
_hasOpenMaps = openTypeMapsCount > 0;
_runtimeMaps = new(GetTypeMap, openTypeMapsCount);
_resolvedMaps = new(2 * typeMapsCount);
configuration.Features.Configure(this);
Seal();
foreach (var profile in Profiles)
{
profile.Clear();
}
_sealed = true;
}
public MapperConfiguration(Action<IMapperConfigurationExpression> configure)
: this(Build(configure))
{
}
public void AssertConfigurationIsValid() => _validator.AssertConfigurationExpressionIsValid(_configuredMaps.Values);
public IMapper CreateMapper() => new Mapper(this);
public IMapper CreateMapper(Func<Type, object> serviceCtor) => new Mapper(this, serviceCtor);
public void CompileMappings()
{
foreach (var request in _resolvedMaps.Keys.Where(t => !t.IsGenericTypeDefinition).Select(types => new MapRequest(types, types)).ToArray())
{
GetExecutionPlan(request);
}
}
public LambdaExpression BuildExecutionPlan(Type sourceType, Type destinationType)
{
var typePair = new TypePair(sourceType, destinationType);
return this.Internal().BuildExecutionPlan(new(typePair, typePair));
}
LambdaExpression IGlobalConfiguration.BuildExecutionPlan(in MapRequest mapRequest)
{
var typeMap = ResolveTypeMap(mapRequest.RuntimeTypes) ?? ResolveTypeMap(mapRequest.RequestedTypes);
if (typeMap != null)
{
return GenerateTypeMapExpression(mapRequest.RequestedTypes, typeMap);
}
var mapperToUse = FindMapper(mapRequest.RuntimeTypes);
return GenerateObjectMapperExpression(mapRequest, mapperToUse);
}
private static MapperConfigurationExpression Build(Action<IMapperConfigurationExpression> configure)
{
var expr = new MapperConfigurationExpression();
configure(expr);
return expr;
}
IProjectionBuilder IGlobalConfiguration.ProjectionBuilder => _projectionBuilder;
Func<Type, object> IGlobalConfiguration.ServiceCtor => _serviceCtor;
bool IGlobalConfiguration.EnableNullPropagationForQueryMapping => _enableNullPropagationForQueryMapping;
int IGlobalConfiguration.MaxExecutionPlanDepth => _maxExecutionPlanDepth;
private ProfileMap Configuration { get; }
ProfileMap[] IGlobalConfiguration.Profiles => Profiles;
internal ProfileMap[] Profiles { get; }
int IGlobalConfiguration.RecursiveQueriesMaxDepth => _recursiveQueriesMaxDepth;
Features<IRuntimeFeature> IGlobalConfiguration.Features => _features;
Func<TSource, TDestination, ResolutionContext, TDestination> IGlobalConfiguration.GetExecutionPlan<TSource, TDestination>(in MapRequest mapRequest)
=> (Func<TSource, TDestination, ResolutionContext, TDestination>)GetExecutionPlan(mapRequest);
private Delegate GetExecutionPlan(in MapRequest mapRequest) => _executionPlans.GetOrAdd(mapRequest);
private Delegate CompileExecutionPlan(MapRequest mapRequest)
{
var executionPlan = ((IGlobalConfiguration)this).BuildExecutionPlan(mapRequest);
return executionPlan.Compile(); // breakpoint here to inspect all execution plans
}
TypeMap IGlobalConfiguration.ResolveAssociatedTypeMap(TypePair types)
{
var typeMap = ResolveTypeMap(types);
if (typeMap != null)
{
return typeMap;
}
if (FindMapper(types) is IObjectMapperInfo objectMapperInfo)
{
return ResolveTypeMap(objectMapperInfo.GetAssociatedTypes(types));
}
return null;
}
private static LambdaExpression GenerateTypeMapExpression(TypePair requestedTypes, TypeMap typeMap)
{
typeMap.CheckProjection();
if (requestedTypes == typeMap.Types)
{
return typeMap.MapExpression;
}
var mapDestinationType = typeMap.DestinationType;
var requestedDestinationType = requestedTypes.DestinationType;
var source = Parameter(requestedTypes.SourceType, "source");
var destination = Parameter(requestedDestinationType, "typeMapDestination");
var checkNullValueTypeDest = CheckNullValueType(destination, mapDestinationType);
return
Lambda(
ToType(
Invoke(typeMap.MapExpression, ToType(source, typeMap.SourceType), ToType(checkNullValueTypeDest, mapDestinationType), ContextParameter),
requestedDestinationType),
source, destination, ContextParameter);
}
private static Expression CheckNullValueType(Expression expression, Type runtimeType) =>
!expression.Type.IsValueType && runtimeType.IsValueType ? Coalesce(expression, Default(runtimeType)) : expression;
private LambdaExpression GenerateObjectMapperExpression(in MapRequest mapRequest, IObjectMapper mapperToUse)
{
var source = Parameter(mapRequest.RequestedTypes.SourceType, "source");
var destination = Parameter(mapRequest.RequestedTypes.DestinationType, "mapperDestination");
var runtimeDestinationType = mapRequest.RuntimeTypes.DestinationType;
Expression fullExpression;
if (mapperToUse == null)
{
var exception = new AutoMapperMappingException("Missing type map configuration or unsupported mapping.", null, mapRequest.RuntimeTypes)
{
MemberMap = mapRequest.MemberMap
};
fullExpression = Throw(Constant(exception), runtimeDestinationType);
}
else
{
var checkNullValueTypeDest = CheckNullValueType(destination, runtimeDestinationType);
var map = mapperToUse.MapExpression(this, Configuration, mapRequest.MemberMap,
ToType(source, mapRequest.RuntimeTypes.SourceType),
ToType(checkNullValueTypeDest, runtimeDestinationType));
var newException = Call(MappingError, ExceptionParameter, Constant(mapRequest));
var throwExpression = Throw(newException, runtimeDestinationType);
fullExpression = TryCatch(ToType(map, runtimeDestinationType), Catch(ExceptionParameter, throwExpression));
}
var profileMap = mapRequest.MemberMap?.Profile ?? Configuration;
var nullCheckSource = NullCheckSource(profileMap, source, destination, fullExpression, mapRequest.MemberMap);
return Lambda(nullCheckSource, source, destination, ContextParameter);
}
public static AutoMapperMappingException GetMappingError(Exception innerException, in MapRequest mapRequest) =>
new("Error mapping types.", innerException, mapRequest.RuntimeTypes) { MemberMap = mapRequest.MemberMap };
IReadOnlyCollection<TypeMap> IGlobalConfiguration.GetAllTypeMaps() => _configuredMaps.Values;
TypeMap IGlobalConfiguration.FindTypeMapFor(Type sourceType, Type destinationType) => FindTypeMapFor(sourceType, destinationType);
TypeMap IGlobalConfiguration.FindTypeMapFor<TSource, TDestination>() => FindTypeMapFor(typeof(TSource), typeof(TDestination));
TypeMap IGlobalConfiguration.FindTypeMapFor(TypePair typePair) => FindTypeMapFor(typePair);
TypeMap FindTypeMapFor(Type sourceType, Type destinationType) => FindTypeMapFor(new(sourceType, destinationType));
TypeMap FindTypeMapFor(TypePair typePair) => _configuredMaps.GetValueOrDefault(typePair);
TypeMap IGlobalConfiguration.ResolveTypeMap(Type sourceType, Type destinationType) => ResolveTypeMap(new(sourceType, destinationType));
TypeMap IGlobalConfiguration.ResolveTypeMap(TypePair typePair) => ResolveTypeMap(typePair);
TypeMap ResolveTypeMap(TypePair typePair)
{
if (_resolvedMaps.TryGetValue(typePair, out TypeMap typeMap))
{
return typeMap;
}
if (_sealed)
{
typeMap = _runtimeMaps.GetOrAdd(typePair);
// if it's a dynamically created type map, we need to seal it outside GetTypeMap to handle recursion
if (typeMap != null && typeMap.MapExpression == null)
{
lock (typeMap)
{
typeMap.Seal(this, null);
}
}
}
else
{
typeMap = GetTypeMap(typePair);
_resolvedMaps.Add(typePair, typeMap);
}
return typeMap;
}
private TypeMap GetTypeMap(TypePair initialTypes)
{
var typeMap = FindClosedGenericTypeMapFor(initialTypes);
if (typeMap != null)
{
return typeMap;
}
var allSourceTypes = GetTypeInheritance(initialTypes.SourceType);
var allDestinationTypes = GetTypeInheritance(initialTypes.DestinationType);
foreach (var destinationType in allDestinationTypes)
{
foreach (var sourceType in allSourceTypes)
{
if (sourceType == initialTypes.SourceType && destinationType == initialTypes.DestinationType)
{
continue;
}
var types = new TypePair(sourceType, destinationType);
if (_resolvedMaps.TryGetValue(types, out typeMap))
{
return typeMap;
}
typeMap = FindClosedGenericTypeMapFor(types);
if (typeMap != null)
{
return typeMap;
}
}
}
return null;
}
static List<Type> GetTypeInheritance(Type type)
{
var interfaces = type.GetInterfaces();
var lastIndex = interfaces.Length - 1;
var types = new List<Type>(interfaces.Length + 2) { type };
Type baseType = type;
while ((baseType = baseType.BaseType) != null)
{
types.Add(baseType);
foreach (var interfaceType in baseType.GetInterfaces())
{
var interfaceIndex = Array.LastIndexOf(interfaces, interfaceType);
if (interfaceIndex != lastIndex)
{
interfaces[interfaceIndex] = interfaces[lastIndex];
interfaces[lastIndex] = interfaceType;
}
}
}
foreach (var interfaceType in interfaces)
{
types.Add(interfaceType);
}
return types;
}
IEnumerable<IObjectMapper> IGlobalConfiguration.GetMappers() => _mappers;
private void Seal()
{
foreach (var profile in Profiles)
{
profile.Register(this);
}
foreach (var profile in Profiles)
{
profile.Configure(this);
}
IGlobalConfiguration globalConfiguration = this;
var derivedMaps = new List<TypeMap>();
foreach (var typeMap in _configuredMaps.Values)
{
if (typeMap.DestinationTypeOverride != null)
{
var derivedMap = globalConfiguration.GetIncludedTypeMap(typeMap.GetAsPair());
_resolvedMaps[typeMap.Types] = derivedMap;
}
else
{
_resolvedMaps[typeMap.Types] = typeMap;
}
derivedMaps.Clear();
GetDerivedTypeMaps(typeMap,derivedMaps);
foreach (var derivedMap in derivedMaps)
{
var includedPair = new TypePair(derivedMap.SourceType, typeMap.DestinationType);
_resolvedMaps.TryAdd(includedPair, derivedMap);
}
}
var typeMapsPath = new HashSet<TypeMap>();
foreach (var typeMap in _configuredMaps.Values)
{
typeMap.Seal(this, typeMapsPath);
}
_features.Seal(this);
}
private void GetDerivedTypeMaps(TypeMap typeMap, List<TypeMap> typeMaps)
{
foreach (var derivedMap in this.Internal().GetIncludedTypeMaps(typeMap))
{
typeMaps.Add(derivedMap);
GetDerivedTypeMaps(derivedMap, typeMaps);
}
}
TypeMap[] IGlobalConfiguration.GetIncludedTypeMaps(IReadOnlyCollection<TypePair> includedTypes)
{
if (includedTypes.Count == 0)
{
return Array.Empty<TypeMap>();
}
var typeMaps = new TypeMap[includedTypes.Count];
int index = 0;
foreach (var pair in includedTypes)
{
typeMaps[index] = GetIncludedTypeMap(pair);
index++;
}
return typeMaps;
}
TypeMap IGlobalConfiguration.GetIncludedTypeMap(Type sourceType, Type destinationType) => GetIncludedTypeMap(new(sourceType, destinationType));
TypeMap IGlobalConfiguration.GetIncludedTypeMap(TypePair pair) => GetIncludedTypeMap(pair);
TypeMap GetIncludedTypeMap(TypePair pair)
{
var typeMap = FindTypeMapFor(pair);
if (typeMap != null)
{
return typeMap;
}
else
{
typeMap = ResolveTypeMap(pair);
// we want the exact map the user included, but we could instantiate an open generic
if (typeMap?.Types != pair)
{
throw QueryMapperHelper.MissingMapException(pair);
}
return typeMap;
}
}
private TypeMap FindClosedGenericTypeMapFor(TypePair typePair)
{
if (!_hasOpenMaps || !typePair.IsConstructedGenericType)
{
return null;
}
return FindClosedGenericMap(typePair);
TypeMap FindClosedGenericMap(TypePair typePair)
{
var genericTypePair = typePair.GetTypeDefinitionIfGeneric();
var userMap =
FindTypeMapFor(genericTypePair.SourceType, typePair.DestinationType) ??
FindTypeMapFor(typePair.SourceType, genericTypePair.DestinationType) ??
FindTypeMapFor(genericTypePair);
ITypeMapConfiguration genericMapConfig;
ProfileMap profile;
TypeMap cachedMap;
TypePair closedTypes;
if (userMap != null && userMap.DestinationTypeOverride == null)
{
genericMapConfig = userMap.Profile.GetGenericMap(userMap.Types);
profile = userMap.Profile;
cachedMap = null;
closedTypes = typePair;
}
else
{
var foundGenericMap = _resolvedMaps.TryGetValue(genericTypePair, out cachedMap) && cachedMap.Types.ContainsGenericParameters;
if (!foundGenericMap)
{
return cachedMap;
}
genericMapConfig = cachedMap.Profile.GetGenericMap(cachedMap.Types);
profile = cachedMap.Profile;
closedTypes = cachedMap.Types.CloseGenericTypes(typePair);
}
if (genericMapConfig == null)
{
return null;
}
var typeMap = profile.CreateClosedGenericTypeMap(genericMapConfig, closedTypes, this);
cachedMap?.CopyInheritedMapsTo(typeMap);
return typeMap;
}
}
IObjectMapper IGlobalConfiguration.FindMapper(TypePair types) => FindMapper(types);
IObjectMapper FindMapper(TypePair types)
{
foreach (var mapper in _mappers)
{
if (mapper.IsMatch(types))
{
return mapper;
}
}
return null;
}
void IGlobalConfiguration.RegisterTypeMap(TypeMap typeMap) => _configuredMaps[typeMap.Types] = typeMap;
void IGlobalConfiguration.AssertConfigurationIsValid(TypeMap typeMap) => _validator.AssertConfigurationIsValid(new[] { typeMap });
void IGlobalConfiguration.AssertConfigurationIsValid(string profileName)
{
if (Profiles.All(x => x.Name != profileName))
{
throw new ArgumentOutOfRangeException(nameof(profileName), $"Cannot find any profiles with the name '{profileName}'.");
}
_validator.AssertConfigurationIsValid(_configuredMaps.Values.Where(typeMap => typeMap.Profile.Name == profileName));
}
void IGlobalConfiguration.AssertConfigurationIsValid<TProfile>() => this.Internal().AssertConfigurationIsValid(typeof(TProfile).FullName);
}
}
| |
using Android.Annotation;
using Android.Content;
using Android.Content.Res;
using Android.Graphics.Drawables;
using Android.OS;
using Android.Util;
using Android.Views;
using Android.Widget;
using System;
using System.Collections.Generic;
using Xamarin.Android.ExpandableSelector.Animation;
namespace Xamarin.Android.ExpandableSelector
{
/// <summary>
/// FrameLayout extension used to show a list of ExpandableItems instances represented with Button
/// or ImageButton widgets which can be collapsed and expanded using an animation. The configurable
/// elements of the class are:
/// - List of items to show represented with ExpandableItem instances.
/// - Time used to perform the collapse/expand animations. Expressed in milliseconds.
/// - Show or hide the view background when the List of ExpandaleItems are collapsed.
/// - Configure a ExpandableSelectorListeners to be notified when the view is going to be
/// collapsed/expanded or has been collapsed/expanded.
/// - Configure a OnExpandableItemClickListener to be notified when an item is clicked.
/// </summary>
public class ExpandableSelector : FrameLayout
{
private static readonly int DEFAULT_ANIMATION_DURATION = 300;
private List<View> _buttons = new List<View>();
private ExpandableSelectorAnimator _expandableSelectorAnimator;
private Drawable _expandedBackground;
private bool _hideBackgroundIfCollapsed;
public ExpandableSelector(Context context)
: this(context, null)
{
}
public ExpandableSelector(Context context, IAttributeSet attrs)
: this(context, attrs, 0)
{
}
public ExpandableSelector(Context context, IAttributeSet attrs, int defStyleAttr)
: base(context, attrs, defStyleAttr)
{
this.InitializeView(attrs);
}
[TargetApi(Value = (int)BuildVersionCodes.Lollipop)]
public ExpandableSelector(Context context, IAttributeSet attrs, int defStyleAttr, int defStyleRes)
: base(context, attrs, defStyleAttr, defStyleRes)
{
this.InitializeView(attrs);
}
public List<ExpandableItem> ExpandableItems { get; set; }
/// <summary>
/// Returns true if the view is collapsed and false if the view is expanded.
/// </summary>
public bool IsCollapsed
{
get { return this._expandableSelectorAnimator.IsCollapsed; }
}
/// <summary>
/// Returns true if the view is expanded and false if the view is collapsed.
/// </summary>
public bool IsExpanded
{
get { return this._expandableSelectorAnimator.IsExpanded; }
}
public event EventHandler<ExpandableItemClickEventArgs> ItemClick;
protected virtual void OnItemClick(int index ,ExpandableItem item)
{
EventHandler<ExpandableItemClickEventArgs> handler = this.ItemClick;
if (handler != null)
{
handler(this, new ExpandableItemClickEventArgs(index, item));
}
}
public event EventHandler Expanded;
protected virtual void OnExpanded()
{
EventHandler handler = this.Expanded;
if (handler != null)
{
handler(this, EventArgs.Empty);
}
}
public event EventHandler Collapsed;
protected virtual void OnCollapsed()
{
EventHandler handler = this.Collapsed;
if (handler != null)
{
handler(this, EventArgs.Empty);
}
}
public event EventHandler StartExpand;
protected virtual void OnStartExpand()
{
EventHandler handler = this.StartExpand;
if (handler != null)
{
handler(this, EventArgs.Empty);
}
}
public event EventHandler StartCollapse;
protected virtual void OnStartCollapse()
{
EventHandler handler = this.StartCollapse;
if (handler != null)
{
handler(this, EventArgs.Empty);
}
}
/// <summary>
/// Configures a List ExpandableItem to be shown. By default, the list of ExpandableItems is
/// going to be shown collapsed. Please take into account that this method creates
/// ImageButton/Button widgets based on the size of the list passed as parameter. Don't use this
/// library as a RecyclerView and take into account the number of elements to show.
/// </summary>
/// <param name="expandableItems"></param>
public void ShowExpandableItems(List<ExpandableItem> expandableItems)
{
this.ValidateExpandableItems(expandableItems);
this.Reset();
this.ExpandableItems = expandableItems;
this.RenderExpandableItems();
this.HookEvents();
this.BringChildsToFront(expandableItems);
}
/// <summary>
/// Performs different animations to show the previously configured ExpandableItems transformed
/// into Button widgets. Notifies the ExpandableSelectorListener instance there was previously
/// configured.
/// </summary>
public void Expand()
{
this._expandableSelectorAnimator.AnimationFinished += delegate
{
this.UpdateBackground();
this.OnExpanded();
};
this.OnStartExpand();
this._expandableSelectorAnimator.Expand();
}
/// <summary>
/// Performs different animations to hide the previously configured ExpandableItems transformed
/// into Button widgets. Notifies the ExpandableSelectorListener instance there was previously
/// configured.
/// </summary>
public void Collapse()
{
this._expandableSelectorAnimator.AnimationFinished += delegate
{
this.UpdateBackground();
this.OnCollapsed();
};
this.OnStartCollapse();
this._expandableSelectorAnimator.Collapse();
}
/// <summary>
/// Given a position passed as parameter returns the ExpandableItem associated.
/// </summary>
/// <param name="expandableItemPosition"></param>
/// <returns></returns>
public ExpandableItem GetExpandableItem(int expandableItemPosition)
{
return this.ExpandableItems[expandableItemPosition];
}
/// <summary>
/// Changes the ExpandableItem associated to a given position and updates the Button widget to
/// show the new ExpandableItem information.
/// </summary>
/// <param name="expandableItemPosition"></param>
/// <param name="expandableItem"></param>
public void UpdateExpandableItem(int expandableItemPosition, ExpandableItem expandableItem)
{
this.ValidateExpandableItem(expandableItem);
this.ExpandableItems.RemoveAt(expandableItemPosition);
this.ExpandableItems.Insert(expandableItemPosition, expandableItem);
int buttonPosition = this._buttons.Count - 1 - expandableItemPosition;
this.ConfigureButtonContent(this._buttons[buttonPosition], expandableItem);
}
private void InitializeView(IAttributeSet attrs)
{
TypedArray attributes =
this.Context.ObtainStyledAttributes(attrs, Resource.Styleable.expandable_selector);
this.InitializeAnimationDuration(attributes);
this.InitializeHideBackgroundIfCollapsed(attributes);
this.InitializeHideFirstItemOnCollapse(attributes);
attributes.Recycle();
}
private void InitializeHideBackgroundIfCollapsed(TypedArray attributes)
{
this._hideBackgroundIfCollapsed =
attributes.GetBoolean(Resource.Styleable.expandable_selector_hide_background_if_collapsed, false);
this._expandedBackground = this.Background;
this.UpdateBackground();
}
private void InitializeAnimationDuration(TypedArray attributes)
{
int animationDuration =
attributes.GetInteger(Resource.Styleable.expandable_selector_animation_duration,
DEFAULT_ANIMATION_DURATION);
int expandInterpolatorId =
attributes.GetResourceId(Resource.Styleable.expandable_selector_expand_interpolator,
global::Android.Resource.Animation.AccelerateInterpolator);
int collapseInterpolatorId =
attributes.GetResourceId(Resource.Styleable.expandable_selector_collapse_interpolator,
global::Android.Resource.Animation.DecelerateInterpolator);
int containerInterpolatorId =
attributes.GetResourceId(Resource.Styleable.expandable_selector_container_interpolator,
global::Android.Resource.Animation.DecelerateInterpolator);
this._expandableSelectorAnimator = new ExpandableSelectorAnimator(this, animationDuration,
expandInterpolatorId,
collapseInterpolatorId, containerInterpolatorId);
}
private void InitializeHideFirstItemOnCollapse(TypedArray attributes)
{
bool hideFirstItemOnCollapsed =
attributes.GetBoolean(Resource.Styleable.expandable_selector_hide_first_item_on_collapse, false);
this._expandableSelectorAnimator.HideFirstItemOnCollapse = hideFirstItemOnCollapsed;
}
private void UpdateBackground()
{
if (!this._hideBackgroundIfCollapsed)
{
return;
}
if (this.IsExpanded)
{
this.Background = this._expandedBackground;
}
else
{
this.SetBackgroundResource(global::Android.Resource.Color.Transparent);
}
}
private void Reset()
{
this.ExpandableItems = new List<ExpandableItem>();
foreach (View button in this._buttons)
{
this.RemoveView(button);
}
this._buttons = new List<View>();
this._expandableSelectorAnimator.Reset();
}
private void RenderExpandableItems()
{
int numberOfItems = this.ExpandableItems.Count;
for (int i = numberOfItems - 1; i >= 0; i--)
{
View button = this.InitializeButton(i);
this.AddView(button);
this._buttons.Add(button);
this._expandableSelectorAnimator.InitializeButton(button);
this.ConfigureButtonContent(button, this.ExpandableItems[i]);
}
this._expandableSelectorAnimator.Buttons = this._buttons;
}
private void HookEvents()
{
int numberOfButtons = this._buttons.Count;
bool thereIsMoreThanOneButton = numberOfButtons > 1;
if (thereIsMoreThanOneButton)
{
this._buttons[numberOfButtons - 1].Click += (s, e) =>
{
if (this.IsCollapsed)
{
try
{
this.Expand();
}
catch (Exception ex)
{
throw;
}
}
else
{
var button = s as View;
this.OnItemClick(0, this.ExpandableItems[numberOfButtons - 1]);
}
};
}
for (int i = 0; i < numberOfButtons - 1; i++)
{
int buttonPosition = i;
this._buttons[i].Click += (s, e) =>
{
int buttonIndex = numberOfButtons - 1 - buttonPosition;
this.OnItemClick(buttonIndex, this.ExpandableItems[buttonIndex]);
};
}
}
private View InitializeButton(int expandableItemPosition)
{
try
{
ExpandableItem expandableItem = this.ExpandableItems[expandableItemPosition];
View button = null;
LayoutInflater layoutInflater = LayoutInflater.From(this.Context);
button =
layoutInflater.Inflate(
expandableItem.HasTitle()
? Resource.Layout.expandable_item_button
: Resource.Layout.expandable_item_image_button,
this, false);
button.Visibility = expandableItemPosition == 0
? ViewStates.Visible
: ViewStates.Invisible;
return button;
}
catch (Exception ex)
{
var error = ex.Message;
throw;
}
}
public void ConfigureButtonContent(View view, ExpandableItem expandableItem)
{
if (expandableItem.HasBackgroundId())
{
int backgroundId = expandableItem.BackgroundId;
view.SetBackgroundResource(backgroundId);
}
if (expandableItem.HasTitle())
{
var button = view as Button;
if (button==null)
{
return;
}
string text = expandableItem.Title;
button.Text = text;
}
if (expandableItem.HasResourceId())
{
var imageButton = view as ImageButton;
if (imageButton== null)
{
return;
}
int resourceId = expandableItem.ResourceId;
imageButton.SetImageResource(resourceId);
}
}
private void ValidateExpandableItem(ExpandableItem expandableItem)
{
if (expandableItem == null)
{
throw new ArgumentNullException("expandableItem",
"You can't use a null instance of ExpandableItem as parameter.");
}
}
private void ValidateExpandableItems(List<ExpandableItem> expandableItems)
{
if (expandableItems == null)
{
throw new ArgumentNullException("expandableItems",
"The List<ExpandableItem> passed as argument can't be null");
}
}
private void BringChildsToFront(List<ExpandableItem> expandableItems)
{
int childCount = this.ChildCount;
int numberOfExpandableItems = expandableItems.Count;
if (childCount > numberOfExpandableItems)
{
for (int i = 0; i < childCount - numberOfExpandableItems; i++)
{
this.GetChildAt(i).BringToFront();
}
}
}
}
public class ExpandableItemClickEventArgs : EventArgs
{
public int Index { get; private set; }
public ExpandableItem Item { get; private set; }
public ExpandableItemClickEventArgs(int index, ExpandableItem item)
{
this.Index = index;
this.Item = item;
}
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Orleans;
using Orleans.Concurrency;
using Orleans.Configuration;
using Orleans.GrainDirectory;
using Orleans.Runtime;
using Orleans.Runtime.Configuration;
using Orleans.Runtime.GrainDirectory;
using Orleans.Serialization;
using Orleans.ServiceBus.Providers;
using Orleans.Streams;
using TestExtensions;
using TestGrainInterfaces;
using UnitTests.GrainInterfaces;
using UnitTests.Grains;
using Xunit;
using Xunit.Abstractions;
// ReSharper disable NotAccessedVariable
namespace UnitTests.Serialization
{
public class BuiltInSerializerCollectionFixture
{
public ConcurrentDictionary<BuiltInSerializerTests.SerializerToUse, SerializationTestEnvironment> Environments { get; } =
new ConcurrentDictionary<BuiltInSerializerTests.SerializerToUse, SerializationTestEnvironment>();
}
/// <summary>
/// Test the built-in serializers
/// </summary>
[Collection(TestEnvironmentFixture.DefaultCollection), TestCategory("Serialization")]
public class BuiltInSerializerTests
{
private readonly ITestOutputHelper output;
private readonly TestEnvironmentFixture defaultFixture;
private readonly BuiltInSerializerCollectionFixture serializerFixture;
public enum SerializerToUse
{
Default,
BinaryFormatterFallbackSerializer,
IlBasedFallbackSerializer,
NoFallback
}
public static IEnumerable<object[]> FallbackSerializers = new[]
{
new object[] { SerializerToUse.Default },
new object[] { SerializerToUse.BinaryFormatterFallbackSerializer },
new object[] { SerializerToUse.IlBasedFallbackSerializer }
};
private SerializationTestEnvironment InitializeSerializer(SerializerToUse serializerToUse)
{
if (serializerToUse == SerializerToUse.Default)
{
return this.defaultFixture;
}
return this.serializerFixture.Environments.GetOrAdd(
serializerToUse,
_ =>
{
Type fallback;
switch (serializerToUse)
{
case SerializerToUse.IlBasedFallbackSerializer:
#pragma warning disable CS0618 // Type or member is obsolete
fallback = typeof(ILBasedSerializer);
#pragma warning restore CS0618 // Type or member is obsolete
break;
case SerializerToUse.BinaryFormatterFallbackSerializer:
fallback = typeof(BinaryFormatterSerializer);
break;
case SerializerToUse.NoFallback:
fallback = typeof(SupportsNothingSerializer);
break;
default:
throw new InvalidOperationException("Invalid Serializer was selected");
}
return SerializationTestEnvironment.InitializeWithDefaults(
builder => builder.Configure<SerializationProviderOptions>(
options => options.FallbackSerializationProvider = fallback));
});
}
public BuiltInSerializerTests(ITestOutputHelper output, TestEnvironmentFixture fixture, BuiltInSerializerCollectionFixture serializerFixture)
{
this.output = output;
this.defaultFixture = fixture;
this.serializerFixture = serializerFixture;
}
[Fact, TestCategory("BVT"), TestCategory("CodeGen")]
public void InternalSerializableTypesHaveSerializers()
{
var environment = InitializeSerializer(SerializerToUse.NoFallback);
Assert.True(
environment.SerializationManager.HasSerializer(typeof(int)),
$"Should be able to serialize internal type {nameof(Int32)}.");
Assert.True(
environment.SerializationManager.HasSerializer(typeof(List<int>)),
$"Should be able to serialize internal type {nameof(List<int>)}.");
Assert.True(
environment.SerializationManager.HasSerializer(typeof(AddressesAndTag)),
$"Should be able to serialize internal type {nameof(AddressesAndTag)}.");
Assert.True(
environment.SerializationManager.HasSerializer(typeof(ActivationInfo)),
$"Should be able to serialize internal type {nameof(ActivationInfo)}.");
var grainReferenceType = typeof(RuntimeVersion).Assembly.GetType(
"Orleans.OrleansCodeGenRemindableReference",
throwOnError: true);
Assert.True(
environment.SerializationManager.HasSerializer(grainReferenceType),
$"Should be able to serialize grain reference type {grainReferenceType}.");
Assert.True(
environment.SerializationManager.HasSerializer(typeof(PubSubGrainState)),
$"Should be able to serialize internal type {nameof(PubSubGrainState)}.");
Assert.True(
environment.SerializationManager.HasSerializer(typeof(EventHubBatchContainer)),
$"Should be able to serialize internal type {nameof(EventHubBatchContainer)}.");
Assert.True(
environment.SerializationManager.HasSerializer(typeof(EventHubSequenceTokenV2)),
$"Should be able to serialize internal type {nameof(EventHubSequenceTokenV2)}.");
}
[Fact(Skip = "See https://github.com/dotnet/orleans/issues/3531"), TestCategory("BVT"), TestCategory("CodeGen")]
public void ValueTupleTypesHasSerializer()
{
var environment = InitializeSerializer(SerializerToUse.NoFallback);
Assert.True(
environment.SerializationManager.HasSerializer(typeof(ValueTuple<int, AddressAndTag>)),
$"Should be able to serialize internal type {nameof(ValueTuple<int, AddressAndTag>)}.");
}
[Theory, TestCategory("BVT")]
[MemberData(nameof(FallbackSerializers))]
public void Serialize_ComplexClass(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var expected = OuterClass.GetPrivateClassInstance();
expected.Int = 89;
expected.String = Guid.NewGuid().ToString();
expected.NonSerializedInt = 39;
expected.Classes = new SomeAbstractClass[]
{
expected,
new AnotherConcreteClass
{
AnotherString = "hi",
Interfaces = new List<ISomeInterface> { expected }
}
};
expected.Enum = SomeAbstractClass.SomeEnum.Something;
expected.SetObsoleteInt(38);
var actual = (SomeAbstractClass)OrleansSerializationLoop(environment.SerializationManager, expected);
Assert.Equal(expected.Int, actual.Int);
Assert.Equal(expected.Enum, actual.Enum);
Assert.Equal(expected.String, ((OuterClass.SomeConcreteClass)actual).String);
Assert.Equal(expected.Classes.Length, actual.Classes.Length);
Assert.Equal(expected.String, ((OuterClass.SomeConcreteClass)actual.Classes[0]).String);
Assert.Equal(expected.Classes[1].Interfaces[0].Int, actual.Classes[1].Interfaces[0].Int);
Assert.Equal(0, actual.NonSerializedInt);
Assert.Equal(expected.GetObsoleteInt(), actual.GetObsoleteInt());
Assert.Null(actual.SomeGrainReference);
}
/// <summary>
/// Tests that the default (non-fallback) serializer can handle complex classes.
/// </summary>
/// <param name="serializerToUse"></param>
[Theory, TestCategory("BVT")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_ComplexAccessibleClass(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var expected = new AnotherConcreteClass
{
Int = 89,
AnotherString = Guid.NewGuid().ToString(),
NonSerializedInt = 39,
Enum = SomeAbstractClass.SomeEnum.Something,
};
expected.Classes = new SomeAbstractClass[]
{
expected,
new AnotherConcreteClass
{
AnotherString = "hi",
Interfaces = new List<ISomeInterface> { expected }
}
};
expected.Interfaces = new List<ISomeInterface>
{
expected.Classes[1]
};
expected.SetObsoleteInt(38);
var actual = (AnotherConcreteClass)OrleansSerializationLoop(environment.SerializationManager, expected);
Assert.Equal(expected.Int, actual.Int);
Assert.Equal(expected.Enum, actual.Enum);
Assert.Equal(expected.AnotherString, actual.AnotherString);
Assert.Equal(expected.Classes.Length, actual.Classes.Length);
Assert.Equal(expected.Classes[1].Interfaces[0].Int, actual.Classes[1].Interfaces[0].Int);
Assert.Equal(expected.Interfaces[0].Int, actual.Interfaces[0].Int);
Assert.Equal(actual.Interfaces[0], actual.Classes[1]);
Assert.NotEqual(expected.NonSerializedInt, actual.NonSerializedInt);
Assert.Equal(0, actual.NonSerializedInt);
Assert.Equal(expected.GetObsoleteInt(), actual.GetObsoleteInt());
Assert.Null(actual.SomeGrainReference);
}
[Theory, TestCategory("BVT")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_Type(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
// Test serialization of Type.
var expected = typeof(int);
var actual = (Type)OrleansSerializationLoop(environment.SerializationManager, expected);
Assert.Equal(expected.AssemblyQualifiedName, actual.AssemblyQualifiedName);
// Test serialization of RuntimeType.
expected = 8.GetType();
actual = (Type)OrleansSerializationLoop(environment.SerializationManager, expected);
Assert.Equal(expected.AssemblyQualifiedName, actual.AssemblyQualifiedName);
}
[Theory, TestCategory("BVT")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_ComplexStruct(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
// Test struct serialization.
var expected = new SomeStruct(10) { Id = Guid.NewGuid(), PublicValue = 6, ValueWithPrivateGetter = 7 };
expected.SetValueWithPrivateSetter(8);
expected.SetPrivateValue(9);
var actual = (SomeStruct)OrleansSerializationLoop(environment.SerializationManager, expected);
Assert.Equal(expected.Id, actual.Id);
Assert.Equal(expected.ReadonlyField, actual.ReadonlyField);
Assert.Equal(expected.PublicValue, actual.PublicValue);
Assert.Equal(expected.ValueWithPrivateSetter, actual.ValueWithPrivateSetter);
Assert.Null(actual.SomeGrainReference);
Assert.Equal(expected.GetPrivateValue(), actual.GetPrivateValue());
Assert.Equal(expected.GetValueWithPrivateGetter(), actual.GetValueWithPrivateGetter());
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_ActivationAddress(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var grain = GrainId.NewId();
var addr = ActivationAddress.GetAddress(null, grain, null);
object deserialized = OrleansSerializationLoop(environment.SerializationManager, addr, false);
Assert.IsAssignableFrom<ActivationAddress>(deserialized);
Assert.Null(((ActivationAddress)deserialized).Activation); //Activation no longer null after copy
Assert.Null(((ActivationAddress)deserialized).Silo); //Silo no longer null after copy
Assert.Equal(grain, ((ActivationAddress)deserialized).Grain); //Grain different after copy
deserialized = OrleansSerializationLoop(environment.SerializationManager, addr);
Assert.IsAssignableFrom<ActivationAddress>(deserialized); //ActivationAddress full serialization loop as wrong type
Assert.Null(((ActivationAddress)deserialized).Activation); //Activation no longer null after full serialization loop
Assert.Null(((ActivationAddress)deserialized).Silo); //Silo no longer null after full serialization loop
Assert.Equal(grain, ((ActivationAddress)deserialized).Grain); //Grain different after copy
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_EmptyList(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var list = new List<int>();
var deserialized = OrleansSerializationLoop(environment.SerializationManager, list, false);
Assert.IsAssignableFrom<List<int>>(deserialized); //Empty list of integers copied as wrong type"
ValidateList(list, (List<int>)deserialized, "int (empty, copy)");
deserialized = OrleansSerializationLoop(environment.SerializationManager, list);
Assert.IsAssignableFrom<List<int>>(deserialized); //Empty list of integers full serialization loop as wrong type
ValidateList(list, (List<int>)deserialized, "int (empty)");
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_BasicDictionaries(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
Dictionary<string, string> source1 = new Dictionary<string, string>();
source1["Hello"] = "Yes";
source1["Goodbye"] = "No";
var deserialized = OrleansSerializationLoop(environment.SerializationManager, source1);
ValidateDictionary<string, string>(source1, deserialized, "string/string");
Dictionary<int, DateTime> source2 = new Dictionary<int, DateTime>();
source2[3] = DateTime.Now;
source2[27] = DateTime.Now.AddHours(2);
deserialized = OrleansSerializationLoop(environment.SerializationManager, source2);
ValidateDictionary<int, DateTime>(source2, deserialized, "int/date");
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_ReadOnlyDictionary(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
Dictionary<string, string> source1 = new Dictionary<string, string>();
source1["Hello"] = "Yes";
source1["Goodbye"] = "No";
var readOnlySource1 = new ReadOnlyDictionary<string, string>(source1);
var deserialized = OrleansSerializationLoop(environment.SerializationManager, readOnlySource1);
ValidateReadOnlyDictionary(readOnlySource1, deserialized, "string/string");
Dictionary<int, DateTime> source2 = new Dictionary<int, DateTime>();
source2[3] = DateTime.Now;
source2[27] = DateTime.Now.AddHours(2);
var readOnlySource2 = new ReadOnlyDictionary<int, DateTime>(source2);
deserialized = OrleansSerializationLoop(environment.SerializationManager, readOnlySource2);
ValidateReadOnlyDictionary(readOnlySource2, deserialized, "int/date");
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_DictionaryWithComparer(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
Dictionary<string, string> source1 = new Dictionary<string, string>(new CaseInsensitiveStringEquality());
source1["Hello"] = "Yes";
source1["Goodbye"] = "No";
var deserialized = OrleansSerializationLoop(environment.SerializationManager, source1);
ValidateDictionary<string, string>(source1, deserialized, "case-insensitive string/string");
Dictionary<string, string> result1 = deserialized as Dictionary<string, string>;
Assert.Equal(source1["Hello"], result1["hElLo"]); //Round trip for case insensitive string/string dictionary lost the custom comparer
Dictionary<int, DateTime> source2 = new Dictionary<int, DateTime>(new Mod5IntegerComparer());
source2[3] = DateTime.Now;
source2[27] = DateTime.Now.AddHours(2);
deserialized = OrleansSerializationLoop(environment.SerializationManager, source2);
ValidateDictionary<int, DateTime>(source2, deserialized, "int/date");
Dictionary<int, DateTime> result2 = (Dictionary<int, DateTime>)deserialized;
Assert.Equal<DateTime>(source2[3], result2[13]); //Round trip for case insensitive int/DateTime dictionary lost the custom comparer"
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_SortedDictionaryWithComparer(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var source1 = new SortedDictionary<string, string>(new CaseInsensitiveStringComparer());
source1["Hello"] = "Yes";
source1["Goodbye"] = "No";
object deserialized = OrleansSerializationLoop(environment.SerializationManager, source1);
ValidateSortedDictionary<string, string>(source1, deserialized, "string/string");
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_SortedListWithComparer(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var source1 = new SortedList<string, string>(new CaseInsensitiveStringComparer());
source1["Hello"] = "Yes";
source1["Goodbye"] = "No";
object deserialized = OrleansSerializationLoop(environment.SerializationManager, source1);
ValidateSortedList<string, string>(source1, deserialized, "string/string");
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_HashSetWithComparer(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var source1 = new HashSet<string>(new CaseInsensitiveStringEquality());
source1.Add("one");
source1.Add("two");
source1.Add("three");
var deserialized = OrleansSerializationLoop(environment.SerializationManager, source1);
Assert.IsAssignableFrom(source1.GetType(), deserialized); //Type is wrong after round-trip of string hash set with comparer
var result = deserialized as HashSet<string>;
Assert.Equal(source1.Count, result.Count); //Count is wrong after round-trip of string hash set with comparer
#pragma warning disable xUnit2017 // Do not use Contains() to check if a value exists in a collection
foreach (var key in source1)
{
Assert.True(result.Contains(key)); //key is missing after round-trip of string hash set with comparer
}
Assert.True(result.Contains("One")); //Comparer is wrong after round-trip of string hash set with comparer
#pragma warning restore xUnit2017 // Do not use Contains() to check if a value exists in a collection
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_Stack(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var source1 = new Stack<string>();
source1.Push("one");
source1.Push("two");
source1.Push("three");
object deserialized = OrleansSerializationLoop(environment.SerializationManager, source1);
Assert.IsAssignableFrom(source1.GetType(), deserialized); //Type is wrong after round-trip of string stack
var result = deserialized as Stack<string>;
Assert.Equal(source1.Count, result.Count); //Count is wrong after round-trip of string stack
var srcIter = source1.GetEnumerator();
var resIter = result.GetEnumerator();
while (srcIter.MoveNext() && resIter.MoveNext())
{
Assert.Equal(srcIter.Current, resIter.Current); //Data is wrong after round-trip of string stack
}
}
/// <summary>
/// Tests that the <see cref="IOnDeserialized"/> callback is invoked after deserialization.
/// </summary>
/// <param name="serializerToUse"></param>
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_TypeWithOnDeserializedHook(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var input = new TypeWithOnDeserializedHook
{
Int = 5
};
var deserialized = OrleansSerializationLoop(environment.SerializationManager, input);
var result = Assert.IsType<TypeWithOnDeserializedHook>(deserialized);
Assert.Equal(input.Int, result.Int);
Assert.Null(input.Context);
Assert.NotNull(result.Context);
Assert.Equal(environment.SerializationManager, result.Context.GetSerializationManager());
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_SortedSetWithComparer(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var source1 = new SortedSet<string>(new CaseInsensitiveStringComparer());
source1.Add("one");
source1.Add("two");
source1.Add("three");
object deserialized = OrleansSerializationLoop(environment.SerializationManager, source1);
Assert.IsAssignableFrom(source1.GetType(), deserialized); //Type is wrong after round-trip of string sorted set with comparer
var result = (SortedSet<string>)deserialized;
Assert.Equal(source1.Count, result.Count); //Count is wrong after round-trip of string sorted set with comparer
#pragma warning disable xUnit2017 // Do not use Contains() to check if a value exists in a collection
foreach (var key in source1)
{
Assert.True(result.Contains(key)); //key is missing after round-trip of string sorted set with comparer
}
Assert.True(result.Contains("One")); //Comparer is wrong after round-trip of string sorted set with comparer
#pragma warning restore xUnit2017 // Do not use Contains() to check if a value exists in a collection
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_Array(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var source1 = new int[] { 1, 3, 5 };
object deserialized = OrleansSerializationLoop(environment.SerializationManager, source1);
ValidateArray<int>(source1, deserialized, "int");
var source2 = new string[] { "hello", "goodbye", "yes", "no", "", "I don't know" };
deserialized = OrleansSerializationLoop(environment.SerializationManager, source2);
ValidateArray<string>(source2, deserialized, "string");
var source3 = new sbyte[] { 1, 3, 5 };
deserialized = OrleansSerializationLoop(environment.SerializationManager, source3);
ValidateArray<sbyte>(source3, deserialized, "sbyte");
var source4 = new byte[] { 1, 3, 5 };
deserialized = OrleansSerializationLoop(environment.SerializationManager, source4);
ValidateArray<byte>(source4, deserialized, "byte");
var source5 = Enumerable.Repeat(3, (environment.SerializationManager.LargeObjectSizeThreshold / sizeof(int)) + 1).ToArray();
deserialized = OrleansSerializationLoop(environment.SerializationManager, source5);
ValidateArray<int>(source5, deserialized, "int");
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_ArrayOfArrays(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var source1 = new[] { new[] { 1, 3, 5 }, new[] { 10, 20, 30 }, new[] { 17, 13, 11, 7, 5, 3, 2 } };
object deserialized = OrleansSerializationLoop(environment.SerializationManager, source1);
ValidateArrayOfArrays(source1, deserialized, "int");
var source2 = new[] { new[] { "hello", "goodbye", "yes", "no", "", "I don't know" }, new[] { "yes" } };
deserialized = OrleansSerializationLoop(environment.SerializationManager, source2);
ValidateArrayOfArrays(source2, deserialized, "string");
var source3 = new HashSet<string>[3][];
source3[0] = new HashSet<string>[2];
source3[1] = new HashSet<string>[3];
source3[2] = new HashSet<string>[1];
source3[0][0] = new HashSet<string>();
source3[0][1] = new HashSet<string>();
source3[1][0] = new HashSet<string>();
source3[1][1] = null;
source3[1][2] = new HashSet<string>();
source3[2][0] = new HashSet<string>();
source3[0][0].Add("this");
source3[0][0].Add("that");
source3[1][0].Add("the other");
source3[1][2].Add("and another");
source3[2][0].Add("but not yet another");
deserialized = OrleansSerializationLoop(environment.SerializationManager, source3);
var result = Assert.IsAssignableFrom<HashSet<string>[][]>(deserialized); //Array of arrays of hash sets type is wrong on deserialization
Assert.Equal(3, result.Length); //Outer array size wrong on array of array of sets
Assert.Equal(2, result[0][0].Count); //Inner set size wrong on array of array of sets, element 0,0
Assert.Empty(result[0][1]); //Inner set size wrong on array of array of sets, element 0,1
Assert.Single(result[1][0]); //Inner set size wrong on array of array of sets, element 1,0
Assert.Null(result[1][1]); //Inner set not null on array of array of sets, element 1, 1
Assert.Single(result[1][2]); //Inner set size wrong on array of array of sets, element 1,2
Assert.Single(result[2][0]); //Inner set size wrong on array of array of sets, element 2,0
var source4 = new GrainReference[3][];
source4[0] = new GrainReference[2];
source4[1] = new GrainReference[3];
source4[2] = new GrainReference[1];
source4[0][0] = environment.InternalGrainFactory.GetGrain(GrainId.NewId());
source4[0][1] = environment.InternalGrainFactory.GetGrain(GrainId.NewId());
source4[1][0] = environment.InternalGrainFactory.GetGrain(GrainId.NewId());
source4[1][1] = environment.InternalGrainFactory.GetGrain(GrainId.NewId());
source4[1][2] = environment.InternalGrainFactory.GetGrain(GrainId.NewId());
source4[2][0] = environment.InternalGrainFactory.GetGrain(GrainId.NewId());
deserialized = OrleansSerializationLoop(environment.SerializationManager, source4);
ValidateArrayOfArrays(source4, deserialized, "grain reference");
var source5 = new GrainReference[32][];
for (int i = 0; i < source5.Length; i++)
{
source5[i] = new GrainReference[64];
for (int j = 0; j < source5[i].Length; j++)
{
source5[i][j] = environment.InternalGrainFactory.GetGrain(GrainId.NewId());
}
}
deserialized = OrleansSerializationLoop(environment.SerializationManager, source5);
ValidateArrayOfArrays(source5, deserialized, "grain reference (large)");
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_ArrayOfArrayOfArrays(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var source1 = new[] { new[] { 1, 3, 5 }, new[] { 10, 20, 30 }, new[] { 17, 13, 11, 7, 5, 3, 2 } };
var source2 = new[] { new[] { 1, 3 }, new[] { 10, 20 }, new[] { 17, 13, 11, 7, 5 } };
var source3 = new[] { new[] { 1, 3, 5 }, new[] { 10, 20, 30 } };
var source = new[] { source1, source2, source3 };
object deserialized = OrleansSerializationLoop(environment.SerializationManager, source);
ValidateArrayOfArrayOfArrays(source, deserialized, "int");
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_ReadOnlyCollection(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var source1 = new List<string> { "Yes", "No" };
var collection = new ReadOnlyCollection<string>(source1);
var deserialized = OrleansSerializationLoop(environment.SerializationManager, collection);
ValidateReadOnlyCollectionList(collection, deserialized, "string/string");
}
[Fact, TestCategory("Functional")]
public void Serialize_UnserializableException()
{
// Create an environment which has no keyed serializer. This will cause some exception types to be unserializable.
var environment = SerializationTestEnvironment.InitializeWithDefaults(
builder => builder.ConfigureServices(
services => services.RemoveAll(typeof(IKeyedSerializer))));
const string message = "This is a test message";
// throw the exception so that stack trace is populated
Exception source = Assert.Throws<UnserializableException>((Action)(() => { throw new UnserializableException(message); }));
object deserialized = OrleansSerializationLoop(environment.SerializationManager, source);
var result = Assert.IsAssignableFrom<Exception>(deserialized); //Type is wrong after round trip of unserializable exception
var expectedMessage = "Non-serializable exception of type " +
typeof(UnserializableException).OrleansTypeName() + ": " + message;
Assert.Contains(expectedMessage, result.Message); //Exception message is wrong after round trip of unserializable exception
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.IlBasedFallbackSerializer)]
public void Serialize_UnserializableException_IlFallback(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
const string Message = "This is a test message";
// throw the exception so that stack trace is populated
var source = Assert.Throws<UnserializableException>((Action)(() => { throw new UnserializableException(Message); }));
object deserialized = OrleansSerializationLoop(environment.SerializationManager, source);
var result = Assert.IsAssignableFrom<UnserializableException>(deserialized);
Assert.Contains(Message, result.Message);
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_ObjectIdentity(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var val = new List<string> { "first", "second" };
var val2 = new List<string> { "first", "second" };
var source = new Dictionary<string, List<string>>();
source["one"] = val;
source["two"] = val;
source["three"] = val2;
Assert.Same(source["one"], source["two"]); //Object identity lost before round trip of string/list dict!!!
var deserialized = OrleansSerializationLoop(environment.SerializationManager, source);
var result = Assert.IsAssignableFrom<Dictionary<string, List<string>>>(deserialized); //Type is wrong after round-trip of string/list dict
Assert.Equal(source.Count, result.Count); //Count is wrong after round-trip of string/list dict
List<string> list1;
List<string> list2;
List<string> list3;
Assert.True(result.TryGetValue("one", out list1)); //Key 'one' not found after round trip of string/list dict
Assert.True(result.TryGetValue("two", out list2)); //Key 'two' not found after round trip of string/list dict
Assert.True(result.TryGetValue("three", out list3)); //Key 'three' not found after round trip of string/list dict
ValidateList<string>(val, list1, "string");
ValidateList<string>(val, list2, "string");
ValidateList<string>(val2, list3, "string");
Assert.Same(list1, list2); //Object identity lost after round trip of string/list dict
Assert.NotSame(list2, list3); //Object identity gained after round trip of string/list dict
Assert.NotSame(list1, list3); //Object identity gained after round trip of string/list dict
}
[Theory, TestCategory("Functional")]
[MemberData(nameof(FallbackSerializers))]
public void Serialize_Unrecognized(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var test1 = new Unrecognized { A = 3, B = 27 };
var raw = OrleansSerializationLoop(environment.SerializationManager, test1, false);
var result = Assert.IsAssignableFrom<Unrecognized>(raw); //Type is wrong after deep copy of unrecognized
Assert.Equal(3, result.A); //Property A is wrong after deep copy of unrecognized"
Assert.Equal(27, result.B); //Property B is wrong after deep copy of unrecognized"
var test2 = new Unrecognized[3];
for (int i = 0; i < 3; i++)
{
test2[i] = new Unrecognized { A = i, B = 2 * i };
}
raw = OrleansSerializationLoop(environment.SerializationManager, test2);
Assert.IsAssignableFrom<Unrecognized[]>(raw); //Type is wrong after round trip of array of unrecognized
var result2 = (Unrecognized[])raw;
Assert.Equal(3, result2.Length); //Array length is wrong after round trip of array of unrecognized
for (int j = 0; j < 3; j++)
{
Assert.Equal(j, result2[j].A); //Property A at index " + j + "is wrong after round trip of array of unrecognized
Assert.Equal(2 * j, result2[j].B); //Property B at index " + j + "is wrong after round trip of array of unrecognized
}
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_Immutable(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var test1 = new ImmutableType(3, 27);
var raw = environment.SerializationManager.DeepCopy(test1);
Assert.IsAssignableFrom<ImmutableType>(raw); //Type is wrong after deep copy of [Immutable] type
Assert.Same(test1, raw); //Deep copy of [Immutable] object made a copy instead of just copying the pointer
var test2list = new List<int>();
for (int i = 0; i < 3; i++)
{
test2list.Add(i);
}
var test2 = new Immutable<List<int>>(test2list);
raw = environment.SerializationManager.DeepCopy(test2);
Assert.IsAssignableFrom<Immutable<List<int>>>(raw); //Type is wrong after round trip of array of Immutable<>
Assert.Same(test2.Value, ((Immutable<List<int>>)raw).Value); //Deep copy of Immutable<> object made a copy instead of just copying the pointer
var test3 = new EmbeddedImmutable("test", 1, 2, 3, 4);
raw = environment.SerializationManager.DeepCopy(test3);
Assert.IsAssignableFrom<EmbeddedImmutable>(raw); //Type is wrong after deep copy of type containing an Immutable<> field
Assert.Same(test3.B.Value, ((EmbeddedImmutable)raw).B.Value); //Deep copy of embedded [Immutable] object made a copy instead of just copying the pointer
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_Uri_Multithreaded(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
Parallel.For(0, 50, i =>
{
Uri test1 = new Uri("http://www.microsoft.com/" + i);
object raw = environment.SerializationManager.DeepCopy(test1);
Assert.IsAssignableFrom<Uri>(raw); //Type is wrong after deep copy of Uri
Assert.Same(test1, raw); //Deep copy made a copy instead of just copying the pointer
object deserialized = OrleansSerializationLoop(environment.SerializationManager, test1);
Assert.IsAssignableFrom<Uri>(deserialized); //Type is wrong after round-trip of Uri
Uri result = (Uri)deserialized;
Assert.Equal(test1, result); //Wrong contents after round-trip of Uri
});
}
////[Fact, TestCategory("Functional")]
//public void Serialize_RequestInvocationHistory()
//{
// //Message inMsg = new Message();
// //inMsg.TargetGrain = GrainId.NewId();
// //inMsg.TargetActivation = ActivationId.NewId();
// //inMsg.InterfaceId = 12;
// //inMsg.MethodId = 13;
// //RequestInvocationHistory src = new RequestInvocationHistory(inMsg);
// //inMsg.AddToCallChainHeader(src);
// ////object deserialized = OrleansSerializationLoop(environment.SerializationManager, inMsg);
// ////Message outMsg = (Message)deserialized;
// ////IEnumerable<RequestInvocationHistory> dstArray = outMsg.CallChainHeader;
// ////RequestInvocationHistory dst = dstArray.FirstOrDefault();
// ////object deserialized = OrleansSerializationLoop(environment.SerializationManager, src);
// ////RequestInvocationHistory dst = (RequestInvocationHistory)deserialized;
// //Dictionary<string, object> deserialized = SerializeMessage(inMsg);
// //IEnumerable<RequestInvocationHistory> dstArray = ((IEnumerable)deserialized[Message.Header.CallChainHeader]).Cast<RequestInvocationHistory>();
// //RequestInvocationHistory dst = dstArray.FirstOrDefault();
// //Assert.Equal(src.GrainId, dst.GrainId);
// //Assert.Equal(src.ActivationId, dst.ActivationId);
// //Assert.Equal(src.InterfaceId, dst.InterfaceId);
// //Assert.Equal(src.MethodId, dst.MethodId);
//}
//private Dictionary<string, object> SerializeMessage(Message msg)
//{
// var outStream = new BinaryTokenStreamWriter();
// SerializationManager.SerializeMessageHeaders(msg.headers, outStream);
// var inStream = new BinaryTokenStreamReader(outStream.ToByteArray());
// var copy = SerializationManager.DeserializeMessageHeaders(inStream);
// return copy;
//}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_GrainReference(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
GrainId grainId = GrainId.NewId();
GrainReference input = environment.InternalGrainFactory.GetGrain(grainId);
object deserialized = OrleansSerializationLoop(environment.SerializationManager, input);
var grainRef = Assert.IsAssignableFrom<GrainReference>(deserialized); //GrainReference copied as wrong type
Assert.Equal(grainId, grainRef.GrainId); //GrainId different after copy
Assert.Equal(grainId.GetPrimaryKey(), grainRef.GrainId.GetPrimaryKey()); //PK different after copy
Assert.Equal(input, grainRef); //Wrong contents after round-trip of input
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_GrainReference_ViaStandardSerializer(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
GrainId grainId = GrainId.NewId();
GrainReference input = environment.InternalGrainFactory.GetGrain(grainId);
Assert.True(input.IsBound);
object deserialized = DotNetSerializationLoop(input, environment.SerializationManager);
var grainRef = Assert.IsAssignableFrom<GrainReference>(deserialized); //GrainReference copied as wrong type
Assert.True(grainRef.IsBound);
Assert.Equal(grainId, grainRef.GrainId); //GrainId different after copy
Assert.Equal(grainId.GetPrimaryKey(), grainRef.GrainId.GetPrimaryKey()); //PK different after copy
Assert.Equal(input, grainRef); //Wrong contents after round-trip of input
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_GrainBase_ViaStandardSerializer(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
Grain input = new EchoTaskGrain(null, null);
// Expected exception:
// System.Runtime.Serialization.SerializationException: Type 'Echo.Grains.EchoTaskGrain' in Assembly 'UnitTestGrains, Version=1.0.0.0, Culture=neutral, PublicKeyToken=070f47935e3ed133' is not marked as serializable.
var exc = Assert.Throws<SerializationException>(() => DotNetSerializationLoop(input, environment.SerializationManager));
Assert.Contains("is not marked as serializable", exc.Message);
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_ValidateBuildSegmentListWithLengthLimit(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
byte[] array1 = { 1 };
byte[] array2 = { 2, 3 };
byte[] array3 = { 4, 5, 6 };
byte[] array4 = { 7, 8, 9, 10 };
List<ArraySegment<byte>> underTest = new List<ArraySegment<byte>>();
underTest.Add(new ArraySegment<byte>(array1));
underTest.Add(new ArraySegment<byte>(array2));
underTest.Add(new ArraySegment<byte>(array3));
underTest.Add(new ArraySegment<byte>(array4));
List<ArraySegment<byte>> actual1 = ByteArrayBuilder.BuildSegmentListWithLengthLimit(underTest, 0, 2);
List<ArraySegment<byte>> actual2 = ByteArrayBuilder.BuildSegmentListWithLengthLimit(underTest, 2, 2);
List<ArraySegment<byte>> actual3 = ByteArrayBuilder.BuildSegmentListWithLengthLimit(underTest, 4, 2);
List<ArraySegment<byte>> actual4 = ByteArrayBuilder.BuildSegmentListWithLengthLimit(underTest, 6, 2);
List<ArraySegment<byte>> actual5 = ByteArrayBuilder.BuildSegmentListWithLengthLimit(underTest, 8, 2);
// 1: {[1}, {2], 3}
Assert.Equal(0, actual1[0].Offset);
Assert.Single(actual1[0]);
Assert.Equal(array1, actual1[0].Array);
Assert.Equal(0, actual1[1].Offset);
Assert.Single(actual1[1]);
Assert.Equal(array2, actual1[1].Array);
// 2: {2, [3}, {4], 5, 6}
Assert.Equal(1, actual2[0].Offset);
Assert.Single(actual2[0]);
Assert.Equal(array2, actual2[0].Array);
Assert.Equal(0, actual2[1].Offset);
Assert.Single(actual2[1]);
Assert.Equal(array3, actual2[1].Array);
// 3: {4, [5, 6]}
Assert.Equal(1, actual3[0].Offset);
Assert.Equal(2, actual3[0].Count);
Assert.Equal(array3, actual3[0].Array);
// 4: {[7, 8], 9, 10}
Assert.Equal(0, actual4[0].Offset);
Assert.Equal(2, actual4[0].Count);
Assert.Equal(array4, actual4[0].Array);
// 5: {7, 8, [9, 10]}
Assert.Equal(2, actual5[0].Offset);
Assert.Equal(2, actual5[0].Count);
Assert.Equal(array4, actual5[0].Array);
}
internal bool AreByteArraysAreEqual(byte[] array1, byte[] array2)
{
if (array1.Length != array2.Length)
return false;
for (int i = 0; i < array1.Length; i++)
{
if (array1[i] != array2[i])
return false;
}
return true;
}
[Theory, TestCategory("Functional")]
[InlineData(typeof(Dictionary<string, object>))]
[InlineData(typeof(Dictionary<string, int>))]
[InlineData(typeof(NonShallowCopyableValueType))]
[InlineData(typeof(NonShallowCopyableValueType?))]
[InlineData(typeof(Tuple<string, NonShallowCopyableValueType>))]
public void SerializationTests_IsNotOrleansShallowCopyable(Type type)
{
Assert.False(type.IsOrleansShallowCopyable());
}
[Theory, TestCategory("Functional")]
[InlineData(typeof(int))]
[InlineData(typeof(DateTime))]
[InlineData(typeof(Immutable<Dictionary<string, object>>))]
[InlineData(typeof(ShallowCopyableValueType))]
[InlineData(typeof(ArgumentNullException))]
[InlineData(typeof(int?))]
[InlineData(typeof(Tuple<string, int>))]
[InlineData(typeof(Tuple<Guid?, Tuple<string, ShallowCopyableValueType?, DateTimeOffset>>))]
public void SerializationTests_IsOrleansShallowCopyable(Type type)
{
Assert.True(type.IsOrleansShallowCopyable());
}
public struct ShallowCopyableValueType
{
public int AnotherValueType;
}
public struct NonShallowCopyableValueType
{
public object AutoProp { get; }
public NonShallowCopyableValueType(object o) => AutoProp = o;
}
internal static object OrleansSerializationLoop(SerializationManager serializationManager, object input, bool includeWire = true)
{
var copy = serializationManager.DeepCopy(input);
if (includeWire)
{
copy = serializationManager.RoundTripSerializationForTesting(copy);
}
return copy;
}
internal static object DotNetSerializationLoop(object input, SerializationManager serializationManager)
{
byte[] bytes;
object deserialized;
var formatter = new BinaryFormatter
{
Context = new StreamingContext(StreamingContextStates.All, new SerializationContext(serializationManager))
};
using (var str = new MemoryStream())
{
formatter.Serialize(str, input);
str.Flush();
bytes = str.ToArray();
}
using (var inStream = new MemoryStream(bytes))
{
deserialized = formatter.Deserialize(inStream);
}
return deserialized;
}
private void ValidateDictionary<K, V>(Dictionary<K, V> source, object deserialized, string type)
{
var result = Assert.IsAssignableFrom<Dictionary<K, V>>(deserialized); //Type is wrong after round-trip of dict
ValidateDictionaryContent(source, result, type);
}
private void ValidateDictionaryContent<K, V>(IDictionary<K, V> source, IDictionary<K, V> result, string type)
{
Assert.Equal(source.Count, result.Count); //Count is wrong after round-trip of " + type + " dict"
foreach (var pair in source)
{
Assert.True(result.ContainsKey(pair.Key), "Key " + pair.Key.ToString() + " is missing after round-trip of " + type + " dict");
Assert.Equal<V>(pair.Value, result[pair.Key]); //Key has wrong value after round-trip
}
}
private void ValidateReadOnlyDictionary<K, V>(ReadOnlyDictionary<K, V> source, object deserialized, string type)
{
var result = Assert.IsAssignableFrom<ReadOnlyDictionary<K, V>>(deserialized); //Type is wrong after round-trip
ValidateDictionaryContent(source, result, type);
}
private void ValidateSortedDictionary<K, V>(SortedDictionary<K, V> source, object deserialized, string type)
{
Assert.IsAssignableFrom<SortedDictionary<K, V>>(deserialized);
SortedDictionary<K, V> result = deserialized as SortedDictionary<K, V>;
Assert.Equal(source.Count, result.Count); //Count is wrong after round-trip of " + type + " sorted dict
foreach (var pair in source)
{
Assert.True(result.ContainsKey(pair.Key)); //Key " + pair.Key.ToString() + " is missing after round-trip of " + type + " sorted dict
Assert.Equal<V>(pair.Value, result[pair.Key]); //Key " + pair.Key.ToString() + " has wrong value after round-trip of " + type + " sorted dict
}
var sourceKeys = source.Keys.GetEnumerator();
var resultKeys = result.Keys.GetEnumerator();
while (sourceKeys.MoveNext() && resultKeys.MoveNext())
{
Assert.Equal<K>(sourceKeys.Current, resultKeys.Current); //Keys out of order after round-trip of " + type + " sorted dict
}
}
private void ValidateSortedList<K, V>(SortedList<K, V> source, object deserialized, string type)
{
Assert.IsAssignableFrom<SortedList<K, V>>(deserialized);
SortedList<K, V> result = deserialized as SortedList<K, V>;
Assert.Equal(source.Count, result.Count); //Count is wrong after round-trip of " + type + " sorted list"
foreach (var pair in source)
{
Assert.True(result.ContainsKey(pair.Key)); //Key " + pair.Key.ToString() + " is missing after round-trip of " + type + " sorted list
Assert.Equal<V>(pair.Value, result[pair.Key]); //Key " + pair.Key.ToString() + " has wrong value after round-trip of " + type + " sorted list
}
var sourceKeys = source.Keys.GetEnumerator();
var resultKeys = result.Keys.GetEnumerator();
while (sourceKeys.MoveNext() && resultKeys.MoveNext())
{
Assert.Equal<K>(sourceKeys.Current, resultKeys.Current); //Keys out of order after round-trip of " + type + " sorted list
}
}
private void ValidateReadOnlyCollectionList<T>(ReadOnlyCollection<T> expected, object deserialized, string type)
{
Assert.IsAssignableFrom<ReadOnlyCollection<T>>(deserialized); //Type is wrong after round-trip of " + type + " array
ValidateList(expected, deserialized as IList<T>, type);
}
private void ValidateList<T>(IList<T> expected, IList<T> result, string type)
{
Assert.Equal(expected.Count, result.Count);
for (int i = 0; i < expected.Count; i++)
{
Assert.Equal<T>(expected[i], result[i]); //Item " + i + " is wrong after round trip of " + type + " list
}
}
private void ValidateArray<T>(T[] expected, object deserialized, string type)
{
var result = Assert.IsAssignableFrom<T[]>(deserialized);
Assert.Equal(expected.Length, result.Length); //Length is wrong after round-trip of " + type + " array"
for (int i = 0; i < expected.Length; i++)
{
Assert.Equal<T>(expected[i], result[i]); //Item " + i + " is wrong after round trip of " + type + " array"
}
}
private void ValidateArrayOfArrays<T>(T[][] expected, object deserialized, string type)
{
var result = Assert.IsAssignableFrom<T[][]>(deserialized); //Type is wrong after round-trip of " + type + " array of arrays"
Assert.Equal(expected.Length, result.Length); //Length is wrong after round-trip of " + type + " array of arrays"
for (int i = 0; i < expected.Length; i++)
{
ValidateArray<T>(expected[i], result[i], "Array of " + type + "[" + i + "] ");
}
}
private void ValidateArrayOfArrayOfArrays<T>(T[][][] expected, object deserialized, string type)
{
var result = Assert.IsAssignableFrom<T[][][]>(deserialized); //Type is wrong after round-trip of " + type + " array of arrays"
Assert.Equal(expected.Length, result.Length); //Length is wrong after round-trip of " + type + " array of arrays"
for (int i = 0; i < expected.Length; i++)
{
ValidateArrayOfArrays<T>(expected[i], result[i], "Array of " + type + "[" + i + "][]");
}
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_CircularReference(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var c1 = new CircularTest1();
var c2 = new CircularTest2();
c2.CircularTest1List.Add(c1);
c1.CircularTest2 = c2;
var deserialized = (CircularTest1)OrleansSerializationLoop(environment.SerializationManager, c1);
Assert.Equal(c1.CircularTest2.CircularTest1List.Count, deserialized.CircularTest2.CircularTest1List.Count);
Assert.Same(deserialized, deserialized.CircularTest2.CircularTest1List[0]);
deserialized = (CircularTest1)OrleansSerializationLoop(environment.SerializationManager, c1, true);
Assert.Equal(c1.CircularTest2.CircularTest1List.Count, deserialized.CircularTest2.CircularTest1List.Count);
Assert.Same(deserialized, deserialized.CircularTest2.CircularTest1List[0]);
}
[Theory, TestCategory("Functional")]
[InlineData(SerializerToUse.NoFallback)]
public void Serialize_Enums(SerializerToUse serializerToUse)
{
var environment = InitializeSerializer(serializerToUse);
var result = OrleansSerializationLoop(environment.SerializationManager, IntEnum.Value2);
var typedResult = Assert.IsType<IntEnum>(result);
Assert.Equal(IntEnum.Value2, typedResult);
var result2 = OrleansSerializationLoop(environment.SerializationManager, UShortEnum.Value3);
var typedResult2 = Assert.IsType<UShortEnum>(result2);
Assert.Equal(UShortEnum.Value3, typedResult2);
var test = new ClassWithEnumTestData { EnumValue = TestEnum.Third, Enemy = CampaignEnemyTestType.Enemy3 };
var result3 = OrleansSerializationLoop(environment.SerializationManager, test);
var typedResult3 = Assert.IsType<ClassWithEnumTestData>(result3);
Assert.Equal(TestEnum.Third, typedResult3.EnumValue);
Assert.Equal(CampaignEnemyTestType.Enemy3, typedResult3.Enemy);
var result4 = OrleansSerializationLoop(environment.SerializationManager, CampaignEnemyType.Enemy3);
var typedResult4 = Assert.IsType<CampaignEnemyType>(result4);
Assert.Equal(CampaignEnemyType.Enemy3, typedResult4);
}
public class SupportsNothingSerializer : IExternalSerializer
{
public bool IsSupportedType(Type itemType) => false;
public object DeepCopy(object source, ICopyContext context)
{
throw new NotSupportedException();
}
public void Serialize(object item, ISerializationContext context, Type expectedType)
{
throw new NotSupportedException();
}
public object Deserialize(Type expectedType, IDeserializationContext context)
{
throw new NotSupportedException();
}
}
/// <summary>
/// Tests that ISerializable classes have their callbacks called in the correct order.
/// </summary>
[Fact, TestCategory("BVT")]
public void ISerializable_CallbackOrder_Class()
{
var environment = InitializeSerializer(SerializerToUse.Default);
var input = new SimpleISerializableObject
{
Payload = "pyjamas"
};
// Verify that our behavior conforms to our expected behavior.
var result = (SimpleISerializableObject)OrleansSerializationLoop(environment.SerializationManager, input);
Assert.Equal(
new[]
{
"default_ctor",
"serializing",
"serialized"
},
input.History);
Assert.Equal(3, input.Contexts.Count);
Assert.All(input.Contexts, ctx => Assert.True(ctx.Context is ICopyContext || ctx.Context is ISerializationContext));
Assert.Equal(
new[]
{
"deserializing",
"serialization_ctor",
"deserialized",
"deserialization"
},
result.History);
Assert.Equal(input.Payload, result.Payload, StringComparer.Ordinal);
Assert.Equal(3, result.Contexts.Count);
Assert.All(result.Contexts, ctx => Assert.True(ctx.Context is IDeserializationContext));
// Verify that our behavior conforms to the behavior of BinaryFormatter.
var input2 = new SimpleISerializableObject
{
Payload = "pyjamas"
};
var result2 = (SimpleISerializableObject)DotNetSerializationLoop(
input2,
environment.SerializationManager);
Assert.Equal(input2.History, input.History);
Assert.Equal(result2.History, result.History);
}
/// <summary>
/// Tests that ISerializable structs have their callbacks called in the correct order.
/// </summary>
[Fact, TestCategory("BVT")]
public void ISerializable_CallbackOrder_Struct()
{
var environment = InitializeSerializer(SerializerToUse.Default);
var input = new SimpleISerializableStruct
{
Payload = "pyjamas"
};
// Verify that our behavior conforms to our expected behavior.
var result = (SimpleISerializableStruct)OrleansSerializationLoop(environment.SerializationManager, input);
Assert.Equal(
new[]
{
"serialization_ctor",
"deserialized",
"deserialization"
},
result.History);
Assert.Equal(input.Payload, result.Payload, StringComparer.Ordinal);
Assert.Equal(2, result.Contexts.Count);
Assert.All(result.Contexts, ctx => Assert.True(ctx.Context is IDeserializationContext));
// Verify that our behavior conforms to the behavior of BinaryFormatter.
var input2 = new SimpleISerializableStruct
{
Payload = "pyjamas"
};
var result2 = (SimpleISerializableStruct)DotNetSerializationLoop(
input2,
environment.SerializationManager);
Assert.Equal(input2.History, input.History);
Assert.Equal(result2.History, result.History);
}
[Serializable]
private class SimpleISerializableObject : ISerializable, IDeserializationCallback
{
private List<string> history;
private List<StreamingContext> contexts;
public SimpleISerializableObject()
{
this.History.Add("default_ctor");
}
public SimpleISerializableObject(SerializationInfo info, StreamingContext context)
{
this.History.Add("serialization_ctor");
this.Contexts.Add(context);
this.Payload = info.GetString(nameof(this.Payload));
}
public List<string> History => this.history ?? (this.history = new List<string>());
public List<StreamingContext> Contexts => this.contexts ?? (this.contexts = new List<StreamingContext>());
public string Payload { get; set; }
public void GetObjectData(SerializationInfo info, StreamingContext context)
{
this.Contexts.Add(context);
info.AddValue(nameof(this.Payload), this.Payload);
}
[OnSerializing]
internal void OnSerializingMethod(StreamingContext context)
{
this.History.Add("serializing");
this.Contexts.Add(context);
}
[OnSerialized]
internal void OnSerializedMethod(StreamingContext context)
{
this.History.Add("serialized");
this.Contexts.Add(context);
}
[OnDeserializing]
internal void OnDeserializingMethod(StreamingContext context)
{
this.History.Add("deserializing");
this.Contexts.Add(context);
}
[OnDeserialized]
internal void OnDeserializedMethod(StreamingContext context)
{
this.History.Add("deserialized");
this.Contexts.Add(context);
}
void IDeserializationCallback.OnDeserialization(object sender)
{
this.History.Add("deserialization");
}
}
[Serializable]
private struct SimpleISerializableStruct : ISerializable, IDeserializationCallback
{
private List<string> history;
private List<StreamingContext> contexts;
public SimpleISerializableStruct(SerializationInfo info, StreamingContext context)
{
this.history = null;
this.contexts = null;
this.Payload = info.GetString(nameof(this.Payload));
this.History.Add("serialization_ctor");
this.Contexts.Add(context);
}
public List<string> History => this.history ?? (this.history = new List<string>());
public List<StreamingContext> Contexts => this.contexts ?? (this.contexts = new List<StreamingContext>());
public string Payload { get; set; }
public void GetObjectData(SerializationInfo info, StreamingContext context)
{
this.Contexts.Add(context);
info.AddValue(nameof(this.Payload), this.Payload);
}
[OnSerializing]
internal void OnSerializingMethod(StreamingContext context)
{
this.History.Add("serializing");
this.Contexts.Add(context);
}
[OnSerialized]
internal void OnSerializedMethod(StreamingContext context)
{
this.History.Add("serialized");
this.Contexts.Add(context);
}
[OnDeserializing]
internal void OnDeserializingMethod(StreamingContext context)
{
this.History.Add("deserializing");
this.Contexts.Add(context);
}
[OnDeserialized]
internal void OnDeserializedMethod(StreamingContext context)
{
this.History.Add("deserialized");
this.Contexts.Add(context);
}
void IDeserializationCallback.OnDeserialization(object sender)
{
this.History.Add("deserialization");
}
}
}
}
// ReSharper restore NotAccessedVariable
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Threading;
namespace System.Collections.Generic.Internal
{
/// <summary>
/// Simplified version of Dictionary<K,V>
/// 1. Deriving from DictionaryBase to share common code
/// 2. Hashing/bucket moved to DictionaryBase
/// 3. Seperate TKey,TValue array. This will get rid of array with both TKey and TValue
/// 4. No interface implementation. You pay for the methods called
/// 5. Support FindFirstKey/FindNextKey, hash code based search (returning key to caller for key comparison)
/// 6. If comparer is provided, it has to be non null. This allows reducing dependency on EqualityComparer<TKey>.Default
/// </summary>
/// <typeparam name="TKey"></typeparam>
/// <typeparam name="TValue"></typeparam>
internal class Dictionary<TKey, TValue> : DictionaryBase
{
private TKey[] keyArray;
private TValue[] valueArray;
private IEqualityComparer<TKey> comparer;
private Lock m_lock;
public Dictionary()
: this(0, EqualityComparer<TKey>.Default, false)
{
}
public Dictionary(bool sync) : this(0, EqualityComparer<TKey>.Default, sync)
{
}
public Dictionary(int capacity)
: this(capacity, EqualityComparer<TKey>.Default, false)
{
}
public Dictionary(int capacity, bool sync) : this(capacity, EqualityComparer<TKey>.Default, sync)
{
}
public Dictionary(IEqualityComparer<TKey> comparer)
: this(0, comparer, false)
{
}
public Dictionary(IEqualityComparer<TKey> comparer, bool sync) : this(0, comparer, sync)
{
}
public Dictionary(int capacity, IEqualityComparer<TKey> comparer) : this(capacity, comparer, false)
{
}
public Dictionary(int capacity, IEqualityComparer<TKey> comparer, bool sync)
{
// If comparer parameter is passed in, it can't be null
// This removes dependency on EqualityComparer<TKey>.Default which brings bunch of cost
Debug.Assert(comparer != null);
if (capacity > 0)
{
Initialize(capacity);
}
this.comparer = comparer;
if (sync)
{
m_lock = new Lock();
}
}
public Dictionary(IDictionary<TKey, TValue> dictionary) : this(dictionary, EqualityComparer<TKey>.Default)
{
}
public Dictionary(IDictionary<TKey, TValue> dictionary, IEqualityComparer<TKey> comparer) :
this(dictionary != null ? dictionary.Count : 0, comparer)
{
if (dictionary == null)
{
throw new ArgumentNullException(nameof(dictionary));
}
foreach (KeyValuePair<TKey, TValue> pair in dictionary)
{
Add(pair.Key, pair.Value);
}
}
public void LockAcquire()
{
Debug.Assert(m_lock != null);
m_lock.Acquire();
}
public void LockRelease()
{
Debug.Assert(m_lock != null);
m_lock.Release();
}
public IEqualityComparer<TKey> Comparer
{
get
{
return comparer;
}
}
public KeyCollection Keys
{
get
{
return new KeyCollection(this);
}
}
public ValueCollection Values
{
get
{
return new ValueCollection(this);
}
}
public Enumerator GetEnumerator()
{
return new Enumerator(this, Enumerator.KeyValuePair);
}
public TValue this[TKey key]
{
get
{
int i = FindEntry(key);
if (i >= 0)
{
return valueArray[i];
}
throw new KeyNotFoundException();
}
set
{
Insert(key, value, false);
}
}
public void Add(TKey key, TValue value)
{
if (!Insert(key, value, true))
{
throw new ArgumentException(SR.Argument_AddingDuplicate);
}
}
public void Add(TKey key, TValue value, int hashCode)
{
if (!Insert(key, value, true, hashCode))
{
throw new ArgumentException(SR.Argument_AddingDuplicate);
}
}
public void Clear()
{
if (count > 0)
{
ClearBase();
Array.Clear(keyArray, 0, count);
Array.Clear(valueArray, 0, count);
}
}
public bool ContainsKey(TKey key)
{
return FindEntry(key) >= 0;
}
public bool ContainsValue(TValue value)
{
if (value == null)
{
for (int i = 0; i < count; i++)
{
if (entries[i].hashCode >= 0 && valueArray[i] == null)
{
return true;
}
}
}
else
{
EqualityComparer<TValue> c = EqualityComparer<TValue>.Default;
for (int i = 0; i < count; i++)
{
if (entries[i].hashCode >= 0 && c.Equals(valueArray[i], value))
{
return true;
}
}
}
return false;
}
private void CopyTo(KeyValuePair<TKey, TValue>[] array, int index)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
if (index < 0 || index > array.Length)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < Count)
{
throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
}
int count = this.count;
Entry[] entries = this.entries;
for (int i = 0; i < count; i++)
{
if (entries[i].hashCode >= 0)
{
array[index++] = new KeyValuePair<TKey, TValue>(keyArray[i], valueArray[i]);
}
}
}
/// <summary>
/// Get total count of items, including free cells
/// </summary>
/// <remarks>
/// WARNING: This function will be called under a GC callback. Please read the comments in
/// GCCallbackAttribute to understand all the implications before you make any changes
/// </remarks>
[System.Runtime.InteropServices.GCCallback]
public int GetMaxCount()
{
return this.count;
}
/// <summary>
/// Get Key[i], return true if not free
/// </summary>
public bool GetKey(int index, ref TKey key)
{
Debug.Assert((index >= 0) && (index < this.count));
if (entries[index].hashCode >= 0)
{
key = keyArray[index];
return true;
}
return false;
}
/// <summary>
/// Get Value[i], return true if not free
/// </summary>
/// <remarks>
/// WARNING: This function will be called under a GC callback. Please read the comments in
/// GCCallbackAttribute to understand all the implications before you make any changes
/// </remarks>
[System.Runtime.InteropServices.GCCallback]
public bool GetValue(int index, ref TValue value)
{
Debug.Assert((index >= 0) && (index < this.count));
if (entries[index].hashCode >= 0)
{
value = valueArray[index];
return true;
}
return false;
}
private int FindEntry(TKey key)
{
if (entries != null)
{
int hashCode = comparer.GetHashCode(key) & 0x7FFFFFFF;
for (int i = entries[ModLength(hashCode)].bucket; i >= 0; i = entries[i].next)
{
if (entries[i].hashCode == hashCode && comparer.Equals(keyArray[i], key))
{
return i;
}
}
}
return -1;
}
private int FindEntry(TKey key, int hashCode)
{
if (entries != null)
{
hashCode = hashCode & 0x7FFFFFFF;
for (int i = entries[ModLength(hashCode)].bucket; i >= 0; i = entries[i].next)
{
if (entries[i].hashCode == hashCode && comparer.Equals(keyArray[i], key))
{
return i;
}
}
}
return -1;
}
/// <summary>
/// First first matching entry, returning index, update key
/// </summary>
/// <param name="key"></param>
/// <returns></returns>
public int FindFirstKey(ref TKey key)
{
int hashCode = comparer.GetHashCode(key) & 0x7FFFFFFF;
int entry = FindFirstEntry(hashCode);
if (entry >= 0)
{
key = keyArray[entry];
}
return entry;
}
/// <summary>
/// Find next matching entry, returning index, update key
/// </summary>
/// <param name="key"></param>
/// <param name="entry"></param>
/// <returns></returns>
public int FindNextKey(ref TKey key, int entry)
{
entry = FindNextEntry(entry);
if (entry >= 0)
{
key = keyArray[entry];
}
return entry;
}
[MethodImpl(MethodImplOptions.NoInlining)]
private void Initialize(int capacity)
{
int size = InitializeBase(capacity);
keyArray = new TKey[size];
valueArray = new TValue[size];
}
private bool Insert(TKey key, TValue value, bool add)
{
return Insert(key, value, add, comparer.GetHashCode(key));
}
private bool Insert(TKey key, TValue value, bool add, int hashCode)
{
if (entries == null)
{
Initialize(0);
}
hashCode = hashCode & 0x7FFFFFFF;
int targetBucket = ModLength(hashCode);
for (int i = entries[targetBucket].bucket; i >= 0; i = entries[i].next)
{
if (entries[i].hashCode == hashCode && comparer.Equals(keyArray[i], key))
{
if (add)
{
return false;
}
valueArray[i] = value;
version++;
return true;
}
}
int index;
if (freeCount > 0)
{
index = freeList;
freeList = entries[index].next;
freeCount--;
}
else
{
if (count == entries.Length)
{
Resize();
targetBucket = ModLength(hashCode);
}
index = count;
count++;
}
entries[index].hashCode = hashCode;
entries[index].next = entries[targetBucket].bucket;
keyArray[index] = key;
valueArray[index] = value;
entries[targetBucket].bucket = index;
version++;
return true;
}
private void Resize()
{
Resize(HashHelpers.ExpandPrime(count));
}
private void Resize(int newSize)
{
#if !RHTESTCL
Debug.Assert(newSize >= entries.Length);
#endif
Entry[] newEntries = ResizeBase1(newSize);
TKey[] newKeys = new TKey[newSize];
Array.Copy(keyArray, 0, newKeys, 0, count);
TValue[] newValues = new TValue[newSize];
Array.Copy(valueArray, 0, newValues, 0, count);
ResizeBase2(newEntries, newSize);
keyArray = newKeys;
valueArray = newValues;
}
public bool Remove(TKey key)
{
if (entries != null)
{
int hashCode = comparer.GetHashCode(key) & 0x7FFFFFFF;
int bucket = ModLength(hashCode);
int last = -1;
for (int i = entries[bucket].bucket; i >= 0; last = i, i = entries[i].next)
{
if (entries[i].hashCode == hashCode && comparer.Equals(keyArray[i], key))
{
if (last < 0)
{
entries[bucket].bucket = entries[i].next;
}
else
{
entries[last].next = entries[i].next;
}
entries[i].hashCode = -1;
entries[i].next = freeList;
keyArray[i] = default(TKey);
valueArray[i] = default(TValue);
freeList = i;
freeCount++;
version++;
return true;
}
}
}
return false;
}
public bool TryGetValue(TKey key, out TValue value)
{
int i = FindEntry(key);
if (i >= 0)
{
value = valueArray[i];
return true;
}
value = default(TValue);
return false;
}
public bool TryGetValue(TKey key, int hashCode, out TValue value)
{
int i = FindEntry(key, hashCode);
if (i >= 0)
{
value = valueArray[i];
return true;
}
value = default(TValue);
return false;
}
/// <summary>
/// Return matching key
/// </summary>
public bool TryGetKey(ref TKey key)
{
int i = FindEntry(key);
if (i >= 0)
{
key = keyArray[i];
return true;
}
return false;
}
public struct Enumerator : IEnumerator<KeyValuePair<TKey, TValue>>,
IDictionaryEnumerator
{
private Dictionary<TKey, TValue> dictionary;
private int version;
private int index;
private KeyValuePair<TKey, TValue> current;
private int getEnumeratorRetType; // What should Enumerator.Current return?
internal const int DictEntry = 1;
internal const int KeyValuePair = 2;
internal Enumerator(Dictionary<TKey, TValue> dictionary, int getEnumeratorRetType)
{
this.dictionary = dictionary;
version = dictionary.version;
index = 0;
this.getEnumeratorRetType = getEnumeratorRetType;
current = new KeyValuePair<TKey, TValue>();
}
public bool MoveNext()
{
if (version != dictionary.version)
{
throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
}
// Use unsigned comparison since we set index to dictionary.count+1 when the enumeration ends.
// dictionary.count+1 could be negative if dictionary.count is Int32.MaxValue
while ((uint)index < (uint)dictionary.count)
{
if (dictionary.entries[index].hashCode >= 0)
{
current = new KeyValuePair<TKey, TValue>(dictionary.keyArray[index], dictionary.valueArray[index]);
index++;
return true;
}
index++;
}
index = dictionary.count + 1;
current = new KeyValuePair<TKey, TValue>();
return false;
}
public KeyValuePair<TKey, TValue> Current
{
get { return current; }
}
public void Dispose()
{
}
object IEnumerator.Current
{
get
{
if (index == 0 || (index == dictionary.count + 1))
{
throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
}
if (getEnumeratorRetType == DictEntry)
{
return new System.Collections.DictionaryEntry(current.Key, current.Value);
}
else
{
return new KeyValuePair<TKey, TValue>(current.Key, current.Value);
}
}
}
void IEnumerator.Reset()
{
if (version != dictionary.version)
{
throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
}
index = 0;
current = new KeyValuePair<TKey, TValue>();
}
DictionaryEntry IDictionaryEnumerator.Entry
{
get
{
if (index == 0 || (index == dictionary.count + 1))
{
throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
}
return new DictionaryEntry(current.Key, current.Value);
}
}
object IDictionaryEnumerator.Key
{
get
{
if (index == 0 || (index == dictionary.count + 1))
{
throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
}
return current.Key;
}
}
object IDictionaryEnumerator.Value
{
get
{
if (index == 0 || (index == dictionary.count + 1))
{
throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
}
return current.Value;
}
}
}
public sealed class KeyCollection : ICollection<TKey>, ICollection
{
private Dictionary<TKey, TValue> dictionary;
public KeyCollection(Dictionary<TKey, TValue> dictionary)
{
if (dictionary == null)
{
throw new ArgumentNullException(nameof(dictionary));
}
this.dictionary = dictionary;
}
public Enumerator GetEnumerator()
{
return new Enumerator(dictionary);
}
public void CopyTo(TKey[] array, int index)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
if (index < 0 || index > array.Length)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < dictionary.Count)
{
throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
}
int count = dictionary.count;
TKey[] keys = dictionary.keyArray;
Entry[] entries = dictionary.entries;
for (int i = 0; i < count; i++)
{
if (entries[i].hashCode >= 0)
{
array[index++] = keys[i];
}
}
}
public int Count
{
get { return dictionary.Count; }
}
bool ICollection<TKey>.IsReadOnly
{
get { return true; }
}
void ICollection<TKey>.Add(TKey item)
{
throw new NotSupportedException(SR.NotSupported_KeyCollectionSet);
}
void ICollection<TKey>.Clear()
{
throw new NotSupportedException(SR.NotSupported_KeyCollectionSet);
}
bool ICollection<TKey>.Contains(TKey item)
{
return dictionary.ContainsKey(item);
}
bool ICollection<TKey>.Remove(TKey item)
{
throw new NotSupportedException(SR.NotSupported_KeyCollectionSet);
}
IEnumerator<TKey> IEnumerable<TKey>.GetEnumerator()
{
return new Enumerator(dictionary);
}
IEnumerator IEnumerable.GetEnumerator()
{
return new Enumerator(dictionary);
}
void ICollection.CopyTo(Array array, int index)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
if (array.Rank != 1)
{
throw new ArgumentException(SR.Arg_RankMultiDimNotSupported);
}
if (array.GetLowerBound(0) != 0)
{
throw new ArgumentException(SR.Arg_NonZeroLowerBound);
}
if (index < 0 || index > array.Length)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < dictionary.Count)
{
throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
}
TKey[] keys = array as TKey[];
if (keys != null)
{
CopyTo(keys, index);
}
else
{
object[] objects = array as object[];
if (objects == null)
{
throw new ArgumentException(SR.Argument_InvalidArrayType);
}
int count = dictionary.count;
Entry[] entries = dictionary.entries;
TKey[] ks = dictionary.keyArray;
try
{
for (int i = 0; i < count; i++)
{
if (entries[i].hashCode >= 0)
{
objects[index++] = ks[i];
}
}
}
catch (ArrayTypeMismatchException)
{
throw new ArgumentException(SR.Argument_InvalidArrayType);
}
}
}
bool ICollection.IsSynchronized
{
get { return false; }
}
Object ICollection.SyncRoot
{
get { return ((ICollection)dictionary).SyncRoot; }
}
public struct Enumerator : IEnumerator<TKey>, System.Collections.IEnumerator
{
private Dictionary<TKey, TValue> dictionary;
private int index;
private int version;
private TKey currentKey;
internal Enumerator(Dictionary<TKey, TValue> dictionary)
{
this.dictionary = dictionary;
version = dictionary.version;
index = 0;
currentKey = default(TKey);
}
public void Dispose()
{
}
public bool MoveNext()
{
if (version != dictionary.version)
{
throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
}
while ((uint)index < (uint)dictionary.count)
{
if (dictionary.entries[index].hashCode >= 0)
{
currentKey = dictionary.keyArray[index];
index++;
return true;
}
index++;
}
index = dictionary.count + 1;
currentKey = default(TKey);
return false;
}
public TKey Current
{
get
{
return currentKey;
}
}
Object System.Collections.IEnumerator.Current
{
get
{
if (index == 0 || (index == dictionary.count + 1))
{
throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
}
return currentKey;
}
}
void System.Collections.IEnumerator.Reset()
{
if (version != dictionary.version)
{
throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
}
index = 0;
currentKey = default(TKey);
}
}
}
public sealed class ValueCollection : ICollection<TValue>, ICollection
{
private Dictionary<TKey, TValue> dictionary;
public ValueCollection(Dictionary<TKey, TValue> dictionary)
{
if (dictionary == null)
{
throw new ArgumentNullException(nameof(dictionary));
}
this.dictionary = dictionary;
}
public Enumerator GetEnumerator()
{
return new Enumerator(dictionary);
}
public void CopyTo(TValue[] array, int index)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
if (index < 0 || index > array.Length)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < dictionary.Count)
{
throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
}
int count = dictionary.count;
Entry[] entries = dictionary.entries;
TValue[] values = dictionary.valueArray;
for (int i = 0; i < count; i++)
{
if (entries[i].hashCode >= 0)
{
array[index++] = values[i];
}
}
}
public int Count
{
get { return dictionary.Count; }
}
bool ICollection<TValue>.IsReadOnly
{
get { return true; }
}
void ICollection<TValue>.Add(TValue item)
{
throw new NotSupportedException(SR.NotSupported_ValueCollectionSet);
}
bool ICollection<TValue>.Remove(TValue item)
{
throw new NotSupportedException(SR.NotSupported_ValueCollectionSet);
}
void ICollection<TValue>.Clear()
{
throw new NotSupportedException(SR.NotSupported_ValueCollectionSet);
}
bool ICollection<TValue>.Contains(TValue item)
{
return dictionary.ContainsValue(item);
}
IEnumerator<TValue> IEnumerable<TValue>.GetEnumerator()
{
return new Enumerator(dictionary);
}
IEnumerator IEnumerable.GetEnumerator()
{
return new Enumerator(dictionary);
}
void ICollection.CopyTo(Array array, int index)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
if (array.Rank != 1)
{
throw new ArgumentException(SR.Arg_RankMultiDimNotSupported);
}
if (array.GetLowerBound(0) != 0)
{
throw new ArgumentException(SR.Arg_NonZeroLowerBound);
}
if (index < 0 || index > array.Length)
{
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < dictionary.Count)
throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall);
TValue[] values = array as TValue[];
if (values != null)
{
CopyTo(values, index);
}
else
{
object[] objects = array as object[];
if (objects == null)
{
throw new ArgumentException(SR.Argument_InvalidArrayType);
}
int count = dictionary.count;
Entry[] entries = dictionary.entries;
TValue[] vs = dictionary.valueArray;
try
{
for (int i = 0; i < count; i++)
{
if (entries[i].hashCode >= 0)
{
objects[index++] = vs[i];
}
}
}
catch (ArrayTypeMismatchException)
{
throw new ArgumentException(SR.Argument_InvalidArrayType);
}
}
}
bool ICollection.IsSynchronized
{
get { return false; }
}
Object ICollection.SyncRoot
{
get { return ((ICollection)dictionary).SyncRoot; }
}
public struct Enumerator : IEnumerator<TValue>, System.Collections.IEnumerator
{
private Dictionary<TKey, TValue> dictionary;
private int index;
private int version;
private TValue currentValue;
internal Enumerator(Dictionary<TKey, TValue> dictionary)
{
this.dictionary = dictionary;
version = dictionary.version;
index = 0;
currentValue = default(TValue);
}
public void Dispose()
{
}
public bool MoveNext()
{
if (version != dictionary.version)
{
throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
}
while ((uint)index < (uint)dictionary.count)
{
if (dictionary.entries[index].hashCode >= 0)
{
currentValue = dictionary.valueArray[index];
index++;
return true;
}
index++;
}
index = dictionary.count + 1;
currentValue = default(TValue);
return false;
}
public TValue Current
{
get
{
return currentValue;
}
}
Object System.Collections.IEnumerator.Current
{
get
{
if (index == 0 || (index == dictionary.count + 1))
{
throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen);
}
return currentValue;
}
}
void System.Collections.IEnumerator.Reset()
{
if (version != dictionary.version)
{
throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion);
}
index = 0;
currentValue = default(TValue);
}
}
}
}
}
| |
/// This code was generated by
/// \ / _ _ _| _ _
/// | (_)\/(_)(_|\/| |(/_ v1.0.0
/// / /
/// <summary>
/// SecondaryAuthTokenResource
/// </summary>
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using Twilio.Base;
using Twilio.Clients;
using Twilio.Converters;
using Twilio.Exceptions;
using Twilio.Http;
namespace Twilio.Rest.Accounts.V1
{
public class SecondaryAuthTokenResource : Resource
{
private static Request BuildCreateRequest(CreateSecondaryAuthTokenOptions options, ITwilioRestClient client)
{
return new Request(
HttpMethod.Post,
Rest.Domain.Accounts,
"/v1/AuthTokens/Secondary",
postParams: options.GetParams(),
headerParams: null
);
}
/// <summary>
/// Create a new secondary Auth Token
/// </summary>
/// <param name="options"> Create SecondaryAuthToken parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SecondaryAuthToken </returns>
public static SecondaryAuthTokenResource Create(CreateSecondaryAuthTokenOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = client.Request(BuildCreateRequest(options, client));
return FromJson(response.Content);
}
#if !NET35
/// <summary>
/// Create a new secondary Auth Token
/// </summary>
/// <param name="options"> Create SecondaryAuthToken parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SecondaryAuthToken </returns>
public static async System.Threading.Tasks.Task<SecondaryAuthTokenResource> CreateAsync(CreateSecondaryAuthTokenOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = await client.RequestAsync(BuildCreateRequest(options, client));
return FromJson(response.Content);
}
#endif
/// <summary>
/// Create a new secondary Auth Token
/// </summary>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SecondaryAuthToken </returns>
public static SecondaryAuthTokenResource Create(ITwilioRestClient client = null)
{
var options = new CreateSecondaryAuthTokenOptions();
return Create(options, client);
}
#if !NET35
/// <summary>
/// Create a new secondary Auth Token
/// </summary>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SecondaryAuthToken </returns>
public static async System.Threading.Tasks.Task<SecondaryAuthTokenResource> CreateAsync(ITwilioRestClient client = null)
{
var options = new CreateSecondaryAuthTokenOptions();
return await CreateAsync(options, client);
}
#endif
private static Request BuildDeleteRequest(DeleteSecondaryAuthTokenOptions options, ITwilioRestClient client)
{
return new Request(
HttpMethod.Delete,
Rest.Domain.Accounts,
"/v1/AuthTokens/Secondary",
queryParams: options.GetParams(),
headerParams: null
);
}
/// <summary>
/// Delete the secondary Auth Token from your account
/// </summary>
/// <param name="options"> Delete SecondaryAuthToken parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SecondaryAuthToken </returns>
public static bool Delete(DeleteSecondaryAuthTokenOptions options, ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = client.Request(BuildDeleteRequest(options, client));
return response.StatusCode == System.Net.HttpStatusCode.NoContent;
}
#if !NET35
/// <summary>
/// Delete the secondary Auth Token from your account
/// </summary>
/// <param name="options"> Delete SecondaryAuthToken parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SecondaryAuthToken </returns>
public static async System.Threading.Tasks.Task<bool> DeleteAsync(DeleteSecondaryAuthTokenOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = await client.RequestAsync(BuildDeleteRequest(options, client));
return response.StatusCode == System.Net.HttpStatusCode.NoContent;
}
#endif
/// <summary>
/// Delete the secondary Auth Token from your account
/// </summary>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SecondaryAuthToken </returns>
public static bool Delete(ITwilioRestClient client = null)
{
var options = new DeleteSecondaryAuthTokenOptions();
return Delete(options, client);
}
#if !NET35
/// <summary>
/// Delete the secondary Auth Token from your account
/// </summary>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SecondaryAuthToken </returns>
public static async System.Threading.Tasks.Task<bool> DeleteAsync(ITwilioRestClient client = null)
{
var options = new DeleteSecondaryAuthTokenOptions();
return await DeleteAsync(options, client);
}
#endif
/// <summary>
/// Converts a JSON string into a SecondaryAuthTokenResource object
/// </summary>
/// <param name="json"> Raw JSON string </param>
/// <returns> SecondaryAuthTokenResource object represented by the provided JSON </returns>
public static SecondaryAuthTokenResource FromJson(string json)
{
// Convert all checked exceptions to Runtime
try
{
return JsonConvert.DeserializeObject<SecondaryAuthTokenResource>(json);
}
catch (JsonException e)
{
throw new ApiException(e.Message, e);
}
}
/// <summary>
/// The SID of the Account that the secondary Auth Token was created for
/// </summary>
[JsonProperty("account_sid")]
public string AccountSid { get; private set; }
/// <summary>
/// The ISO 8601 formatted date and time in UTC when the resource was created
/// </summary>
[JsonProperty("date_created")]
public DateTime? DateCreated { get; private set; }
/// <summary>
/// The ISO 8601 formatted date and time in UTC when the resource was last updated
/// </summary>
[JsonProperty("date_updated")]
public DateTime? DateUpdated { get; private set; }
/// <summary>
/// The generated secondary Auth Token
/// </summary>
[JsonProperty("secondary_auth_token")]
public string SecondaryAuthToken { get; private set; }
/// <summary>
/// The URI for this resource, relative to `https://accounts.twilio.com`
/// </summary>
[JsonProperty("url")]
public Uri Url { get; private set; }
private SecondaryAuthTokenResource()
{
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Text;
namespace System.IO
{
/* This class is used by for reading from the stdin.
* It is designed to read stdin in raw mode for interpreting
* key press events and maintain its own buffer for the same.
* which is then used for all the Read operations
*/
internal sealed class StdInReader : TextReader
{
private static string s_moveLeftString; // string written to move the cursor to the left
private readonly StringBuilder _readLineSB; // SB that holds readLine output. This is a field simply to enable reuse; it's only used in ReadLine.
private readonly Stack<ConsoleKeyInfo> _tmpKeys = new Stack<ConsoleKeyInfo>(); // temporary working stack; should be empty outside of ReadLine
private readonly Stack<ConsoleKeyInfo> _availableKeys = new Stack<ConsoleKeyInfo>(); // a queue of already processed key infos available for reading
private readonly Encoding _encoding;
private char[] _unprocessedBufferToBeRead; // Buffer that might have already been read from stdin but not yet processed.
private const int BytesToBeRead = 1024; // No. of bytes to be read from the stream at a time.
private int _startIndex; // First unprocessed index in the buffer;
private int _endIndex; // Index after last unprocessed index in the buffer;
internal StdInReader(Encoding encoding, int bufferSize)
{
_encoding = encoding;
_unprocessedBufferToBeRead = new char[encoding.GetMaxCharCount(BytesToBeRead)];
_startIndex = 0;
_endIndex = 0;
_readLineSB = new StringBuilder();
}
/// <summary> Checks whether the unprocessed buffer is empty. </summary>
internal bool IsUnprocessedBufferEmpty()
{
return _startIndex >= _endIndex; // Everything has been processed;
}
internal unsafe void AppendExtraBuffer(byte* buffer, int bufferLength)
{
// Then convert the bytes to chars
int charLen = _encoding.GetMaxCharCount(bufferLength);
char* charPtr = stackalloc char[charLen];
charLen = _encoding.GetChars(buffer, bufferLength, charPtr, charLen);
// Ensure our buffer is large enough to hold all of the data
if (IsUnprocessedBufferEmpty())
{
_startIndex = _endIndex = 0;
}
else
{
Debug.Assert(_endIndex > 0);
int spaceRemaining = _unprocessedBufferToBeRead.Length - _endIndex;
if (spaceRemaining < charLen)
{
Array.Resize(ref _unprocessedBufferToBeRead, _unprocessedBufferToBeRead.Length * 2);
}
}
// Copy the data into our buffer
Marshal.Copy((IntPtr)charPtr, _unprocessedBufferToBeRead, _endIndex, charLen);
_endIndex += charLen;
}
internal unsafe int ReadStdin(byte* buffer, int bufferSize)
{
int result = Interop.CheckIo(Interop.Sys.ReadStdin(buffer, bufferSize));
Debug.Assert(result >= 0 && result <= bufferSize); // may be 0 if hits EOL
return result;
}
public override string ReadLine()
{
return ReadLine(consumeKeys: true);
}
private string ReadLine(bool consumeKeys)
{
Debug.Assert(_tmpKeys.Count == 0);
string readLineStr = null;
// Disable echo and buffering. These will be disabled for the duration of the line read.
Interop.Sys.InitializeConsoleBeforeRead();
try
{
// Read key-by-key until we've read a line.
while (true)
{
// Read the next key. This may come from previously read keys, from previously read but
// unprocessed data, or from an actual stdin read.
bool previouslyProcessed;
ConsoleKeyInfo keyInfo = ReadKey(out previouslyProcessed);
if (!consumeKeys && keyInfo.Key != ConsoleKey.Backspace) // backspace is the only character not written out in the below if/elses.
{
_tmpKeys.Push(keyInfo);
}
// Handle the next key. Since for other functions we may have ended up reading some of the user's
// input, we need to be able to handle manually processing that input, and so we do that processing
// for all input. As such, we need to special-case a few characters, e.g. recognizing when Enter is
// pressed to end a line. We also need to handle Backspace specially, to fix up both our buffer of
// characters and the position of the cursor. More advanced processing would be possible, but we
// try to keep this very simple, at least for now.
if (keyInfo.Key == ConsoleKey.Enter)
{
readLineStr = _readLineSB.ToString();
_readLineSB.Clear();
if (!previouslyProcessed)
{
Console.WriteLine();
}
break;
}
else if (IsEol(keyInfo.KeyChar))
{
string line = _readLineSB.ToString();
_readLineSB.Clear();
if (line.Length > 0)
{
readLineStr = line;
}
break;
}
else if (keyInfo.Key == ConsoleKey.Backspace)
{
int len = _readLineSB.Length;
if (len > 0)
{
_readLineSB.Length = len - 1;
if (!previouslyProcessed)
{
if (s_moveLeftString == null)
{
string moveLeft = ConsolePal.TerminalFormatStrings.Instance.CursorLeft;
s_moveLeftString = !string.IsNullOrEmpty(moveLeft) ? moveLeft + " " + moveLeft : string.Empty;
}
Console.Write(s_moveLeftString);
}
}
}
else if (keyInfo.Key == ConsoleKey.Tab)
{
_readLineSB.Append(keyInfo.KeyChar);
if (!previouslyProcessed)
{
Console.Write(' ');
}
}
else if (keyInfo.Key == ConsoleKey.Clear)
{
_readLineSB.Clear();
if (!previouslyProcessed)
{
Console.Clear();
}
}
else if (keyInfo.KeyChar != '\0')
{
_readLineSB.Append(keyInfo.KeyChar);
if (!previouslyProcessed)
{
Console.Write(keyInfo.KeyChar);
}
}
}
}
finally
{
Interop.Sys.UninitializeConsoleAfterRead();
// If we're not consuming the read input, make the keys available for a future read
while (_tmpKeys.Count > 0)
{
_availableKeys.Push(_tmpKeys.Pop());
}
}
return readLineStr;
}
public override int Read() => ReadOrPeek(peek: false);
public override int Peek() => ReadOrPeek(peek: true);
private int ReadOrPeek(bool peek)
{
// If there aren't any keys in our processed keys stack, read a line to populate it.
if (_availableKeys.Count == 0)
{
ReadLine(consumeKeys: false);
}
// Now if there are keys, use the first.
if (_availableKeys.Count > 0)
{
ConsoleKeyInfo keyInfo = peek ? _availableKeys.Peek() : _availableKeys.Pop();
if (!IsEol(keyInfo.KeyChar))
{
return keyInfo.KeyChar;
}
}
// EOL
return -1;
}
private static bool IsEol(char c)
{
return
c != ConsolePal.s_posixDisableValue &&
(c == ConsolePal.s_veolCharacter || c == ConsolePal.s_veol2Character || c == ConsolePal.s_veofCharacter);
}
internal ConsoleKey GetKeyFromCharValue(char x, out bool isShift, out bool isCtrl)
{
isShift = false;
isCtrl = false;
switch (x)
{
case '\b':
return ConsoleKey.Backspace;
case '\t':
return ConsoleKey.Tab;
case '\n':
return ConsoleKey.Enter;
case (char)(0x1B):
return ConsoleKey.Escape;
case '*':
return ConsoleKey.Multiply;
case '+':
return ConsoleKey.Add;
case '-':
return ConsoleKey.Subtract;
case '/':
return ConsoleKey.Divide;
case (char)(0x7F):
return ConsoleKey.Delete;
case ' ':
return ConsoleKey.Spacebar;
default:
// 1. Ctrl A to Ctrl Z.
if (x >= 1 && x <= 26)
{
isCtrl = true;
return ConsoleKey.A + x - 1;
}
// 2. Numbers from 0 to 9.
if (x >= '0' && x <= '9')
{
return ConsoleKey.D0 + x - '0';
}
//3. A to Z
if (x >= 'A' && x <= 'Z')
{
isShift = true;
return ConsoleKey.A + (x - 'A');
}
// 4. a to z.
if (x >= 'a' && x <= 'z')
{
return ConsoleKey.A + (x - 'a');
}
break;
}
return default(ConsoleKey);
}
internal bool MapBufferToConsoleKey(out ConsoleKey key, out char ch, out bool isShift, out bool isAlt, out bool isCtrl)
{
Debug.Assert(!IsUnprocessedBufferEmpty());
// Try to get the special key match from the TermInfo static information.
ConsoleKeyInfo keyInfo;
int keyLength;
if (ConsolePal.TryGetSpecialConsoleKey(_unprocessedBufferToBeRead, _startIndex, _endIndex, out keyInfo, out keyLength))
{
key = keyInfo.Key;
isShift = (keyInfo.Modifiers & ConsoleModifiers.Shift) != 0;
isAlt = (keyInfo.Modifiers & ConsoleModifiers.Alt) != 0;
isCtrl = (keyInfo.Modifiers & ConsoleModifiers.Control) != 0;
ch = ((keyLength == 1) ? _unprocessedBufferToBeRead[_startIndex] : '\0'); // ignore keyInfo.KeyChar
_startIndex += keyLength;
return true;
}
// Check if we can match Esc + combination and guess if alt was pressed.
isAlt = isCtrl = isShift = false;
if (_unprocessedBufferToBeRead[_startIndex] == (char)0x1B && // Alt is send as an escape character
_endIndex - _startIndex >= 2) // We have at least two characters to read
{
_startIndex++;
if (MapBufferToConsoleKey(out key, out ch, out isShift, out isAlt, out isCtrl))
{
isAlt = true;
return true;
}
else
{
// We could not find a matching key here so, Alt+ combination assumption is in-correct.
// The current key needs to be marked as Esc key.
// Also, we do not increment _startIndex as we already did it.
key = ConsoleKey.Escape;
ch = (char)0x1B;
isAlt = false;
return true;
}
}
// Try reading the first char in the buffer and interpret it as a key.
ch = _unprocessedBufferToBeRead[_startIndex++];
key = GetKeyFromCharValue(ch, out isShift, out isCtrl);
return key != default(ConsoleKey);
}
/// <summary>
/// Try to intercept the key pressed.
///
/// Unlike Windows, Unix has no concept of virtual key codes.
/// Hence, in case we do not recognize a key, we can't really
/// get the ConsoleKey key code associated with it.
/// As a result, we try to recognize the key, and if that does
/// not work, we simply return the char associated with that
/// key with ConsoleKey set to default value.
/// </summary>
public unsafe ConsoleKeyInfo ReadKey(out bool previouslyProcessed)
{
// Order of reading:
// 1. A read should first consult _availableKeys, as this contains input that has already been both read from stdin and processed into ConsoleKeyInfos.
// 2. If _availableKeys is empty, then _unprocessedBufferToRead should be consulted. This is input from stdin that was read in bulk but has yet to be processed.
// 3. Finally if _unprocessedBufferToRead is empty, input must be obtained from ReadStdinUnbuffered.
if (_availableKeys.Count > 0)
{
previouslyProcessed = true;
return _availableKeys.Pop();
}
previouslyProcessed = false;
Interop.Sys.InitializeConsoleBeforeRead();
try
{
ConsoleKey key;
char ch;
bool isAlt, isCtrl, isShift;
if (IsUnprocessedBufferEmpty())
{
// Read in bytes
byte* bufPtr = stackalloc byte[BytesToBeRead];
int result = ReadStdin(bufPtr, BytesToBeRead);
if (result > 0)
{
// Append them
AppendExtraBuffer(bufPtr, result);
}
else
{
// Could be empty if EOL entered on its own. Pick one of the EOL characters we have,
// or just use 0 if none are available.
return new ConsoleKeyInfo((char)
(ConsolePal.s_veolCharacter != ConsolePal.s_posixDisableValue ? ConsolePal.s_veolCharacter :
ConsolePal.s_veol2Character != ConsolePal.s_posixDisableValue ? ConsolePal.s_veol2Character :
ConsolePal.s_veofCharacter != ConsolePal.s_posixDisableValue ? ConsolePal.s_veofCharacter :
0),
default(ConsoleKey), false, false, false);
}
}
MapBufferToConsoleKey(out key, out ch, out isShift, out isAlt, out isCtrl);
return new ConsoleKeyInfo(ch, key, isShift, isAlt, isCtrl);
}
finally
{
Interop.Sys.UninitializeConsoleAfterRead();
}
}
/// <summary>Gets whether there's input waiting on stdin.</summary>
internal bool StdinReady { get { return Interop.Sys.StdinReady(); } }
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using HttpServerLib=HttpServer;
using log4net;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.IO;
using System.Net;
using System.Reflection;
using System.Text;
using System.Web;
namespace OpenSim.Framework.Servers.HttpServer
{
public class OSHttpRequest : IOSHttpRequest
{
private static readonly ILog _log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
protected HttpServerLib.HttpRequest _request = null;
protected HttpServerLib.HttpClientContext _context = null;
public string[] AcceptTypes
{
get { return _request.AcceptTypes; }
}
public Encoding ContentEncoding
{
get { return _contentEncoding; }
}
private Encoding _contentEncoding;
public long ContentLength
{
get { return _request.ContentLength; }
}
public long ContentLength64
{
get { return ContentLength; }
}
public string ContentType
{
get { return _contentType; }
}
private string _contentType;
public HttpCookieCollection Cookies
{
get
{
HttpServerLib.RequestCookies cookies = _request.Cookies;
HttpCookieCollection httpCookies = new HttpCookieCollection();
foreach (HttpServerLib.RequestCookie cookie in cookies)
httpCookies.Add(new HttpCookie(cookie.Name, cookie.Value));
return httpCookies;
}
}
public bool HasEntityBody
{
get { return _request.ContentLength != 0; }
}
public NameValueCollection Headers
{
get { return _request.Headers; }
}
public string HttpMethod
{
get { return _request.Method; }
}
public Stream InputStream
{
get { return _request.Body; }
}
public bool IsSecured
{
get { return _context.IsSecured; }
}
public bool KeepAlive
{
get { return HttpServerLib.ConnectionType.KeepAlive == _request.Connection; }
}
public NameValueCollection QueryString
{
get { return _queryString; }
}
private NameValueCollection _queryString;
public Hashtable Query
{
get { return _query; }
}
private Hashtable _query;
/// <value>
/// POST request values, if applicable
/// </value>
// public Hashtable Form { get; private set; }
public string RawUrl
{
get { return _request.Uri.AbsolutePath; }
}
public IPEndPoint RemoteIPEndPoint
{
get { return _remoteIPEndPoint; }
}
private IPEndPoint _remoteIPEndPoint;
public Uri Url
{
get { return _request.Uri; }
}
public string UserAgent
{
get { return _userAgent; }
}
private string _userAgent;
internal HttpServerLib.HttpRequest HttpRequest
{
get { return _request; }
}
internal HttpServerLib.HttpClientContext HttpClientContext
{
get { return _context; }
}
/// <summary>
/// Internal whiteboard for handlers to store temporary stuff
/// into.
/// </summary>
internal Dictionary<string, object> Whiteboard
{
get { return _whiteboard; }
}
private Dictionary<string, object> _whiteboard = new Dictionary<string, object>();
public OSHttpRequest() {}
public OSHttpRequest(HttpServerLib.HttpClientContext context, HttpServerLib.HttpRequest req)
{
_request = req;
_context = context;
if (null != req.Headers["content-encoding"])
{
try
{
_contentEncoding = Encoding.GetEncoding(_request.Headers["content-encoding"]);
}
catch (Exception)
{
// ignore
}
}
if (null != req.Headers["content-type"])
_contentType = _request.Headers["content-type"];
if (null != req.Headers["user-agent"])
_userAgent = req.Headers["user-agent"];
if (null != req.Headers["remote_addr"])
{
try
{
IPAddress addr = IPAddress.Parse(req.Headers["remote_addr"]);
// sometimes req.Headers["remote_port"] returns a comma separated list, so use
// the first one in the list and log it
string[] strPorts = req.Headers["remote_port"].Split(new char[] { ',' });
if (strPorts.Length > 1)
{
_log.ErrorFormat("[OSHttpRequest]: format exception on addr/port {0}:{1}, ignoring",
req.Headers["remote_addr"], req.Headers["remote_port"]);
}
int port = Int32.Parse(strPorts[0]);
_remoteIPEndPoint = new IPEndPoint(addr, port);
}
catch (FormatException)
{
_log.ErrorFormat("[OSHttpRequest]: format exception on addr/port {0}:{1}, ignoring",
req.Headers["remote_addr"], req.Headers["remote_port"]);
}
}
_queryString = new NameValueCollection();
_query = new Hashtable();
try
{
foreach (HttpServerLib.HttpInputItem item in req.QueryString)
{
try
{
_queryString.Add(item.Name, item.Value);
_query[item.Name] = item.Value;
}
catch (InvalidCastException)
{
_log.DebugFormat("[OSHttpRequest]: error parsing {0} query item, skipping it", item.Name);
continue;
}
}
}
catch (Exception)
{
_log.ErrorFormat("[OSHttpRequest]: Error parsing querystring");
}
// Form = new Hashtable();
// foreach (HttpInputItem item in req.Form)
// {
// _log.DebugFormat("[OSHttpRequest]: Got form item {0}={1}", item.Name, item.Value);
// Form.Add(item.Name, item.Value);
// }
}
public override string ToString()
{
StringBuilder me = new StringBuilder();
me.Append(String.Format("OSHttpRequest: {0} {1}\n", HttpMethod, RawUrl));
foreach (string k in Headers.AllKeys)
{
me.Append(String.Format(" {0}: {1}\n", k, Headers[k]));
}
if (null != RemoteIPEndPoint)
{
me.Append(String.Format(" IP: {0}\n", RemoteIPEndPoint));
}
return me.ToString();
}
}
}
| |
using System;
using System.Text;
/// <summary>
/// StringBuilder.Chars Property
/// Gets or sets the character at the specified character position in this instance.
/// </summary>
public class StringBuilderChars
{
private const int c_MIN_STR_LEN = 1;
private const int c_MAX_STR_LEN = 260;
private const int c_MAX_CAPACITY = Int16.MaxValue;
public static int Main()
{
StringBuilderChars testObj = new StringBuilderChars();
TestLibrary.TestFramework.BeginTestCase("for property: StringBuilder.Chars");
if(testObj.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
TestLibrary.TestFramework.LogInformation("[Negative]");
retVal = NegTest1() && retVal;
retVal = NegTest2() && retVal;
retVal = NegTest3() && retVal;
retVal = NegTest4() && retVal;
return retVal;
}
#region Positive tests
public bool PosTest1()
{
bool retVal = true;
const string c_TEST_ID = "P001";
const string c_TEST_DESC = "PosTest1: Get the Chars property";
string errorDesc;
StringBuilder sb;
char actualChar, expectedChar;
int index;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
string str = TestLibrary.Generator.GetString(-55, false, c_MIN_STR_LEN, c_MAX_STR_LEN);
sb = new StringBuilder(str);
index = TestLibrary.Generator.GetInt32(-55) % str.Length;
expectedChar = str[index];
actualChar = sb[index];
if (actualChar != expectedChar)
{
errorDesc = "Character of current StringBuilder " + sb + " at sepcifed index " + index
+ " is not the value ";
errorDesc += string.Format("{0} as expected: actual({1})", expectedChar, actualChar);
TestLibrary.TestFramework.LogError("001" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
errorDesc = "Unexpected exception: " + e;
TestLibrary.TestFramework.LogError("002" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
return retVal;
}
public bool PosTest2()
{
bool retVal = true;
const string c_TEST_ID = "P002";
const string c_TEST_DESC = "PosTest2: Set the Chars property";
string errorDesc;
StringBuilder sb;
char actualChar, expectedChar;
int index;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
string str = TestLibrary.Generator.GetString(-55, false, c_MIN_STR_LEN, c_MAX_STR_LEN);
sb = new StringBuilder(str);
index = TestLibrary.Generator.GetInt32(-55) % str.Length;
expectedChar = TestLibrary.Generator.GetChar(-55);
sb[index] = expectedChar;
actualChar = sb[index];
if (actualChar != expectedChar)
{
errorDesc = "Character of current StringBuilder " + sb + " at sepcifed index " + index
+ " is not the value ";
errorDesc += string.Format("{0} as expected: actual({1})", expectedChar, actualChar);
TestLibrary.TestFramework.LogError("003" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
errorDesc = "Unexpected exception: " + e;
TestLibrary.TestFramework.LogError("004" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
return retVal;
}
#endregion
#region Negative tests
//IndexOutOfRangeException
public bool NegTest1()
{
bool retVal = true;
const string c_TEST_ID = "N001";
const string c_TEST_DESC = "NegTest1: index is greater than or equal the current length of this instance while getting a character.";
string errorDesc;
StringBuilder sb;
int index, currentInstanceLength;
string str = TestLibrary.Generator.GetString(-55, false, c_MIN_STR_LEN, c_MAX_STR_LEN);
sb = new StringBuilder(str);
currentInstanceLength = str.Length;
index = currentInstanceLength +
TestLibrary.Generator.GetInt32(-55) % (int.MaxValue - currentInstanceLength);
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
char ch = sb[index];
errorDesc = "IndexOutOfRangeException is not thrown as expected.";
errorDesc += string.Format("\nString value of StringBuilder is {0}", str);
errorDesc += string.Format("\nCurrent length of instance is {0}, index specified is {1}",
currentInstanceLength, index);
TestLibrary.TestFramework.LogError("005" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
catch (IndexOutOfRangeException)
{ }
catch (Exception e)
{
errorDesc = "Unexpected exception: " + e;
errorDesc += string.Format("\nString value of StringBuilder is {0}", str);
errorDesc += string.Format("\nCurrent length of instance is {0}, index specified is {1}",
currentInstanceLength, index);
TestLibrary.TestFramework.LogError("006" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
return retVal;
}
public bool NegTest2()
{
bool retVal = true;
const string c_TEST_ID = "N002";
const string c_TEST_DESC = "NegTest2: index is less than zero while getting a character.";
string errorDesc;
StringBuilder sb;
int index, currentInstanceLength;
string str = TestLibrary.Generator.GetString(-55, false, c_MIN_STR_LEN, c_MAX_STR_LEN);
sb = new StringBuilder(str);
currentInstanceLength = str.Length;
index = -1 * TestLibrary.Generator.GetInt32(-55) - 1;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
char ch = sb[index];
errorDesc = "IndexOutOfRangeException is not thrown as expected.";
errorDesc += string.Format("\nString value of StringBuilder is {0}", str);
errorDesc += string.Format("\nCurrent length of instance is {0}, index specified is {1}",
currentInstanceLength, index);
TestLibrary.TestFramework.LogError("007" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
catch (IndexOutOfRangeException)
{ }
catch (Exception e)
{
errorDesc = "Unexpected exception: " + e;
errorDesc += string.Format("\nString value of StringBuilder is {0}", str);
errorDesc += string.Format("\nCurrent length of instance is {0}, index specified is {1}",
currentInstanceLength, index);
TestLibrary.TestFramework.LogError("008" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
return retVal;
}
//ArgumentOutOfRangeException
public bool NegTest3()
{
bool retVal = true;
const string c_TEST_ID = "N003";
const string c_TEST_DESC = "NegTest3: index is greater than or equal the current length of this instance while setting a character.";
string errorDesc;
StringBuilder sb;
int index, currentInstanceLength;
string str = TestLibrary.Generator.GetString(-55, false, c_MIN_STR_LEN, c_MAX_STR_LEN);
sb = new StringBuilder(str);
currentInstanceLength = str.Length;
index = currentInstanceLength +
TestLibrary.Generator.GetInt32(-55) % (int.MaxValue - currentInstanceLength);
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
char ch = TestLibrary.Generator.GetChar(-55);
sb[index] =ch;
errorDesc = "ArgumentOutOfRangeException is not thrown as expected.";
errorDesc += string.Format("\nString value of StringBuilder is {0}", str);
errorDesc += string.Format("\nCurrent length of instance is {0}, index specified is {1}",
currentInstanceLength, index);
TestLibrary.TestFramework.LogError("009" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
catch (ArgumentOutOfRangeException)
{ }
catch (Exception e)
{
errorDesc = "Unexpected exception: " + e;
errorDesc += string.Format("\nString value of StringBuilder is {0}", str);
errorDesc += string.Format("\nCurrent length of instance is {0}, index specified is {1}",
currentInstanceLength, index);
TestLibrary.TestFramework.LogError("010" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
return retVal;
}
public bool NegTest4()
{
bool retVal = true;
const string c_TEST_ID = "N004";
const string c_TEST_DESC = "NegTest4: index is less than zero while setting a character.";
string errorDesc;
StringBuilder sb;
int index, currentInstanceLength;
string str = TestLibrary.Generator.GetString(-55, false, c_MIN_STR_LEN, c_MAX_STR_LEN);
sb = new StringBuilder(str);
currentInstanceLength = str.Length;
index = -1 * TestLibrary.Generator.GetInt32(-55) - 1;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
char ch = TestLibrary.Generator.GetChar(-55);
sb[index] = ch;
errorDesc = "ArgumentOutOfRangeException is not thrown as expected.";
errorDesc += string.Format("\nString value of StringBuilder is {0}", str);
errorDesc += string.Format("\nCurrent length of instance is {0}, index specified is {1}",
currentInstanceLength, index);
TestLibrary.TestFramework.LogError("011" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
catch (ArgumentOutOfRangeException)
{ }
catch (Exception e)
{
errorDesc = "Unexpected exception: " + e;
errorDesc += string.Format("\nString value of StringBuilder is {0}", str);
errorDesc += string.Format("\nCurrent length of instance is {0}, index specified is {1}",
currentInstanceLength, index);
TestLibrary.TestFramework.LogError("012" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
return retVal;
}
#endregion
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagvr = Google.Ads.GoogleAds.V9.Resources;
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Ads.GoogleAds.V9.Services
{
/// <summary>Settings for <see cref="GenderViewServiceClient"/> instances.</summary>
public sealed partial class GenderViewServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="GenderViewServiceSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="GenderViewServiceSettings"/>.</returns>
public static GenderViewServiceSettings GetDefault() => new GenderViewServiceSettings();
/// <summary>Constructs a new <see cref="GenderViewServiceSettings"/> object with default settings.</summary>
public GenderViewServiceSettings()
{
}
private GenderViewServiceSettings(GenderViewServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
GetGenderViewSettings = existing.GetGenderViewSettings;
OnCopy(existing);
}
partial void OnCopy(GenderViewServiceSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>GenderViewServiceClient.GetGenderView</c> and <c>GenderViewServiceClient.GetGenderViewAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings GetGenderViewSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="GenderViewServiceSettings"/> object.</returns>
public GenderViewServiceSettings Clone() => new GenderViewServiceSettings(this);
}
/// <summary>
/// Builder class for <see cref="GenderViewServiceClient"/> to provide simple configuration of credentials, endpoint
/// etc.
/// </summary>
internal sealed partial class GenderViewServiceClientBuilder : gaxgrpc::ClientBuilderBase<GenderViewServiceClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public GenderViewServiceSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public GenderViewServiceClientBuilder()
{
UseJwtAccessWithScopes = GenderViewServiceClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref GenderViewServiceClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<GenderViewServiceClient> task);
/// <summary>Builds the resulting client.</summary>
public override GenderViewServiceClient Build()
{
GenderViewServiceClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<GenderViewServiceClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<GenderViewServiceClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private GenderViewServiceClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return GenderViewServiceClient.Create(callInvoker, Settings);
}
private async stt::Task<GenderViewServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return GenderViewServiceClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => GenderViewServiceClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() => GenderViewServiceClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => GenderViewServiceClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>GenderViewService client wrapper, for convenient use.</summary>
/// <remarks>
/// Service to manage gender views.
/// </remarks>
public abstract partial class GenderViewServiceClient
{
/// <summary>
/// The default endpoint for the GenderViewService service, which is a host of "googleads.googleapis.com" and a
/// port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443";
/// <summary>The default GenderViewService scopes.</summary>
/// <remarks>
/// The default GenderViewService scopes are:
/// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/adwords",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="GenderViewServiceClient"/> using the default credentials, endpoint and
/// settings. To specify custom credentials or other settings, use <see cref="GenderViewServiceClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="GenderViewServiceClient"/>.</returns>
public static stt::Task<GenderViewServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new GenderViewServiceClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="GenderViewServiceClient"/> using the default credentials, endpoint and
/// settings. To specify custom credentials or other settings, use <see cref="GenderViewServiceClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="GenderViewServiceClient"/>.</returns>
public static GenderViewServiceClient Create() => new GenderViewServiceClientBuilder().Build();
/// <summary>
/// Creates a <see cref="GenderViewServiceClient"/> which uses the specified call invoker for remote operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="GenderViewServiceSettings"/>.</param>
/// <returns>The created <see cref="GenderViewServiceClient"/>.</returns>
internal static GenderViewServiceClient Create(grpccore::CallInvoker callInvoker, GenderViewServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
GenderViewService.GenderViewServiceClient grpcClient = new GenderViewService.GenderViewServiceClient(callInvoker);
return new GenderViewServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC GenderViewService client</summary>
public virtual GenderViewService.GenderViewServiceClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::GenderView GetGenderView(GetGenderViewRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::GenderView> GetGenderViewAsync(GetGenderViewRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::GenderView> GetGenderViewAsync(GetGenderViewRequest request, st::CancellationToken cancellationToken) =>
GetGenderViewAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the gender view to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::GenderView GetGenderView(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetGenderView(new GetGenderViewRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the gender view to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::GenderView> GetGenderViewAsync(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetGenderViewAsync(new GetGenderViewRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the gender view to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::GenderView> GetGenderViewAsync(string resourceName, st::CancellationToken cancellationToken) =>
GetGenderViewAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the gender view to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::GenderView GetGenderView(gagvr::GenderViewName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetGenderView(new GetGenderViewRequest
{
ResourceNameAsGenderViewName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the gender view to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::GenderView> GetGenderViewAsync(gagvr::GenderViewName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetGenderViewAsync(new GetGenderViewRequest
{
ResourceNameAsGenderViewName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the gender view to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::GenderView> GetGenderViewAsync(gagvr::GenderViewName resourceName, st::CancellationToken cancellationToken) =>
GetGenderViewAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
}
/// <summary>GenderViewService client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Service to manage gender views.
/// </remarks>
public sealed partial class GenderViewServiceClientImpl : GenderViewServiceClient
{
private readonly gaxgrpc::ApiCall<GetGenderViewRequest, gagvr::GenderView> _callGetGenderView;
/// <summary>
/// Constructs a client wrapper for the GenderViewService service, with the specified gRPC client and settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">The base <see cref="GenderViewServiceSettings"/> used within this client.</param>
public GenderViewServiceClientImpl(GenderViewService.GenderViewServiceClient grpcClient, GenderViewServiceSettings settings)
{
GrpcClient = grpcClient;
GenderViewServiceSettings effectiveSettings = settings ?? GenderViewServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callGetGenderView = clientHelper.BuildApiCall<GetGenderViewRequest, gagvr::GenderView>(grpcClient.GetGenderViewAsync, grpcClient.GetGenderView, effectiveSettings.GetGenderViewSettings).WithGoogleRequestParam("resource_name", request => request.ResourceName);
Modify_ApiCall(ref _callGetGenderView);
Modify_GetGenderViewApiCall(ref _callGetGenderView);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_GetGenderViewApiCall(ref gaxgrpc::ApiCall<GetGenderViewRequest, gagvr::GenderView> call);
partial void OnConstruction(GenderViewService.GenderViewServiceClient grpcClient, GenderViewServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC GenderViewService client</summary>
public override GenderViewService.GenderViewServiceClient GrpcClient { get; }
partial void Modify_GetGenderViewRequest(ref GetGenderViewRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override gagvr::GenderView GetGenderView(GetGenderViewRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetGenderViewRequest(ref request, ref callSettings);
return _callGetGenderView.Sync(request, callSettings);
}
/// <summary>
/// Returns the requested gender view in full detail.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<gagvr::GenderView> GetGenderViewAsync(GetGenderViewRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetGenderViewRequest(ref request, ref callSettings);
return _callGetGenderView.Async(request, callSettings);
}
}
}
| |
using System;
using NUnit.Framework;
using Whois.Parsers;
namespace Whois.Parsing.Whois.Rnids.Rs.Rs
{
[TestFixture]
public class RsParsingTests : ParsingTests
{
private WhoisParser parser;
[SetUp]
public void SetUp()
{
SerilogConfig.Init();
parser = new WhoisParser();
}
[Test]
public void Test_found()
{
var sample = SampleReader.Read("whois.rnids.rs", "rs", "found.txt");
var response = parser.Parse("whois.rnids.rs", sample);
Assert.Greater(sample.Length, 0);
Assert.AreEqual(WhoisStatus.Found, response.Status);
Assert.AreEqual(0, response.ParsingErrors);
Assert.AreEqual("whois.rnids.rs/rs/Found", response.TemplateName);
Assert.AreEqual("eg.rs", response.DomainName.ToString());
// Registrar Details
Assert.AreEqual("GAMA Electronics d.o.o.", response.Registrar.Name);
Assert.AreEqual(new DateTime(2011, 08, 08, 11, 13, 00, 000, DateTimeKind.Utc), response.Updated);
Assert.AreEqual(new DateTime(2010, 10, 22, 10, 20, 31, 000, DateTimeKind.Utc), response.Registered);
Assert.AreEqual(new DateTime(2012, 10, 22, 10, 20, 31, 000, DateTimeKind.Utc), response.Expiration);
// Nameservers
Assert.AreEqual(2, response.NameServers.Count);
Assert.AreEqual("bits-hq.bitsyu.net", response.NameServers[0]);
Assert.AreEqual("largo.bitsyu.net", response.NameServers[1]);
// Domain Status
Assert.AreEqual(1, response.DomainStatus.Count);
Assert.AreEqual("Active", response.DomainStatus[0]);
Assert.AreEqual(9, response.FieldsParsed);
}
[Test]
public void Test_found_nameservers_hyphenated()
{
var sample = SampleReader.Read("whois.rnids.rs", "rs", "found_nameservers_hyphenated.txt");
var response = parser.Parse("whois.rnids.rs", sample);
Assert.Greater(sample.Length, 0);
Assert.AreEqual(WhoisStatus.Found, response.Status);
Assert.AreEqual(0, response.ParsingErrors);
Assert.AreEqual("whois.rnids.rs/rs/Found", response.TemplateName);
Assert.AreEqual("eg.rs", response.DomainName.ToString());
// Registrar Details
Assert.AreEqual("GAMA Electronics d.o.o.", response.Registrar.Name);
Assert.AreEqual(new DateTime(2011, 08, 08, 11, 13, 00, 000, DateTimeKind.Utc), response.Updated);
Assert.AreEqual(new DateTime(2010, 10, 22, 10, 20, 31, 000, DateTimeKind.Utc), response.Registered);
Assert.AreEqual(new DateTime(2012, 10, 22, 10, 20, 31, 000, DateTimeKind.Utc), response.Expiration);
// Nameservers
Assert.AreEqual(2, response.NameServers.Count);
Assert.AreEqual("bits-hq.bitsyu.net", response.NameServers[0]);
Assert.AreEqual("largo.bitsyu.net", response.NameServers[1]);
// Domain Status
Assert.AreEqual(1, response.DomainStatus.Count);
Assert.AreEqual("Active", response.DomainStatus[0]);
Assert.AreEqual(9, response.FieldsParsed);
}
[Test]
public void Test_expired()
{
var sample = SampleReader.Read("whois.rnids.rs", "rs", "expired.txt");
var response = parser.Parse("whois.rnids.rs", sample);
Assert.Greater(sample.Length, 0);
Assert.AreEqual(WhoisStatus.Expired, response.Status);
Assert.AreEqual(0, response.ParsingErrors);
Assert.AreEqual("whois.rnids.rs/rs/Found", response.TemplateName);
Assert.AreEqual("saj.rs", response.DomainName.ToString());
// Registrar Details
Assert.AreEqual("BGSVETIONIK.S.A.", response.Registrar.Name);
Assert.AreEqual(new DateTime(2012, 06, 18, 02, 00, 02, 000, DateTimeKind.Utc), response.Updated);
Assert.AreEqual(new DateTime(2010, 06, 17, 14, 40, 59, 000, DateTimeKind.Utc), response.Registered);
Assert.AreEqual(new DateTime(2012, 06, 17, 14, 40, 59, 000, DateTimeKind.Utc), response.Expiration);
// Registrant Details
Assert.AreEqual("Ana Rakovic", response.Registrant.Name);
// Nameservers
Assert.AreEqual(2, response.NameServers.Count);
Assert.AreEqual("ns9.sajthosting.com", response.NameServers[0]);
Assert.AreEqual("ns10.sajthosting.com", response.NameServers[1]);
// Domain Status
Assert.AreEqual(1, response.DomainStatus.Count);
Assert.AreEqual("Expired", response.DomainStatus[0]);
Assert.AreEqual(10, response.FieldsParsed);
}
[Test]
public void Test_other_status_in_transfer()
{
var sample = SampleReader.Read("whois.rnids.rs", "rs", "other_status_in_transfer.txt");
var response = parser.Parse("whois.rnids.rs", sample);
Assert.Greater(sample.Length, 0);
Assert.AreEqual(WhoisStatus.Other, response.Status);
Assert.AreEqual(0, response.ParsingErrors);
Assert.AreEqual("whois.rnids.rs/rs/Found", response.TemplateName);
Assert.AreEqual("saj.rs", response.DomainName.ToString());
// Registrar Details
Assert.AreEqual("NINET Company d.o.o.", response.Registrar.Name);
Assert.AreEqual(new DateTime(2012, 07, 06, 16, 24, 55, 000, DateTimeKind.Utc), response.Updated);
Assert.AreEqual(new DateTime(2010, 06, 17, 14, 40, 59, 000, DateTimeKind.Utc), response.Registered);
Assert.AreEqual(new DateTime(2013, 06, 17, 14, 40, 59, 000, DateTimeKind.Utc), response.Expiration);
// Registrant Details
Assert.AreEqual("Ana Rakovic", response.Registrant.Name);
// Nameservers
Assert.AreEqual(2, response.NameServers.Count);
Assert.AreEqual("ns1.bgsvetionik.com", response.NameServers[0]);
Assert.AreEqual("ns2.bgsvetionik.com", response.NameServers[1]);
// Domain Status
Assert.AreEqual(1, response.DomainStatus.Count);
Assert.AreEqual("In Transfer", response.DomainStatus[0]);
Assert.AreEqual(10, response.FieldsParsed);
}
[Test]
public void Test_locked()
{
var sample = SampleReader.Read("whois.rnids.rs", "rs", "locked.txt");
var response = parser.Parse("whois.rnids.rs", sample);
Assert.Greater(sample.Length, 0);
Assert.AreEqual(WhoisStatus.Locked, response.Status);
Assert.AreEqual(0, response.ParsingErrors);
Assert.AreEqual("whois.rnids.rs/rs/Found", response.TemplateName);
Assert.AreEqual("kondor.rs", response.DomainName.ToString());
// Registrar Details
Assert.AreEqual("BGSVETIONIK.S.A.", response.Registrar.Name);
Assert.AreEqual(new DateTime(2011, 11, 18, 16, 03, 46, 000, DateTimeKind.Utc), response.Updated);
Assert.AreEqual(new DateTime(2009, 09, 30, 16, 19, 08, 000, DateTimeKind.Utc), response.Registered);
Assert.AreEqual(new DateTime(2012, 09, 30, 16, 19, 08, 000, DateTimeKind.Utc), response.Expiration);
// Registrant Details
Assert.AreEqual("Slavisa Janjusevic", response.Registrant.Name);
// Nameservers
Assert.AreEqual(4, response.NameServers.Count);
Assert.AreEqual("dns1.orion.rs", response.NameServers[0]);
Assert.AreEqual("dns2.orion.rs", response.NameServers[1]);
Assert.AreEqual("dns3.orion.rs", response.NameServers[2]);
Assert.AreEqual("dns4.orion.rs", response.NameServers[3]);
// Domain Status
Assert.AreEqual(1, response.DomainStatus.Count);
Assert.AreEqual("Locked", response.DomainStatus[0]);
Assert.AreEqual(12, response.FieldsParsed);
}
[Test]
public void Test_not_found()
{
var sample = SampleReader.Read("whois.rnids.rs", "rs", "not_found.txt");
var response = parser.Parse("whois.rnids.rs", sample);
Assert.Greater(sample.Length, 0);
Assert.AreEqual(WhoisStatus.NotFound, response.Status);
Assert.AreEqual(0, response.ParsingErrors);
Assert.AreEqual("whois.rnids.rs/rs/NotFound", response.TemplateName);
Assert.AreEqual(1, response.FieldsParsed);
}
[Test]
public void Test_found_status_registered()
{
var sample = SampleReader.Read("whois.rnids.rs", "rs", "found_status_registered.txt");
var response = parser.Parse("whois.rnids.rs", sample);
Assert.Greater(sample.Length, 0);
Assert.AreEqual(WhoisStatus.Locked, response.Status);
Assert.AreEqual(0, response.ParsingErrors);
Assert.AreEqual("whois.rnids.rs/rs/Found", response.TemplateName);
Assert.AreEqual("google.rs", response.DomainName.ToString());
// Registrar Details
Assert.AreEqual("NINET Company d.o.o.", response.Registrar.Name);
Assert.AreEqual(new DateTime(2014, 02, 11, 19, 49, 38, 000, DateTimeKind.Utc), response.Updated);
Assert.AreEqual(new DateTime(2008, 03, 10, 12, 31, 19, 000, DateTimeKind.Utc), response.Registered);
Assert.AreEqual(new DateTime(2015, 03, 10, 12, 31, 19, 000, DateTimeKind.Utc), response.Expiration);
// Registrant Details
Assert.AreEqual("Google Inc.", response.Registrant.Name);
// Registrant Address
Assert.AreEqual(1, response.Registrant.Address.Count);
Assert.AreEqual("1600 Amphitheatre Parkway, Mountain View, United States of America", response.Registrant.Address[0]);
// Nameservers
Assert.AreEqual(4, response.NameServers.Count);
Assert.AreEqual("ns1.google.com", response.NameServers[0]);
Assert.AreEqual("ns2.google.com", response.NameServers[1]);
Assert.AreEqual("ns3.google.com", response.NameServers[2]);
Assert.AreEqual("ns4.google.com", response.NameServers[3]);
// Domain Status
Assert.AreEqual(1, response.DomainStatus.Count);
Assert.AreEqual("Locked", response.DomainStatus[0]);
Assert.AreEqual(13, response.FieldsParsed);
}
}
}
| |
#if DNX451
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using OmniSharp.Models;
using OmniSharp.Options;
using Xunit;
namespace OmniSharp.Tests
{
public class FixUsingsFacts
{
string fileName = "test.cs";
[Fact]
public async Task FixUsings_AddsUsingSingle()
{
const string fileContents = @"namespace nsA
{
public class classX{}
}
namespace OmniSharp
{
public class class1
{
public method1()
{
var c1 = new classX();
}
}
}";
string expectedFileContents = @"using nsA;
namespace nsA
{
public class classX{}
}
namespace OmniSharp
{
public class class1
{
public method1()
{
var c1 = new classX();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingSingleForFrameworkMethod()
{
const string fileContents = @"namespace OmniSharp
{
public class class1
{
public void method1()
{
Console.WriteLine(""abc"");
}
}
}";
string expectedFileContents = @"using System;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Console.WriteLine(""abc"");
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingSingleForFrameworkClass()
{
const string fileContents = @"namespace OmniSharp
{
public class class1
{
public void method1()()
{
var s = new StringBuilder();
}
}
}";
string expectedFileContents = @"using System.Text;
namespace OmniSharp
{
public class class1
{
public void method1()()
{
var s = new StringBuilder();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingMultiple()
{
const string fileContents = @"namespace nsA
{
public class classX{}
}
namespace nsB
{
public class classY{}
}
namespace OmniSharp
{
public class class1
{
public method1()
{
var c1 = new classX();
var c2 = new classY();
}
}
}";
string expectedFileContents = @"using nsA;
using nsB;
namespace nsA
{
public class classX{}
}
namespace nsB
{
public class classY{}
}
namespace OmniSharp
{
public class class1
{
public method1()
{
var c1 = new classX();
var c2 = new classY();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingMultipleForFramework()
{
const string fileContents = @"namespace OmniSharp
{
public class class1
{
public void method1()
{
Console.WriteLine(""abc"");
var sb = new StringBuilder();
}
}
}";
string expectedFileContents = @"using System;
using System.Text;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Console.WriteLine(""abc"");
var sb = new StringBuilder();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_ReturnsAmbiguousResult()
{
const string fileContents = @"
namespace nsA
{
public class classX{}
}
namespace nsB
{
public class classX{}
}
namespace OmniSharp
{
public class class1
{
public method1()
{
var c1 = new $classX();
}
}
}";
var classLineColumn = TestHelpers.GetLineAndColumnFromDollar(TestHelpers.RemovePercentMarker(fileContents));
var fileContentNoDollarMarker = TestHelpers.RemoveDollarMarker(fileContents);
var expectedUnresolved = new List<QuickFix>();
expectedUnresolved.Add(new QuickFix()
{
Line = classLineColumn.Line,
Column = classLineColumn.Column,
FileName = fileName,
Text = "`classX` is ambiguous"
});
await AssertUnresolvedReferences(fileContentNoDollarMarker, expectedUnresolved);
}
[Fact]
public async Task FixUsings_ReturnsNoUsingsForAmbiguousResult()
{
const string fileContents = @"namespace nsA {
public class classX{}
}
namespace nsB {
public class classX{}
}
namespace OmniSharp {
public class class1
{
public method1()
{
var c1 = new classX();
}
}
}";
await AssertBufferContents(fileContents, fileContents);
}
[Fact]
public async Task FixUsings_AddsUsingForExtension()
{
const string fileContents = @"namespace nsA {
public static class StringExtension {
public static void Whatever(this string astring) {}
}
}
namespace OmniSharp {
public class class1
{
public method1()
{
""string"".Whatever();
}
}
}";
string expectedFileContents = @"using nsA;
namespace nsA {
public static class StringExtension {
public static void Whatever(this string astring) {}
}
}
namespace OmniSharp {
public class class1
{
public method1()
{
""string"".Whatever();
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingLinqMethodSyntax()
{
const string fileContents = @"namespace OmniSharp
{
public class class1
{
public void method1()
{
List<string> first = new List<string>();
var testing = first.Where(s => s == ""abc"");
}
}
}";
string expectedFileContents = @"using System.Collections.Generic;
using System.Linq;
namespace OmniSharp
{
public class class1
{
public void method1()
{
List<string> first = new List<string>();
var testing = first.Where(s => s == ""abc"");
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_AddsUsingLinqQuerySyntax()
{
const string fileContents = @"namespace OmniSharp
{
public class class1
{
public void method1()
{
int[] numbers = { 5, 4, 1, 3, 9, 8, 6, 7, 2, 0 };
var lowNums =
from n in numbers
where n < 5
select n;
}
}
}";
string expectedFileContents = @"using System.Linq;
namespace OmniSharp
{
public class class1
{
public void method1()
{
int[] numbers = { 5, 4, 1, 3, 9, 8, 6, 7, 2, 0 };
var lowNums =
from n in numbers
where n < 5
select n;
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_RemoveDuplicateUsing()
{
const string fileContents = @"using System;
using System;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Console.WriteLine(""test"");
}
}
}";
const string expectedFileContents = @"using System;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Console.WriteLine(""test"");
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
[Fact]
public async Task FixUsings_RemoveUnusedUsing()
{
const string fileContents = @"using System;
using System.Linq;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Console.WriteLine(""test"");
}
}
}";
const string expectedFileContents = @"using System;
namespace OmniSharp
{
public class class1
{
public void method1()
{
Console.WriteLine(""test"");
}
}
}";
await AssertBufferContents(fileContents, expectedFileContents);
}
private async Task AssertBufferContents(string fileContents, string expectedFileContents)
{
var response = await RunFixUsings(fileContents);
Assert.Equal(expectedFileContents, response.Buffer);
}
private async Task AssertUnresolvedReferences(string fileContents, List<QuickFix> expectedUnresolved)
{
var response = await RunFixUsings(fileContents);
var qfList = response.AmbiguousResults.ToList();
Assert.Equal(qfList.Count(), expectedUnresolved.Count());
var i = 0;
foreach (var expectedQuickFix in expectedUnresolved)
{
Assert.Equal(qfList[i].Line, expectedQuickFix.Line);
Assert.Equal(qfList[i].Column, expectedQuickFix.Column);
Assert.Equal(qfList[i].FileName, expectedQuickFix.FileName);
Assert.Equal(qfList[i].Text, expectedQuickFix.Text);
i++;
}
}
private async Task<FixUsingsResponse> RunFixUsings(string fileContents)
{
var workspace = TestHelpers.CreateSimpleWorkspace(fileContents, fileName);
var fakeOptions = new FakeOmniSharpOptions();
fakeOptions.Options = new OmniSharpOptions();
fakeOptions.Options.FormattingOptions = new FormattingOptions() {NewLine = "\n"};
var controller = new OmnisharpController(workspace, fakeOptions);
var request = new FixUsingsRequest
{
FileName = fileName,
Buffer = fileContents
};
return await controller.FixUsings(request);
}
}
}
#endif
| |
/*! Achordeon - MIT License
Copyright (c) 2017 tiamatix / Wolf Robben
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
!*/
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Linq;
using System.Runtime.CompilerServices;
using Achordeon.Annotations;
using Achordeon.Common.Extensions;
using Achordeon.Common.Helpers;
namespace Achordeon.Shell.Wpf.Helpers.RecentFileList
{
public sealed class RecentFileList : INotifyPropertyChanged
{
private int m_MaxFilesNumber = 8;
private readonly ObservableCollection<RecentFile> m_RecentFiles;
public ReadOnlyObservableCollection<RecentFile> RecentFiles { get; }
public int MaxFilesNumber
{
get { return m_MaxFilesNumber; }
set
{
if (m_MaxFilesNumber == value)
return;
if (value <= 0)
throw new ArgumentException("The value must be equal or larger than 1.");
m_MaxFilesNumber = value;
if (m_RecentFiles.Count - m_MaxFilesNumber < 1)
return;
RemoveRange(m_MaxFilesNumber, m_RecentFiles.Count - m_MaxFilesNumber);
}
}
private int PinCount
{
get { return m_RecentFiles.Count(r => r.IsPinned); }
}
public bool IsEmpty => m_RecentFiles.Count < 1;
public RecentFileList()
{
m_RecentFiles = new ObservableCollection<RecentFile>();
RecentFiles = new ReadOnlyObservableCollection<RecentFile>(m_RecentFiles);
}
public void Load(IEnumerable<RecentFile> ARecentFiles)
{
if (ARecentFiles == null)
throw new ArgumentNullException(nameof(ARecentFiles));
Clear();
AddRange(ARecentFiles.Take(m_MaxFilesNumber));
}
public void AddFile(string AFileName)
{
if (string.IsNullOrEmpty(AFileName))
throw new ArgumentException("The argument fileName must not be null or empty.");
var File = m_RecentFiles.FirstOrDefault(r => r.Path == AFileName);
if (File != null)
{
var OldIndex = m_RecentFiles.IndexOf(File);
var NewIndex = File.IsPinned ? 0 : PinCount;
if (OldIndex == NewIndex)
return;
m_RecentFiles.Move(OldIndex, NewIndex);
}
else
{
if (PinCount >= m_MaxFilesNumber)
return;
if (m_RecentFiles.Count >= m_MaxFilesNumber)
RemoveAt(m_RecentFiles.Count - 1);
Insert(PinCount, new RecentFile(AFileName));
}
}
public void Remove(string AFileName)
{
var Existing = RecentFiles.FirstOrDefault(a => a.Path == AFileName);
if (Existing == null)
return;
Remove(Existing);
}
public void Remove(RecentFile ARecentFile)
{
if (ARecentFile == null)
throw new ArgumentNullException(nameof(ARecentFile));
if (!m_RecentFiles.Remove(ARecentFile))
throw new ArgumentException("The passed recentFile was not found in the recent files list.");
ARecentFile.PropertyChanged -= RecentFilePropertyChanged;
OnPropertyChanged(nameof(IsEmpty));
}
public void SaveToXml(XmlFile AXml)
{
AXml.ThrowIfNullEx(nameof(AXml));
var FilesNode = AXml.Add("RecentFiles");
foreach (var RecentFile in m_RecentFiles)
RecentFile.SaveToXml(FilesNode.Add("RecentFile"));
}
public void LoadFromXml(XmlFile AXml)
{
AXml.ThrowIfNullEx(nameof(AXml));
Clear();
var FilesNode = AXml.SelectSingle("RecentFiles");
if (FilesNode == null)
return;
foreach (var FileNode in FilesNode.SelectAll("RecentFile"))
{
var RecentFile = new RecentFile(FileNode);
if (!string.IsNullOrWhiteSpace(RecentFile.Path))
Add(RecentFile);
}
OnPropertyChanged(nameof(IsEmpty));
}
private void Insert(int AIndex, RecentFile ARecentFile)
{
ARecentFile.ThrowIfNullEx(nameof(ARecentFile));
ARecentFile.PropertyChanged += RecentFilePropertyChanged;
m_RecentFiles.Insert(AIndex, ARecentFile);
OnPropertyChanged(nameof(IsEmpty));
}
private void Add(RecentFile ARecentFile)
{
ARecentFile.ThrowIfNullEx(nameof(ARecentFile));
ARecentFile.PropertyChanged += RecentFilePropertyChanged;
m_RecentFiles.Add(ARecentFile);
OnPropertyChanged(nameof(IsEmpty));
}
private void AddRange(IEnumerable<RecentFile> ARecentFilesToAdd)
{
foreach (var RecentFile in ARecentFilesToAdd)
Add(RecentFile);
}
private void RemoveAt(int AIndex)
{
m_RecentFiles[AIndex].PropertyChanged -= RecentFilePropertyChanged;
m_RecentFiles.RemoveAt(AIndex);
OnPropertyChanged(nameof(IsEmpty));
}
private void RemoveRange(int AIndex, int ACount)
{
for (var i = 0; i < ACount; ++i)
RemoveAt(AIndex);
OnPropertyChanged(nameof(IsEmpty));
}
private void Clear()
{
foreach (var File in m_RecentFiles)
File.PropertyChanged -= RecentFilePropertyChanged;
m_RecentFiles.Clear();
OnPropertyChanged(nameof(IsEmpty));
}
private void RecentFilePropertyChanged(object ASender, PropertyChangedEventArgs AArgs)
{
var TargetProperty = nameof(RecentFile.IsPinned);
if (AArgs.PropertyName != TargetProperty)
return;
var File = (RecentFile) ASender;
var OldIndex = m_RecentFiles.IndexOf(File);
if (File.IsPinned)
{
m_RecentFiles.Move(OldIndex, 0);
}
else
{
var Count = PinCount;
if (OldIndex == Count)
return;
m_RecentFiles.Move(OldIndex, Count);
}
}
public event PropertyChangedEventHandler PropertyChanged;
[NotifyPropertyChangedInvocator]
private void OnPropertyChanged([CallerMemberName] string APropertyName = null)
{
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(APropertyName));
}
}
}
| |
// -----
// GNU General Public License
// The Forex Professional Analyzer is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
// The Forex Professional Analyzer is distributed in the hope that it will be useful, but without any warranty; without even the implied warranty of merchantability or fitness for a particular purpose.
// See the GNU Lesser General Public License for more details.
// -----
using System;
using System.Collections.Generic;
using System.Text;
using System.Collections;
using System.Drawing.Drawing2D;
using System.Drawing;
namespace fxpa
{
public static class MathHelper
{
public enum DirectionEnum
{
Equal = 0,
Up,
Down
}
/// <summary>
/// Will round the value to a given count of resulting symbols.
/// </summary>
/// <returns></returns>
public static double RoundToSymbolsCount(double inputValue, int requiredSymbolsCount)
{
int symbols = (int)Math.Ceiling(Math.Log10(inputValue));
// Values below 0 mean spaces already existing after the zero point - discard them.
symbols = Math.Max(0, symbols);
return Math.Round(inputValue, requiredSymbolsCount - symbols);
//double symbolsd = Math.Log10(inputDataUnits[0].Close);
//double symbols2d = Math.Log10(1.2);
//int symbols2 = (int)Math.Ceiling(Math.Log10(1.2));
//double symbols3d = Math.Log10(10.2);
//int symbols3 = (int)Math.Ceiling(Math.Log10(10.2));
}
/// <summary>
/// Perform a complex calculation to establish the distance between these points in absolute coordinates,
/// considering both X and Y scaling.
/// </summary>
public static float GetAbsoluteDistance(Matrix transformationMatrix, PointF point1, PointF point2)
{
float xDifference = Math.Abs(point1.X - point2.X);
float yDifference = Math.Abs(point1.Y - point2.Y);
// Apply scale.
xDifference = xDifference * transformationMatrix.Elements[0];
yDifference = yDifference * transformationMatrix.Elements[3];
return (float)Math.Sqrt(xDifference * xDifference + yDifference * yDifference);
}
/// <summary>
/// Will show where line 1 crosses line 2; 1 for cross up, 2 for cross down, 0 for no crossing.
/// </summary>
static public double[] CreateLineCrossings(double[] line1, double[] line2)
{
System.Diagnostics.Debug.Assert(line1.Length == line2.Length);
double[] results = new double[line2.Length];
for (int k = 0; k < line1.Length; k++)
{
if (k == 0)
{
//if (line1[k] == line2[k])
//{
// results[k] = 1;
//}
}
else
{
if ((line1[k - 1] >= line2[k - 1] && line1[k] <= line2[k]))
{
results[k] = 2;
}
else if (line1[k - 1] <= line2[k - 1] && line1[k] >= line2[k])
{
results[k] = 1;
}
}
}
return results;
}
/// <summary>
///
/// </summary>
static public double[] CreateFixedLineResultLength(double value, int count)
{
double[] line = new double[count];
for (int i = 0; i < count; i++)
{
line[i] = value;
}
return line;
}
/// <summary>
/// Calculates a "connection value" line between the 2 values (like a vector connecting 2 points).
/// </summary>
static public double[] CreateConnectionValues(double initialValue, double secondaryValue, int connectionValuesCount)
{
double[] results = new double[connectionValuesCount];
//System.Diagnostics.Debug.Assert(connectionValuesCount >= 2);
results[0] = initialValue;
results[connectionValuesCount - 1] = secondaryValue;
double stepValue = (secondaryValue - initialValue) / (connectionValuesCount - 1);
for (int i = 1; i < connectionValuesCount - 1; i++)
{
results[i] = results[i - 1] + stepValue;
}
return results;
}
static public DirectionEnum[] EstablishDirection(double[] values)
{
DirectionEnum[] results = new DirectionEnum[values.Length];
for (int i = 1; i < values.Length; i++)
{
if (values[i - 1] > values[i])
{
results[i] = DirectionEnum.Down;
}
else if (values[i - 1] < values[i])
{
results[i] = DirectionEnum.Up;
}
}
return results;
}
/// <summary>
/// This is a quick approximation of the Moving average.
/// The formula used here is as follows :
/// An estimate of the moving average if the bin size for moving average is n may be obtained by:
/// NewAverage = (((n-1) * OldAverage) + newValue)/n
/// This works once the bin is full (sample number >= n). The bin partially full is often dealt with by using a seed value for the initial moving average (OldAverage) and then using this calculation.
/// This assumes normal distribution of values etc.
/// </summary>
static public double[] CalculateQuickMA(double[] values, int period)
{
double[] results = new double[values.Length];
double average = 0;
for (int i = 0; i < values.Length; i++)
{
average = (((period - 1) * average) + values[i]) / period;
results[i] = average;
}
return results;
}
/// <summary>
/// Provides a way to see the distribution of the values in periodsCount separate periods.
/// </summary>
/// <param name="values"></param>
/// <param name="periodsCount"></param>
/// <returns></returns>
static public Dictionary<double, double> CalculateValueDistribution(IEnumerable<double> values, int periodsCount)
{
List<double> list = new List<double>(values);
list.Sort();
double min = list[0];
double max = list[list.Count - 1];
double periodSize = (max - min) / periodsCount;
Dictionary<double, double> results = new Dictionary<double,double>();
int j=0;
for (double i=min; i<max; i+=periodSize)
{
int count = 0;
for (; j < list.Count; j++)
{
if (list[j] <= i + periodSize)
{
count++;
}
else
{
break;
}
}
results.Add(i, count);
}
return results;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Globalization;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
namespace System.Reflection.Emit
{
public sealed class DynamicMethod : MethodInfo
{
private RuntimeType[] m_parameterTypes = null!;
internal IRuntimeMethodInfo? m_methodHandle;
private RuntimeType m_returnType = null!;
private DynamicILGenerator? m_ilGenerator;
private DynamicILInfo? m_DynamicILInfo;
private bool m_fInitLocals;
private RuntimeModule m_module = null!;
internal bool m_skipVisibility;
internal RuntimeType? m_typeOwner; // can be null
// We want the creator of the DynamicMethod to control who has access to the
// DynamicMethod (just like we do for delegates). However, a user can get to
// the corresponding RTDynamicMethod using Exception.TargetSite, StackFrame.GetMethod, etc.
// If we allowed use of RTDynamicMethod, the creator of the DynamicMethod would
// not be able to bound access to the DynamicMethod. Hence, we need to ensure that
// we do not allow direct use of RTDynamicMethod.
private RTDynamicMethod m_dynMethod = null!;
// needed to keep the object alive during jitting
// assigned by the DynamicResolver ctor
internal DynamicResolver? m_resolver;
internal bool m_restrictedSkipVisibility;
// The context when the method was created. We use this to do the RestrictedMemberAccess checks.
// These checks are done when the method is compiled. This can happen at an arbitrary time,
// when CreateDelegate or Invoke is called, or when another DynamicMethod executes OpCodes.Call.
// We capture the creation context so that we can do the checks against the same context,
// irrespective of when the method gets compiled. Note that the DynamicMethod does not know when
// it is ready for use since there is not API which indictates that IL generation has completed.
private static volatile InternalModuleBuilder? s_anonymouslyHostedDynamicMethodsModule;
private static readonly object s_anonymouslyHostedDynamicMethodsModuleLock = new object();
//
// class initialization (ctor and init)
//
public DynamicMethod(string name,
Type? returnType,
Type[]? parameterTypes)
{
Init(name,
MethodAttributes.Public | MethodAttributes.Static,
CallingConventions.Standard,
returnType,
parameterTypes,
null, // owner
null, // m
false, // skipVisibility
true);
}
public DynamicMethod(string name,
Type? returnType,
Type[]? parameterTypes,
bool restrictedSkipVisibility)
{
Init(name,
MethodAttributes.Public | MethodAttributes.Static,
CallingConventions.Standard,
returnType,
parameterTypes,
null, // owner
null, // m
restrictedSkipVisibility,
true);
}
public DynamicMethod(string name,
Type? returnType,
Type[]? parameterTypes,
Module m)
{
if (m == null)
throw new ArgumentNullException(nameof(m));
Init(name,
MethodAttributes.Public | MethodAttributes.Static,
CallingConventions.Standard,
returnType,
parameterTypes,
null, // owner
m, // m
false, // skipVisibility
false);
}
public DynamicMethod(string name,
Type? returnType,
Type[]? parameterTypes,
Module m,
bool skipVisibility)
{
if (m == null)
throw new ArgumentNullException(nameof(m));
Init(name,
MethodAttributes.Public | MethodAttributes.Static,
CallingConventions.Standard,
returnType,
parameterTypes,
null, // owner
m, // m
skipVisibility,
false);
}
public DynamicMethod(string name,
MethodAttributes attributes,
CallingConventions callingConvention,
Type? returnType,
Type[]? parameterTypes,
Module m,
bool skipVisibility)
{
if (m == null)
throw new ArgumentNullException(nameof(m));
Init(name,
attributes,
callingConvention,
returnType,
parameterTypes,
null, // owner
m, // m
skipVisibility,
false);
}
public DynamicMethod(string name,
Type? returnType,
Type[]? parameterTypes,
Type owner)
{
if (owner == null)
throw new ArgumentNullException(nameof(owner));
Init(name,
MethodAttributes.Public | MethodAttributes.Static,
CallingConventions.Standard,
returnType,
parameterTypes,
owner, // owner
null, // m
false, // skipVisibility
false);
}
public DynamicMethod(string name,
Type? returnType,
Type[]? parameterTypes,
Type owner,
bool skipVisibility)
{
if (owner == null)
throw new ArgumentNullException(nameof(owner));
Init(name,
MethodAttributes.Public | MethodAttributes.Static,
CallingConventions.Standard,
returnType,
parameterTypes,
owner, // owner
null, // m
skipVisibility,
false);
}
public DynamicMethod(string name,
MethodAttributes attributes,
CallingConventions callingConvention,
Type? returnType,
Type[]? parameterTypes,
Type owner,
bool skipVisibility)
{
if (owner == null)
throw new ArgumentNullException(nameof(owner));
Init(name,
attributes,
callingConvention,
returnType,
parameterTypes,
owner, // owner
null, // m
skipVisibility,
false);
}
// helpers for intialization
private static void CheckConsistency(MethodAttributes attributes, CallingConventions callingConvention)
{
// only public static for method attributes
if ((attributes & ~MethodAttributes.MemberAccessMask) != MethodAttributes.Static)
throw new NotSupportedException(SR.NotSupported_DynamicMethodFlags);
if ((attributes & MethodAttributes.MemberAccessMask) != MethodAttributes.Public)
throw new NotSupportedException(SR.NotSupported_DynamicMethodFlags);
// only standard or varargs supported
if (callingConvention != CallingConventions.Standard && callingConvention != CallingConventions.VarArgs)
throw new NotSupportedException(SR.NotSupported_DynamicMethodFlags);
// vararg is not supported at the moment
if (callingConvention == CallingConventions.VarArgs)
throw new NotSupportedException(SR.NotSupported_DynamicMethodFlags);
}
// We create a transparent assembly to host DynamicMethods. Since the assembly does not have any
// non-public fields (or any fields at all), it is a safe anonymous assembly to host DynamicMethods
[System.Security.DynamicSecurityMethod] // Methods containing StackCrawlMark local var has to be marked DynamicSecurityMethod
private static RuntimeModule GetDynamicMethodsModule()
{
if (s_anonymouslyHostedDynamicMethodsModule != null)
return s_anonymouslyHostedDynamicMethodsModule;
lock (s_anonymouslyHostedDynamicMethodsModuleLock)
{
if (s_anonymouslyHostedDynamicMethodsModule != null)
return s_anonymouslyHostedDynamicMethodsModule;
AssemblyName assemblyName = new AssemblyName("Anonymously Hosted DynamicMethods Assembly");
StackCrawlMark stackMark = StackCrawlMark.LookForMe;
AssemblyBuilder assembly = AssemblyBuilder.InternalDefineDynamicAssembly(
assemblyName,
AssemblyBuilderAccess.Run,
ref stackMark,
null);
// this always gets the internal module.
s_anonymouslyHostedDynamicMethodsModule = (InternalModuleBuilder)assembly.ManifestModule!;
}
return s_anonymouslyHostedDynamicMethodsModule;
}
private void Init(string name,
MethodAttributes attributes,
CallingConventions callingConvention,
Type? returnType,
Type[]? signature,
Type? owner,
Module? m,
bool skipVisibility,
bool transparentMethod)
{
DynamicMethod.CheckConsistency(attributes, callingConvention);
// check and store the signature
if (signature != null)
{
m_parameterTypes = new RuntimeType[signature.Length];
for (int i = 0; i < signature.Length; i++)
{
if (signature[i] == null)
throw new ArgumentException(SR.Arg_InvalidTypeInSignature);
m_parameterTypes[i] = (signature[i].UnderlyingSystemType as RuntimeType)!;
if (m_parameterTypes[i] == null || m_parameterTypes[i] == typeof(void))
throw new ArgumentException(SR.Arg_InvalidTypeInSignature);
}
}
else
{
m_parameterTypes = Array.Empty<RuntimeType>();
}
// check and store the return value
m_returnType = (returnType == null) ? (RuntimeType)typeof(void) : (returnType.UnderlyingSystemType as RuntimeType)!;
if (m_returnType == null)
throw new NotSupportedException(SR.Arg_InvalidTypeInRetType);
if (transparentMethod)
{
Debug.Assert(owner == null && m == null, "owner and m cannot be set for transparent methods");
m_module = GetDynamicMethodsModule();
if (skipVisibility)
{
m_restrictedSkipVisibility = true;
}
}
else
{
Debug.Assert(m != null || owner != null, "Constructor should ensure that either m or owner is set");
Debug.Assert(m == null || !m.Equals(s_anonymouslyHostedDynamicMethodsModule), "The user cannot explicitly use this assembly");
Debug.Assert(m == null || owner == null, "m and owner cannot both be set");
if (m != null)
m_module = m.ModuleHandle.GetRuntimeModule(); // this returns the underlying module for all RuntimeModule and ModuleBuilder objects.
else
{
RuntimeType? rtOwner = null;
if (owner != null)
rtOwner = owner.UnderlyingSystemType as RuntimeType;
if (rtOwner != null)
{
if (rtOwner.HasElementType || rtOwner.ContainsGenericParameters
|| rtOwner.IsGenericParameter || rtOwner.IsInterface)
throw new ArgumentException(SR.Argument_InvalidTypeForDynamicMethod);
m_typeOwner = rtOwner;
m_module = rtOwner.GetRuntimeModule();
}
}
m_skipVisibility = skipVisibility;
}
// initialize remaining fields
m_ilGenerator = null;
m_fInitLocals = true;
m_methodHandle = null;
if (name == null)
throw new ArgumentNullException(nameof(name));
m_dynMethod = new RTDynamicMethod(this, name, attributes, callingConvention);
}
//
// Delegate and method creation
//
public sealed override Delegate CreateDelegate(Type delegateType)
{
if (m_restrictedSkipVisibility)
{
// Compile the method since accessibility checks are done as part of compilation.
GetMethodDescriptor();
IRuntimeMethodInfo? methodHandle = m_methodHandle;
System.Runtime.CompilerServices.RuntimeHelpers._CompileMethod(methodHandle != null ? methodHandle.Value : RuntimeMethodHandleInternal.EmptyHandle);
GC.KeepAlive(methodHandle);
}
MulticastDelegate d = (MulticastDelegate)Delegate.CreateDelegateNoSecurityCheck(delegateType, null, GetMethodDescriptor());
// stash this MethodInfo by brute force.
d.StoreDynamicMethod(GetMethodInfo());
return d;
}
public sealed override Delegate CreateDelegate(Type delegateType, object? target)
{
if (m_restrictedSkipVisibility)
{
// Compile the method since accessibility checks are done as part of compilation
GetMethodDescriptor();
IRuntimeMethodInfo? methodHandle = m_methodHandle;
System.Runtime.CompilerServices.RuntimeHelpers._CompileMethod(methodHandle != null ? methodHandle.Value : RuntimeMethodHandleInternal.EmptyHandle);
GC.KeepAlive(methodHandle);
}
MulticastDelegate d = (MulticastDelegate)Delegate.CreateDelegateNoSecurityCheck(delegateType, target, GetMethodDescriptor());
// stash this MethodInfo by brute force.
d.StoreDynamicMethod(GetMethodInfo());
return d;
}
// This is guaranteed to return a valid handle
internal RuntimeMethodHandle GetMethodDescriptor()
{
if (m_methodHandle == null)
{
lock (this)
{
if (m_methodHandle == null)
{
if (m_DynamicILInfo != null)
m_DynamicILInfo.GetCallableMethod(m_module, this);
else
{
if (m_ilGenerator == null || m_ilGenerator.ILOffset == 0)
throw new InvalidOperationException(SR.Format(SR.InvalidOperation_BadEmptyMethodBody, Name));
m_ilGenerator.GetCallableMethod(m_module, this);
}
}
}
}
return new RuntimeMethodHandle(m_methodHandle!);
}
//
// MethodInfo api. They mostly forward to RTDynamicMethod
//
public override string ToString() { return m_dynMethod.ToString(); }
public override string Name => m_dynMethod.Name;
public override Type? DeclaringType => m_dynMethod.DeclaringType;
public override Type? ReflectedType => m_dynMethod.ReflectedType;
public override Module Module => m_dynMethod.Module;
// we cannot return a MethodHandle because we cannot track it via GC so this method is off limits
public override RuntimeMethodHandle MethodHandle => throw new InvalidOperationException(SR.InvalidOperation_NotAllowedInDynamicMethod);
public override MethodAttributes Attributes => m_dynMethod.Attributes;
public override CallingConventions CallingConvention => m_dynMethod.CallingConvention;
public override MethodInfo GetBaseDefinition() { return this; }
public override ParameterInfo[] GetParameters() { return m_dynMethod.GetParameters(); }
public override MethodImplAttributes GetMethodImplementationFlags() { return m_dynMethod.GetMethodImplementationFlags(); }
public override bool IsSecurityCritical => true;
public override bool IsSecuritySafeCritical => false;
public override bool IsSecurityTransparent => false;
public override object? Invoke(object? obj, BindingFlags invokeAttr, Binder? binder, object?[]? parameters, CultureInfo? culture)
{
if ((CallingConvention & CallingConventions.VarArgs) == CallingConventions.VarArgs)
throw new NotSupportedException(SR.NotSupported_CallToVarArg);
//
// We do not demand any permission here because the caller already has access
// to the current DynamicMethod object, and it could just as easily emit another
// Transparent DynamicMethod to call the current DynamicMethod.
//
_ = GetMethodDescriptor();
// ignore obj since it's a static method
// create a signature object
Signature sig = new Signature(
this.m_methodHandle!, m_parameterTypes, m_returnType, CallingConvention);
// verify arguments
int formalCount = sig.Arguments.Length;
int actualCount = (parameters != null) ? parameters.Length : 0;
if (formalCount != actualCount)
throw new TargetParameterCountException(SR.Arg_ParmCnt);
// if we are here we passed all the previous checks. Time to look at the arguments
bool wrapExceptions = (invokeAttr & BindingFlags.DoNotWrapExceptions) == 0;
object retValue;
if (actualCount > 0)
{
object[] arguments = CheckArguments(parameters!, binder, invokeAttr, culture, sig);
retValue = RuntimeMethodHandle.InvokeMethod(null, arguments, sig, false, wrapExceptions);
// copy out. This should be made only if ByRef are present.
for (int index = 0; index < arguments.Length; index++)
parameters![index] = arguments[index];
}
else
{
retValue = RuntimeMethodHandle.InvokeMethod(null, null, sig, false, wrapExceptions);
}
GC.KeepAlive(this);
return retValue;
}
public override object[] GetCustomAttributes(Type attributeType, bool inherit)
{
return m_dynMethod.GetCustomAttributes(attributeType, inherit);
}
public override object[] GetCustomAttributes(bool inherit) { return m_dynMethod.GetCustomAttributes(inherit); }
public override bool IsDefined(Type attributeType, bool inherit) { return m_dynMethod.IsDefined(attributeType, inherit); }
public override Type ReturnType => m_dynMethod.ReturnType;
public override ParameterInfo ReturnParameter => m_dynMethod.ReturnParameter;
public override ICustomAttributeProvider ReturnTypeCustomAttributes => m_dynMethod.ReturnTypeCustomAttributes;
//
// DynamicMethod specific methods
//
public ParameterBuilder? DefineParameter(int position, ParameterAttributes attributes, string? parameterName)
{
if (position < 0 || position > m_parameterTypes.Length)
throw new ArgumentOutOfRangeException(SR.ArgumentOutOfRange_ParamSequence);
position--; // it's 1 based. 0 is the return value
if (position >= 0)
{
RuntimeParameterInfo[] parameters = m_dynMethod.LoadParameters();
parameters[position].SetName(parameterName);
parameters[position].SetAttributes(attributes);
}
return null;
}
public DynamicILInfo GetDynamicILInfo()
{
if (m_DynamicILInfo == null)
{
byte[] methodSignature = SignatureHelper.GetMethodSigHelper(
null, CallingConvention, ReturnType, null, null, m_parameterTypes, null, null).GetSignature(true);
m_DynamicILInfo = new DynamicILInfo(this, methodSignature);
}
return m_DynamicILInfo;
}
public ILGenerator GetILGenerator()
{
return GetILGenerator(64);
}
public ILGenerator GetILGenerator(int streamSize)
{
if (m_ilGenerator == null)
{
byte[] methodSignature = SignatureHelper.GetMethodSigHelper(
null, CallingConvention, ReturnType, null, null, m_parameterTypes, null, null).GetSignature(true);
m_ilGenerator = new DynamicILGenerator(this, methodSignature, streamSize);
}
return m_ilGenerator;
}
public bool InitLocals
{
get => m_fInitLocals;
set => m_fInitLocals = value;
}
//
// Internal API
//
internal MethodInfo GetMethodInfo()
{
return m_dynMethod;
}
//////////////////////////////////////////////////////////////////////////////////////////////
// RTDynamicMethod
//
// this is actually the real runtime instance of a method info that gets used for invocation
// We need this so we never leak the DynamicMethod out via an exception.
// This way the DynamicMethod creator is the only one responsible for DynamicMethod access,
// and can control exactly who gets access to it.
//
internal sealed class RTDynamicMethod : MethodInfo
{
internal DynamicMethod m_owner;
private RuntimeParameterInfo[]? m_parameters;
private string m_name;
private MethodAttributes m_attributes;
private CallingConventions m_callingConvention;
internal RTDynamicMethod(DynamicMethod owner, string name, MethodAttributes attributes, CallingConventions callingConvention)
{
m_owner = owner;
m_name = name;
m_attributes = attributes;
m_callingConvention = callingConvention;
}
//
// MethodInfo api
//
public override string ToString()
{
var sbName = new ValueStringBuilder(MethodNameBufferSize);
sbName.Append(ReturnType.FormatTypeName());
sbName.Append(' ');
sbName.Append(Name);
sbName.Append('(');
AppendParameters(ref sbName, GetParameterTypes(), CallingConvention);
sbName.Append(')');
return sbName.ToString();
}
public override string Name => m_name;
public override Type? DeclaringType => null;
public override Type? ReflectedType => null;
public override Module Module => m_owner.m_module;
public override RuntimeMethodHandle MethodHandle => throw new InvalidOperationException(SR.InvalidOperation_NotAllowedInDynamicMethod);
public override MethodAttributes Attributes => m_attributes;
public override CallingConventions CallingConvention => m_callingConvention;
public override MethodInfo GetBaseDefinition()
{
return this;
}
public override ParameterInfo[] GetParameters()
{
ParameterInfo[] privateParameters = LoadParameters();
ParameterInfo[] parameters = new ParameterInfo[privateParameters.Length];
Array.Copy(privateParameters, parameters, privateParameters.Length);
return parameters;
}
public override MethodImplAttributes GetMethodImplementationFlags()
{
return MethodImplAttributes.IL | MethodImplAttributes.NoInlining;
}
public override object Invoke(object? obj, BindingFlags invokeAttr, Binder? binder, object?[]? parameters, CultureInfo? culture)
{
// We want the creator of the DynamicMethod to control who has access to the
// DynamicMethod (just like we do for delegates). However, a user can get to
// the corresponding RTDynamicMethod using Exception.TargetSite, StackFrame.GetMethod, etc.
// If we allowed use of RTDynamicMethod, the creator of the DynamicMethod would
// not be able to bound access to the DynamicMethod. Hence, we do not allow
// direct use of RTDynamicMethod.
throw new ArgumentException(SR.Argument_MustBeRuntimeMethodInfo, "this");
}
public override object[] GetCustomAttributes(Type attributeType, bool inherit)
{
if (attributeType == null)
throw new ArgumentNullException(nameof(attributeType));
if (attributeType.IsAssignableFrom(typeof(MethodImplAttribute)))
return new object[] { new MethodImplAttribute((MethodImplOptions)GetMethodImplementationFlags()) };
else
return Array.Empty<object>();
}
public override object[] GetCustomAttributes(bool inherit)
{
// support for MethodImplAttribute PCA
return new object[] { new MethodImplAttribute((MethodImplOptions)GetMethodImplementationFlags()) };
}
public override bool IsDefined(Type attributeType, bool inherit)
{
if (attributeType == null)
throw new ArgumentNullException(nameof(attributeType));
if (attributeType.IsAssignableFrom(typeof(MethodImplAttribute)))
return true;
else
return false;
}
public override bool IsSecurityCritical => m_owner.IsSecurityCritical;
public override bool IsSecuritySafeCritical => m_owner.IsSecuritySafeCritical;
public override bool IsSecurityTransparent => m_owner.IsSecurityTransparent;
public override Type ReturnType => m_owner.m_returnType;
public override ParameterInfo ReturnParameter => new RuntimeParameterInfo(this, null, m_owner.m_returnType, -1);
public override ICustomAttributeProvider ReturnTypeCustomAttributes => new EmptyCAHolder();
internal RuntimeParameterInfo[] LoadParameters()
{
if (m_parameters == null)
{
Type[] parameterTypes = m_owner.m_parameterTypes;
RuntimeParameterInfo[] parameters = new RuntimeParameterInfo[parameterTypes.Length];
for (int i = 0; i < parameterTypes.Length; i++)
parameters[i] = new RuntimeParameterInfo(this, null, parameterTypes[i], i);
if (m_parameters == null)
// should we interlockexchange?
m_parameters = parameters;
}
return m_parameters;
}
}
}
}
| |
/**
* LookupService.cs
*
* Copyright (C) 2008 MaxMind Inc. All Rights Reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
using System;
using System.IO;
using System.Net;
public class LookupService{
private FileStream file = null;
private DatabaseInfo databaseInfo = null;
byte databaseType = Convert.ToByte(DatabaseInfo.COUNTRY_EDITION);
int[] databaseSegments;
int recordLength;
int dboptions;
byte[] dbbuffer;
String licenseKey;
int dnsService = 0;
private static Country UNKNOWN_COUNTRY = new Country("--", "N/A");
private static int COUNTRY_BEGIN = 16776960;
private static int STATE_BEGIN = 16700000;
private static int STRUCTURE_INFO_MAX_SIZE = 20;
private static int DATABASE_INFO_MAX_SIZE = 100;
private static int FULL_RECORD_LENGTH = 100;//???
private static int SEGMENT_RECORD_LENGTH = 3;
private static int STANDARD_RECORD_LENGTH = 3;
private static int ORG_RECORD_LENGTH = 4;
private static int MAX_RECORD_LENGTH = 4;
private static int MAX_ORG_RECORD_LENGTH = 1000;//???
private static int FIPS_RANGE = 360;
private static int STATE_BEGIN_REV0 = 16700000;
private static int STATE_BEGIN_REV1 = 16000000;
private static int US_OFFSET = 1;
private static int CANADA_OFFSET = 677;
private static int WORLD_OFFSET = 1353;
public static int GEOIP_STANDARD = 0;
public static int GEOIP_MEMORY_CACHE = 1;
public static int GEOIP_UNKNOWN_SPEED = 0;
public static int GEOIP_DIALUP_SPEED = 1;
public static int GEOIP_CABLEDSL_SPEED = 2;
public static int GEOIP_CORPORATE_SPEED = 3;
private static String[] countryCode = {
"--","AP","EU","AD","AE","AF","AG","AI","AL","AM","AN","AO","AQ","AR",
"AS","AT","AU","AW","AZ","BA","BB","BD","BE","BF","BG","BH","BI","BJ",
"BM","BN","BO","BR","BS","BT","BV","BW","BY","BZ","CA","CC","CD","CF",
"CG","CH","CI","CK","CL","CM","CN","CO","CR","CU","CV","CX","CY","CZ",
"DE","DJ","DK","DM","DO","DZ","EC","EE","EG","EH","ER","ES","ET","FI",
"FJ","FK","FM","FO","FR","FX","GA","GB","GD","GE","GF","GH","GI","GL",
"GM","GN","GP","GQ","GR","GS","GT","GU","GW","GY","HK","HM","HN","HR",
"HT","HU","ID","IE","IL","IN","IO","IQ","IR","IS","IT","JM","JO","JP",
"KE","KG","KH","KI","KM","KN","KP","KR","KW","KY","KZ","LA","LB","LC",
"LI","LK","LR","LS","LT","LU","LV","LY","MA","MC","MD","MG","MH","MK",
"ML","MM","MN","MO","MP","MQ","MR","MS","MT","MU","MV","MW","MX","MY",
"MZ","NA","NC","NE","NF","NG","NI","NL","NO","NP","NR","NU","NZ","OM",
"PA","PE","PF","PG","PH","PK","PL","PM","PN","PR","PS","PT","PW","PY",
"QA","RE","RO","RU","RW","SA","SB","SC","SD","SE","SG","SH","SI","SJ",
"SK","SL","SM","SN","SO","SR","ST","SV","SY","SZ","TC","TD","TF","TG",
"TH","TJ","TK","TM","TN","TO","TL","TR","TT","TV","TW","TZ","UA","UG",
"UM","US","UY","UZ","VA","VC","VE","VG","VI","VN","VU","WF","WS","YE",
"YT","RS","ZA","ZM","ME","ZW","A1","A2","O1","AX","GG","IM","JE","BL",
"MF"};
private static String[] countryName = {
"N/A","Asia/Pacific Region","Europe","Andorra","United Arab Emirates",
"Afghanistan","Antigua and Barbuda","Anguilla","Albania","Armenia",
"Netherlands Antilles","Angola","Antarctica","Argentina","American Samoa",
"Austria","Australia","Aruba","Azerbaijan","Bosnia and Herzegovina",
"Barbados","Bangladesh","Belgium","Burkina Faso","Bulgaria","Bahrain",
"Burundi","Benin","Bermuda","Brunei Darussalam","Bolivia","Brazil","Bahamas",
"Bhutan","Bouvet Island","Botswana","Belarus","Belize","Canada",
"Cocos (Keeling) Islands","Congo, The Democratic Republic of the",
"Central African Republic","Congo","Switzerland","Cote D'Ivoire",
"Cook Islands","Chile","Cameroon","China","Colombia","Costa Rica","Cuba",
"Cape Verde","Christmas Island","Cyprus","Czech Republic","Germany",
"Djibouti","Denmark","Dominica","Dominican Republic","Algeria","Ecuador",
"Estonia","Egypt","Western Sahara","Eritrea","Spain","Ethiopia","Finland",
"Fiji","Falkland Islands (Malvinas)","Micronesia, Federated States of",
"Faroe Islands","France","France, Metropolitan","Gabon","United Kingdom",
"Grenada","Georgia","French Guiana","Ghana","Gibraltar","Greenland","Gambia",
"Guinea","Guadeloupe","Equatorial Guinea","Greece",
"South Georgia and the South Sandwich Islands","Guatemala","Guam",
"Guinea-Bissau","Guyana","Hong Kong","Heard Island and McDonald Islands",
"Honduras","Croatia","Haiti","Hungary","Indonesia","Ireland","Israel","India",
"British Indian Ocean Territory","Iraq","Iran, Islamic Republic of",
"Iceland","Italy","Jamaica","Jordan","Japan","Kenya","Kyrgyzstan","Cambodia",
"Kiribati","Comoros","Saint Kitts and Nevis",
"Korea, Democratic People's Republic of","Korea, Republic of","Kuwait",
"Cayman Islands","Kazakstan","Lao People's Democratic Republic","Lebanon",
"Saint Lucia","Liechtenstein","Sri Lanka","Liberia","Lesotho","Lithuania",
"Luxembourg","Latvia","Libyan Arab Jamahiriya","Morocco","Monaco",
"Moldova, Republic of","Madagascar","Marshall Islands",
"Macedonia, the Former Yugoslav Republic of","Mali","Myanmar","Mongolia",
"Macau","Northern Mariana Islands","Martinique","Mauritania","Montserrat",
"Malta","Mauritius","Maldives","Malawi","Mexico","Malaysia","Mozambique",
"Namibia","New Caledonia","Niger","Norfolk Island","Nigeria","Nicaragua",
"Netherlands","Norway","Nepal","Nauru","Niue","New Zealand","Oman","Panama",
"Peru","French Polynesia","Papua New Guinea","Philippines","Pakistan",
"Poland","Saint Pierre and Miquelon","Pitcairn","Puerto Rico","" +
"Palestinian Territory, Occupied","Portugal","Palau","Paraguay","Qatar",
"Reunion","Romania","Russian Federation","Rwanda","Saudi Arabia",
"Solomon Islands","Seychelles","Sudan","Sweden","Singapore","Saint Helena",
"Slovenia","Svalbard and Jan Mayen","Slovakia","Sierra Leone","San Marino",
"Senegal","Somalia","Suriname","Sao Tome and Principe","El Salvador",
"Syrian Arab Republic","Swaziland","Turks and Caicos Islands","Chad",
"French Southern Territories","Togo","Thailand","Tajikistan","Tokelau",
"Turkmenistan","Tunisia","Tonga","Timor-Leste","Turkey","Trinidad and Tobago",
"Tuvalu","Taiwan","Tanzania, United Republic of","Ukraine","Uganda",
"United States Minor Outlying Islands","United States","Uruguay","Uzbekistan",
"Holy See (Vatican City State)","Saint Vincent and the Grenadines",
"Venezuela","Virgin Islands, British","Virgin Islands, U.S.","Vietnam",
"Vanuatu","Wallis and Futuna","Samoa","Yemen","Mayotte","Serbia",
"South Africa","Zambia","Montenegro","Zimbabwe","Anonymous Proxy",
"Satellite Provider","Other",
"Aland Islands","Guernsey","Isle of Man","Jersey","Saint Barthelemy",
"Saint Martin"};
public LookupService(String databaseFile, int options){
try {
this.file = new FileStream(databaseFile, FileMode.Open, FileAccess.Read);
dboptions = options;
init();
} catch(System.SystemException e) {
Console.Write("cannot open file " + databaseFile + "\n");
}
}
public LookupService(String databaseFile):this(databaseFile, GEOIP_STANDARD){
}
private void init(){
int i, j;
byte [] delim = new byte[3];
byte [] buf = new byte[SEGMENT_RECORD_LENGTH];
databaseType = (byte)DatabaseInfo.COUNTRY_EDITION;
recordLength = STANDARD_RECORD_LENGTH;
//file.Seek(file.Length() - 3,SeekOrigin.Begin);
file.Seek(-3,SeekOrigin.End);
for (i = 0; i < STRUCTURE_INFO_MAX_SIZE; i++) {
file.Read(delim,0,3);
if (delim[0] == 255 && delim[1] == 255 && delim[2] == 255){
databaseType = Convert.ToByte(file.ReadByte());
if (databaseType >= 106) {
// Backward compatibility with databases from April 2003 and earlier
databaseType -= 105;
}
// Determine the database type.
if (databaseType == DatabaseInfo.REGION_EDITION_REV0) {
databaseSegments = new int[1];
databaseSegments[0] = STATE_BEGIN_REV0;
recordLength = STANDARD_RECORD_LENGTH;
} else if (databaseType == DatabaseInfo.REGION_EDITION_REV1) {
databaseSegments = new int[1];
databaseSegments[0] = STATE_BEGIN_REV1;
recordLength = STANDARD_RECORD_LENGTH;
} else if (databaseType == DatabaseInfo.CITY_EDITION_REV0 ||
databaseType == DatabaseInfo.CITY_EDITION_REV1 ||
databaseType == DatabaseInfo.ORG_EDITION ||
databaseType == DatabaseInfo.ISP_EDITION ||
databaseType == DatabaseInfo.ASNUM_EDITION)
{
databaseSegments = new int[1];
databaseSegments[0] = 0;
if (databaseType == DatabaseInfo.CITY_EDITION_REV0 ||
databaseType == DatabaseInfo.CITY_EDITION_REV1) {
recordLength = STANDARD_RECORD_LENGTH;
}
else {
recordLength = ORG_RECORD_LENGTH;
}
file.Read(buf,0,SEGMENT_RECORD_LENGTH);
for (j = 0; j < SEGMENT_RECORD_LENGTH; j++) {
databaseSegments[0] += (unsignedByteToInt(buf[j]) << (j * 8));
}
}
break;
}
else {
//file.Seek(file.getFilePointer() - 4);
file.Seek(-4,SeekOrigin.Current);
//file.Seek(file.position-4,SeekOrigin.Begin);
}
}
if ((databaseType == DatabaseInfo.COUNTRY_EDITION) |
(databaseType == DatabaseInfo.PROXY_EDITION) |
(databaseType == DatabaseInfo.NETSPEED_EDITION)) {
databaseSegments = new int[1];
databaseSegments[0] = COUNTRY_BEGIN;
recordLength = STANDARD_RECORD_LENGTH;
}
if ((dboptions & GEOIP_MEMORY_CACHE) == 1) {
int l = (int) file.Length;
dbbuffer = new byte[l];
file.Seek(0,SeekOrigin.Begin);
file.Read(dbbuffer,0,l);
}
}
public void close(){
try {
file.Close();
file = null;
}
catch (Exception e) { }
}
public Country getCountry(IPAddress ipAddress) {
return getCountry(bytestoLong(ipAddress.GetAddressBytes()));
}
public Country getCountry(String ipAddress){
IPAddress addr;
try {
addr = IPAddress.Parse(ipAddress);
}
//catch (UnknownHostException e) {
catch (Exception e) {
Console.Write(e.Message);
return UNKNOWN_COUNTRY;
}
// return getCountry(bytestoLong(addr.GetAddressBytes()));
return getCountry(bytestoLong(addr.GetAddressBytes()));
}
public Country getCountry(long ipAddress){
if (file == null) {
//throw new IllegalStateException("Database has been closed.");
throw new Exception("Database has been closed.");
}
if ((databaseType == DatabaseInfo.CITY_EDITION_REV1) |
(databaseType == DatabaseInfo.CITY_EDITION_REV0)) {
Location l = getLocation(ipAddress);
if (l == null) {
return UNKNOWN_COUNTRY;
}
else {
return new Country(l.countryCode, l.countryName);
}
}
else {
int ret = SeekCountry(ipAddress) - COUNTRY_BEGIN;
if (ret == 0) {
return UNKNOWN_COUNTRY;
}
else {
return new Country(countryCode[ret], countryName[ret]);
}
}
}
public int getID(String ipAddress){
IPAddress addr;
try {
addr = IPAddress.Parse(ipAddress);
}
catch (Exception e) {
Console.Write(e.Message);
return 0;
}
return getID(bytestoLong(addr.GetAddressBytes()));
}
public int getID(IPAddress ipAddress) {
return getID(bytestoLong(ipAddress.GetAddressBytes()));
}
public int getID(long ipAddress){
if (file == null) {
throw new Exception("Database has been closed.");
}
int ret = SeekCountry(ipAddress) - databaseSegments[0];
return ret;
}
public DatabaseInfo getDatabaseInfo(){
if (databaseInfo != null) {
return databaseInfo;
}
try {
// Synchronize since we're accessing the database file.
//synchronized (this) {
bool hasStructureInfo = false;
byte [] delim = new byte[3];
// Advance to part of file where database info is stored.
//file.Seek(file.length() - 3);
file.Seek(-3,SeekOrigin.End);
for (int i=0; i<STRUCTURE_INFO_MAX_SIZE; i++) {
file.Read(delim,0,3);
if (delim[0] == 255 && delim[1] == 255 && delim[2] == 255) {
hasStructureInfo = true;
break;
}
}
if (hasStructureInfo) {
//file.Seek(file.getFilePointer() - 3);
file.Seek(-3,SeekOrigin.Current);
}
else {
// No structure info, must be pre Sep 2002 database, go back to end.
//file.Seek(file.length() - 3);
file.Seek(-3,SeekOrigin.End);
}
// Find the database info string.
for (int i=0; i<DATABASE_INFO_MAX_SIZE; i++) {
file.Read(delim,0,3);
if (delim[0]==0 && delim[1]==0 && delim[2]==0) {
byte[] dbInfo = new byte[i];
char[] dbInfo2 = new char[i];
file.Read(dbInfo,0,i);
for (int a0 = 0;a0 < i;a0++){
dbInfo2[a0] = Convert.ToChar(dbInfo[a0]);
}
// Create the database info object using the string.
this.databaseInfo = new DatabaseInfo(new String(dbInfo2));
return databaseInfo;
}
//file.Seek(file.getFilePointer() -4);
file.Seek(-4,SeekOrigin.Current);
}
//}
}
catch (Exception e) {
Console.Write(e.Message);
//e.printStackTrace();
}
return new DatabaseInfo("");
}
public Region getRegion(IPAddress ipAddress) {
return getRegion(bytestoLong(ipAddress.GetAddressBytes()));
}
public Region getRegion(String str){
IPAddress addr;
try {
addr = IPAddress.Parse(str);
}
catch (Exception e) {
Console.Write(e.Message);
return null;
}
return getRegion(bytestoLong(addr.GetAddressBytes()));
}
public Region getRegion(long ipnum){
Region record = new Region();
int seek_region = 0;
if (databaseType == DatabaseInfo.REGION_EDITION_REV0) {
seek_region = SeekCountry(ipnum) - STATE_BEGIN_REV0;
char [] ch = new char[2];
if (seek_region >= 1000){
record.countryCode = "US";
record.countryName = "United States";
ch[0] = (char)(((seek_region - 1000)/26) + 65);
ch[1] = (char)(((seek_region - 1000)%26) + 65);
record.region = new String(ch);
} else {
record.countryCode = countryCode[seek_region];
record.countryName = countryName[seek_region];
record.region = "";
}
} else if (databaseType == DatabaseInfo.REGION_EDITION_REV1) {
seek_region = SeekCountry(ipnum) - STATE_BEGIN_REV1;
char [] ch = new char[2];
if (seek_region < US_OFFSET) {
record.countryCode = "";
record.countryName = "";
record.region = "";
} else if (seek_region < CANADA_OFFSET) {
record.countryCode = "US";
record.countryName = "United States";
ch[0] = (char)(((seek_region - US_OFFSET)/26) + 65);
ch[1] = (char)(((seek_region - US_OFFSET)%26) + 65);
record.region = new String(ch);
} else if (seek_region < WORLD_OFFSET) {
record.countryCode = "CA";
record.countryName = "Canada";
ch[0] = (char)(((seek_region - CANADA_OFFSET)/26) + 65);
ch[1] = (char)(((seek_region - CANADA_OFFSET)%26) + 65);
record.region = new String(ch);
} else {
record.countryCode = countryCode[(seek_region - WORLD_OFFSET) / FIPS_RANGE];
record.countryName = countryName[(seek_region - WORLD_OFFSET) / FIPS_RANGE];
record.region = "";
}
}
return record;
}
public Location getLocation(IPAddress addr){
return getLocation(bytestoLong(addr.GetAddressBytes()));
}
public Location getLocation(String str){
IPAddress addr;
try {
addr = IPAddress.Parse(str);
}
catch (Exception e) {
Console.Write(e.Message);
return null;
}
return getLocation(bytestoLong(addr.GetAddressBytes()));
}
public Location getLocation(long ipnum){
int record_pointer;
byte[] record_buf = new byte[FULL_RECORD_LENGTH];
char[] record_buf2 = new char[FULL_RECORD_LENGTH];
int record_buf_offset = 0;
Location record = new Location();
int str_length = 0;
int j, Seek_country;
double latitude = 0, longitude = 0;
try {
Seek_country = SeekCountry(ipnum);
if (Seek_country == databaseSegments[0]) {
return null;
}
record_pointer = Seek_country + ((2 * recordLength - 1) * databaseSegments[0]);
if ((dboptions & GEOIP_MEMORY_CACHE) == 1){
for (int i = 0;i < FULL_RECORD_LENGTH;i++){
record_buf[i] = dbbuffer[i+record_pointer];
}
} else {
file.Seek(record_pointer,SeekOrigin.Begin);
file.Read(record_buf,0,FULL_RECORD_LENGTH);
}
for (int a0 = 0;a0 < FULL_RECORD_LENGTH;a0++){
record_buf2[a0] = Convert.ToChar(record_buf[a0]);
}
// get country
record.countryCode = countryCode[unsignedByteToInt(record_buf[0])];
record.countryName = countryName[unsignedByteToInt(record_buf[0])];
record_buf_offset++;
// get region
while (record_buf[record_buf_offset + str_length] != '\0')
str_length++;
if (str_length > 0) {
record.region = new String(record_buf2, record_buf_offset, str_length);
}
record_buf_offset += str_length + 1;
str_length = 0;
// get region_name
record.regionName = RegionName.getRegionName( record.countryCode, record.region );
// get city
while (record_buf[record_buf_offset + str_length] != '\0')
str_length++;
if (str_length > 0) {
record.city = new String(record_buf2, record_buf_offset, str_length);
}
record_buf_offset += (str_length + 1);
str_length = 0;
// get postal code
while (record_buf[record_buf_offset + str_length] != '\0')
str_length++;
if (str_length > 0) {
record.postalCode = new String(record_buf2, record_buf_offset, str_length);
}
record_buf_offset += (str_length + 1);
// get latitude
for (j = 0; j < 3; j++)
latitude += (unsignedByteToInt(record_buf[record_buf_offset + j]) << (j * 8));
record.latitude = (float) latitude/10000 - 180;
record_buf_offset += 3;
// get longitude
for (j = 0; j < 3; j++)
longitude += (unsignedByteToInt(record_buf[record_buf_offset + j]) << (j * 8));
record.longitude = (float) longitude/10000 - 180;
record.metro_code = record.dma_code = 0;
record.area_code = 0;
if (databaseType == DatabaseInfo.CITY_EDITION_REV1) {
// get metro_code
int metroarea_combo = 0;
if (record.countryCode == "US"){
record_buf_offset += 3;
for (j = 0; j < 3; j++)
metroarea_combo += (unsignedByteToInt(record_buf[record_buf_offset + j]) << (j * 8));
record.metro_code = record.dma_code = metroarea_combo/1000;
record.area_code = metroarea_combo % 1000;
}
}
}
catch (IOException e) {
Console.Write("IO Exception while seting up segments");
}
return record;
}
public String getOrg(IPAddress addr) {
return getOrg(bytestoLong(addr.GetAddressBytes()));
}
public String getOrg(String str){
IPAddress addr;
try {
addr = IPAddress.Parse(str);
}
//catch (UnknownHostException e) {
catch (Exception e){
Console.Write(e.Message);
return null;
}
return getOrg(bytestoLong(addr.GetAddressBytes()));
}
public String getOrg(long ipnum){
int Seek_org;
int record_pointer;
int str_length = 0;
byte [] buf = new byte[MAX_ORG_RECORD_LENGTH];
char [] buf2 = new char[MAX_ORG_RECORD_LENGTH];
String org_buf;
try {
Seek_org = SeekCountry(ipnum);
if (Seek_org == databaseSegments[0]) {
return null;
}
record_pointer = Seek_org + (2 * recordLength - 1) * databaseSegments[0];
if ((dboptions & GEOIP_MEMORY_CACHE) == 1) {
for (int i = 0;i < MAX_ORG_RECORD_LENGTH;i++) {
buf[i] = dbbuffer[i+record_pointer];
}
} else {
file.Seek(record_pointer,SeekOrigin.Begin);
file.Read(buf,0,MAX_ORG_RECORD_LENGTH);
}
while (buf[str_length] != 0) {
buf2[str_length] = Convert.ToChar(buf[str_length]);
str_length++;
}
buf2[str_length] = '\0';
org_buf = new String(buf2,0,str_length);
return org_buf;
}
catch (IOException e) {
Console.Write("IO Exception");
return null;
}
}
private int SeekCountry(long ipAddress){
byte [] buf = new byte[2 * MAX_RECORD_LENGTH];
int [] x = new int[2];
int offset = 0;
for (int depth = 31; depth >= 0; depth--) {
try {
if ((dboptions & GEOIP_MEMORY_CACHE) == 1) {
for (int i = 0;i < (2 * MAX_RECORD_LENGTH);i++) {
buf[i] = dbbuffer[i+(2 * recordLength * offset)];
}
} else {
file.Seek(2 * recordLength * offset,SeekOrigin.Begin);
file.Read(buf,0,2 * MAX_RECORD_LENGTH);
}
}
catch (IOException e) {
Console.Write("IO Exception");
}
for (int i = 0; i<2; i++) {
x[i] = 0;
for (int j = 0; j<recordLength; j++) {
int y = buf[(i*recordLength)+j];
if (y < 0) {
y+= 256;
}
x[i] += (y << (j * 8));
}
}
if ((ipAddress & (1 << depth)) > 0) {
if (x[1] >= databaseSegments[0]) {
return x[1];
}
offset = x[1];
}
else {
if (x[0] >= databaseSegments[0]) {
return x[0];
}
offset = x[0];
}
}
// shouldn't reach here
Console.Write("Error Seeking country while Seeking " + ipAddress);
return 0;
}
private static long swapbytes(long ipAddress){
return (((ipAddress>>0) & 255) << 24) | (((ipAddress>>8) & 255) << 16)
| (((ipAddress>>16) & 255) << 8) | (((ipAddress>>24) & 255) << 0);
}
private static long bytestoLong(byte [] address){
long ipnum = 0;
for (int i = 0; i < 4; ++i) {
long y = address[i];
if (y < 0) {
y+= 256;
}
ipnum += y << ((3-i)*8);
}
return ipnum;
}
private static int unsignedByteToInt(byte b) {
return (int) b & 0xFF;
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="WmlPageAdapter.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
#if WMLSUPPORT
namespace System.Web.UI.Adapters {
using System.Collections.Specialized;
using System.Globalization;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Web;
using System.Web.UI.HtmlControls;
using System.Web.UI.WebControls;
using System.Web.Util;
using System.Collections;
public class WmlPageAdapter : PageAdapter {
private static String _cacheExpiry = "<head>\r\n"
+ "<meta http-equiv=\"Cache-Control\" content=\"max-age=0\" forua=\"true\"/>\r\n"
+ "</head>\r\n";
private static String _headerBegin = "<?xml version='1.0'";
private static String _headerEncoding = " encoding ='{0}'";
private static String _headerEnd = "?>\r\n"
+ "<!DOCTYPE wml PUBLIC '-//WAPFORUM//DTD WML 1.1//EN' 'http://www.wapforum.org/DTD/wml_1.1.xml'>";
private const String _postBackEventArgumentVarName = "mcsva";
private const String _postBackEventTargetVarName = "mcsvt";
private const String _postUrlVarName = "mcsvp";
// Mobile Internet Toolkit 5093
private static readonly char[] _specialEncodingChars = new char[64] {
'\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0',
'\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0',
'\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '/', '-', '\0', '+', '=', '*',
'\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', '.', '\0', '\0',
};
private static readonly Encoding _utf8Encoding = Encoding.GetEncoding("UTF-8");
private IDictionary _dynamicPostFields = new ListDictionary();
private bool _haveRequiresNoSoftkeyLabels = false;
private bool _haveRequiresUTF8ContentEncoding = false;
private int _numberOfPostBacks;
private bool _requiresUTF8ContentEncoding = false;
// '+' <-> '-'
// '=' <-> '.'
// '/' <-> '*'
private IDictionary _formVariables = null; // Variables set in an onenterforward setvar at the top of the card.
private string _queryString;
private bool _requiresNoSoftkeyLabels = false;
private IDictionary _staticPostFields = new ListDictionary();
private bool _usePostBackCards = false;
private bool _writtenFormVariables = false;
private bool _writtenPostBack = false;
private string QueryString {
get {
if (_queryString == null) {
// Ampersands are encoded by WriteEncodedText, called from RenderFormQueryString.
_queryString = Page.ClientQueryString;
}
return _queryString;
}
}
// UNDONE: Internal because needed by WmlTextWriter. Consider removing this prop somehow.
// Returns true if form variables have been written.
internal bool WrittenFormVariables {
get {
return _writtenFormVariables;
}
}
// Adds a form variable.
public void AddFormVariable(WmlTextWriter writer, String clientID, String value, bool generateRandomID) {
// On first (analyze) pass, form variables are added to
// an array. On second pass, they are rendered. This ensures
// that only visible controls generate variables.
if (!writer.AnalyzeMode) {
return;
}
if (_formVariables == null) {
_formVariables = new ListDictionary();
}
// Map the client ID to a short name. See
// MapClientIDToShortName for details.
_formVariables[writer.MapClientIDToShortName(clientID, generateRandomID)] = value;
}
private void AnalyzeAndRenderHtmlForm(WmlTextWriter writer, HtmlForm form) {
if (form == null) {
return;
}
writer.SetAnalyzeMode(true);
RenderForm(writer, form);
Page.ResetOnFormRenderCalled();
writer.SetAnalyzeMode(false);
RenderForm(writer, form);
writer.WriteLine();
}
protected virtual void AnalyzePostBack(WmlPostFieldType postBackType) {
_numberOfPostBacks++;
}
// Extracted into separate method for intelligibility.
private void BeginForm(WmlTextWriter writer) {
_writtenFormVariables = false;
if (!writer.AnalyzeMode) {
RenderBeginForm(writer);
}
}
public override NameValueCollection DeterminePostBackMode() {
NameValueCollection collection = base.DeterminePostBackMode();
if (collection == null) {
return null;
}
if (!StringUtil.EqualsIgnoreCase((string)Browser["requiresSpecialViewStateEncoding"], "true")) {
return collection;
}
collection = ReplaceSpeciallyEncodedState(collection);
return collection;
}
// UNDONE: For M1, we only have Wml browsers which do not support accesskey. For later milestones, make this
// dependent on a capability or replace with a capability.
private bool DoesBrowserSupportAccessKey() {
return false;
}
internal String EncodeSpecialViewState(String pageState) {
// Mobile Internet Toolkit 5093.
// Note: This 'trades' certain characters for other characters, so applying it twice is an identity
// transformation.
char[] viewstate = pageState.ToCharArray();
for (int i = 0; i < viewstate.Length; i++) {
char currentChar = viewstate[i];
// Only check character replacement if within the range
if (currentChar < _specialEncodingChars.Length) {
char encodingChar = _specialEncodingChars[currentChar];
if (encodingChar != '\0') {
viewstate[i] = encodingChar;
}
}
}
return new String(viewstate);
}
private void EndForm(WmlTextWriter writer) {
if (writer.AnalyzeMode) {
// Analyze form when done.
((WmlPageAdapter)PageAdapter).PostAnalyzeForm();
}
else {
RenderEndForm(writer);
}
}
// Return a session page state persister to reduce view state size on the client.
public override PageStatePersister GetStatePersister() {
return new SessionPageStatePersister(Page);
}
/// <internalonly/>
// VSWhidbey 80467: Need to adapt id separator.
public override char IdSeparator {
get {
return ':';
}
}
// Initialization of writer state should go here.
private void InitializeWriter(WmlTextWriter writer) {
writer.CurrentForm = Page.Form;
}
public virtual void PostAnalyzeForm() {
if (_numberOfPostBacks > 1) {
_usePostBackCards = true;
}
}
public void RegisterPostField(WmlTextWriter writer, Control control) {
RegisterPostField(writer, control.UniqueID, control.ClientID, true, false);
}
public void RegisterPostField(WmlTextWriter writer, string fieldName, string clientValue, bool isDynamic, bool random) {
if (!writer.AnalyzeMode) {
return;
}
if (isDynamic) {
// Dynamic value.
// Map the client ID to a short name. See
// MapClientIDToShortName for details.
_dynamicPostFields[fieldName] = writer.MapClientIDToShortName(clientValue, random);
}
else {
_staticPostFields[fieldName] = clientValue;
}
}
protected internal override void Render(HtmlTextWriter writer) {
WmlTextWriter wmlWriter = (WmlTextWriter) writer;
if (Page.Form == null) {
throw new HttpException(SR.GetString(SR.PageAdapter_MustHaveFormRunatServer));
}
if (Page.HasRenderDelegate()) {
throw new HttpException(SR.GetString(SR.PageAdapter_RenderDelegateMustBeInServerForm));
}
if (RequiresUTF8ContentEncoding()) {
Page.Response.ContentEncoding = _utf8Encoding;
}
InitializeWriter(wmlWriter);
RenderXmlHeader(wmlWriter);
wmlWriter.WriteFullBeginTag("wml");
RenderCacheExpiry(wmlWriter);
HtmlForm form = Page.Form;
AnalyzeAndRenderHtmlForm(wmlWriter, form);
RenderPostBackCard(wmlWriter);
wmlWriter.WriteEndTag("wml");
}
// Renders the beginning of the form.
// UNDONE: Remove internal modifier when method is completely removed from writer.
protected internal virtual void RenderBeginForm(WmlTextWriter writer) {
RenderBeginCardTag(writer);
// Write form variables.
// UNDONE: Move writer._provideBackButton to this adapter.
// Review: In V1 we had a writer.ProvideBackButton property, is there any need for this with (more advanced)
// whidbey devices?
_writtenFormVariables = true;
if (_formVariables == null) {
_formVariables = new ListDictionary();
}
_formVariables[_postBackEventTargetVarName] = String.Empty; // Whidbey 18260
_formVariables[_postBackEventArgumentVarName] = String.Empty;
writer.Write("<onevent type=\"onenterforward\"><refresh>");
RenderSetFormVariables(writer);
RenderPostUrlFormVariable(writer);
writer.WriteLine("</refresh></onevent>");
writer.Write("<onevent type=\"onenterbackward\"><refresh>");
RenderSetFormVariables(writer);
RenderPostUrlFormVariable(writer);
writer.WriteLine("</refresh></onevent>");
// UNDONE: formAdapter.RenderExtraCardElements(this);
writer.BeginFormOrPanel();
}
private void RenderPostUrlFormVariable(WmlTextWriter writer) {
if (Page.ContainsCrossPagePost) {
writer.WriteBeginTag("setvar");
writer.WriteAttribute("name", _postUrlVarName);
writer.Write(" value=\"");
RenderPostBackUrl(writer, Page.RelativeFilePath);
RenderFormQueryString(writer, QueryString);
writer.Write("\" />");
}
}
public override void RenderBeginHyperlink(HtmlTextWriter writer, string targetUrl, bool encodeUrl, string softkeyLabel, string accessKey) {
WmlTextWriter wmlWriter = (WmlTextWriter)writer;
if (wmlWriter.AnalyzeMode) {
return;
}
// Valid values are null, String.Empty, and single character strings
if ((accessKey != null) && (accessKey.Length > 1)) {
throw new ArgumentOutOfRangeException("accessKey");
}
// If the softkey label is too long, let the device choose a default softkey label.
softkeyLabel = ResolveSoftkeyLabel(softkeyLabel);
wmlWriter.WriteBeginTag("a");
wmlWriter.Write(" href=\"");
if (encodeUrl) {
targetUrl = targetUrl.Replace("$", "$$");
targetUrl = HttpUtility.HtmlAttributeEncode(targetUrl); // Leaves "$" alone.
wmlWriter.Write(targetUrl);
}
else {
wmlWriter.Write(wmlWriter.EscapeAmpersand(targetUrl));
}
wmlWriter.Write("\"");
if (softkeyLabel != null && softkeyLabel.Length > 0 && !RequiresNoSoftkeyLabels)
wmlWriter.WriteAttribute("title", softkeyLabel, false /* encode */);
if (accessKey != null && accessKey.Length > 0 && DoesBrowserSupportAccessKey())
wmlWriter.WriteAttribute("accessKey", accessKey, false /* encode */);
wmlWriter.Write(">");
}
public virtual void RenderBeginPostBack(WmlTextWriter writer, string softkeyLabel, string accessKey) {
if (writer.AnalyzeMode) {
return;
}
// If the softkey label is too long, let the device choose a default softkey label.
softkeyLabel = ResolveSoftkeyLabel(softkeyLabel);
writer.WriteBeginTag("anchor");
if (softkeyLabel != null && softkeyLabel.Length > 0 && !RequiresNoSoftkeyLabels)
writer.WriteAttribute("title", softkeyLabel, false /* encode Whidbey 17925 */);
if (accessKey != null && accessKey.Length > 0 && DoesBrowserSupportAccessKey())
writer.WriteAttribute("accessKey", accessKey);
writer.Write(">");
}
// Renders the cache expiry as a header or meta element.
private void RenderCacheExpiry(WmlTextWriter writer) {
if (!StringUtil.EqualsIgnoreCase(Browser["SupportsCacheControlMetaTag"], "false")) {
writer.Write(_cacheExpiry);
}
else {
Page.Response.AppendHeader("Cache-Control", "max-age=0");
}
}
// Renders a card tag.
protected virtual void RenderBeginCardTag(WmlTextWriter writer) {
writer.WriteLine("<card>");
writer.Indent++;
}
// Renders the end of the form.
protected internal virtual void RenderEndForm(WmlTextWriter writer) {
writer.CloseParagraph();
writer.Indent--;
writer.WriteEndTag("card");
writer.WriteLine();
}
public override void RenderEndHyperlink(HtmlTextWriter writer) {
WmlTextWriter wmlWriter = (WmlTextWriter)writer;
if (wmlWriter.AnalyzeMode) {
return;
}
wmlWriter.WriteEndTag("a");
}
public virtual void RenderEndPostBack(WmlTextWriter writer, String target, String argument, String postUrl) {
if (writer.AnalyzeMode) {
// Analyze postbacks to see if postback cards should
// be rendered.
AnalyzePostBack(WmlPostFieldType.Submit);
}
else {
RenderGoAction(writer, target, argument, postUrl);
writer.WriteEndTag("anchor");
}
}
protected virtual void RenderForm(WmlTextWriter writer, HtmlForm form) {
Page.OnFormRender();
BeginForm(writer);
form.RenderChildren(writer);
EndForm(writer);
Page.OnFormPostRender();
}
// Render the method attribute of a go action.
private void RenderFormMethodAttribute(WmlTextWriter writer, string method) {
// Method defaults to get in WML, so write it if it's not.
if (StringUtil.EqualsIgnoreCase(method, "post")) {
writer.WriteAttribute("method", "post");
}
}
// Render a complete form post in a go action. This is used when rendering a postback card, or when
// rendering a go action that posts back directly rather than redirecting to a postback card.
private void RenderFormPostInGoAction(WmlTextWriter writer, string target, string argument, WmlPostFieldType postFieldType, String postUrl) {
writer.WriteBeginTag("go");
writer.Write(" href=\"");
if (!Page.ContainsCrossPagePost) {
RenderPostBackUrl(writer, Page.RelativeFilePath);
RenderFormQueryString(writer, QueryString);
}
else if (!String.IsNullOrEmpty(postUrl)) {
RenderPostBackUrl(writer, postUrl);
}
else {
writer.Write("$(");
writer.Write(_postUrlVarName);
if (!StringUtil.EqualsIgnoreCase((string)Browser["requiresNoescapedPostUrl"], "false")) {
writer.Write(":noescape");
}
writer.Write(")");
}
writer.Write("\"");
string method = Page.Form.Method;
RenderFormMethodAttribute(writer, method);
writer.Write(">");
string clientState = ClientState;
if (clientState != null) {
ICollection stateChunks = Page.DecomposeViewStateIntoChunks();
int numChunks = stateChunks.Count;
if (numChunks > 1) {
RenderStatePostField(writer, Page.ViewStateFieldCountID, stateChunks.Count.ToString(CultureInfo.CurrentCulture));
}
int count = 0;
foreach (String state in stateChunks) {
string key = Page.ViewStateFieldPrefixID;
if (count > 0 ) {
key += count.ToString(CultureInfo.CurrentCulture);
}
RenderStatePostField(writer, key, state);
++count;
}
}
RenderReferrerPagePostField(writer);
RenderTargetAndArgumentPostFields(writer, target, argument, postFieldType);
RenderPostFieldVariableDictionary(writer, _dynamicPostFields);
RenderPostFieldDictionary(writer, _staticPostFields);
// UNDONE: Add postbacks for variables which are not on the current page.
writer.WriteEndTag("go");
}
// Renders the Form query string.
private void RenderFormQueryString(WmlTextWriter writer, string queryString) {
if (String.IsNullOrEmpty(queryString)) {
return;
}
writer.Write("?");
// UNDONE: MMIT IPageAdapter.PersistCookielessData NYI
// if(Page.Adapter.PersistCookielessData && Browser["canRenderOneventAndPrevElementsTogether"] != "false")
if (!StringUtil.EqualsIgnoreCase((string)Browser["canRenderOneventAndPrevElementsTogether"], "false")) {
queryString = writer.ReplaceFormsCookieWithVariable(queryString);
}
writer.WriteEncodedText(queryString);
}
public virtual void RenderGoAction(WmlTextWriter writer, String target, String argument, String postUrl) {
if (UsePostBackCard()) {
RenderGoActionToPostbackCard(writer, target, argument, postUrl);
}
else {
RenderFormPostInGoAction(writer, target, argument, WmlPostFieldType.Normal, postUrl);
}
}
private void RenderGoActionToPostbackCard(WmlTextWriter writer, String target, String argument, String postUrl) {
// If using postback cards, render a go action to the given
// postback card, along with setvars setting the target and
// argument.
writer.WriteBeginTag("go");
writer.Write(" href=\"");
_writtenPostBack = true;
writer.Write("#");
writer.Write(WmlTextWriter.PostBackWithVarsCardID);
writer.Write("\">");
writer.WriteBeginTag("setvar");
writer.WriteAttribute("name", _postBackEventTargetVarName);
writer.WriteAttribute("value", target);
writer.Write("/>");
writer.WriteBeginTag("setvar");
writer.WriteAttribute("name", _postBackEventArgumentVarName);
writer.Write(" value=\"");
if (argument != null) {
writer.WriteEncodedText(argument);
}
writer.Write("\"/>");
if (!String.IsNullOrEmpty(postUrl)) {
writer.WriteBeginTag("setvar");
writer.WriteAttribute("name", _postUrlVarName);
writer.Write(" value=\"");
writer.WriteEncodedUrl(postUrl);
writer.Write("\"/>");
}
writer.WriteEndTag("go");
}
// Renders postback cards.
private void RenderPostBackCard(WmlTextWriter writer) {
if (!_writtenPostBack) {
return;
}
writer.WriteBeginTag("card");
writer.WriteAttribute("id", WmlTextWriter.PostBackWithVarsCardID);
writer.WriteLine(">");
writer.Write("<onevent type=\"onenterforward\">");
RenderFormPostInGoAction(writer, null, _postBackEventArgumentVarName, WmlPostFieldType.Variable, String.Empty);
// REVIEW: Should we always include page hidden variables.
writer.WriteLine("</onevent>");
writer.WriteLine("<onevent type=\"onenterbackward\"><prev /></onevent>");
writer.WriteLine("</card>");
}
public override void RenderPostBackEvent(HtmlTextWriter writer, string target, string argument, string softkeyLabel, string text, string postUrl, string accessKey) {
WmlTextWriter wmlWriter = writer as WmlTextWriter;
if (wmlWriter == null) {
base.RenderPostBackEvent(writer, target, argument, softkeyLabel, text, postUrl, accessKey);
return;
}
if (String.IsNullOrEmpty(softkeyLabel)) {
softkeyLabel = text;
}
if (!String.IsNullOrEmpty(postUrl)) {
Page.ContainsCrossPagePost = true;
}
RenderBeginPostBack((WmlTextWriter)writer, softkeyLabel, accessKey);
wmlWriter.Write(text);
RenderEndPostBack((WmlTextWriter)writer, target, argument, postUrl);
}
private void RenderPostBackUrl(WmlTextWriter writer, string path) {
if ((String)Browser["requiresAbsolutePostbackUrl"] == "true" && Page != null && Page.Request != null && Page.Response != null) {
// ApplyAppPathModifier makes the path absolute
writer.WriteEncodedUrl(Page.Response.ApplyAppPathModifier(path));
}
else {
writer.WriteEncodedUrl(path);
}
}
// Render a postfield dictionary with non-variable values.
private void RenderPostFieldDictionary(WmlTextWriter writer, IDictionary postFieldDictionary) {
foreach (DictionaryEntry entry in postFieldDictionary) {
writer.WritePostField((string)entry.Key, (string)entry.Value);
}
}
// Render a postfield dictionary with variable values.
private void RenderPostFieldVariableDictionary(WmlTextWriter writer, IDictionary postFieldDictionary) {
foreach (DictionaryEntry entry in postFieldDictionary) {
writer.WritePostFieldVariable((string)entry.Key, (string)entry.Value);
}
}
// If the form action corresponds to a cross page post, render the referrer page in a post field.
private void RenderReferrerPagePostField(WmlTextWriter writer) {
if (Page.ContainsCrossPagePost) {
writer.WritePostField(Page.previousPageID, Page.EncryptString(Page.Request.CurrentExecutionFilePath));
}
}
// Render a select option.
public virtual void RenderSelectOption(WmlTextWriter writer, string text) {
if (writer.AnalyzeMode) {
return;
}
writer.WriteFullBeginTag("option");
writer.WriteEncodedText(text);
writer.WriteEndTag("option");
}
public virtual void RenderSelectOption(WmlTextWriter writer, String text, String value) {
if (writer.AnalyzeMode) {
return;
}
writer.WriteBeginTag("option");
writer.WriteAttribute("value", value, true);
writer.Write(">");
writer.WriteEncodedText(text);
writer.WriteEndTag("option");
}
public virtual void RenderSelectOptionWithNavigateUrl(WmlTextWriter writer, String text, string navigateUrl) {
if (writer.AnalyzeMode) {
return;
}
writer.WriteBeginTag("option");
writer.WriteAttribute("onpick", navigateUrl);
writer.Write(">");
writer.WriteEncodedText(text);
writer.WriteEndTag("option");
}
public virtual void RenderSelectOptionAsPostBack(WmlTextWriter writer, string text) {
RenderSelectOptionAsPostBack(writer, text, null, null);
}
public virtual void RenderSelectOptionAsPostBack(WmlTextWriter writer, string text, String target, String argument) {
if (writer.AnalyzeMode) {
return;
}
writer.WriteFullBeginTag("option");
writer.WriteBeginTag("onevent");
writer.WriteAttribute("type", "onpick");
writer.Write(">");
writer.WriteBeginTag("go");
writer.WriteAttribute("href", "#" + WmlTextWriter.PostBackWithVarsCardID);
writer.Write(">");
if (!String.IsNullOrEmpty(target)) {
writer.WriteBeginTag("setvar");
writer.WriteAttribute("name", _postBackEventTargetVarName);
writer.WriteAttribute("value", target);
writer.Write(" />");
}
if (!String.IsNullOrEmpty(argument)) {
writer.WriteBeginTag("setvar");
writer.WriteAttribute("name", _postBackEventArgumentVarName);
writer.WriteAttribute("value", argument);
writer.Write(" />");
}
writer.WriteEndTag("go");
writer.WriteEndTag("onevent");
writer.WriteEncodedText(text);
writer.WriteEndTag("option");
_writtenPostBack = true;
_usePostBackCards = true;
}
public void RenderSelectOptionAsAutoPostBack(WmlTextWriter writer, string text, string groupName, string value) {
if (writer.AnalyzeMode) {
return;
}
writer.WriteFullBeginTag("option");
writer.WriteBeginTag("onevent");
writer.WriteAttribute("type", "onpick");
writer.Write(">");
writer.WriteBeginTag("go");
writer.WriteAttribute("href", "#" + WmlTextWriter.PostBackWithVarsCardID);
writer.Write(">");
writer.WriteBeginTag("setvar");
writer.WriteAttribute("name", writer.MapClientIDToShortName(groupName, false));
writer.WriteAttribute("value", value);
writer.Write(" />");
writer.WriteEndTag("go");
writer.WriteEndTag("onevent");
writer.WriteEncodedText(text);
writer.WriteEndTag("option");
_writtenPostBack = true;
_usePostBackCards = true;
}
public void RenderSelectOptionAsAutoPostBack(WmlTextWriter writer, string text, string value) {
if (writer.AnalyzeMode) {
return;
}
writer.WriteBeginTag("option");
if (!String.IsNullOrEmpty(value)) {
writer.WriteAttribute("value", value, true);
}
writer.WriteAttribute("onpick", "#" + WmlTextWriter.PostBackWithVarsCardID);
writer.Write(">");
writer.WriteEncodedText(text);
writer.WriteEndTag("option");
// force use of postback cards with variables.
_writtenPostBack = true;
_usePostBackCards = true;
}
private void RenderSetFormVariables(WmlTextWriter writer) {
foreach (DictionaryEntry entry in _formVariables) {
writer.WriteBeginTag("setvar");
writer.WriteAttribute("name", (String)entry.Key);
writer.WriteAttribute("value", (String)entry.Value, true);
writer.Write(" />");
}
}
// Render a postfield for view state or control state.
private void RenderStatePostField(WmlTextWriter writer, string stateName, string stateValue) {
if (stateValue == null) {
return;
}
if (Browser["requiresSpecialViewStateEncoding"] == "true") {
stateValue = ((WmlPageAdapter) Page.Adapter).EncodeSpecialViewState(stateValue);
}
writer.WritePostField(stateName, stateValue);
}
// Render postfields for the event target and the event argument.
private void RenderTargetAndArgumentPostFields(WmlTextWriter writer, string target, string argument, WmlPostFieldType postFieldType) {
// Write the event target.
if (target != null) {
writer.WritePostField(Page.postEventSourceID, target);
}
else {
// Target is null when the action is generated from a postback
// card itself. In this case, set the event target to whatever
// the original event target was.
writer.WritePostFieldVariable(Page.postEventSourceID, _postBackEventTargetVarName);
}
// Write the event argument, if valid.
if (argument != null) {
if (postFieldType == WmlPostFieldType.Variable) {
writer.WritePostFieldVariable(Page.postEventArgumentID, argument);
}
else {
writer.WritePostField(Page.postEventArgumentID, argument);
}
}
}
// Transforms text for the target device. The default transformation is the identity transformation,
// which does not change the text.
internal void RenderTransformedText(WmlTextWriter writer, string text) {
bool leadingSpace = false;
bool setPendingP = false;
bool trailingSpace = false;
// p's replaced by brs as in MMIT V1 for valid containment.
text = LiteralControlAdapterUtility.PreprocessLiteralText(text);
bool isEmpty = (text != null && text.Length == 0);
if (isEmpty) {
return;
}
if (writer.TopOfForm) {
while (Regex.IsMatch(text, "^(?'space'\\s*)(?:<p|</p)\\s*>")) {
text = Regex.Replace(text, "^(?'space'\\s*)(?:<p|</p)\\s*>", "${space}", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
}
}
if (setPendingP = Regex.IsMatch(text, "</p\\s*>\\s*$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant)) {
text = Regex.Replace(text, "</p\\s*>(?'space'\\s*)$", "${space}", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
}
text = Regex.Replace(text, "<br\\s*/?>", "<br/>", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
text = Regex.Replace(text, "</p\\s*>(?'space'\\s*)<p\\s*>", "<br/>${space}", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
text = Regex.Replace(text, "(?:<p|</p)\\s*>", "<br/>", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
if (trailingSpace = Regex.IsMatch(text, "\\s+$")) {
text = Regex.Replace(text, "\\s+$", String.Empty);
}
if (leadingSpace = Regex.IsMatch(text, "^\\s+")) {
text = Regex.Replace(text, "^\\s+", String.Empty);
}
text = text.Replace("$", "$$");
// Render text.
if (text.Trim().Length > 0) {
if (leadingSpace) {
writer.WriteLine();
}
Style emptyStyle = new Style();
writer.BeginRender(); // write pending tags.
writer.EnterStyle(emptyStyle); // VSWhidbey 114083
writer.Write(text);
writer.ExitStyle(emptyStyle);
writer.EndRender();
if (trailingSpace) {
writer.WriteLine();
}
}
// Whidbey 19653 transform space as newline. If we are at the top of the form (before the leading p),
// don't need literal text -it won't be rendered. Similarly, if we are setting a pending p, no need to writeline.
else if (!setPendingP && !writer.TopOfForm) {
Debug.Assert(!isEmpty, "Empty text. Should have returned before this point.");
writer.WriteLine();
}
if (setPendingP) {
writer.SetPendingP();
}
}
private void RenderXmlHeader(WmlTextWriter writer) {
writer.Write(_headerBegin);
String charset = Page.Response.Charset;
if (charset != null && charset.Length > 0 &&
!StringUtil.EqualsIgnoreCase(charset, "utf-8")) {
writer.Write(String.Format(_headerEncoding, charset));
}
writer.Write(_headerEnd);
}
// Reverse the special character replacement done when
// writing out the viewstate value.
private NameValueCollection ReplaceSpeciallyEncodedState(NameValueCollection baseCollection) {
// For each viewstate field
int numViewStateFields = Convert.ToInt32(baseCollection[Page.ViewStateFieldCountID], CultureInfo.CurrentCulture);
Hashtable newEntries = new Hashtable();
for (int i=0; i<numViewStateFields; ++i) {
string key = Page.ViewStateFieldPrefixID;
if (i > 0) {
key += i.ToString(CultureInfo.CurrentCulture);
}
// Applying EncodeSpecialViewState twice returns a string to its
// original form.
string speciallyEncodedState = baseCollection[key];
if (speciallyEncodedState != null) {
speciallyEncodedState = EncodeSpecialViewState(speciallyEncodedState);
}
newEntries.Add(key, speciallyEncodedState);
}
// We need to regenerate the collection since the
// original baseCollection is readonly.
NameValueCollection collection = new NameValueCollection();
for (int i = 0; i < baseCollection.Count; i++) {
String name = baseCollection.GetKey(i);
string value = newEntries[name] as string;
if (value != null) {
collection.Add(name, value);
}
else {
collection.Add(name, baseCollection.Get(i));
}
}
return collection;
}
internal bool RequiresNoSoftkeyLabels {
get {
if (!_haveRequiresNoSoftkeyLabels) {
String RequiresNoSoftkeyLabelsString = Browser["requiresNoSoftkeyLabels"];
if (RequiresNoSoftkeyLabelsString == null) {
_requiresNoSoftkeyLabels = false;
}
else {
_requiresNoSoftkeyLabels = Convert.ToBoolean(RequiresNoSoftkeyLabelsString, CultureInfo.InvariantCulture);
}
_haveRequiresNoSoftkeyLabels = true;
}
return _requiresNoSoftkeyLabels;
}
}
private bool RequiresUTF8ContentEncoding() {
if (!_haveRequiresUTF8ContentEncoding) {
String requiresUTF8ContentEncodingString = Browser["requiresUTF8ContentEncoding"];
if (requiresUTF8ContentEncodingString == null) {
_requiresUTF8ContentEncoding = false;
}
else {
_requiresUTF8ContentEncoding = Convert.ToBoolean(requiresUTF8ContentEncodingString, CultureInfo.InvariantCulture);
}
_haveRequiresUTF8ContentEncoding = true;
}
return _requiresUTF8ContentEncoding;
}
// Chooses between a developer specified softkey label and null (letting the device choose the softkey label).
private string ResolveSoftkeyLabel(string softkeyLabel) {
int maxLength = Convert.ToInt32(Browser["maximumSoftkeyLabelLength"], CultureInfo.InvariantCulture);
string decodedSoftkeyLabel = HttpUtility.HtmlDecode(softkeyLabel);
if (decodedSoftkeyLabel != null && decodedSoftkeyLabel.Length <= maxLength) {
return softkeyLabel;
}
return null; // Let device choose the default softkey label.
}
public override string TransformText(string text) {
return LiteralControlAdapterUtility.ProcessWmlLiteralText(text);
}
protected virtual bool UsePostBackCard() {
return _usePostBackCards && Browser["canRenderPostBackCard"] != "false";
}
}
}
#endif
| |
// $Id: Digest.java,v 1.6 2004/07/05 05:49:41 belaban Exp $
using System;
using Alachisoft.NGroups;
using Alachisoft.NCache.Runtime.Serialization.IO;
using Alachisoft.NCache.Runtime.Serialization;
using Alachisoft.NCache.Common.Net;
namespace Alachisoft.NGroups.Protocols.pbcast
{
/// <summary> A message digest, which is used e.g. by the PBCAST layer for gossiping (also used by NAKACK for
/// keeping track of current seqnos for all members). It contains pairs of senders and a range of seqnos
/// (low and high), where each sender is associated with its highest and lowest seqnos seen so far. That
/// is, the lowest seqno which was not yet garbage-collected and the highest that was seen so far and is
/// deliverable (or was already delivered) to the application. A range of [0 - 0] means no messages have
/// been received yet. <p> April 3 2001 (bela): Added high_seqnos_seen member. It is used to disseminate
/// information about the last (highest) message M received from a sender P. Since we might be using a
/// negative acknowledgment message numbering scheme, we would never know if the last message was
/// lost. Therefore we periodically gossip and include the last message seqno. Members who haven't seen
/// it (e.g. because msg was dropped) will request a retransmission. See DESIGN for details.
/// </summary>
/// <author> Bela Ban
/// </author>
[Serializable]
internal class Digest : ICompactSerializable
{
internal Address[] senders = null;
internal long[] low_seqnos = null; // lowest seqnos seen
internal long[] high_seqnos = null; // highest seqnos seen so far *that are deliverable*, initially 0
internal long[] high_seqnos_seen = null; // highest seqnos seen so far (not necessarily deliverable), initially -1
internal int index = 0; // current index of where next member is added
public Digest()
{
} // used for externalization
public Digest(int size)
{
reset(size);
}
public void add(Address sender, long low_seqno, long high_seqno)
{
if (index >= senders.Length)
{
return ;
}
if (sender == null)
{
return ;
}
senders[index] = sender;
low_seqnos[index] = low_seqno;
high_seqnos[index] = high_seqno;
high_seqnos_seen[index] = - 1;
index++;
}
public void add(Address sender, long low_seqno, long high_seqno, long high_seqno_seen)
{
if (index >= senders.Length)
{
return ;
}
if (sender == null)
{
return ;
}
senders[index] = sender;
low_seqnos[index] = low_seqno;
high_seqnos[index] = high_seqno;
high_seqnos_seen[index] = high_seqno_seen;
index++;
}
public void add(Digest d)
{
Address sender;
long low_seqno, high_seqno, high_seqno_seen;
if (d != null)
{
for (int i = 0; i < d.size(); i++)
{
sender = d.senderAt(i);
low_seqno = d.lowSeqnoAt(i);
high_seqno = d.highSeqnoAt(i);
high_seqno_seen = d.highSeqnoSeenAt(i);
add(sender, low_seqno, high_seqno, high_seqno_seen);
}
}
}
/// <summary> Adds a digest to this digest. This digest must have enough space to add the other digest; otherwise an error
/// message will be written. For each sender in the other digest, the merge() method will be called.
/// </summary>
public void merge(Digest d)
{
Address sender;
long low_seqno, high_seqno, high_seqno_seen;
if (d == null)
{
return ;
}
for (int i = 0; i < d.size(); i++)
{
sender = d.senderAt(i);
low_seqno = d.lowSeqnoAt(i);
high_seqno = d.highSeqnoAt(i);
high_seqno_seen = d.highSeqnoSeenAt(i);
merge(sender, low_seqno, high_seqno, high_seqno_seen);
}
}
/// <summary> Similar to add(), but if the sender already exists, its seqnos will be modified (no new entry) as follows:
/// <ol>
/// <li>this.low_seqno=min(this.low_seqno, low_seqno)
/// <li>this.high_seqno=max(this.high_seqno, high_seqno)
/// <li>this.high_seqno_seen=max(this.high_seqno_seen, high_seqno_seen)
/// </ol>
/// If the sender doesn not exist, a new entry will be added (provided there is enough space)
/// </summary>
public void merge(Address sender, long low_seqno, long high_seqno, long high_seqno_seen)
{
int index;
long my_low_seqno, my_high_seqno, my_high_seqno_seen;
if (sender == null)
{
return ;
}
index = getIndex(sender);
if (index == - 1)
{
add(sender, low_seqno, high_seqno, high_seqno_seen);
return ;
}
my_low_seqno = lowSeqnoAt(index);
my_high_seqno = highSeqnoAt(index);
my_high_seqno_seen = highSeqnoSeenAt(index);
if (low_seqno < my_low_seqno)
setLowSeqnoAt(index, low_seqno);
if (high_seqno > my_high_seqno)
setHighSeqnoAt(index, high_seqno);
if (high_seqno_seen > my_high_seqno_seen)
setHighSeqnoSeenAt(index, high_seqno_seen);
}
public int getIndex(Address sender)
{
int ret = - 1;
if (sender == null)
return ret;
for (int i = 0; i < senders.Length; i++)
if (sender.Equals(senders[i]))
return i;
return ret;
}
public bool contains(Address sender)
{
return getIndex(sender) != - 1;
}
/// <summary> Compares two digests and returns true if the senders are the same, otherwise false</summary>
/// <param name="">other
/// </param>
/// <returns>
/// </returns>
public bool sameSenders(Digest other)
{
Address a1, a2;
if (other == null)
return false;
if (this.senders == null || other.senders == null)
return false;
if (this.senders.Length != other.senders.Length)
return false;
for (int i = 0; i < this.senders.Length; i++)
{
a1 = this.senders[i];
a2 = other.senders[i];
if (a1 == null && a2 == null)
continue;
if (a1 != null && a2 != null && a1.Equals(a2))
continue;
else
return false;
}
return true;
}
/// <summary>Increment the sender's high_seqno by 1 </summary>
public void incrementHighSeqno(Address sender)
{
if (sender == null)
return ;
for (int i = 0; i < senders.Length; i++)
{
if (senders[i] != null && senders[i].Equals(sender))
{
high_seqnos[i] = high_seqnos[i] + 1;
break;
}
}
}
public int size()
{
return senders.Length;
}
public Address senderAt(int index)
{
if (index < size())
return senders[index];
else
{
return null;
}
}
/// <summary> Resets the seqnos for the sender at 'index' to 0. This happens when a member has left the group,
/// but it is still in the digest. Resetting its seqnos ensures that no-one will request a message
/// retransmission from the dead member.
/// </summary>
public void resetAt(int index)
{
if (index < size())
{
low_seqnos[index] = 0;
high_seqnos[index] = 0;
high_seqnos_seen[index] = - 1;
}
}
public void reset(int size)
{
senders = new Address[size];
low_seqnos = new long[size];
high_seqnos = new long[size];
high_seqnos_seen = new long[size];
for (int i = 0; i < size; i++)
high_seqnos_seen[i] = - 1;
index = 0;
}
public long lowSeqnoAt(int index)
{
if (index < size())
return low_seqnos[index];
else
{
return 0;
}
}
public long highSeqnoAt(int index)
{
if (index < size())
return high_seqnos[index];
else
{
return 0;
}
}
public long highSeqnoSeenAt(int index)
{
if (index < size())
return high_seqnos_seen[index];
else
{
return 0;
}
}
public long highSeqnoAt(Address sender)
{
long ret = - 1;
int index;
if (sender == null)
return ret;
index = getIndex(sender);
if (index == - 1)
return ret;
else
return high_seqnos[index];
}
public long highSeqnoSeenAt(Address sender)
{
long ret = - 1;
int index;
if (sender == null)
return ret;
index = getIndex(sender);
if (index == - 1)
return ret;
else
return high_seqnos_seen[index];
}
public void setLowSeqnoAt(int index, long low_seqno)
{
if (index < size())
{
low_seqnos[index] = low_seqno;
}
}
public void setHighSeqnoAt(int index, long high_seqno)
{
if (index < size())
{
high_seqnos[index] = high_seqno;
}
}
public void setHighSeqnoSeenAt(int index, long high_seqno_seen)
{
if (index < size())
{
high_seqnos_seen[index] = high_seqno_seen;
}
}
public void setHighSeqnoAt(Address sender, long high_seqno)
{
int index = getIndex(sender);
if (index < 0)
return ;
else
setHighSeqnoAt(index, high_seqno);
}
public void setHighSeqnoSeenAt(Address sender, long high_seqno_seen)
{
int index = getIndex(sender);
if (index < 0)
return ;
else
setHighSeqnoSeenAt(index, high_seqno_seen);
}
public Digest copy()
{
Digest ret = new Digest(senders.Length);
if (senders != null)
Array.Copy(senders, 0, ret.senders, 0, senders.Length);
ret.low_seqnos = new long[low_seqnos.Length];
low_seqnos.CopyTo(ret.low_seqnos, 0);
ret.high_seqnos = new long[high_seqnos.Length];
high_seqnos.CopyTo(ret.high_seqnos, 0);
ret.high_seqnos_seen = new long[high_seqnos_seen.Length];
high_seqnos_seen.CopyTo(ret.high_seqnos_seen, 0);
return ret;
}
public override string ToString()
{
System.Text.StringBuilder sb = new System.Text.StringBuilder();
bool first = true;
if (senders == null)
return "[]";
for (int i = 0; i < senders.Length; i++)
{
if (!first)
{
sb.Append(", ");
}
else
{
sb.Append('[');
first = false;
}
sb.Append(senders[i]).Append(": ").Append('[').Append(low_seqnos[i]).Append(" : ");
sb.Append(high_seqnos[i]);
if (high_seqnos_seen[i] >= 0)
sb.Append(" (").Append(high_seqnos_seen[i]).Append(")]");
}
sb.Append(']');
return sb.ToString();
}
public string printHighSeqnos()
{
System.Text.StringBuilder sb = new System.Text.StringBuilder();
bool first = true;
for (int i = 0; i < senders.Length; i++)
{
if (!first)
{
sb.Append(", ");
}
else
{
sb.Append('[');
first = false;
}
sb.Append(senders[i]);
sb.Append('#');
sb.Append(high_seqnos[i]);
}
sb.Append(']');
return sb.ToString();
}
public string printHighSeqnosSeen()
{
System.Text.StringBuilder sb = new System.Text.StringBuilder();
bool first = true;
for (int i = 0; i < senders.Length; i++)
{
if (!first)
{
sb.Append(", ");
}
else
{
sb.Append('[');
first = false;
}
sb.Append(senders[i]);
sb.Append('#');
sb.Append(high_seqnos_seen[i]);
}
sb.Append(']');
return sb.ToString();
}
#region ICompactSerializable Members
void ICompactSerializable.Deserialize(CompactReader reader)
{
senders = (Address[])reader.ReadObject();
low_seqnos = (long[])reader.ReadObject();
high_seqnos = (long[])reader.ReadObject();
high_seqnos_seen = (long[])reader.ReadObject();
index = reader.ReadInt32();
}
void ICompactSerializable.Serialize(CompactWriter writer)
{
writer.WriteObject(senders);
writer.WriteObject(low_seqnos);
writer.WriteObject(high_seqnos);
writer.WriteObject(high_seqnos_seen);
writer.Write(index);
}
#endregion
}
}
| |
/*(c) Copyright 2012, VersionOne, Inc. All rights reserved. (c)*/
using System.IO;
using System.Linq;
using VersionOne.Profile;
// TODO at least move classes to separate files
namespace VersionOne.ServiceHost.Core {
public delegate void ProcessFileDelegate(string file);
public delegate void ProcessFileBatchDelegate(string[] files);
public delegate void ProcessFolderDelegate(string folder);
public delegate void ProcessFolderBatchDelegate(string[] folders);
public abstract class FileSystemMonitor {
private readonly IProfile profile;
private IProfile processedPathsProfile;
private IProfile ProcessedPaths {
get {
return processedPathsProfile ?? (processedPathsProfile = profile["ProcessedFiles"]);
// Retaining name "ProcessedFiles" for backward-compatibility
}
}
protected string FilterPattern { get; set; }
protected string WatchFolder { get; set; }
/// <summary>
/// Get the processed state of the given file from the profile.
/// </summary>
/// <param name="file">File to look up.</param>
/// <returns>True if processed. False if not processed. Null if not in profile.</returns>
protected bool? GetState(string file) {
var value = ProcessedPaths[file].Value;
if(value == null) {
return null;
}
return bool.Parse(value);
}
/// <summary>
/// Save the processing state for the given file to the profile.
/// </summary>
/// <param name="file">File in question.</param>
/// <param name="done">True if processed.</param>
protected void SaveState(string file, bool? done) {
ProcessedPaths[file].Value = done == null ? null : done.ToString();
}
public FileSystemMonitor(IProfile profile, string watchFolder, string filterPattern) {
this.profile = profile;
WatchFolder = watchFolder;
FilterPattern = filterPattern;
if(string.IsNullOrEmpty(FilterPattern)) {
FilterPattern = "*.*";
}
var path = Path.GetFullPath(WatchFolder);
if(!Directory.Exists(path)) {
Directory.CreateDirectory(path);
}
}
/// <summary>
/// Perform the basic processing pattern.
/// </summary>
/// <param name="path">A file or directory name, depending on the subclass implementation.</param>
protected void ProcessPath(string path) {
if (GetState(path) == null) {
SaveState(path, false);
InvokeProcessor(path);
SaveState(path, true);
}
}
protected abstract void InvokeProcessor(string path);
}
public class FileMonitor : FileSystemMonitor {
private readonly ProcessFileDelegate processor;
public FileMonitor(IProfile profile, string watchfolder, string filterpattern, ProcessFileDelegate processor) : base(profile, watchfolder, filterpattern) {
this.processor = processor;
}
public void ProcessFolder(object pubobj) {
var path = Path.GetFullPath(WatchFolder);
var files = Directory.GetFiles(path, FilterPattern);
foreach(var file in files) {
ProcessPath(file);
}
}
protected override void InvokeProcessor(string path) {
processor(path);
}
}
public class FolderMonitor : FileSystemMonitor {
private readonly ProcessFolderDelegate processor;
public FolderMonitor(IProfile profile, string watchFolder, string filterPattern, ProcessFolderDelegate processor) : base(profile, watchFolder, filterPattern) {
this.processor = processor;
}
public void ProcessFolder(object pubobj) {
var path = Path.GetFullPath(WatchFolder);
var subFolders = Directory.GetDirectories(path, FilterPattern);
foreach(var subFolder in subFolders) {
ProcessPath(subFolder);
}
}
protected override void InvokeProcessor(string path) {
processor(path);
}
}
public class BatchFolderMonitor : FileSystemMonitor {
private readonly ProcessFolderBatchDelegate processor;
public BatchFolderMonitor(IProfile profile, string watchFolder, string filterPattern, ProcessFolderBatchDelegate processor) : base(profile, watchFolder, filterPattern) {
this.processor = processor;
}
public void ProcessFolder(object pubobj) {
var path = Path.GetFullPath(WatchFolder);
var subFolders = Directory.GetDirectories(path, FilterPattern, SearchOption.AllDirectories);
var notProcessed = subFolders.Where(subFolder => GetState(subFolder) == null).ToList();
if(notProcessed.Count == 0) {
return;
}
foreach(var subFolder in notProcessed) {
SaveState(subFolder, false);
}
processor(notProcessed.ToArray());
foreach(var subFolder in notProcessed) {
SaveState(subFolder, true);
}
}
protected override void InvokeProcessor(string path) {
// TODO: Fix this smell
}
}
/// <summary>
/// More thoroughly determines if a file has been processed.
/// Compares file modified stamps for paths that have been logged.
/// </summary>
public class RecyclingFileMonitor {
private readonly IProfile profile;
private IProfile processedPathsProfile;
private readonly ProcessFileBatchDelegate processor;
private IProfile ProcessedPaths {
get {
return processedPathsProfile ?? (processedPathsProfile = profile["ProcessedFiles"]);
// Retaining name "ProcessedFiles" for backward-compatibility
}
}
protected string FilterPattern { get; set; }
protected string WatchFolder { get; set; }
/// <summary>
/// Get the processed state of the given file from the profile.
/// </summary>
/// <param name="file">File to look up.</param>
/// <returns>True if processed. False if not processed. Null if not in profile.</returns>
protected bool? GetState(string file) {
var value = ProcessedPaths[file].Value;
if(value == null) {
return null;
}
var haveProcessed = bool.Parse(value);
if (haveProcessed) {
// we've seen this path before, so look at the last write timestamp
var stampValue = ProcessedPaths[file]["LastWrite"].Value;
long storedLastWrite;
if (long.TryParse(stampValue, out storedLastWrite)) {
var actualLastWrite = File.GetLastWriteTimeUtc(file).ToBinary();
if(actualLastWrite > storedLastWrite) {
return null;
}
}
return true;
}
return false;
}
/// <summary>
/// Save the processing state for the given file to the profile.
/// </summary>
/// <param name="file">File in question.</param>
/// <param name="done">True if processed.</param>
protected void SaveState(string file, bool? done) {
ProcessedPaths[file].Value = done == null ? null : done.ToString();
if (done.HasValue && done.Value) {
var lastWrite = File.GetLastWriteTimeUtc(file).ToBinary();
ProcessedPaths[file]["LastWrite"].Value = lastWrite.ToString();
}
}
public RecyclingFileMonitor(IProfile profile, string watchFolder, string filterPattern, ProcessFileBatchDelegate processor) {
this.processor = processor;
this.profile = profile;
WatchFolder = watchFolder;
FilterPattern = filterPattern;
if(string.IsNullOrEmpty(FilterPattern)) {
FilterPattern = "*.*";
}
var path = Path.GetFullPath(WatchFolder);
if(!Directory.Exists(path)) {
Directory.CreateDirectory(path);
}
}
protected void InvokeProcessor(string[] files) {
processor(files);
}
public void ProcessFolder(object pubobj) {
var path = Path.GetFullPath(WatchFolder);
var files = Directory.GetFiles(path, FilterPattern, SearchOption.AllDirectories);
var toProcess = files.Where(file => GetState(file) == null).ToList();
foreach(var file in toProcess) {
SaveState(file, false);
}
InvokeProcessor(toProcess.ToArray());
foreach(var file in toProcess) {
SaveState(file, true);
}
}
}
}
| |
#region MigraDoc - Creating Documents on the Fly
//
// Authors:
// Klaus Potzesny (mailto:Klaus.Potzesny@pdfsharp.com)
//
// Copyright (c) 2001-2009 empira Software GmbH, Cologne (Germany)
//
// http://www.pdfsharp.com
// http://www.migradoc.com
// http://sourceforge.net/projects/pdfsharp
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections;
using PdfSharp.Drawing;
using MigraDoc.DocumentObjectModel;
using MigraDoc.DocumentObjectModel.Visitors;
using MigraDoc.DocumentObjectModel.Tables;
using MigraDoc.DocumentObjectModel.IO;
namespace MigraDoc.Rendering
{
/// <summary>
/// Renders a table to an XGraphics object.
/// </summary>
internal class TableRenderer : Renderer
{
internal TableRenderer(XGraphics gfx, Table documentObject, FieldInfos fieldInfos)
:
base(gfx, documentObject, fieldInfos)
{
this.table = (Table)documentObject;
}
internal TableRenderer(XGraphics gfx, RenderInfo renderInfo, FieldInfos fieldInfos)
:
base(gfx, renderInfo, fieldInfos)
{
this.table = (Table)this.renderInfo.DocumentObject;
}
internal override LayoutInfo InitialLayoutInfo
{
get
{
LayoutInfo layoutInfo = new LayoutInfo();
layoutInfo.KeepTogether = this.table.KeepTogether;
layoutInfo.KeepWithNext = false;
layoutInfo.MarginBottom = 0;
layoutInfo.MarginLeft = 0;
layoutInfo.MarginTop = 0;
layoutInfo.MarginRight = 0;
return layoutInfo;
}
}
void InitRendering()
{
TableFormatInfo formatInfo = (TableFormatInfo)this.renderInfo.FormatInfo;
this.bottomBorderMap = formatInfo.bottomBorderMap;
this.connectedRowsMap = formatInfo.connectedRowsMap;
this.formattedCells = formatInfo.formattedCells;
this.currRow = formatInfo.startRow;
this.startRow = formatInfo.startRow;
this.endRow = formatInfo.endRow;
this.mergedCells = formatInfo.mergedCells;
this.lastHeaderRow = formatInfo.lastHeaderRow;
this.startX = this.renderInfo.LayoutInfo.ContentArea.X;
this.startY = this.renderInfo.LayoutInfo.ContentArea.Y;
}
/// <summary>
///
/// </summary>
void RenderHeaderRows()
{
if (this.lastHeaderRow < 0)
return;
foreach (Cell cell in this.mergedCells)
{
if (cell.Row.Index <= this.lastHeaderRow)
RenderCell(cell);
}
}
void RenderCell(Cell cell)
{
Rectangle innerRect = GetInnerRect(CalcStartingHeight(), cell);
RenderShading(cell, innerRect);
RenderContent(cell, innerRect);
RenderBorders(cell, innerRect);
}
void RenderShading(Cell cell, Rectangle innerRect)
{
ShadingRenderer shadeRenderer = new ShadingRenderer(this.gfx, cell.Shading);
shadeRenderer.Render(innerRect.X, innerRect.Y, innerRect.Width, innerRect.Height);
}
void RenderBorders(Cell cell, Rectangle innerRect)
{
XUnit leftPos = innerRect.X;
XUnit rightPos = leftPos + innerRect.Width;
XUnit topPos = innerRect.Y;
XUnit bottomPos = innerRect.Y + innerRect.Height;
Borders mergedBorders = this.mergedCells.GetEffectiveBorders(cell);
BordersRenderer bordersRenderer = new BordersRenderer(mergedBorders, this.gfx);
XUnit bottomWidth = bordersRenderer.GetWidth(BorderType.Bottom);
XUnit leftWidth = bordersRenderer.GetWidth(BorderType.Left);
XUnit topWidth = bordersRenderer.GetWidth(BorderType.Top);
XUnit rightWidth = bordersRenderer.GetWidth(BorderType.Right);
bordersRenderer.RenderVertically(BorderType.Right, rightPos, topPos, bottomPos + bottomWidth - topPos);
bordersRenderer.RenderVertically(BorderType.Left, leftPos - leftWidth, topPos, bottomPos + bottomWidth - topPos);
bordersRenderer.RenderHorizontally(BorderType.Bottom, leftPos - leftWidth, bottomPos, rightPos + rightWidth + leftWidth - leftPos);
bordersRenderer.RenderHorizontally(BorderType.Top, leftPos - leftWidth, topPos - topWidth, rightPos + rightWidth + leftWidth - leftPos);
RenderDiagonalBorders(mergedBorders, innerRect);
}
void RenderDiagonalBorders(Borders mergedBorders, Rectangle innerRect)
{
BordersRenderer bordersRenderer = new BordersRenderer(mergedBorders, this.gfx);
bordersRenderer.RenderDiagonally(BorderType.DiagonalDown, innerRect.X, innerRect.Y, innerRect.Width, innerRect.Height);
bordersRenderer.RenderDiagonally(BorderType.DiagonalUp, innerRect.X, innerRect.Y, innerRect.Width, innerRect.Height);
}
void RenderContent(Cell cell, Rectangle innerRect)
{
FormattedCell formattedCell = (FormattedCell)this.formattedCells[cell];
RenderInfo[] renderInfos = formattedCell.GetRenderInfos();
if (renderInfos == null)
return;
VerticalAlignment verticalAlignment = cell.VerticalAlignment;
XUnit contentHeight = formattedCell.ContentHeight;
XUnit innerHeight = innerRect.Height;
XUnit targetX = innerRect.X + cell.Column.LeftPadding;
XUnit targetY;
if (verticalAlignment == VerticalAlignment.Bottom)
{
targetY = innerRect.Y + innerRect.Height;
targetY -= cell.Row.BottomPadding;
targetY -= contentHeight;
}
else if (verticalAlignment == VerticalAlignment.Center)
{
targetY = innerRect.Y + cell.Row.TopPadding;
targetY += innerRect.Y + innerRect.Height - cell.Row.BottomPadding;
targetY -= contentHeight;
targetY /= 2;
}
else
targetY = innerRect.Y + cell.Row.TopPadding;
RenderByInfos(targetX, targetY, renderInfos);
}
Rectangle GetInnerRect(XUnit startingHeight, Cell cell)
{
BordersRenderer bordersRenderer = new BordersRenderer(this.mergedCells.GetEffectiveBorders(cell), this.gfx);
FormattedCell formattedCell = (FormattedCell)this.formattedCells[cell];
XUnit width = formattedCell.InnerWidth;
XUnit y = this.startY;
if (cell.Row.Index > this.lastHeaderRow)
y += startingHeight;
else
y += CalcMaxTopBorderWidth(0);
XUnit upperBorderPos = (XUnit)this.bottomBorderMap[cell.Row.Index];
y += upperBorderPos;
if (cell.Row.Index > this.lastHeaderRow)
y -= (XUnit)this.bottomBorderMap[this.startRow];
XUnit lowerBorderPos = (XUnit)this.bottomBorderMap[cell.Row.Index + cell.MergeDown + 1];
XUnit height = lowerBorderPos - upperBorderPos;
height -= bordersRenderer.GetWidth(BorderType.Bottom);
XUnit x = this.startX;
for (int clmIdx = 0; clmIdx < cell.Column.Index; ++clmIdx)
{
x += this.table.Columns[clmIdx].Width;
}
x += LeftBorderOffset;
return new Rectangle(x, y, width, height);
}
internal override void Render()
{
InitRendering();
RenderHeaderRows();
if (startRow < this.table.Rows.Count)
{
Cell cell = this.table[startRow, 0];
int cellIdx = this.mergedCells.BinarySearch(this.table[startRow, 0], new CellComparer());
while (cellIdx < this.mergedCells.Count)
{
cell = (Cell)this.mergedCells[cellIdx];
if (cell.Row.Index > this.endRow)
break;
RenderCell(cell);
++cellIdx;
}
}
}
void InitFormat(Area area, FormatInfo previousFormatInfo)
{
TableFormatInfo prevTableFormatInfo = (TableFormatInfo)previousFormatInfo;
TableRenderInfo tblRenderInfo = new TableRenderInfo();
tblRenderInfo.table = this.table;
this.renderInfo = tblRenderInfo;
if (prevTableFormatInfo != null)
{
this.mergedCells = prevTableFormatInfo.mergedCells;
this.formattedCells = prevTableFormatInfo.formattedCells;
this.bottomBorderMap = prevTableFormatInfo.bottomBorderMap;
this.lastHeaderRow = prevTableFormatInfo.lastHeaderRow;
this.connectedRowsMap = prevTableFormatInfo.connectedRowsMap;
this.startRow = prevTableFormatInfo.endRow + 1;
}
else
{
this.mergedCells = new MergedCellList(this.table);
FormatCells();
CalcLastHeaderRow();
CreateConnectedRows();
CreateBottomBorderMap();
if (this.doHorizontalBreak)
{
CalcLastHeaderColumn();
CreateConnectedColumns();
}
this.startRow = this.lastHeaderRow + 1;
}
((TableFormatInfo)tblRenderInfo.FormatInfo).mergedCells = this.mergedCells;
((TableFormatInfo)tblRenderInfo.FormatInfo).formattedCells = this.formattedCells;
((TableFormatInfo)tblRenderInfo.FormatInfo).bottomBorderMap = this.bottomBorderMap;
((TableFormatInfo)tblRenderInfo.FormatInfo).connectedRowsMap = this.connectedRowsMap;
((TableFormatInfo)tblRenderInfo.FormatInfo).lastHeaderRow = this.lastHeaderRow;
}
void FormatCells()
{
this.formattedCells = new SortedList(new CellComparer());
foreach (Cell cell in this.mergedCells)
{
FormattedCell formattedCell = new FormattedCell(cell, this.documentRenderer, this.mergedCells.GetEffectiveBorders(cell), this.fieldInfos, 0, 0);
formattedCell.Format(this.gfx);
this.formattedCells.Add(cell, formattedCell);
}
}
/// <summary>
/// Formats (measures) the table.
/// </summary>
/// <param name="area">The area on which to fit the table.</param>
/// <param name="previousFormatInfo"></param>
internal override void Format(Area area, FormatInfo previousFormatInfo)
{
DocumentElements elements = DocumentRelations.GetParent(this.table) as DocumentElements;
if (elements != null)
{
Section section = DocumentRelations.GetParent(elements) as Section;
if (section != null)
this.doHorizontalBreak = section.PageSetup.HorizontalPageBreak;
}
this.renderInfo = new TableRenderInfo();
InitFormat(area, previousFormatInfo);
// Don't take any Rows higher then MaxElementHeight
XUnit topHeight = this.CalcStartingHeight();
XUnit probeHeight = topHeight;
XUnit offset = 0;
if (this.startRow > this.lastHeaderRow + 1 &&
this.startRow < this.table.Rows.Count)
offset = (XUnit)this.bottomBorderMap[this.startRow] - topHeight;
else
offset = -CalcMaxTopBorderWidth(0);
int probeRow = this.startRow;
XUnit currentHeight = 0;
XUnit startingHeight = 0;
bool isEmpty = false;
while (probeRow < this.table.Rows.Count)
{
bool firstProbe = probeRow == this.startRow;
probeRow = (int)this.connectedRowsMap[probeRow];
// Don't take any Rows higher then MaxElementHeight
probeHeight = (XUnit)this.bottomBorderMap[probeRow + 1] - offset;
if (firstProbe && probeHeight > MaxElementHeight - Tolerance)
probeHeight = MaxElementHeight - Tolerance;
//The height for the first new row(s) + headerrows:
if (startingHeight == 0)
{
if (probeHeight > area.Height)
{
isEmpty = true;
break;
}
startingHeight = probeHeight;
}
if (probeHeight > area.Height)
break;
else
{
this.currRow = probeRow;
currentHeight = probeHeight;
++probeRow;
}
}
if (!isEmpty)
{
TableFormatInfo formatInfo = (TableFormatInfo)this.renderInfo.FormatInfo;
formatInfo.startRow = this.startRow;
formatInfo.isEnding = currRow >= this.table.Rows.Count - 1;
formatInfo.endRow = this.currRow;
}
FinishLayoutInfo(area, currentHeight, startingHeight);
}
void FinishLayoutInfo(Area area, XUnit currentHeight, XUnit startingHeight)
{
LayoutInfo layoutInfo = this.renderInfo.LayoutInfo;
layoutInfo.StartingHeight = startingHeight;
//REM: Trailing height would have to be calculated in case tables had a keep with next property.
layoutInfo.TrailingHeight = 0;
if (this.currRow >= 0)
{
layoutInfo.ContentArea = new Rectangle(area.X, area.Y, 0, currentHeight);
XUnit width = LeftBorderOffset;
foreach (Column clm in this.table.Columns)
{
width += clm.Width;
}
layoutInfo.ContentArea.Width = width;
}
layoutInfo.MinWidth = layoutInfo.ContentArea.Width;
if (!this.table.Rows.IsNull("LeftIndent"))
layoutInfo.Left = this.table.Rows.LeftIndent.Point;
else if (this.table.Rows.Alignment == RowAlignment.Left)
{
if (table.Columns.Count > 0) // Errors in Wiki syntax can lead to tables w/o columns ...
{
XUnit leftOffset = LeftBorderOffset;
leftOffset += table.Columns[0].LeftPadding;
layoutInfo.Left = -leftOffset;
}
#if DEBUG
else
table.GetType();
#endif
}
switch (this.table.Rows.Alignment)
{
case RowAlignment.Left:
layoutInfo.HorizontalAlignment = ElementAlignment.Near;
break;
case RowAlignment.Right:
layoutInfo.HorizontalAlignment = ElementAlignment.Far;
break;
case RowAlignment.Center:
layoutInfo.HorizontalAlignment = ElementAlignment.Center;
break;
}
}
XUnit LeftBorderOffset
{
get
{
if (this.leftBorderOffset < 0)
{
if (table.Rows.Count > 0 && table.Columns.Count > 0)
{
Borders borders = this.mergedCells.GetEffectiveBorders(table[0, 0]);
BordersRenderer bordersRenderer = new BordersRenderer(borders, this.gfx);
this.leftBorderOffset = bordersRenderer.GetWidth(BorderType.Left);
}
else
this.leftBorderOffset = 0;
}
return this.leftBorderOffset;
}
}
private XUnit leftBorderOffset = -1;
/// <summary>
/// Calcs either the height of the header rows or the height of the uppermost top border.
/// </summary>
/// <returns></returns>
XUnit CalcStartingHeight()
{
XUnit height = 0;
if (this.lastHeaderRow >= 0)
{
height = (XUnit)this.bottomBorderMap[this.lastHeaderRow + 1];
height += CalcMaxTopBorderWidth(0);
}
else
{
if (this.table.Rows.Count > this.startRow)
height = CalcMaxTopBorderWidth(this.startRow);
}
return height;
}
void CalcLastHeaderColumn()
{
this.lastHeaderColumn = -1;
foreach (Column clm in this.table.Columns)
{
if (clm.HeadingFormat)
this.lastHeaderColumn = clm.Index;
else break;
}
if (this.lastHeaderColumn >= 0)
this.lastHeaderRow = CalcLastConnectedColumn(this.lastHeaderColumn);
//Ignore heading format if all the table is heading:
if (this.lastHeaderRow == this.table.Rows.Count - 1)
this.lastHeaderRow = -1;
}
void CalcLastHeaderRow()
{
this.lastHeaderRow = -1;
foreach (Row row in this.table.Rows)
{
if (row.HeadingFormat)
this.lastHeaderRow = row.Index;
else break;
}
if (this.lastHeaderRow >= 0)
this.lastHeaderRow = CalcLastConnectedRow(this.lastHeaderRow);
//Ignore heading format if all the table is heading:
if (this.lastHeaderRow == this.table.Rows.Count - 1)
this.lastHeaderRow = -1;
}
void CreateConnectedRows()
{
this.connectedRowsMap = new SortedList();
foreach (Cell cell in this.mergedCells)
{
if (!this.connectedRowsMap.ContainsKey(cell.Row.Index))
{
int lastConnectedRow = CalcLastConnectedRow(cell.Row.Index);
this.connectedRowsMap[cell.Row.Index] = lastConnectedRow;
}
}
}
void CreateConnectedColumns()
{
this.connectedColumnsMap = new SortedList();
foreach (Cell cell in this.mergedCells)
{
if (!this.connectedColumnsMap.ContainsKey(cell.Column.Index))
{
int lastConnectedColumn = CalcLastConnectedColumn(cell.Column.Index);
this.connectedColumnsMap[cell.Column.Index] = lastConnectedColumn;
}
}
}
void CreateBottomBorderMap()
{
this.bottomBorderMap = new SortedList();
this.bottomBorderMap.Add(0, XUnit.FromPoint(0));
while (!this.bottomBorderMap.ContainsKey(this.table.Rows.Count))
{
CreateNextBottomBorderPosition();
}
}
/// <summary>
/// Calculates the top border width for the first row that is rendered or formatted.
/// </summary>
/// <param name="row">The row index.</param>
XUnit CalcMaxTopBorderWidth(int row)
{
XUnit maxWidth = 0;
if (this.table.Rows.Count > row)
{
int cellIdx = this.mergedCells.BinarySearch(this.table[row, 0], new CellComparer());
Cell rowCell = this.mergedCells[cellIdx];
while (cellIdx < this.mergedCells.Count)
{
rowCell = this.mergedCells[cellIdx];
if (rowCell.Row.Index > row)
break;
if (!rowCell.IsNull("Borders"))
{
BordersRenderer bordersRenderer = new BordersRenderer(rowCell.Borders, this.gfx);
XUnit width = 0;
width = bordersRenderer.GetWidth(BorderType.Top);
if (width > maxWidth)
maxWidth = width;
}
++cellIdx;
}
}
return maxWidth;
}
/// <summary>
/// Creates the next bottom border position.
/// </summary>
void CreateNextBottomBorderPosition()
{
int lastIdx = bottomBorderMap.Count - 1;
int lastBorderRow = (int)bottomBorderMap.GetKey(lastIdx);
XUnit lastPos = (XUnit)bottomBorderMap.GetByIndex(lastIdx);
Cell minMergedCell = GetMinMergedCell(lastBorderRow);
FormattedCell minMergedFormattedCell = (FormattedCell)this.formattedCells[minMergedCell];
XUnit maxBottomBorderPosition = lastPos + minMergedFormattedCell.InnerHeight;
maxBottomBorderPosition += CalcBottomBorderWidth(minMergedCell);
foreach (Cell cell in this.mergedCells)
{
if (cell.Row.Index > minMergedCell.Row.Index + minMergedCell.MergeDown)
break;
if (cell.Row.Index + cell.MergeDown == minMergedCell.Row.Index + minMergedCell.MergeDown)
{
FormattedCell formattedCell = (FormattedCell)this.formattedCells[cell];
XUnit topBorderPos = (XUnit)this.bottomBorderMap[cell.Row.Index];
XUnit bottomBorderPos = topBorderPos + formattedCell.InnerHeight;
bottomBorderPos += CalcBottomBorderWidth(cell);
if (bottomBorderPos > maxBottomBorderPosition)
maxBottomBorderPosition = bottomBorderPos;
}
}
this.bottomBorderMap.Add(minMergedCell.Row.Index + minMergedCell.MergeDown + 1, maxBottomBorderPosition);
}
/// <summary>
/// Calculates bottom border width of a cell.
/// </summary>
/// <param name="cell">The cell the bottom border of the row that is probed.</param>
/// <returns>The calculated border width.</returns>
XUnit CalcBottomBorderWidth(Cell cell)
{
Borders borders = this.mergedCells.GetEffectiveBorders(cell);
if (borders != null)
{
BordersRenderer bordersRenderer = new BordersRenderer(borders, this.gfx);
return bordersRenderer.GetWidth(BorderType.Bottom);
}
return 0;
}
/// <summary>
/// Gets the first cell in the given row that is merged down minimally.
/// </summary>
/// <param name="row">The row to prope.</param>
/// <returns>The first cell with minimal vertical merge.</returns>
Cell GetMinMergedCell(int row)
{
int minMerge = this.table.Rows.Count;
Cell minCell = null;
foreach (Cell cell in this.mergedCells)
{
if (cell.Row.Index == row)
{
if (cell.MergeDown == 0)
{
minCell = cell;
break;
}
else if (cell.MergeDown < minMerge)
{
minMerge = cell.MergeDown;
minCell = cell;
}
}
else if (cell.Row.Index > row)
break;
}
return minCell;
}
/// <summary>
/// Calculates the last row that is connected with the given row.
/// </summary>
/// <param name="row">The row that is probed for downward connection.</param>
/// <returns>The last row that is connected with the given row.</returns>
int CalcLastConnectedRow(int row)
{
int lastConnectedRow = row;
foreach (Cell cell in this.mergedCells)
{
if (cell.Row.Index <= lastConnectedRow)
{
int downConnection = Math.Max(cell.Row.KeepWith, cell.MergeDown);
if (lastConnectedRow < cell.Row.Index + downConnection)
lastConnectedRow = cell.Row.Index + downConnection;
}
}
return lastConnectedRow;
}
/// <summary>
/// Calculates the last column that is connected with the specified column.
/// </summary>
/// <param name="column">The column that is probed for downward connection.</param>
/// <returns>The last column that is connected with the given column.</returns>
int CalcLastConnectedColumn(int column)
{
int lastConnectedColumn = column;
foreach (Cell cell in this.mergedCells)
{
if (cell.Column.Index <= lastConnectedColumn)
{
int rightConnection = Math.Max(cell.Column.KeepWith, cell.MergeRight);
if (lastConnectedColumn < cell.Column.Index + rightConnection)
lastConnectedColumn = cell.Column.Index + rightConnection;
}
}
return lastConnectedColumn;
}
Table table;
MergedCellList mergedCells;
SortedList formattedCells;
SortedList bottomBorderMap;
SortedList connectedRowsMap;
SortedList connectedColumnsMap;
int lastHeaderRow;
int lastHeaderColumn;
int startRow;
int currRow;
int endRow = -1;
bool doHorizontalBreak = false;
XUnit startX;
XUnit startY;
}
}
| |
using System;
using System.Linq;
using NUnit.Framework;
using Umbraco.Core;
using Umbraco.Core.Models;
using Umbraco.Core.Models.Rdbms;
using Umbraco.Tests.TestHelpers;
using Umbraco.Tests.TestHelpers.Entities;
namespace Umbraco.Tests.Services
{
[DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)]
[TestFixture, RequiresSTA]
public class MemberTypeServiceTests : BaseServiceTest
{
[SetUp]
public override void Initialize()
{
base.Initialize();
}
[TearDown]
public override void TearDown()
{
base.TearDown();
}
[Test]
public void Member_Cannot_Edit_Property()
{
IMemberType memberType = MockedContentTypes.CreateSimpleMemberType();
ServiceContext.MemberTypeService.Save(memberType);
//re-get
memberType = ServiceContext.MemberTypeService.Get(memberType.Id);
foreach (var p in memberType.PropertyTypes)
{
Assert.IsFalse(memberType.MemberCanEditProperty(p.Alias));
}
}
[Test]
public void Member_Can_Edit_Property()
{
IMemberType memberType = MockedContentTypes.CreateSimpleMemberType();
ServiceContext.MemberTypeService.Save(memberType);
var prop = memberType.PropertyTypes.First().Alias;
memberType.SetMemberCanEditProperty(prop, true);
ServiceContext.MemberTypeService.Save(memberType);
//re-get
memberType = ServiceContext.MemberTypeService.Get(memberType.Id);
foreach (var p in memberType.PropertyTypes.Where(x => x.Alias != prop))
{
Assert.IsFalse(memberType.MemberCanEditProperty(p.Alias));
}
Assert.IsTrue(memberType.MemberCanEditProperty(prop));
}
[Test]
public void Member_Cannot_View_Property()
{
IMemberType memberType = MockedContentTypes.CreateSimpleMemberType();
ServiceContext.MemberTypeService.Save(memberType);
//re-get
memberType = ServiceContext.MemberTypeService.Get(memberType.Id);
foreach (var p in memberType.PropertyTypes)
{
Assert.IsFalse(memberType.MemberCanViewProperty(p.Alias));
}
}
[Test]
public void Member_Can_View_Property()
{
IMemberType memberType = MockedContentTypes.CreateSimpleMemberType();
ServiceContext.MemberTypeService.Save(memberType);
var prop = memberType.PropertyTypes.First().Alias;
memberType.SetMemberCanViewProperty(prop, true);
ServiceContext.MemberTypeService.Save(memberType);
//re-get
memberType = ServiceContext.MemberTypeService.Get(memberType.Id);
foreach (var p in memberType.PropertyTypes.Where(x => x.Alias != prop))
{
Assert.IsFalse(memberType.MemberCanViewProperty(p.Alias));
}
Assert.IsTrue(memberType.MemberCanViewProperty(prop));
}
[Test]
public void Deleting_PropertyType_Removes_The_Property_From_Member()
{
IMemberType memberType = MockedContentTypes.CreateSimpleMemberType();
ServiceContext.MemberTypeService.Save(memberType);
IMember member = MockedMember.CreateSimpleMember(memberType, "test", "test@test.com", "pass", "test");
ServiceContext.MemberService.Save(member);
var initProps = member.Properties.Count;
var initPropTypes = member.PropertyTypes.Count();
//remove a property (NOT ONE OF THE DEFAULTS)
var standardProps = Constants.Conventions.Member.GetStandardPropertyTypeStubs();
memberType.RemovePropertyType(memberType.PropertyTypes.First(x => standardProps.ContainsKey(x.Alias) == false).Alias);
ServiceContext.MemberTypeService.Save(memberType);
//re-load it from the db
member = ServiceContext.MemberService.GetById(member.Id);
Assert.AreEqual(initPropTypes - 1, member.PropertyTypes.Count());
Assert.AreEqual(initProps - 1, member.Properties.Count);
}
[Test]
public void Rebuild_Member_Xml_On_Alias_Change()
{
var contentType1 = MockedContentTypes.CreateSimpleMemberType("test1", "Test1");
var contentType2 = MockedContentTypes.CreateSimpleMemberType("test2", "Test2");
ServiceContext.MemberTypeService.Save(contentType1);
ServiceContext.MemberTypeService.Save(contentType2);
var contentItems1 = MockedMember.CreateSimpleMember(contentType1, 10).ToArray();
contentItems1.ForEach(x => ServiceContext.MemberService.Save(x));
var contentItems2 = MockedMember.CreateSimpleMember(contentType2, 5).ToArray();
contentItems2.ForEach(x => ServiceContext.MemberService.Save(x));
//only update the contentType1 alias which will force an xml rebuild for all content of that type
contentType1.Alias = "newAlias";
ServiceContext.MemberTypeService.Save(contentType1);
foreach (var c in contentItems1)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsTrue(xml.Xml.StartsWith("<newAlias"));
}
foreach (var c in contentItems2)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsTrue(xml.Xml.StartsWith("<test2")); //should remain the same
}
}
[Test]
public void Rebuild_Member_Xml_On_Property_Removal()
{
var standardProps = Constants.Conventions.Member.GetStandardPropertyTypeStubs();
var contentType1 = MockedContentTypes.CreateSimpleMemberType("test1", "Test1");
ServiceContext.MemberTypeService.Save(contentType1);
var contentItems1 = MockedMember.CreateSimpleMember(contentType1, 10).ToArray();
contentItems1.ForEach(x => ServiceContext.MemberService.Save(x));
var alias = contentType1.PropertyTypes.First(x => standardProps.ContainsKey(x.Alias) == false).Alias;
var elementToMatch = "<" + alias + ">";
foreach (var c in contentItems1)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsTrue(xml.Xml.Contains(elementToMatch)); //verify that it is there before we remove the property
}
//remove a property (NOT ONE OF THE DEFAULTS)
contentType1.RemovePropertyType(alias);
ServiceContext.MemberTypeService.Save(contentType1);
var reQueried = ServiceContext.MemberTypeService.Get(contentType1.Id);
var reContent = ServiceContext.MemberService.GetById(contentItems1.First().Id);
foreach (var c in contentItems1)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsFalse(xml.Xml.Contains(elementToMatch)); //verify that it is no longer there
}
}
[Test]
public void Cannot_Save_MemberType_With_Empty_Name()
{
// Arrange
IMemberType memberType = MockedContentTypes.CreateSimpleMemberType("memberTypeAlias", string.Empty);
// Act & Assert
Assert.Throws<ArgumentException>(() => ServiceContext.MemberTypeService.Save(memberType));
}
[Test]
public void Empty_Description_Is_Always_Null_After_Saving_Member_Type()
{
var service = ServiceContext.MemberTypeService;
var memberType = MockedContentTypes.CreateSimpleMemberType();
memberType.Description = null;
service.Save(memberType);
var memberType2 = MockedContentTypes.CreateSimpleMemberType("memberType2", "Member Type 2");
memberType2.Description = string.Empty;
service.Save(memberType2);
Assert.IsNull(memberType.Description);
Assert.IsNull(memberType2.Description);
}
//[Test]
//public void Can_Save_MemberType_Structure_And_Create_A_Member_Based_On_It()
//{
// // Arrange
// var cs = ServiceContext.MemberService;
// var cts = ServiceContext.MemberTypeService;
// var dtdYesNo = ServiceContext.DataTypeService.GetDataTypeDefinitionById(-49);
// var ctBase = new MemberType(-1) { Name = "Base", Alias = "Base", Icon = "folder.gif", Thumbnail = "folder.png" };
// ctBase.AddPropertyType(new PropertyType(dtdYesNo)
// {
// Name = "Hide From Navigation",
// Alias = Constants.Conventions.Content.NaviHide
// }
// /*,"Navigation"*/);
// cts.Save(ctBase);
// var ctHomePage = new MemberType(ctBase)
// {
// Name = "Home Page",
// Alias = "HomePage",
// Icon = "settingDomain.gif",
// Thumbnail = "folder.png",
// AllowedAsRoot = true
// };
// ctHomePage.AddPropertyType(new PropertyType(dtdYesNo) { Name = "Some property", Alias = "someProperty" }
// /*,"Navigation"*/);
// cts.Save(ctHomePage);
// // Act
// var homeDoc = cs.CreateMember("Test", "test@test.com", "test", "HomePage");
// // Assert
// Assert.That(ctBase.HasIdentity, Is.True);
// Assert.That(ctHomePage.HasIdentity, Is.True);
// Assert.That(homeDoc.HasIdentity, Is.True);
// Assert.That(homeDoc.ContentTypeId, Is.EqualTo(ctHomePage.Id));
//}
//[Test]
//public void Can_Create_And_Save_MemberType_Composition()
//{
// /*
// * Global
// * - Components
// * - Category
// */
// var service = ServiceContext.ContentTypeService;
// var global = MockedContentTypes.CreateSimpleContentType("global", "Global");
// service.Save(global);
// var components = MockedContentTypes.CreateSimpleContentType("components", "Components", global);
// service.Save(components);
// var component = MockedContentTypes.CreateSimpleContentType("component", "Component", components);
// service.Save(component);
// var category = MockedContentTypes.CreateSimpleContentType("category", "Category", global);
// service.Save(category);
// var success = category.AddContentType(component);
// Assert.That(success, Is.False);
//}
//[Test]
//public void Can_Remove_ContentType_Composition_From_ContentType()
//{
// //Test for U4-2234
// var cts = ServiceContext.ContentTypeService;
// //Arrange
// var component = CreateComponent();
// cts.Save(component);
// var banner = CreateBannerComponent(component);
// cts.Save(banner);
// var site = CreateSite();
// cts.Save(site);
// var homepage = CreateHomepage(site);
// cts.Save(homepage);
// //Add banner to homepage
// var added = homepage.AddContentType(banner);
// cts.Save(homepage);
// //Assert composition
// var bannerExists = homepage.ContentTypeCompositionExists(banner.Alias);
// var bannerPropertyExists = homepage.CompositionPropertyTypes.Any(x => x.Alias.Equals("bannerName"));
// Assert.That(added, Is.True);
// Assert.That(bannerExists, Is.True);
// Assert.That(bannerPropertyExists, Is.True);
// Assert.That(homepage.CompositionPropertyTypes.Count(), Is.EqualTo(6));
// //Remove banner from homepage
// var removed = homepage.RemoveContentType(banner.Alias);
// cts.Save(homepage);
// //Assert composition
// var bannerStillExists = homepage.ContentTypeCompositionExists(banner.Alias);
// var bannerPropertyStillExists = homepage.CompositionPropertyTypes.Any(x => x.Alias.Equals("bannerName"));
// Assert.That(removed, Is.True);
// Assert.That(bannerStillExists, Is.False);
// Assert.That(bannerPropertyStillExists, Is.False);
// Assert.That(homepage.CompositionPropertyTypes.Count(), Is.EqualTo(4));
//}
//[Test]
//public void Can_Copy_ContentType_By_Performing_Clone()
//{
// // Arrange
// var service = ServiceContext.ContentTypeService;
// var metaContentType = MockedContentTypes.CreateMetaContentType();
// service.Save(metaContentType);
// var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", metaContentType);
// service.Save(simpleContentType);
// var categoryId = simpleContentType.Id;
// // Act
// var sut = simpleContentType.Clone("newcategory");
// service.Save(sut);
// // Assert
// Assert.That(sut.HasIdentity, Is.True);
// var contentType = service.GetContentType(sut.Id);
// var category = service.GetContentType(categoryId);
// Assert.That(contentType.CompositionAliases().Any(x => x.Equals("meta")), Is.True);
// Assert.AreEqual(contentType.ParentId, category.ParentId);
// Assert.AreEqual(contentType.Level, category.Level);
// Assert.AreEqual(contentType.PropertyTypes.Count(), category.PropertyTypes.Count());
// Assert.AreNotEqual(contentType.Id, category.Id);
// Assert.AreNotEqual(contentType.Key, category.Key);
// Assert.AreNotEqual(contentType.Path, category.Path);
// Assert.AreNotEqual(contentType.SortOrder, category.SortOrder);
// Assert.AreNotEqual(contentType.PropertyTypes.First(x => x.Alias.Equals("title")).Id, category.PropertyTypes.First(x => x.Alias.Equals("title")).Id);
// Assert.AreNotEqual(contentType.PropertyGroups.First(x => x.Name.Equals("Content")).Id, category.PropertyGroups.First(x => x.Name.Equals("Content")).Id);
//}
//private ContentType CreateComponent()
//{
// var component = new ContentType(-1)
// {
// Alias = "component",
// Name = "Component",
// Description = "ContentType used for Component grouping",
// Icon = ".sprTreeDoc3",
// Thumbnail = "doc.png",
// SortOrder = 1,
// CreatorId = 0,
// Trashed = false
// };
// var contentCollection = new PropertyTypeCollection();
// contentCollection.Add(new PropertyType(new Guid(), DataTypeDatabaseType.Ntext) { Alias = "componentGroup", Name = "Component Group", Description = "", HelpText = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 });
// component.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Component", SortOrder = 1 });
// return component;
//}
//private ContentType CreateBannerComponent(ContentType parent)
//{
// var banner = new ContentType(parent)
// {
// Alias = "banner",
// Name = "Banner Component",
// Description = "ContentType used for Banner Component",
// Icon = ".sprTreeDoc3",
// Thumbnail = "doc.png",
// SortOrder = 1,
// CreatorId = 0,
// Trashed = false
// };
// var propertyType = new PropertyType(new Guid(), DataTypeDatabaseType.Ntext)
// {
// Alias = "bannerName",
// Name = "Banner Name",
// Description = "",
// HelpText = "",
// Mandatory = false,
// SortOrder = 2,
// DataTypeDefinitionId = -88
// };
// banner.AddPropertyType(propertyType, "Component");
// return banner;
//}
//private ContentType CreateSite()
//{
// var site = new ContentType(-1)
// {
// Alias = "site",
// Name = "Site",
// Description = "ContentType used for Site inheritence",
// Icon = ".sprTreeDoc3",
// Thumbnail = "doc.png",
// SortOrder = 2,
// CreatorId = 0,
// Trashed = false
// };
// var contentCollection = new PropertyTypeCollection();
// contentCollection.Add(new PropertyType(new Guid(), DataTypeDatabaseType.Ntext) { Alias = "hostname", Name = "Hostname", Description = "", HelpText = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 });
// site.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Site Settings", SortOrder = 1 });
// return site;
//}
//private ContentType CreateHomepage(ContentType parent)
//{
// var contentType = new ContentType(parent)
// {
// Alias = "homepage",
// Name = "Homepage",
// Description = "ContentType used for the Homepage",
// Icon = ".sprTreeDoc3",
// Thumbnail = "doc.png",
// SortOrder = 1,
// CreatorId = 0,
// Trashed = false
// };
// var contentCollection = new PropertyTypeCollection();
// contentCollection.Add(new PropertyType(new Guid(), DataTypeDatabaseType.Ntext) { Alias = "title", Name = "Title", Description = "", HelpText = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 });
// contentCollection.Add(new PropertyType(new Guid(), DataTypeDatabaseType.Ntext) { Alias = "bodyText", Name = "Body Text", Description = "", HelpText = "", Mandatory = false, SortOrder = 2, DataTypeDefinitionId = -87 });
// contentCollection.Add(new PropertyType(new Guid(), DataTypeDatabaseType.Ntext) { Alias = "author", Name = "Author", Description = "Name of the author", HelpText = "", Mandatory = false, SortOrder = 3, DataTypeDefinitionId = -88 });
// contentType.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Content", SortOrder = 1 });
// return contentType;
//}
//private IEnumerable<IContentType> CreateContentTypeHierarchy()
//{
// //create the master type
// var masterContentType = MockedContentTypes.CreateSimpleContentType("masterContentType", "MasterContentType");
// masterContentType.Key = new Guid("C00CA18E-5A9D-483B-A371-EECE0D89B4AE");
// ServiceContext.ContentTypeService.Save(masterContentType);
// //add the one we just created
// var list = new List<IContentType> { masterContentType };
// for (var i = 0; i < 10; i++)
// {
// var contentType = MockedContentTypes.CreateSimpleContentType("childType" + i, "ChildType" + i,
// //make the last entry in the list, this one's parent
// list.Last());
// list.Add(contentType);
// }
// return list;
//}
}
}
| |
#region Header
//
// CmdExportSolidToSat.cs - Create a solid in memory and export it to a SAT file
//
// Copyright (C) 2013-2021 by Jeremy Tammik, Autodesk Inc. All rights reserved.
//
// Keywords: The Building Coder Revit API C# .NET add-in.
//
#endregion // Header
#region Namespaces
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Autodesk.Revit.Attributes;
using Autodesk.Revit.DB;
using Autodesk.Revit.UI;
#endregion // Namespaces
namespace BuildingCoder
{
[Transaction(TransactionMode.Manual)]
internal class CmdExportSolidToSat : IExternalCommand
{
public Result Execute(
ExternalCommandData commandData,
ref string message,
ElementSet elements)
{
var uiapp = commandData.Application;
var uidoc = uiapp.ActiveUIDocument;
var app = uiapp.Application;
var doc = uidoc.Document;
var sel = uidoc.Selection;
// Retrieve all floors from the model
var floors
= new FilteredElementCollector(doc)
.OfClass(typeof(Floor))
.ToElements()
.Cast<Floor>()
.ToList();
if (2 != floors.Count)
{
message = "Please create two intersected floors";
return Result.Failed;
}
// Retrieve the floor solids
var opt = new Options();
var geometry1 = floors[0].get_Geometry(opt);
var geometry2 = floors[1].get_Geometry(opt);
var solid1 = geometry1.FirstOrDefault() as Solid;
var solid2 = geometry2.FirstOrDefault() as Solid;
// Calculate the intersection solid
var intersectedSolid = BooleanOperationsUtils
.ExecuteBooleanOperation(solid1, solid2,
BooleanOperationsType.Intersect);
// Search for the metric mass family template file
var template_path = DirSearch(
app.FamilyTemplatePath,
"Metric Mass.rft");
// Create a new temporary family
var family_doc = app.NewFamilyDocument(
template_path);
// Create a free form element
// from the intersection solid
using (var t = new Transaction(family_doc))
{
t.Start("Add Free Form Element");
var freeFormElement = FreeFormElement.Create(
family_doc, intersectedSolid);
t.Commit();
}
var dir = Path.GetTempPath();
var filepath = Path.Combine(dir,
"floor_intersection_family.rfa");
var sao = new SaveAsOptions
{
OverwriteExistingFile = true
};
family_doc.SaveAs(filepath, sao);
// Create 3D View
var viewFamilyType
= new FilteredElementCollector(family_doc)
.OfClass(typeof(ViewFamilyType))
.OfType<ViewFamilyType>()
.FirstOrDefault(x =>
x.ViewFamily == ViewFamily.ThreeDimensional);
View3D threeDView;
using (var t = new Transaction(family_doc))
{
t.Start("Create 3D View");
threeDView = View3D.CreateIsometric(
family_doc, viewFamilyType.Id);
t.Commit();
}
// Export to SAT
var viewSet = new List<ElementId>
{
threeDView.Id
};
var exportOptions
= new SATExportOptions();
var res = family_doc.Export(dir,
"SolidFile.sat", viewSet, exportOptions);
return Result.Succeeded;
}
#region Intersect solid with another solid from a linked file
public static double GetIntersectedSolidArea(
Document host,
Solid hostElement,
RevitLinkInstance rins,
Solid linkedElement)
{
// Step 1 "Determine the transformation T from the linked document Q coordinates to P's."
var transForm = rins.GetTransform();
// Step 2 "Open the linked project Q and retrieve the solid Sb of B."
// linkedElement is Solid of linked Link
// Step 3 "Transform it to P's coordinate space: T * Sb."
var tmp = SolidUtils.CreateTransformed(linkedElement, transForm);
// Step 4 "Retrieve the solid Sa of A"
// hostElement is hostElementSolid
var result = BooleanOperationsUtils.ExecuteBooleanOperation(
hostElement, tmp, BooleanOperationsType.Intersect);
return result.SurfaceArea;
}
#endregion // Intersect solid with another solid from a linked file
#region Clone Solid Workaround before new Revit 2016 Solid.Clone API
public static Solid Clone( /*this*/ Solid solid)
{
if (solid == null) return null;
// Better than unioning the solid with itself:
// use a small cube contained within the original
// solid instead, e.g. a 1x1x1 cube at the origin
// or something.
return BooleanOperationsUtils
.ExecuteBooleanOperation(solid, solid,
BooleanOperationsType.Union);
}
#endregion // Clone Solid Workaround
/// <summary>
/// Return the full path of the first file
/// found matching the given filename pattern
/// in a recursive search through all
/// subdirectories of the given starting folder.
/// </summary>
private string DirSearch(
string start_dir,
string filename_pattern)
{
foreach (var d in Directory.GetDirectories(
start_dir))
{
foreach (var f in Directory.GetFiles(
d, filename_pattern))
return f;
var f2 = DirSearch(d, filename_pattern);
if (null != f2) return f2;
}
return null;
}
}
}
| |
#if (UNITY_STANDALONE_LINUX || UNITY_EDITOR_LINUX)
/*
* Copyright (c) 2010 Nicolas George
* Copyright (c) 2011 Stefano Sabatini
* Copyright (c) 2014 Andrey Utkin
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/* This file was translated from C to make use of the FFmpeg.AutoGen library.
* Original: https://ffmpeg.org/doxygen/4.1/transcode_aac_8c-example.html */
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using FFmpeg.AutoGen;
using static FFmpeg.AutoGen.ffmpeg;
unsafe class FFmpegTranscoding {
AVFormatContext *ofmt_ctx;
unsafe struct FilteringContext {
public AVFilterContext *buffersink_ctx;
public AVFilterContext *buffersrc_ctx;
public AVFilterGraph *filter_graph;
};
FilteringContext *filter_ctx;
unsafe struct StreamContext {
public AVCodecContext *dec_ctx;
public AVCodecContext *enc_ctx;
};
StreamContext *stream_ctx;
/**
* Open an input file and the required decoder.
* @param filename File to be opened
* @param[out] input_format_context Format context of opened file
* @param[out] input_codec_context Codec context of opened file
* @return Error code (0 if successful)
*/
int open_input_file(string filename,
AVFormatContext **input_format_context,
AVCodecContext **input_codec_context)
{
AVCodecContext *avctx;
AVCodec *input_codec;
int error;
/* Open the input file to read from it. */
if ((error = avformat_open_input(input_format_context, filename, null,
null)) < 0) {
Console.WriteLine($"error: Could not open input file '{filename}' (error '{LibAVErrorToString(error)}')");
*input_format_context = null;
return error;
}
/* Get information on the input file (number of streams etc.). */
if ((error = avformat_find_stream_info(*input_format_context, null)) < 0) {
Console.WriteLine($"error: Could not open find stream info (error '{LibAVErrorToString(error)}')");
avformat_close_input(input_format_context);
return error;
}
/* Make sure that there is only one stream in the input file. */
if ((*input_format_context)->nb_streams != 1) {
Console.WriteLine($"error: Expected one audio input stream, but found {(*input_format_context)->nb_streams}");
avformat_close_input(input_format_context);
return AVERROR_EXIT;
}
/* Find a decoder for the audio stream. */
if ((input_codec = avcodec_find_decoder((*input_format_context)->streams[0]->codecpar->codec_id)) == null) {
Console.WriteLine("error: Could not find input codec");
avformat_close_input(input_format_context);
return AVERROR_EXIT;
}
/* Allocate a new decoding context. */
avctx = avcodec_alloc_context3(input_codec);
if (avctx == null) {
Console.WriteLine("error: Could not allocate a decoding context");
avformat_close_input(input_format_context);
return AVERROR(ENOMEM);
}
/* Initialize the stream parameters with demuxer information. */
error = avcodec_parameters_to_context(avctx, (*input_format_context)->streams[0]->codecpar);
if (error < 0) {
Console.WriteLine($"error: Could not avcodec_parameters_to_context (error '{LibAVErrorToString(error)}')");
avformat_close_input(input_format_context);
avcodec_free_context(&avctx);
return error;
}
/* Open the decoder for the audio stream to use it later. */
if ((error = avcodec_open2(avctx, input_codec, null)) < 0) {
Console.WriteLine($"error: Could not open input codec (error '{LibAVErrorToString(error)}')");
avcodec_free_context(&avctx);
avformat_close_input(input_format_context);
return error;
}
/* Save the decoder context for easier access later. */
*input_codec_context = avctx;
return 0;
}
/**
* Open an output file and the required encoder.
* Also set some basic encoder parameters.
* Some of these parameters are based on the input file's parameters.
* @param filename File to be opened
* @param input_codec_context Codec context of input file
* @param[out] output_format_context Format context of output file
* @param[out] output_codec_context Codec context of output file
* @return Error code (0 if successful)
*/
int open_output_file(string filename, AVCodecContext *input_codec_context,
AVFormatContext **output_format_context, AVCodecContext **output_codec_context)
{
AVCodecContext *avctx = null;
AVIOContext *output_io_context = null;
AVStream *stream = null;
AVCodec *output_codec = null;
int error;
/* Open the output file to write to it. */
if ((error = avio_open(&output_io_context, filename,
AVIO_FLAG_WRITE)) < 0) {
Console.WriteLine($"error: Could not open output file '{filename}' (error '{LibAVErrorToString(error)}')");
return error;
}
/* Create a new format context for the output container format. */
if ((*output_format_context = avformat_alloc_context()) == null) {
Console.WriteLine("error: Could not allocate output format context");
return AVERROR(ENOMEM);
}
/* Associate the output file (pointer) with the container format context. */
(*output_format_context)->pb = output_io_context;
/* Guess the desired container format based on the file extension. */
if (((*output_format_context)->oformat = av_guess_format(null, filename, null)) == null) {
Console.WriteLine("error: Could not find output file format");
goto cleanup;
}
if (((*output_format_context)->url = av_strdup(filename)) == null) {
Console.WriteLine("error: Could not allocate url.");
error = AVERROR(ENOMEM);
goto cleanup;
}
/* Find the encoder to be used by its name. */
if ((output_codec = avcodec_find_encoder(AVCodecID.AV_CODEC_ID_VORBIS)) == null) {
Console.WriteLine("error: Could not find a vorbis encoder.");
goto cleanup;
}
/* Create a new audio stream in the output file container. */
if ((stream = avformat_new_stream(*output_format_context, null)) == null) {
Console.WriteLine("error: Could not create new stream");
error = AVERROR(ENOMEM);
goto cleanup;
}
avctx = avcodec_alloc_context3(output_codec);
if (avctx == null) {
Console.WriteLine("error: Could not allocate an encoding context");
error = AVERROR(ENOMEM);
goto cleanup;
}
/* Set the basic encoder parameters.
* The input file's sample rate is used to avoid a sample rate conversion. */
/* NOTE: These parameters are tailored for vorbis.
* See https://ffmpeg.org/ffmpeg-codecs.html#libvorbis
* Other codecs may need different parameters */
avctx->channels = 2;
avctx->channel_layout = (ulong)av_get_default_channel_layout((int)avctx->channels);
avctx->sample_rate = input_codec_context->sample_rate;
avctx->sample_fmt = output_codec->sample_fmts[0];
avctx->global_quality = 7;
/* Set the sample rate for the container. */
avctx->time_base.den = input_codec_context->sample_rate;
avctx->time_base.num = 1;
/* Some container formats (like MP4) require global headers to be present.
* Mark the encoder so that it behaves accordingly. */
if (((*output_format_context)->oformat->flags & AVFMT_GLOBALHEADER) != 0)
avctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
/* Open the encoder for the audio stream to use it later. */
if ((error = avcodec_open2(avctx, output_codec, null)) < 0) {
Console.WriteLine($"error: Could not open output codec (error '{LibAVErrorToString(error)}')");
goto cleanup;
}
error = avcodec_parameters_from_context(stream->codecpar, avctx);
if (error < 0) {
Console.WriteLine("error: Could not initialize stream parameters");
goto cleanup;
}
/* Save the encoder context for easier access later. */
*output_codec_context = avctx;
return 0;
cleanup:
avcodec_free_context(&avctx);
avio_closep(&(*output_format_context)->pb);
avformat_free_context(*output_format_context);
*output_format_context = null;
return error < 0 ? error : AVERROR_EXIT;
}
/**
* Initialize one data packet for reading or writing.
* @param packet Packet to be initialized
*/
void init_packet(AVPacket *packet)
{
av_init_packet(packet);
/* Set the packet data and size so that it is recognized as being empty. */
packet->data = null;
packet->size = 0;
}
/**
* Initialize one audio frame for reading from the input file.
* @param[out] frame Frame to be initialized
* @return Error code (0 if successful)
*/
int init_input_frame(AVFrame **frame)
{
if ((*frame = av_frame_alloc()) == null) {
Console.WriteLine("error: Could not allocate input frame");
return AVERROR(ENOMEM);
}
return 0;
}
/**
* Initialize the audio resampler based on the input and output codec settings.
* If the input and output sample formats differ, a conversion is required
* libswresample takes care of this, but requires initialization.
* @param input_codec_context Codec context of the input file
* @param output_codec_context Codec context of the output file
* @param[out] resample_context Resample context for the required conversion
* @return Error code (0 if successful)
*/
int init_resampler(AVCodecContext *input_codec_context,
AVCodecContext *output_codec_context,
SwrContext **resample_context)
{
int error;
/*
* Create a resampler context for the conversion.
* Set the conversion parameters.
* Default channel layouts based on the number of channels
* are assumed for simplicity (they are sometimes not detected
* properly by the demuxer and/or decoder).
*/
*resample_context = swr_alloc_set_opts(null,
av_get_default_channel_layout(output_codec_context->channels),
output_codec_context->sample_fmt,
output_codec_context->sample_rate,
av_get_default_channel_layout(input_codec_context->channels),
input_codec_context->sample_fmt,
input_codec_context->sample_rate,
0, null);
if (*resample_context == null) {
Console.WriteLine("error: Could not allocate resample context");
return AVERROR(ENOMEM);
}
/*
* Perform a sanity check so that the number of converted samples is
* not greater than the number of samples to be converted.
* If the sample rates differ, this case has to be handled differently
*/
Debug.Assert(output_codec_context->sample_rate == input_codec_context->sample_rate);
/* Open the resampler with the specified parameters. */
if ((error = swr_init(*resample_context)) < 0) {
Console.WriteLine("error: Could not open resample context");
swr_free(resample_context);
return error;
}
return 0;
}
/**
* Initialize a FIFO buffer for the audio samples to be encoded.
* @param[out] fifo Sample buffer
* @param output_codec_context Codec context of the output file
* @return Error code (0 if successful)
*/
int init_fifo(AVAudioFifo **fifo, AVCodecContext *output_codec_context)
{
/* Create the FIFO buffer based on the specified output sample format. */
if ((*fifo = av_audio_fifo_alloc(output_codec_context->sample_fmt,
output_codec_context->channels, 1)) == null) {
Console.WriteLine("error: Could not allocate FIFO");
return AVERROR(ENOMEM);
}
return 0;
}
/**
* Write the header of the output file container.
* @param output_format_context Format context of the output file
* @return Error code (0 if successful)
*/
int write_output_file_header(AVFormatContext *output_format_context)
{
int error;
if ((error = avformat_write_header(output_format_context, null)) < 0) {
Console.WriteLine($"Could not write output file header (error '{LibAVErrorToString(error)}')");
return error;
}
return 0;
}
/**
* Decode one audio frame from the input file.
* @param frame Audio frame to be decoded
* @param input_format_context Format context of the input file
* @param input_codec_context Codec context of the input file
* @param[out] data_present Indicates whether data has been decoded
* @param[out] finished Indicates whether the end of file has
* been reached and all data has been
* decoded. If this flag is false, there
* is more data to be decoded, i.e., this
* function has to be called again.
* @return Error code (0 if successful)
*/
int decode_audio_frame(AVFrame *frame,
AVFormatContext *input_format_context,
AVCodecContext *input_codec_context,
int *data_present, int *finished)
{
/* Packet used for temporary storage. */
AVPacket input_packet;
int error;
init_packet(&input_packet);
/* Read one audio frame from the input file into a temporary packet. */
if ((error = av_read_frame(input_format_context, &input_packet)) < 0) {
/* If we are at the end of the file, flush the decoder below. */
if (error == AVERROR_EOF)
*finished = 1;
else {
Console.WriteLine($"error: Could not read frame (error '{LibAVErrorToString(error)}')");
return error;
}
}
/* Send the audio frame stored in the temporary packet to the decoder.
* The input audio stream decoder is used to do this. */
if ((error = avcodec_send_packet(input_codec_context, &input_packet)) < 0) {
Console.WriteLine($"error: Could not send packet for decoding (error '{LibAVErrorToString(error)}')");
return error;
}
/* Receive one frame from the decoder. */
error = avcodec_receive_frame(input_codec_context, frame);
/* If the decoder asks for more data to be able to decode a frame,
* return indicating that no data is present. */
if (error == AVERROR(EAGAIN)) {
error = 0;
goto cleanup;
/* If the end of the input file is reached, stop decoding. */
} else if (error == AVERROR_EOF) {
*finished = 1;
error = 0;
goto cleanup;
} else if (error < 0) {
Console.WriteLine($"error: Could not decode frame (error '{LibAVErrorToString(error)}')");
goto cleanup;
/* Default case: Return decoded data. */
} else {
*data_present = 1;
goto cleanup;
}
cleanup:
av_packet_unref(&input_packet);
return error;
}
/**
* Initialize a temporary storage for the specified number of audio samples.
* The conversion requires temporary storage due to the different format.
* The number of audio samples to be allocated is specified in frame_size.
* @param[out] converted_input_samples Array of converted samples. The
* dimensions are reference, channel
* (for multi-channel audio), sample.
* @param output_codec_context Codec context of the output file
* @param frame_size Number of samples to be converted in
* each round
* @return Error code (0 if successful)
*/
int init_converted_samples(byte ***converted_input_samples,
AVCodecContext *output_codec_context,
int frame_size)
{
int error;
/* Allocate as many pointers as there are audio channels.
* Each pointer will later point to the audio samples of the corresponding
* channels (although it may be NULL for interleaved formats).
*/
if ((*converted_input_samples = (byte**)Marshal.AllocHGlobal(output_codec_context->channels * sizeof(IntPtr))) == null) {
Console.WriteLine($"error: Could not allocate converted input sample pointers");
return AVERROR(ENOMEM);
}
/* Allocate memory for the samples of all channels in one consecutive
* block for convenience. */
if ((error = av_samples_alloc(*converted_input_samples, null,
output_codec_context->channels,
frame_size,
output_codec_context->sample_fmt, 0)) < 0) {
Console.WriteLine($"error: Could not allocate converted input samples (error '{LibAVErrorToString(error)}')");
av_freep(&(*converted_input_samples)[0]);
Marshal.FreeHGlobal((IntPtr)(*converted_input_samples));
return error;
}
return 0;
}
/**
* Convert the input audio samples into the output sample format.
* The conversion happens on a per-frame basis, the size of which is
* specified by frame_size.
* @param input_data Samples to be decoded. The dimensions are
* channel (for multi-channel audio), sample.
* @param[out] converted_data Converted samples. The dimensions are channel
* (for multi-channel audio), sample.
* @param frame_size Number of samples to be converted
* @param resample_context Resample context for the conversion
* @return Error code (0 if successful)
*/
int convert_samples(byte **input_data,
byte **converted_data, int frame_size,
SwrContext *resample_context)
{
int error;
/* Convert the samples using the resampler. */
if ((error = swr_convert(resample_context,
converted_data, frame_size,
input_data , frame_size)) < 0) {
Console.WriteLine($"error: Could not convert input samples (error '{LibAVErrorToString(error)}')");
return error;
}
return 0;
}
/**
* Add converted input audio samples to the FIFO buffer for later processing.
* @param fifo Buffer to add the samples to
* @param converted_input_samples Samples to be added. The dimensions are channel
* (for multi-channel audio), sample.
* @param frame_size Number of samples to be converted
* @return Error code (0 if successful)
*/
int add_samples_to_fifo(AVAudioFifo *fifo,
byte **converted_input_samples,
int frame_size)
{
int error;
/* Make the FIFO as large as it needs to be to hold both,
* the old and the new samples. */
if ((error = av_audio_fifo_realloc(fifo, av_audio_fifo_size(fifo) + frame_size)) < 0) {
Console.WriteLine("error: Could not reallocate FIFO");
return error;
}
/* Store the new samples in the FIFO buffer. */
if (av_audio_fifo_write(fifo, (void **)converted_input_samples,
frame_size) < frame_size) {
Console.WriteLine("error: Could not write data to FIFO");
return AVERROR_EXIT;
}
return 0;
}
/**
* Read one audio frame from the input file, decode, convert and store
* it in the FIFO buffer.
* @param fifo Buffer used for temporary storage
* @param input_format_context Format context of the input file
* @param input_codec_context Codec context of the input file
* @param output_codec_context Codec context of the output file
* @param resampler_context Resample context for the conversion
* @param[out] finished Indicates whether the end of file has
* been reached and all data has been
* decoded. If this flag is false,
* there is more data to be decoded,
* i.e., this function has to be called
* again.
* @return Error code (0 if successful)
*/
int read_decode_convert_and_store(AVAudioFifo *fifo,
AVFormatContext *input_format_context,
AVCodecContext *input_codec_context,
AVCodecContext *output_codec_context,
SwrContext *resampler_context,
int *finished)
{
/* Temporary storage of the input samples of the frame read from the file. */
AVFrame *input_frame = null;
/* Temporary storage for the converted input samples. */
byte **converted_input_samples = null;
int data_present = 0;
int ret = AVERROR_EXIT;
/* Initialize temporary storage for one input frame. */
if (init_input_frame(&input_frame) < 0)
goto cleanup;
/* Decode one frame worth of audio samples. */
if (decode_audio_frame(input_frame, input_format_context,
input_codec_context, &data_present, finished) < 0)
goto cleanup;
/* If we are at the end of the file and there are no more samples
* in the decoder which are delayed, we are actually finished.
* This must not be treated as an error. */
if (*finished != 0) {
ret = 0;
goto cleanup;
}
/* If there is decoded data, convert and store it. */
if (data_present != 0) {
/* Initialize the temporary storage for the converted input samples. */
if (init_converted_samples(&converted_input_samples, output_codec_context,
input_frame->nb_samples) < 0)
goto cleanup;
/* Convert the input samples to the desired output sample format.
* This requires a temporary storage provided by converted_input_samples. */
if (convert_samples(input_frame->extended_data, converted_input_samples,
input_frame->nb_samples, resampler_context) < 0)
goto cleanup;
/* Add the converted input samples to the FIFO buffer for later processing. */
if (add_samples_to_fifo(fifo, converted_input_samples,
input_frame->nb_samples) < 0)
goto cleanup;
ret = 0;
}
ret = 0;
cleanup:
if (converted_input_samples != null) {
av_freep(&converted_input_samples[0]);
Marshal.FreeHGlobal((IntPtr)converted_input_samples);
}
av_frame_free(&input_frame);
return ret;
}
/**
* Initialize one input frame for writing to the output file.
* The frame will be exactly frame_size samples large.
* @param[out] frame Frame to be initialized
* @param output_codec_context Codec context of the output file
* @param frame_size Size of the frame
* @return Error code (0 if successful)
*/
int init_output_frame(AVFrame **frame,
AVCodecContext *output_codec_context,
int frame_size)
{
int error;
/* Create a new frame to store the audio samples. */
if ((*frame = av_frame_alloc()) == null) {
Console.WriteLine("error: Could not allocate output frame");
return AVERROR_EXIT;
}
/* Set the frame's parameters, especially its size and format.
* av_frame_get_buffer needs this to allocate memory for the
* audio samples of the frame.
* Default channel layouts based on the number of channels
* are assumed for simplicity. */
(*frame)->nb_samples = frame_size;
(*frame)->channel_layout = output_codec_context->channel_layout;
(*frame)->format = (int)output_codec_context->sample_fmt;
(*frame)->sample_rate = output_codec_context->sample_rate;
/* Allocate the samples of the created frame. This call will make
* sure that the audio frame can hold as many samples as specified. */
if ((error = av_frame_get_buffer(*frame, 0)) < 0) {
Console.WriteLine($"error: Could not allocate output frame samples (error '{LibAVErrorToString(error)}')");
av_frame_free(frame);
return error;
}
return 0;
}
/* Global timestamp for the audio frames. */
Int64 pts = 0;
/**
* Encode one frame worth of audio to the output file.
* @param frame Samples to be encoded
* @param output_format_context Format context of the output file
* @param output_codec_context Codec context of the output file
* @param[out] data_present Indicates whether data has been
* encoded
* @return Error code (0 if successful)
*/
int encode_audio_frame(AVFrame *frame,
AVFormatContext *output_format_context,
AVCodecContext *output_codec_context,
int *data_present)
{
/* Packet used for temporary storage. */
AVPacket output_packet;
int error;
init_packet(&output_packet);
/* Set a timestamp based on the sample rate for the container. */
if (frame != null) {
frame->pts = pts;
pts += frame->nb_samples;
}
/* Send the audio frame stored in the temporary packet to the encoder.
* The output audio stream encoder is used to do this. */
error = avcodec_send_frame(output_codec_context, frame);
/* The encoder signals that it has nothing more to encode. */
if (error == AVERROR_EOF) {
error = 0;
goto cleanup;
} else if (error < 0) {
Console.WriteLine($"error: Could not send packet for encoding (error '{LibAVErrorToString(error)}')");
return error;
}
/* Receive one encoded frame from the encoder. */
error = avcodec_receive_packet(output_codec_context, &output_packet);
/* If the encoder asks for more data to be able to provide an
* encoded frame, return indicating that no data is present. */
if (error == AVERROR(EAGAIN)) {
error = 0;
goto cleanup;
/* If the last frame has been encoded, stop encoding. */
} else if (error == AVERROR_EOF) {
error = 0;
goto cleanup;
} else if (error < 0) {
Console.WriteLine($"error: Could not encode frame (error '{LibAVErrorToString(error)}')");
goto cleanup;
/* Default case: Return encoded data. */
} else {
*data_present = 1;
}
/* Write one audio frame from the temporary packet to the output file. */
if (*data_present != 0 &&
(error = av_write_frame(output_format_context, &output_packet)) < 0) {
Console.WriteLine($"error: Could not write frame (error '{LibAVErrorToString(error)}')");
goto cleanup;
}
cleanup:
av_packet_unref(&output_packet);
return error;
}
/**
* Load one audio frame from the FIFO buffer, encode and write it to the
* output file.
* @param fifo Buffer used for temporary storage
* @param output_format_context Format context of the output file
* @param output_codec_context Codec context of the output file
* @return Error code (0 if successful)
*/
int load_encode_and_write(AVAudioFifo *fifo,
AVFormatContext *output_format_context,
AVCodecContext *output_codec_context)
{
/* Temporary storage of the output samples of the frame written to the file. */
AVFrame *output_frame;
/* Use the maximum number of possible samples per frame.
* If there is less than the maximum possible frame size in the FIFO
* buffer use this number. Otherwise, use the maximum possible frame size. */
int frame_size = Math.Min(av_audio_fifo_size(fifo),
output_codec_context->frame_size);
int data_written;
/* Initialize temporary storage for one output frame. */
if (init_output_frame(&output_frame, output_codec_context, frame_size) < 0)
return AVERROR_EXIT;
/* Read as many samples from the FIFO buffer as required to fill the frame.
* The samples are stored in the frame temporarily. */
byte*[] temp = output_frame->data;
fixed (byte** temp2 = temp) {
if (av_audio_fifo_read(fifo, (void **)temp2, frame_size) < frame_size) {
Console.WriteLine("error: Could not read data from FIFO");
av_frame_free(&output_frame);
return AVERROR_EXIT;
}
}
/* Encode one frame worth of audio samples. */
if (encode_audio_frame(output_frame, output_format_context,
output_codec_context, &data_written) < 0) {
av_frame_free(&output_frame);
return AVERROR_EXIT;
}
av_frame_free(&output_frame);
return 0;
}
/**
* Write the trailer of the output file container.
* @param output_format_context Format context of the output file
* @return Error code (0 if successful)
*/
int write_output_file_trailer(AVFormatContext *output_format_context)
{
int error;
if ((error = av_write_trailer(output_format_context)) < 0) {
Console.WriteLine("error: Could not write output file trailer (error '{LibAVErrorToString(error)}')");
return error;
}
return 0;
}
public bool main(string inputFile, string outputFile)
{
AVFormatContext *input_format_context = null;
AVFormatContext *output_format_context = null;
AVCodecContext *input_codec_context = null;
AVCodecContext *output_codec_context = null;
SwrContext *resample_context = null;
AVAudioFifo *fifo = null;
bool ret = false;
/* Open the input file for reading. */
if (open_input_file(inputFile, &input_format_context,
&input_codec_context) < 0)
goto cleanup;
/* Open the output file for writing. */
if (open_output_file(outputFile, input_codec_context,
&output_format_context, &output_codec_context) < 0)
goto cleanup;
/* Initialize the resampler to be able to convert audio sample formats. */
if (init_resampler(input_codec_context, output_codec_context,
&resample_context) < 0)
goto cleanup;
/* Initialize the FIFO buffer to store audio samples to be encoded. */
if (init_fifo(&fifo, output_codec_context) < 0)
goto cleanup;
/* Write the header of the output file container. */
if (write_output_file_header(output_format_context) < 0)
goto cleanup;
/* Loop as long as we have input samples to read or output samples
* to write; abort as soon as we have neither. */
while (true) {
/* Use the encoder's desired frame size for processing. */
int output_frame_size = output_codec_context->frame_size;
int finished = 0;
/* Make sure that there is one frame worth of samples in the FIFO
* buffer so that the encoder can do its work.
* Since the decoder's and the encoder's frame size may differ, we
* need to FIFO buffer to store as many frames worth of input samples
* that they make up at least one frame worth of output samples. */
while (av_audio_fifo_size(fifo) < output_frame_size) {
/* Decode one frame worth of audio samples, convert it to the
* output sample format and put it into the FIFO buffer. */
if (read_decode_convert_and_store(fifo, input_format_context,
input_codec_context,
output_codec_context,
resample_context, &finished) < 0)
goto cleanup;
/* If we are at the end of the input file, we continue
* encoding the remaining audio samples to the output file. */
if (finished != 0)
break;
}
/* If we have enough samples for the encoder, we encode them.
* At the end of the file, we pass the remaining samples to
* the encoder. */
while (av_audio_fifo_size(fifo) >= output_frame_size ||
(finished != 0 && av_audio_fifo_size(fifo) > 0))
/* Take one frame worth of audio samples from the FIFO buffer,
* encode it and write it to the output file. */
if (load_encode_and_write(fifo, output_format_context,
output_codec_context) < 0)
goto cleanup;
/* If we are at the end of the input file and have encoded
* all remaining samples, we can exit this loop and finish. */
if (finished != 0) {
int data_written;
/* Flush the encoder as it may have delayed frames. */
do {
data_written = 0;
if (encode_audio_frame(null, output_format_context,
output_codec_context, &data_written) < 0)
goto cleanup;
} while (data_written != 0);
break;
}
}
/* Write the trailer of the output file container. */
if (write_output_file_trailer(output_format_context) < 0)
goto cleanup;
ret = true;
cleanup:
if (fifo != null)
av_audio_fifo_free(fifo);
swr_free(&resample_context);
if (output_codec_context != null)
avcodec_free_context(&output_codec_context);
if (output_format_context != null) {
avio_closep(&output_format_context->pb);
avformat_free_context(output_format_context);
}
if (input_codec_context != null)
avcodec_free_context(&input_codec_context);
if (input_format_context != null)
avformat_close_input(&input_format_context);
return ret;
}
string LibAVErrorToString(int error) {
var bufferSize = 1024;
var buffer = stackalloc byte[bufferSize];
ffmpeg.av_strerror(error, buffer, (ulong) bufferSize);
var message = Marshal.PtrToStringAnsi((IntPtr) buffer);
return message;
}
}
#endif
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using global::System;
using global::System.IO;
using global::System.Reflection;
using global::System.Diagnostics;
using global::System.Collections.Generic;
using global::System.Threading;
using global::System.Reflection.Runtime.General;
using global::Internal.Reflection.Core;
using global::Internal.Runtime.Augments;
using global::Internal.Runtime.TypeLoader;
using global::Internal.Metadata.NativeFormat;
namespace Internal.Reflection.Execution
{
//=============================================================================================================================
// The assembly resolution policy for Project N's emulation of "classic reflection."
//
// The policy is very simple: the only assemblies that can be "loaded" are those that are statically linked into the running
// native process. There is no support for probing for assemblies in directories, user-supplied files, GACs, NICs or any
// other repository.
//=============================================================================================================================
public sealed partial class AssemblyBinderImplementation : AssemblyBinder
{
private sealed class AssemblyNameKey : IEquatable<AssemblyNameKey>
{
private string _assemblyNameAsString;
private AssemblyName _assemblyName;
public AssemblyNameKey(string assemblyNameString, AssemblyName assemblyName)
{
_assemblyNameAsString = assemblyNameString;
_assemblyName = assemblyName;
}
public override bool Equals(object other)
{
AssemblyNameKey otherKey = other as AssemblyNameKey;
if (otherKey == null)
return false;
else
return Equals(otherKey);
}
public bool Equals(AssemblyNameKey other)
{
return _assemblyNameAsString.Equals(other._assemblyNameAsString);
}
public override int GetHashCode()
{
return _assemblyNameAsString.GetHashCode();
}
public AssemblyName AssemblyName
{
get
{
return _assemblyName;
}
}
}
private AssemblyBinderImplementation()
{
_scopeGroups = new KeyValuePair<AssemblyNameKey, ScopeDefinitionGroup>[0];
ModuleList.AddModuleRegistrationCallback(RegisterModule);
}
public static AssemblyBinderImplementation Instance { get; } = new AssemblyBinderImplementation();
partial void BindEcmaByteArray(byte[] rawAssembly, byte[] rawSymbolStore, ref AssemblyBindResult bindResult, ref Exception exception, ref bool? result);
partial void BindEcmaAssemblyName(AssemblyName refName, ref AssemblyBindResult result, ref Exception exception, ref bool resultBoolean);
public sealed override bool Bind(byte[] rawAssembly, byte[] rawSymbolStore, out AssemblyBindResult bindResult, out Exception exception)
{
bool? result = null;
exception = null;
bindResult = default(AssemblyBindResult);
BindEcmaByteArray(rawAssembly, rawSymbolStore, ref bindResult, ref exception, ref result);
// If the Ecma assembly binder isn't linked in, simply throw PlatformNotSupportedException
if (!result.HasValue)
throw new PlatformNotSupportedException();
else
return result.Value;
}
public sealed override bool Bind(AssemblyName refName, out AssemblyBindResult result, out Exception exception)
{
bool foundMatch = false;
result = default(AssemblyBindResult);
exception = null;
// At least one real-world app calls Type.GetType() for "char" using the assembly name "mscorlib". To accomodate this,
// we will adopt the desktop CLR rule that anything named "mscorlib" automatically binds to the core assembly.
bool useMscorlibNameCompareFunc = false;
AssemblyName compareRefName = refName;
if (refName.Name == "mscorlib")
{
useMscorlibNameCompareFunc = true;
compareRefName = new AssemblyName(AssemblyBinder.DefaultAssemblyNameForGetType);
}
foreach (KeyValuePair<AssemblyNameKey, ScopeDefinitionGroup> group in ScopeGroups)
{
bool nameMatches;
if (useMscorlibNameCompareFunc)
{
nameMatches = MscorlibAssemblyNameMatches(compareRefName, group.Key.AssemblyName);
}
else
{
nameMatches = AssemblyNameMatches(refName, group.Key.AssemblyName);
}
if (nameMatches)
{
if (foundMatch)
{
exception = new AmbiguousMatchException();
return false;
}
foundMatch = true;
ScopeDefinitionGroup scopeDefinitionGroup = group.Value;
result.Reader = scopeDefinitionGroup.CanonicalScope.Reader;
result.ScopeDefinitionHandle = scopeDefinitionGroup.CanonicalScope.Handle;
result.OverflowScopes = scopeDefinitionGroup.OverflowScopes;
}
}
BindEcmaAssemblyName(refName, ref result, ref exception, ref foundMatch);
if (exception != null)
return false;
if (!foundMatch)
{
exception = new IOException(SR.Format(SR.FileNotFound_AssemblyNotFound, refName.FullName));
return false;
}
return true;
}
//
// Name match routine for mscorlib references
//
private bool MscorlibAssemblyNameMatches(AssemblyName coreAssemblyName, AssemblyName defName)
{
//
// The defName came from trusted metadata so it should be fully specified.
//
Debug.Assert(defName.Version != null);
Debug.Assert(defName.CultureName != null);
if (defName.Name != coreAssemblyName.Name)
return false;
byte[] defPkt = defName.GetPublicKeyToken();
if (defPkt == null)
return false;
if (!ArePktsEqual(defPkt, coreAssemblyName.GetPublicKeyToken()))
return false;
return true;
}
//
// Encapsulates the assembly ref->def matching policy.
//
private bool AssemblyNameMatches(AssemblyName refName, AssemblyName defName)
{
//
// The defName came from trusted metadata so it should be fully specified.
//
Debug.Assert(defName.Version != null);
Debug.Assert(defName.CultureName != null);
if (!(refName.Name.Equals(defName.Name, StringComparison.OrdinalIgnoreCase)))
return false;
if (refName.Version != null)
{
int compareResult = refName.Version.CompareTo(defName.Version);
if (compareResult > 0)
return false;
}
if (refName.CultureName != null)
{
if (!(refName.CultureName.Equals(defName.CultureName)))
return false;
}
// Bartok cannot handle const enums for now.
/*const*/
AssemblyNameFlags ignorableFlags = AssemblyNameFlags.PublicKey;
if ((refName.Flags & ~ignorableFlags) != (defName.Flags & ~ignorableFlags))
{
return false;
}
byte[] refPublicKeyToken = refName.GetPublicKeyToken();
if (refPublicKeyToken != null)
{
byte[] defPublicKeyToken = defName.GetPublicKeyToken();
if (defPublicKeyToken == null)
return false;
if (!ArePktsEqual(refPublicKeyToken, defPublicKeyToken))
return false;
}
return true;
}
internal new AssemblyName CreateAssemblyNameFromMetadata(MetadataReader reader, ScopeDefinitionHandle scopeDefinitionHandle)
{
return base.CreateAssemblyNameFromMetadata(reader, scopeDefinitionHandle);
}
/// <summary>
/// This callback gets called whenever a module gets registered. It adds the metadata reader
/// for the new module to the available scopes. The lock in ExecutionEnvironmentImplementation ensures
/// that this function may never be called concurrently so that we can assume that two threads
/// never update the reader and scope list at the same time.
/// </summary>
/// <param name="moduleInfo">Module to register</param>
private void RegisterModule(ModuleInfo moduleInfo)
{
NativeFormatModuleInfo nativeFormatModuleInfo = moduleInfo as NativeFormatModuleInfo;
if (nativeFormatModuleInfo == null)
{
return;
}
LowLevelDictionaryWithIEnumerable<AssemblyNameKey, ScopeDefinitionGroup> scopeGroups = new LowLevelDictionaryWithIEnumerable<AssemblyNameKey, ScopeDefinitionGroup>();
foreach (KeyValuePair<AssemblyNameKey, ScopeDefinitionGroup> oldGroup in _scopeGroups)
{
scopeGroups.Add(oldGroup.Key, oldGroup.Value);
}
AddScopesFromReaderToGroups(scopeGroups, nativeFormatModuleInfo.MetadataReader);
// Update reader and scope list
KeyValuePair<AssemblyNameKey, ScopeDefinitionGroup>[] scopeGroupsArray = new KeyValuePair<AssemblyNameKey, ScopeDefinitionGroup>[scopeGroups.Count];
int i = 0;
foreach (KeyValuePair<AssemblyNameKey, ScopeDefinitionGroup> data in scopeGroups)
{
scopeGroupsArray[i] = data;
i++;
}
_scopeGroups = scopeGroupsArray;
}
private KeyValuePair<AssemblyNameKey, ScopeDefinitionGroup>[] ScopeGroups
{
get
{
return _scopeGroups;
}
}
private void AddScopesFromReaderToGroups(LowLevelDictionaryWithIEnumerable<AssemblyNameKey, ScopeDefinitionGroup> groups, MetadataReader reader)
{
foreach (ScopeDefinitionHandle scopeDefinitionHandle in reader.ScopeDefinitions)
{
AssemblyName defName = this.CreateAssemblyNameFromMetadata(reader, scopeDefinitionHandle);
string defFullName = defName.FullName;
AssemblyNameKey nameKey = new AssemblyNameKey(defFullName, defName);
ScopeDefinitionGroup scopeDefinitionGroup;
if (groups.TryGetValue(nameKey, out scopeDefinitionGroup))
{
scopeDefinitionGroup.AddOverflowScope(new QScopeDefinition(reader, scopeDefinitionHandle));
}
else
{
scopeDefinitionGroup = new ScopeDefinitionGroup(new QScopeDefinition(reader, scopeDefinitionHandle));
groups.Add(nameKey, scopeDefinitionGroup);
}
}
}
private static bool ArePktsEqual(byte[] pkt1, byte[] pkt2)
{
if (pkt1.Length != pkt2.Length)
return false;
for (int i = 0; i < pkt1.Length; i++)
{
if (pkt1[i] != pkt2[i])
return false;
}
return true;
}
private volatile KeyValuePair<AssemblyNameKey, ScopeDefinitionGroup>[] _scopeGroups;
private class ScopeDefinitionGroup
{
public ScopeDefinitionGroup(QScopeDefinition canonicalScope)
{
_canonicalScope = canonicalScope;
}
public QScopeDefinition CanonicalScope { get { return _canonicalScope; } }
public IEnumerable<QScopeDefinition> OverflowScopes
{
get
{
return _overflowScopes.ToArray();
}
}
public void AddOverflowScope(QScopeDefinition overflowScope)
{
_overflowScopes.Add(overflowScope);
}
private readonly QScopeDefinition _canonicalScope;
private ArrayBuilder<QScopeDefinition> _overflowScopes;
}
}
}
| |
//------------------------------------------------------------------------------
// Microsoft Windows Client Platform
// Copyright (c) Microsoft Corporation, 2005
//
// File: BitmapMetadataEnumerator.cs
//------------------------------------------------------------------------------
// Allow suppression of certain presharp messages
#pragma warning disable 1634, 1691
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Security;
using System.Security.Permissions;
using System.Diagnostics;
using MS.Internal;
using MS.Internal.PresentationCore; // SecurityHelper
using MS.Win32.PresentationCore;
using SR=MS.Internal.PresentationCore.SR;
using SRID=MS.Internal.PresentationCore.SRID;
namespace System.Windows.Media.Imaging
{
/// <summary>
/// An enumerator that iterates over the children of a timeline.
/// </summary>
internal struct BitmapMetadataEnumerator : IEnumerator<String>, IEnumerator
{
#region External interface
#region IEnumerator interface
#region Properties
/// <summary>
/// Gets the current element in the collection.
/// </summary>
/// <value>
/// The current element in the collection.
/// </value>
/// <SecurityNote>
/// Critical - Accesses unmanaged code
/// TreatAsSafe - inputs are verified or safe
/// </SecurityNote>
object IEnumerator.Current
{
[SecurityCritical, SecurityTreatAsSafe]
get
{
return Current;
}
}
#endregion // Properties
#region Methods
/// <summary>
/// Advances the enumerator to the next element of the collection.
/// </summary>
/// <returns>
/// True if the enumerator was successfully advanced to the next element;
/// false if the enumerator has passed the end of the collection.
/// </returns>
/// <SecurityNote>
/// Critical - Accesses unmanaged code
/// TreatAsSafe - inputs are verified or safe
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
public bool MoveNext()
{
if (_fStarted && _current == null)
{
return false;
}
_fStarted = true;
IntPtr ppStr = IntPtr.Zero;
Int32 celtFetched = 0;
try
{
int hr = UnsafeNativeMethods.EnumString.Next(
_enumeratorHandle,
1,
ref ppStr,
ref celtFetched);
if (HRESULT.IsWindowsCodecError(hr))
{
_current = null;
return false;
}
HRESULT.Check(hr);
if (celtFetched == 0)
{
_current = null;
return false;
}
else
{
_current = Marshal.PtrToStringUni(ppStr);
}
}
finally
{
if (ppStr != IntPtr.Zero)
{
Marshal.FreeCoTaskMem(ppStr);
ppStr = IntPtr.Zero;
}
}
return true;
}
/// <summary>
/// Sets the enumerator to its initial position, which is before the first element
/// in the collection.
/// </summary>
/// <SecurityNote>
/// Critical - Accesses unmanaged code
/// TreatAsSafe - inputs are verified or safe
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
public void Reset()
{
HRESULT.Check(UnsafeNativeMethods.EnumString.Reset(_enumeratorHandle));
_current = null;
_fStarted = false;
}
#endregion // Methods
#endregion // IEnumerator interface
#region Properties
/// <summary>
/// The current timeline referenced by this enumerator.
/// </summary>
public String Current
{
get
{
if (_current == null)
{
if (!_fStarted)
{
#pragma warning suppress 56503 // Suppress presharp warning: Follows a pattern similar to Nullable.
throw new InvalidOperationException(SR.Get(SRID.Enumerator_NotStarted));
}
else
{
#pragma warning suppress 56503 // Suppress presharp warning: Follows a pattern similar to Nullable.
throw new InvalidOperationException(SR.Get(SRID.Enumerator_ReachedEnd));
}
}
return _current;
}
}
/// <summary>
///
/// </summary>
void IDisposable.Dispose()
{
// Do nothing - Required by the IEnumerable contract.
}
#endregion // Properties
#endregion // External interface
#region Internal implementation
#region Construction
/// <summary>
/// Creates an enumerator iterates over the children of the specified container.
/// </summary>
/// <param name="metadataHandle">
/// Handle to a metadata query reader/writer
/// </param>
/// <SecurityNote>
/// Critical - Accesses unmanaged code
/// TreatAsSafe - inputs are verified or safe
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
internal BitmapMetadataEnumerator(SafeMILHandle metadataHandle)
{
Debug.Assert(metadataHandle != null && !metadataHandle.IsInvalid);
HRESULT.Check(UnsafeNativeMethods.WICMetadataQueryReader.GetEnumerator(
metadataHandle,
out _enumeratorHandle));
_current = null;
_fStarted = false;
}
#endregion // Construction
#region Methods
#endregion // Methods
#region Data
/// <SecurityNote>
/// Critical - pointer to an unmanaged object that methods are called on.
/// </SecurityNote>
[SecurityCritical]
private SafeMILHandle _enumeratorHandle;
private String _current;
private bool _fStarted;
#endregion // Data
#endregion // Internal implementation
}
}
| |
/*
* Copyright (c) 2013 Mario Freitas (imkira@gmail.com)
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Collections.Generic;
namespace URLClient
{
public abstract class HTTPResponseHandler : IDisposable
{
/// <summary>Cache policy for response</summary>
public CachePolicy cachePolicy;
/// <summary>Acceptable status code ranges.</summary>
public List<HTTPStatusCodeRange> acceptableStatusCodeRanges;
/// <summary>Whether redirects should be followed.</summary>
public bool allowFollowRedirects;
/// <summary>Max redirect counts to be followed.</summary>
public int maxRedirectCount;
/// <summary>Whether invalid SSL certificates are allowed.</summary>
public bool allowInvalidSSLCertificates;
protected Error _error;
/// <summary>Error (null if none).</summary>
public Error Error
{
get
{
return _error;
}
}
public HTTPResponseHandler()
{
cachePolicy = CachePolicy.UseProtocolCachePolicy;
allowInvalidSSLCertificates = false;
allowFollowRedirects = true;
maxRedirectCount = -1;
acceptableStatusCodeRanges = new List<HTTPStatusCodeRange>();
_error = null;
}
#region Acceptable HTTP Status Codes Control
public HTTPResponseHandler AddAcceptableStatusCode(long singleCode)
{
HTTPStatusCodeRange range = new HTTPStatusCodeRange(singleCode);
return AddAcceptableStatusCodeRange(range);
}
public HTTPResponseHandler AddAcceptableStatusCodeRange(long fromCode,
long toCode)
{
HTTPStatusCodeRange range = new HTTPStatusCodeRange(fromCode, toCode);
return AddAcceptableStatusCodeRange(range);
}
public HTTPResponseHandler AddAcceptableStatusCodeRange(
HTTPStatusCodeRange range)
{
if (acceptableStatusCodeRanges == null)
{
acceptableStatusCodeRanges = new List<HTTPStatusCodeRange>();
}
acceptableStatusCodeRanges.Add(range);
return this;
}
#endregion
#region Cache Policy
public HTTPResponseHandler SetCachePolicy(CachePolicy policy)
{
cachePolicy = policy;
return this;
}
#endregion
#region SSL Validity Control
public HTTPResponseHandler SetAllowInvalidSSLCertificates(bool allow)
{
allowInvalidSSLCertificates = allow;
return this;
}
#endregion
#region Redirect Control
public HTTPResponseHandler SetAllowFollowRedirects(bool allow)
{
allowFollowRedirects = allow;
return this;
}
public HTTPResponseHandler SetMaxRedirectCount(int maxCount)
{
maxRedirectCount = maxCount;
if (maxRedirectCount >= 0)
{
allowFollowRedirects = true;
}
return this;
}
#endregion
public virtual bool OnWillStart(HTTPClient client)
{
return true;
}
public virtual void OnWillSendRequest(HTTPClient client)
{
SetAllowFollowRedirects(client);
SetAcceptableStatusCodes(client);
SetAllowInvalidSSLCertificates(client);
}
protected void SetAllowFollowRedirects(HTTPClient client)
{
uint connectionID = client.ConnectionID;
Bindings._URLClientSetAllowFollowRedirects(
connectionID, allowFollowRedirects, maxRedirectCount);
}
protected void SetAcceptableStatusCodes(HTTPClient client)
{
uint connectionID = client.ConnectionID;
if (acceptableStatusCodeRanges != null)
{
foreach (HTTPStatusCodeRange range in acceptableStatusCodeRanges)
{
Bindings._URLClientAddAcceptableResponseStatusCodeRange(
connectionID, range.fromCode, range.toCode);
}
}
}
protected void SetAllowInvalidSSLCertificates(HTTPClient client)
{
uint connectionID = client.ConnectionID;
Bindings._URLClientSetAllowInvalidSSLCertificate(
connectionID, allowInvalidSSLCertificates);
}
public virtual HTTPResponse OnDidUpdate(HTTPClient client,
HTTPResponse response)
{
if (client.State < ConnectionState.ReceivingData)
{
return response;
}
if (response == null)
{
response = CreateResponse(client);
}
while (true)
{
UpdateContentData(client, response);
if (CheckNeedsResponseReset(client) == false)
{
break;
}
response = ResetResponse(client, response);
}
return response;
}
protected virtual HTTPResponse CreateResponse(HTTPClient client)
{
uint connectionID = client.ConnectionID;
HTTPResponse response = new HTTPResponse();
response.statusCode =
Bindings._URLClientGetResponseStatusCode(connectionID);
response.resumedContentLength =
Bindings._URLClientGetResponseContentLengthResumed(connectionID);
response.expectedReceiveContentLength =
Bindings._URLClientGetResponseContentExpectedLength(connectionID);
response.redirectCount =
Bindings._URLClientGetResponseRedirectCount(connectionID);
response.headers = CreateHeaders(client);
return response;
}
protected HTTPHeaderList CreateHeaders(HTTPClient client)
{
uint connectionID = client.ConnectionID;
HTTPHeaderList headers = new HTTPHeaderList();
string name, value;
// read headers
for (uint i = 0; i < (uint)int.MaxValue; ++i)
{
name = Bindings._URLClientGetResponseHeaderName(connectionID, i);
if (name == null)
{
break;
}
value = Bindings._URLClientGetResponseHeaderValue(connectionID, i);
if (value == null)
{
break;
}
headers[name] = value;
}
return headers;
}
protected virtual bool CheckNeedsResponseReset(HTTPClient client)
{
uint connectionID = client.ConnectionID;
bool isDirty =
Bindings._URLClientCheckAndResetResponseDirtyFlag(connectionID);
return isDirty;
}
protected virtual HTTPResponse ResetResponse(HTTPClient client,
HTTPResponse response)
{
response = CreateResponse(client);
return response;
}
protected abstract void UpdateContentData(HTTPClient client,
HTTPResponse response);
#region IDisposable
private bool _disposed = false;
~HTTPResponseHandler()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
}
protected virtual void ReleaseResources(bool disposing)
{
}
protected virtual void Dispose(bool disposing)
{
if (_disposed == false)
{
_disposed = true;
ReleaseResources(disposing);
if (disposing)
{
GC.SuppressFinalize(this);
}
}
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using GraphQL.Conversion;
using GraphQL.Introspection;
namespace GraphQL.Types
{
public class GraphTypesLookup
{
private readonly Dictionary<string, IGraphType> _types = new Dictionary<string, IGraphType>();
private readonly object _lock = new object();
public GraphTypesLookup()
{
AddType<StringGraphType>();
AddType<BooleanGraphType>();
AddType<FloatGraphType>();
AddType<IntGraphType>();
AddType<IdGraphType>();
AddType<DateGraphType>();
AddType<DecimalGraphType>();
AddType<__Schema>();
AddType<__Type>();
AddType<__Directive>();
AddType<__Field>();
AddType<__EnumValue>();
AddType<__InputValue>();
AddType<__TypeKind>();
}
public static GraphTypesLookup Create(
IEnumerable<IGraphType> types,
IEnumerable<DirectiveGraphType> directives,
Func<Type, IGraphType> resolveType,
IFieldNameConverter fieldNameConverter)
{
var lookup = new GraphTypesLookup();
lookup.FieldNameConverter = fieldNameConverter ?? new CamelCaseFieldNameConverter();
var ctx = new TypeCollectionContext(resolveType, (name, graphType, context) =>
{
if (lookup[name] == null)
{
lookup.AddType(graphType, context);
}
});
types.Apply(type =>
{
lookup.AddType(type, ctx);
});
var introspectionType = typeof(SchemaIntrospection);
lookup.HandleField(introspectionType, SchemaIntrospection.SchemaMeta, ctx);
lookup.HandleField(introspectionType, SchemaIntrospection.TypeMeta, ctx);
lookup.HandleField(introspectionType, SchemaIntrospection.TypeNameMeta, ctx);
directives.Apply(directive =>
{
directive.Arguments?.Apply(arg =>
{
if (arg.ResolvedType != null)
{
return;
}
arg.ResolvedType = lookup.BuildNamedType(arg.Type, ctx.ResolveType);
});
});
return lookup;
}
public IFieldNameConverter FieldNameConverter { get; set; } = new CamelCaseFieldNameConverter();
public void Clear()
{
lock (_lock)
{
_types.Clear();
}
}
public IEnumerable<IGraphType> All()
{
lock (_lock)
{
return _types.Values.ToList();
}
}
public IGraphType this[string typeName]
{
get
{
if (string.IsNullOrWhiteSpace(typeName))
{
throw new ArgumentOutOfRangeException(nameof(typeName), "A type name is required to lookup.");
}
IGraphType type;
var name = typeName.TrimGraphQLTypes();
lock (_lock)
{
_types.TryGetValue(name, out type);
}
return type;
}
set
{
lock (_lock)
{
_types[typeName.TrimGraphQLTypes()] = value;
}
}
}
public IGraphType this[Type type]
{
get
{
lock (_lock)
{
var result = _types.FirstOrDefault(x => x.Value.GetType() == type);
return result.Value;
}
}
}
public void AddType<TType>()
where TType : IGraphType, new()
{
var context = new TypeCollectionContext(
type =>
{
return BuildNamedType(type, t => (IGraphType) Activator.CreateInstance(t));
},
(name, type, _) =>
{
var trimmed = name.TrimGraphQLTypes();
lock (_lock)
{
_types[trimmed] = type;
}
_?.AddType(trimmed, type, null);
});
AddType<TType>(context);
}
private IGraphType BuildNamedType(Type type, Func<Type, IGraphType> resolver)
{
return type.BuildNamedType(t =>
{
var exists = this[t];
if (exists != null)
{
return exists;
}
return resolver(t);
});
}
public void AddType<TType>(TypeCollectionContext context)
where TType : IGraphType
{
var type = typeof(TType).GetNamedType();
var instance = context.ResolveType(type);
AddType(instance, context);
}
public void AddType(IGraphType type, TypeCollectionContext context)
{
if (type == null)
{
return;
}
if (type is NonNullGraphType || type is ListGraphType)
{
throw new ExecutionError("Only add root types.");
}
var name = type.CollectTypes(context).TrimGraphQLTypes();
lock (_lock)
{
_types[name] = type;
}
if (type is IComplexGraphType)
{
var complexType = type as IComplexGraphType;
complexType.Fields.Apply(field =>
{
HandleField(type.GetType(), field, context);
});
}
if (type is IObjectGraphType)
{
var obj = (IObjectGraphType) type;
obj.Interfaces.Apply(objectInterface =>
{
AddTypeIfNotRegistered(objectInterface, context);
var interfaceInstance = this[objectInterface] as IInterfaceGraphType;
if (interfaceInstance != null)
{
obj.AddResolvedInterface(interfaceInstance);
interfaceInstance.AddPossibleType(obj);
if (interfaceInstance.ResolveType == null && obj.IsTypeOf == null)
{
throw new ExecutionError((
"Interface type {0} does not provide a \"resolveType\" function " +
"and possible Type \"{1}\" does not provide a \"isTypeOf\" function. " +
"There is no way to resolve this possible type during execution.")
.ToFormat(interfaceInstance, obj));
}
}
});
}
if (type is UnionGraphType)
{
var union = (UnionGraphType) type;
if (!union.Types.Any() && !union.PossibleTypes.Any())
{
throw new ExecutionError("Must provide types for Union {0}.".ToFormat(union));
}
union.PossibleTypes.Apply(unionedType =>
{
AddTypeIfNotRegistered(unionedType, context);
if (union.ResolveType == null && unionedType.IsTypeOf == null)
{
throw new ExecutionError((
"Union type {0} does not provide a \"resolveType\" function" +
"and possible Type \"{1}\" does not provide a \"isTypeOf\" function. " +
"There is no way to resolve this possible type during execution.")
.ToFormat(union, unionedType));
}
});
union.Types.Apply(unionedType =>
{
AddTypeIfNotRegistered(unionedType, context);
var objType = this[unionedType] as IObjectGraphType;
if (union.ResolveType == null && objType != null && objType.IsTypeOf == null)
{
throw new ExecutionError((
"Union type {0} does not provide a \"resolveType\" function" +
"and possible Type \"{1}\" does not provide a \"isTypeOf\" function. " +
"There is no way to resolve this possible type during execution.")
.ToFormat(union, objType));
}
union.AddPossibleType(objType);
});
}
}
private void HandleField(Type parentType, FieldType field, TypeCollectionContext context)
{
field.Name = FieldNameConverter.NameFor(field.Name, parentType);
if (field.ResolvedType == null)
{
AddTypeIfNotRegistered(field.Type, context);
field.ResolvedType = BuildNamedType(field.Type, context.ResolveType);
}
else
{
AddTypeIfNotRegistered(field.ResolvedType, context);
}
field.Arguments?.Apply(arg =>
{
if (arg.ResolvedType != null)
{
AddTypeIfNotRegistered(arg.ResolvedType, context);
return;
}
AddTypeIfNotRegistered(arg.Type, context);
arg.ResolvedType = BuildNamedType(arg.Type, context.ResolveType);
});
}
private void AddTypeIfNotRegistered(Type type, TypeCollectionContext context)
{
var namedType = type.GetNamedType();
var foundType = this[namedType];
if (foundType == null)
{
AddType(context.ResolveType(namedType), context);
}
}
private void AddTypeIfNotRegistered(IGraphType type, TypeCollectionContext context)
{
var namedType = type.GetNamedType();
var foundType = this[namedType.Name];
if(foundType == null)
{
AddType(namedType, context);
}
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
namespace HtcSharp.HttpModule.Shared.PropertyHelper {
// SourceTools-Start
// Remote-File C:\ASP\src\Shared\PropertyHelper\PropertyHelper.cs
// Start-At-Remote-Line 12
// SourceTools-End
internal class PropertyHelper {
// Delegate type for a by-ref property getter
private delegate TValue ByRefFunc<TDeclaringType, TValue>(ref TDeclaringType arg);
private static readonly MethodInfo CallPropertyGetterOpenGenericMethod =
typeof(PropertyHelper).GetTypeInfo().GetDeclaredMethod(nameof(CallPropertyGetter));
private static readonly MethodInfo CallPropertyGetterByReferenceOpenGenericMethod =
typeof(PropertyHelper).GetTypeInfo().GetDeclaredMethod(nameof(CallPropertyGetterByReference));
private static readonly MethodInfo CallNullSafePropertyGetterOpenGenericMethod =
typeof(PropertyHelper).GetTypeInfo().GetDeclaredMethod(nameof(CallNullSafePropertyGetter));
private static readonly MethodInfo CallNullSafePropertyGetterByReferenceOpenGenericMethod =
typeof(PropertyHelper).GetTypeInfo().GetDeclaredMethod(nameof(CallNullSafePropertyGetterByReference));
private static readonly MethodInfo CallPropertySetterOpenGenericMethod =
typeof(PropertyHelper).GetTypeInfo().GetDeclaredMethod(nameof(CallPropertySetter));
// Using an array rather than IEnumerable, as target will be called on the hot path numerous times.
private static readonly ConcurrentDictionary<Type, PropertyHelper[]> PropertiesCache =
new ConcurrentDictionary<Type, PropertyHelper[]>();
private static readonly ConcurrentDictionary<Type, PropertyHelper[]> VisiblePropertiesCache =
new ConcurrentDictionary<Type, PropertyHelper[]>();
// We need to be able to check if a type is a 'ref struct' - but we need to be able to compile
// for platforms where the attribute is not defined, like net46. So we can fetch the attribute
// by late binding. If the attribute isn't defined, then we assume we won't encounter any
// 'ref struct' types.
private static readonly Type IsByRefLikeAttribute = Type.GetType("System.Runtime.CompilerServices.IsByRefLikeAttribute", throwOnError: false);
private Action<object, object> _valueSetter;
private Func<object, object> _valueGetter;
/// <summary>
/// Initializes a fast <see cref="PropertyHelper"/>.
/// This constructor does not cache the helper. For caching, use <see cref="GetProperties(Type)"/>.
/// </summary>
public PropertyHelper(PropertyInfo property) {
Property = property ?? throw new ArgumentNullException(nameof(property));
Name = property.Name;
}
/// <summary>
/// Gets the backing <see cref="PropertyInfo"/>.
/// </summary>
public PropertyInfo Property { get; }
/// <summary>
/// Gets (or sets in derived types) the property name.
/// </summary>
public virtual string Name { get; protected set; }
/// <summary>
/// Gets the property value getter.
/// </summary>
public Func<object, object> ValueGetter {
get {
if (_valueGetter == null) {
_valueGetter = MakeFastPropertyGetter(Property);
}
return _valueGetter;
}
}
/// <summary>
/// Gets the property value setter.
/// </summary>
public Action<object, object> ValueSetter {
get {
if (_valueSetter == null) {
_valueSetter = MakeFastPropertySetter(Property);
}
return _valueSetter;
}
}
/// <summary>
/// Returns the property value for the specified <paramref name="instance"/>.
/// </summary>
/// <param name="instance">The object whose property value will be returned.</param>
/// <returns>The property value.</returns>
public object GetValue(object instance) {
return ValueGetter(instance);
}
/// <summary>
/// Sets the property value for the specified <paramref name="instance" />.
/// </summary>
/// <param name="instance">The object whose property value will be set.</param>
/// <param name="value">The property value.</param>
public void SetValue(object instance, object value) {
ValueSetter(instance, value);
}
/// <summary>
/// Creates and caches fast property helpers that expose getters for every public get property on the
/// underlying type.
/// </summary>
/// <param name="typeInfo">The type info to extract property accessors for.</param>
/// <returns>A cached array of all public properties of the specified type.
/// </returns>
public static PropertyHelper[] GetProperties(TypeInfo typeInfo) {
return GetProperties(typeInfo.AsType());
}
/// <summary>
/// Creates and caches fast property helpers that expose getters for every public get property on the
/// specified type.
/// </summary>
/// <param name="type">The type to extract property accessors for.</param>
/// <returns>A cached array of all public properties of the specified type.
/// </returns>
public static PropertyHelper[] GetProperties(Type type) {
return GetProperties(type, p => CreateInstance(p), PropertiesCache);
}
/// <summary>
/// <para>
/// Creates and caches fast property helpers that expose getters for every non-hidden get property
/// on the specified type.
/// </para>
/// <para>
/// <see cref="M:GetVisibleProperties"/> excludes properties defined on base types that have been
/// hidden by definitions using the <c>new</c> keyword.
/// </para>
/// </summary>
/// <param name="typeInfo">The type info to extract property accessors for.</param>
/// <returns>
/// A cached array of all public properties of the specified type.
/// </returns>
public static PropertyHelper[] GetVisibleProperties(TypeInfo typeInfo) {
return GetVisibleProperties(typeInfo.AsType(), p => CreateInstance(p), PropertiesCache, VisiblePropertiesCache);
}
/// <summary>
/// <para>
/// Creates and caches fast property helpers that expose getters for every non-hidden get property
/// on the specified type.
/// </para>
/// <para>
/// <see cref="M:GetVisibleProperties"/> excludes properties defined on base types that have been
/// hidden by definitions using the <c>new</c> keyword.
/// </para>
/// </summary>
/// <param name="type">The type to extract property accessors for.</param>
/// <returns>
/// A cached array of all public properties of the specified type.
/// </returns>
public static PropertyHelper[] GetVisibleProperties(Type type) {
return GetVisibleProperties(type, p => CreateInstance(p), PropertiesCache, VisiblePropertiesCache);
}
/// <summary>
/// Creates a single fast property getter. The result is not cached.
/// </summary>
/// <param name="propertyInfo">propertyInfo to extract the getter for.</param>
/// <returns>a fast getter.</returns>
/// <remarks>
/// This method is more memory efficient than a dynamically compiled lambda, and about the
/// same speed.
/// </remarks>
public static Func<object, object> MakeFastPropertyGetter(PropertyInfo propertyInfo) {
Debug.Assert(propertyInfo != null);
return MakeFastPropertyGetter(
propertyInfo,
CallPropertyGetterOpenGenericMethod,
CallPropertyGetterByReferenceOpenGenericMethod);
}
/// <summary>
/// Creates a single fast property getter which is safe for a null input object. The result is not cached.
/// </summary>
/// <param name="propertyInfo">propertyInfo to extract the getter for.</param>
/// <returns>a fast getter.</returns>
/// <remarks>
/// This method is more memory efficient than a dynamically compiled lambda, and about the
/// same speed.
/// </remarks>
public static Func<object, object> MakeNullSafeFastPropertyGetter(PropertyInfo propertyInfo) {
Debug.Assert(propertyInfo != null);
return MakeFastPropertyGetter(
propertyInfo,
CallNullSafePropertyGetterOpenGenericMethod,
CallNullSafePropertyGetterByReferenceOpenGenericMethod);
}
private static Func<object, object> MakeFastPropertyGetter(
PropertyInfo propertyInfo,
MethodInfo propertyGetterWrapperMethod,
MethodInfo propertyGetterByRefWrapperMethod) {
Debug.Assert(propertyInfo != null);
// Must be a generic method with a Func<,> parameter
Debug.Assert(propertyGetterWrapperMethod != null);
Debug.Assert(propertyGetterWrapperMethod.IsGenericMethodDefinition);
Debug.Assert(propertyGetterWrapperMethod.GetParameters().Length == 2);
// Must be a generic method with a ByRefFunc<,> parameter
Debug.Assert(propertyGetterByRefWrapperMethod != null);
Debug.Assert(propertyGetterByRefWrapperMethod.IsGenericMethodDefinition);
Debug.Assert(propertyGetterByRefWrapperMethod.GetParameters().Length == 2);
var getMethod = propertyInfo.GetMethod;
Debug.Assert(getMethod != null);
Debug.Assert(!getMethod.IsStatic);
Debug.Assert(getMethod.GetParameters().Length == 0);
// Instance methods in the CLR can be turned into static methods where the first parameter
// is open over "target". This parameter is always passed by reference, so we have a code
// path for value types and a code path for reference types.
if (getMethod.DeclaringType.GetTypeInfo().IsValueType) {
// Create a delegate (ref TDeclaringType) -> TValue
return MakeFastPropertyGetter(
typeof(ByRefFunc<,>),
getMethod,
propertyGetterByRefWrapperMethod);
} else {
// Create a delegate TDeclaringType -> TValue
return MakeFastPropertyGetter(
typeof(Func<,>),
getMethod,
propertyGetterWrapperMethod);
}
}
private static Func<object, object> MakeFastPropertyGetter(
Type openGenericDelegateType,
MethodInfo propertyGetMethod,
MethodInfo openGenericWrapperMethod) {
var typeInput = propertyGetMethod.DeclaringType;
var typeOutput = propertyGetMethod.ReturnType;
var delegateType = openGenericDelegateType.MakeGenericType(typeInput, typeOutput);
var propertyGetterDelegate = propertyGetMethod.CreateDelegate(delegateType);
var wrapperDelegateMethod = openGenericWrapperMethod.MakeGenericMethod(typeInput, typeOutput);
var accessorDelegate = wrapperDelegateMethod.CreateDelegate(
typeof(Func<object, object>),
propertyGetterDelegate);
return (Func<object, object>)accessorDelegate;
}
/// <summary>
/// Creates a single fast property setter for reference types. The result is not cached.
/// </summary>
/// <param name="propertyInfo">propertyInfo to extract the setter for.</param>
/// <returns>a fast getter.</returns>
/// <remarks>
/// This method is more memory efficient than a dynamically compiled lambda, and about the
/// same speed. This only works for reference types.
/// </remarks>
public static Action<object, object> MakeFastPropertySetter(PropertyInfo propertyInfo) {
Debug.Assert(propertyInfo != null);
Debug.Assert(!propertyInfo.DeclaringType.GetTypeInfo().IsValueType);
var setMethod = propertyInfo.SetMethod;
Debug.Assert(setMethod != null);
Debug.Assert(!setMethod.IsStatic);
Debug.Assert(setMethod.ReturnType == typeof(void));
var parameters = setMethod.GetParameters();
Debug.Assert(parameters.Length == 1);
// Instance methods in the CLR can be turned into static methods where the first parameter
// is open over "target". This parameter is always passed by reference, so we have a code
// path for value types and a code path for reference types.
var typeInput = setMethod.DeclaringType;
var parameterType = parameters[0].ParameterType;
// Create a delegate TDeclaringType -> { TDeclaringType.Property = TValue; }
var propertySetterAsAction =
setMethod.CreateDelegate(typeof(Action<,>).MakeGenericType(typeInput, parameterType));
var callPropertySetterClosedGenericMethod =
CallPropertySetterOpenGenericMethod.MakeGenericMethod(typeInput, parameterType);
var callPropertySetterDelegate =
callPropertySetterClosedGenericMethod.CreateDelegate(
typeof(Action<object, object>), propertySetterAsAction);
return (Action<object, object>)callPropertySetterDelegate;
}
/// <summary>
/// Given an object, adds each instance property with a public get method as a key and its
/// associated value to a dictionary.
///
/// If the object is already an <see cref="IDictionary{String, Object}"/> instance, then a copy
/// is returned.
/// </summary>
/// <remarks>
/// The implementation of PropertyHelper will cache the property accessors per-type. This is
/// faster when the same type is used multiple times with ObjectToDictionary.
/// </remarks>
public static IDictionary<string, object> ObjectToDictionary(object value) {
var dictionary = value as IDictionary<string, object>;
if (dictionary != null) {
return new Dictionary<string, object>(dictionary, StringComparer.OrdinalIgnoreCase);
}
dictionary = new Dictionary<string, object>(StringComparer.OrdinalIgnoreCase);
if (value != null) {
foreach (var helper in GetProperties(value.GetType())) {
dictionary[helper.Name] = helper.GetValue(value);
}
}
return dictionary;
}
private static PropertyHelper CreateInstance(PropertyInfo property) {
return new PropertyHelper(property);
}
// Called via reflection
private static object CallPropertyGetter<TDeclaringType, TValue>(
Func<TDeclaringType, TValue> getter,
object target) {
return getter((TDeclaringType)target);
}
// Called via reflection
private static object CallPropertyGetterByReference<TDeclaringType, TValue>(
ByRefFunc<TDeclaringType, TValue> getter,
object target) {
var unboxed = (TDeclaringType)target;
return getter(ref unboxed);
}
// Called via reflection
private static object CallNullSafePropertyGetter<TDeclaringType, TValue>(
Func<TDeclaringType, TValue> getter,
object target) {
if (target == null) {
return null;
}
return getter((TDeclaringType)target);
}
// Called via reflection
private static object CallNullSafePropertyGetterByReference<TDeclaringType, TValue>(
ByRefFunc<TDeclaringType, TValue> getter,
object target) {
if (target == null) {
return null;
}
var unboxed = (TDeclaringType)target;
return getter(ref unboxed);
}
private static void CallPropertySetter<TDeclaringType, TValue>(
Action<TDeclaringType, TValue> setter,
object target,
object value) {
setter((TDeclaringType)target, (TValue)value);
}
protected static PropertyHelper[] GetVisibleProperties(
Type type,
Func<PropertyInfo, PropertyHelper> createPropertyHelper,
ConcurrentDictionary<Type, PropertyHelper[]> allPropertiesCache,
ConcurrentDictionary<Type, PropertyHelper[]> visiblePropertiesCache) {
if (visiblePropertiesCache.TryGetValue(type, out var result)) {
return result;
}
// The simple and common case, this is normal POCO object - no need to allocate.
var allPropertiesDefinedOnType = true;
var allProperties = GetProperties(type, createPropertyHelper, allPropertiesCache);
foreach (var propertyHelper in allProperties) {
if (propertyHelper.Property.DeclaringType != type) {
allPropertiesDefinedOnType = false;
break;
}
}
if (allPropertiesDefinedOnType) {
result = allProperties;
visiblePropertiesCache.TryAdd(type, result);
return result;
}
// There's some inherited properties here, so we need to check for hiding via 'new'.
var filteredProperties = new List<PropertyHelper>(allProperties.Length);
foreach (var propertyHelper in allProperties) {
var declaringType = propertyHelper.Property.DeclaringType;
if (declaringType == type) {
filteredProperties.Add(propertyHelper);
continue;
}
// If this property was declared on a base type then look for the definition closest to the
// the type to see if we should include it.
var ignoreProperty = false;
// Walk up the hierarchy until we find the type that actually declares this
// PropertyInfo.
var currentTypeInfo = type.GetTypeInfo();
var declaringTypeInfo = declaringType.GetTypeInfo();
while (currentTypeInfo != null && currentTypeInfo != declaringTypeInfo) {
// We've found a 'more proximal' public definition
var declaredProperty = currentTypeInfo.GetDeclaredProperty(propertyHelper.Name);
if (declaredProperty != null) {
ignoreProperty = true;
break;
}
currentTypeInfo = currentTypeInfo.BaseType?.GetTypeInfo();
}
if (!ignoreProperty) {
filteredProperties.Add(propertyHelper);
}
}
result = filteredProperties.ToArray();
visiblePropertiesCache.TryAdd(type, result);
return result;
}
protected static PropertyHelper[] GetProperties(
Type type,
Func<PropertyInfo, PropertyHelper> createPropertyHelper,
ConcurrentDictionary<Type, PropertyHelper[]> cache) {
// Unwrap nullable types. This means Nullable<T>.Value and Nullable<T>.HasValue will not be
// part of the sequence of properties returned by this method.
type = Nullable.GetUnderlyingType(type) ?? type;
if (!cache.TryGetValue(type, out var helpers)) {
// We avoid loading indexed properties using the Where statement.
var properties = type.GetRuntimeProperties().Where(p => IsInterestingProperty(p));
var typeInfo = type.GetTypeInfo();
if (typeInfo.IsInterface) {
// Reflection does not return information about inherited properties on the interface itself.
properties = properties.Concat(typeInfo.ImplementedInterfaces.SelectMany(
interfaceType => interfaceType.GetRuntimeProperties().Where(p => IsInterestingProperty(p))));
}
helpers = properties.Select(p => createPropertyHelper(p)).ToArray();
cache.TryAdd(type, helpers);
}
return helpers;
}
private static bool IsInterestingProperty(PropertyInfo property) {
// For improving application startup time, do not use GetIndexParameters() api early in this check as it
// creates a copy of parameter array and also we would like to check for the presence of a get method
// and short circuit asap.
return
property.GetMethod != null &&
property.GetMethod.IsPublic &&
!property.GetMethod.IsStatic &&
// PropertyHelper can't work with ref structs.
!IsRefStructProperty(property) &&
// Indexed properties are not useful (or valid) for grabbing properties off an object.
property.GetMethod.GetParameters().Length == 0;
}
// PropertyHelper can't really interact with ref-struct properties since they can't be
// boxed and can't be used as generic types. We just ignore them.
//
// see: https://github.com/aspnet/Mvc/issues/8545
private static bool IsRefStructProperty(PropertyInfo property) {
return
IsByRefLikeAttribute != null &&
property.PropertyType.IsValueType &&
property.PropertyType.IsDefined(IsByRefLikeAttribute);
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using Nini.Config;
using OpenSim.Framework;
using OpenSim.Framework.ServiceAuth;
using OpenSim.Server.Base;
using OpenSim.Services.Interfaces;
using OpenMetaverse;
namespace OpenSim.Services.Connectors
{
public class UserAccountServicesConnector : BaseServiceConnector, IUserAccountService
{
private static readonly ILog m_log =
LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
private string m_ServerURI = String.Empty;
public UserAccountServicesConnector()
{
}
public UserAccountServicesConnector(string serverURI)
{
m_ServerURI = serverURI.TrimEnd('/');
}
public UserAccountServicesConnector(IConfigSource source)
{
Initialise(source);
}
public virtual void Initialise(IConfigSource source)
{
IConfig assetConfig = source.Configs["UserAccountService"];
if (assetConfig == null)
{
m_log.Error("[ACCOUNT CONNECTOR]: UserAccountService missing from OpenSim.ini");
throw new Exception("User account connector init error");
}
string serviceURI = assetConfig.GetString("UserAccountServerURI",
String.Empty);
if (serviceURI == String.Empty)
{
m_log.Error("[ACCOUNT CONNECTOR]: No Server URI named in section UserAccountService");
throw new Exception("User account connector init error");
}
m_ServerURI = serviceURI;
base.Initialise(source, "UserAccountService");
}
public virtual UserAccount GetUserAccount(UUID scopeID, string firstName, string lastName)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
//sendData["SCOPEID"] = scopeID.ToString();
sendData["VERSIONMIN"] = ProtocolVersions.ClientProtocolVersionMin.ToString();
sendData["VERSIONMAX"] = ProtocolVersions.ClientProtocolVersionMax.ToString();
sendData["METHOD"] = "getaccount";
sendData["ScopeID"] = scopeID;
sendData["FirstName"] = firstName.ToString();
sendData["LastName"] = lastName.ToString();
return SendAndGetReply(sendData);
}
public virtual UserAccount GetUserAccount(UUID scopeID, string email)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
//sendData["SCOPEID"] = scopeID.ToString();
sendData["VERSIONMIN"] = ProtocolVersions.ClientProtocolVersionMin.ToString();
sendData["VERSIONMAX"] = ProtocolVersions.ClientProtocolVersionMax.ToString();
sendData["METHOD"] = "getaccount";
sendData["ScopeID"] = scopeID;
sendData["Email"] = email;
return SendAndGetReply(sendData);
}
public virtual UserAccount GetUserAccount(UUID scopeID, UUID userID)
{
//m_log.DebugFormat("[ACCOUNTS CONNECTOR]: GetUserAccount {0}", userID);
Dictionary<string, object> sendData = new Dictionary<string, object>();
//sendData["SCOPEID"] = scopeID.ToString();
sendData["VERSIONMIN"] = ProtocolVersions.ClientProtocolVersionMin.ToString();
sendData["VERSIONMAX"] = ProtocolVersions.ClientProtocolVersionMax.ToString();
sendData["METHOD"] = "getaccount";
sendData["ScopeID"] = scopeID;
sendData["UserID"] = userID.ToString();
return SendAndGetReply(sendData);
}
public List<UserAccount> GetUserAccounts(UUID scopeID, string query)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
//sendData["SCOPEID"] = scopeID.ToString();
sendData["VERSIONMIN"] = ProtocolVersions.ClientProtocolVersionMin.ToString();
sendData["VERSIONMAX"] = ProtocolVersions.ClientProtocolVersionMax.ToString();
sendData["METHOD"] = "getaccounts";
sendData["ScopeID"] = scopeID.ToString();
sendData["query"] = query;
string reply = string.Empty;
string reqString = ServerUtils.BuildQueryString(sendData);
string uri = m_ServerURI + "/accounts";
// m_log.DebugFormat("[ACCOUNTS CONNECTOR]: queryString = {0}", reqString);
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
reqString,
m_Auth);
if (reply == null || (reply != null && reply == string.Empty))
{
m_log.DebugFormat("[ACCOUNT CONNECTOR]: GetUserAccounts received null or empty reply");
return null;
}
}
catch (Exception e)
{
m_log.DebugFormat("[ACCOUNT CONNECTOR]: Exception when contacting user accounts server at {0}: {1}", uri, e.Message);
}
List<UserAccount> accounts = new List<UserAccount>();
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if (replyData != null)
{
if (replyData.ContainsKey("result") && replyData["result"].ToString() == "null")
{
return accounts;
}
Dictionary<string, object>.ValueCollection accountList = replyData.Values;
//m_log.DebugFormat("[ACCOUNTS CONNECTOR]: GetAgents returned {0} elements", pinfosList.Count);
foreach (object acc in accountList)
{
if (acc is Dictionary<string, object>)
{
UserAccount pinfo = new UserAccount((Dictionary<string, object>)acc);
accounts.Add(pinfo);
}
else
m_log.DebugFormat("[ACCOUNT CONNECTOR]: GetUserAccounts received invalid response type {0}",
acc.GetType());
}
}
else
m_log.DebugFormat("[ACCOUNTS CONNECTOR]: GetUserAccounts received null response");
return accounts;
}
public void InvalidateCache(UUID userID)
{
}
public virtual bool StoreUserAccount(UserAccount data)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
//sendData["SCOPEID"] = scopeID.ToString();
sendData["VERSIONMIN"] = ProtocolVersions.ClientProtocolVersionMin.ToString();
sendData["VERSIONMAX"] = ProtocolVersions.ClientProtocolVersionMax.ToString();
sendData["METHOD"] = "setaccount";
Dictionary<string, object> structData = data.ToKeyValuePairs();
foreach (KeyValuePair<string, object> kvp in structData)
{
if (kvp.Value == null)
{
m_log.DebugFormat("[ACCOUNTS CONNECTOR]: Null value for {0}", kvp.Key);
continue;
}
sendData[kvp.Key] = kvp.Value.ToString();
}
if (SendAndGetReply(sendData) != null)
return true;
else
return false;
}
/// <summary>
/// Create user remotely. Note this this is not part of the IUserAccountsService
/// </summary>
/// <param name="first"></param>
/// <param name="last"></param>
/// <param name="password"></param>
/// <param name="email"></param>
/// <param name="scopeID"></param>
/// <returns></returns>
public virtual UserAccount CreateUser(string first, string last, string password, string email, UUID scopeID)
{
Dictionary<string, object> sendData = new Dictionary<string, object>();
//sendData["SCOPEID"] = scopeID.ToString();
sendData["VERSIONMIN"] = ProtocolVersions.ClientProtocolVersionMin.ToString();
sendData["VERSIONMAX"] = ProtocolVersions.ClientProtocolVersionMax.ToString();
sendData["METHOD"] = "createuser";
sendData["FirstName"] = first;
sendData["LastName"] = last;
sendData["Password"] = password;
if (!string.IsNullOrEmpty(email))
sendData["Email"] = first;
sendData["ScopeID"] = scopeID.ToString();
return SendAndGetReply(sendData);
}
private UserAccount SendAndGetReply(Dictionary<string, object> sendData)
{
string reply = string.Empty;
string reqString = ServerUtils.BuildQueryString(sendData);
string uri = m_ServerURI + "/accounts";
// m_log.DebugFormat("[ACCOUNTS CONNECTOR]: queryString = {0}", reqString);
try
{
reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
reqString,
m_Auth);
if (reply == null || (reply != null && reply == string.Empty))
{
m_log.DebugFormat("[ACCOUNT CONNECTOR]: GetUserAccount received null or empty reply");
return null;
}
}
catch (Exception e)
{
m_log.DebugFormat("[ACCOUNT CONNECTOR]: Exception when contacting user accounts server at {0}: {1}", uri, e.Message);
}
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
UserAccount account = null;
if ((replyData != null) && replyData.ContainsKey("result") && (replyData["result"] != null))
{
if (replyData["result"] is Dictionary<string, object>)
{
account = new UserAccount((Dictionary<string, object>)replyData["result"]);
}
}
return account;
}
private bool SendAndGetBoolReply(Dictionary<string, object> sendData)
{
string reqString = ServerUtils.BuildQueryString(sendData);
string uri = m_ServerURI + "/accounts";
//m_log.DebugFormat("[ACCOUNTS CONNECTOR]: queryString = {0}", reqString);
try
{
string reply = SynchronousRestFormsRequester.MakeRequest("POST",
uri,
reqString,
m_Auth);
if (reply != string.Empty)
{
//m_log.DebugFormat("[ACCOUNTS CONNECTOR]: reply = {0}", reply);
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(reply);
if (replyData.ContainsKey("result"))
{
if (replyData["result"].ToString().ToLower() == "success")
return true;
else
return false;
}
else
m_log.DebugFormat("[ACCOUNTS CONNECTOR]: Set or Create UserAccount reply data does not contain result field");
}
else
m_log.DebugFormat("[ACCOUNTS CONNECTOR]: Set or Create UserAccount received empty reply");
}
catch (Exception e)
{
m_log.DebugFormat("[ACCOUNT CONNECTOR]: Exception when contacting user accounts server at {0}: {1}", uri, e.Message);
}
return false;
}
}
}
| |
/*
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the ec2-2015-10-01.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.EC2.Model
{
/// <summary>
/// Container for the parameters to the DescribeInstances operation.
/// Describes one or more of your instances.
///
///
/// <para>
/// If you specify one or more instance IDs, Amazon EC2 returns information for those
/// instances. If you do not specify instance IDs, Amazon EC2 returns information for
/// all relevant instances. If you specify an instance ID that is not valid, an error
/// is returned. If you specify an instance that you do not own, it is not included in
/// the returned results.
/// </para>
///
/// <para>
/// Recently terminated instances might appear in the returned results. This interval
/// is usually less than one hour.
/// </para>
/// </summary>
public partial class DescribeInstancesRequest : AmazonEC2Request
{
private List<Filter> _filters = new List<Filter>();
private List<string> _instanceIds = new List<string>();
private int? _maxResults;
private string _nextToken;
/// <summary>
/// Gets and sets the property Filters.
/// <para>
/// One or more filters.
/// </para>
/// <ul> <li>
/// <para>
/// <code>architecture</code> - The instance architecture (<code>i386</code> | <code>x86_64</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>availability-zone</code> - The Availability Zone of the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>block-device-mapping.attach-time</code> - The attach time for an EBS volume
/// mapped to the instance, for example, <code>2010-09-15T17:15:20.000Z</code>.
/// </para>
/// </li> <li>
/// <para>
/// <code>block-device-mapping.delete-on-termination</code> - A Boolean that indicates
/// whether the EBS volume is deleted on instance termination.
/// </para>
/// </li> <li>
/// <para>
/// <code>block-device-mapping.device-name</code> - The device name for the EBS volume
/// (for example, <code>/dev/sdh</code> or <code>xvdh</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>block-device-mapping.status</code> - The status for the EBS volume (<code>attaching</code>
/// | <code>attached</code> | <code>detaching</code> | <code>detached</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>block-device-mapping.volume-id</code> - The volume ID of the EBS volume.
/// </para>
/// </li> <li>
/// <para>
/// <code>client-token</code> - The idempotency token you provided when you launched the
/// instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>dns-name</code> - The public DNS name of the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>group-id</code> - The ID of the security group for the instance. EC2-Classic
/// only.
/// </para>
/// </li> <li>
/// <para>
/// <code>group-name</code> - The name of the security group for the instance. EC2-Classic
/// only.
/// </para>
/// </li> <li>
/// <para>
/// <code>hypervisor</code> - The hypervisor type of the instance (<code>ovm</code> |
/// <code>xen</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>iam-instance-profile.arn</code> - The instance profile associated with the instance.
/// Specified as an ARN.
/// </para>
/// </li> <li>
/// <para>
/// <code>image-id</code> - The ID of the image used to launch the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>instance-id</code> - The ID of the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>instance-lifecycle</code> - Indicates whether this is a Spot Instance (<code>spot</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>instance-state-code</code> - The state of the instance, as a 16-bit unsigned
/// integer. The high byte is an opaque internal value and should be ignored. The low
/// byte is set based on the state represented. The valid values are: 0 (pending), 16
/// (running), 32 (shutting-down), 48 (terminated), 64 (stopping), and 80 (stopped).
/// </para>
/// </li> <li>
/// <para>
/// <code>instance-state-name</code> - The state of the instance (<code>pending</code>
/// | <code>running</code> | <code>shutting-down</code> | <code>terminated</code> | <code>stopping</code>
/// | <code>stopped</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>instance-type</code> - The type of instance (for example, <code>t2.micro</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>instance.group-id</code> - The ID of the security group for the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>instance.group-name</code> - The name of the security group for the instance.
///
/// </para>
/// </li> <li>
/// <para>
/// <code>ip-address</code> - The public IP address of the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>kernel-id</code> - The kernel ID.
/// </para>
/// </li> <li>
/// <para>
/// <code>key-name</code> - The name of the key pair used when the instance was launched.
/// </para>
/// </li> <li>
/// <para>
/// <code>launch-index</code> - When launching multiple instances, this is the index for
/// the instance in the launch group (for example, 0, 1, 2, and so on).
/// </para>
/// </li> <li>
/// <para>
/// <code>launch-time</code> - The time when the instance was launched.
/// </para>
/// </li> <li>
/// <para>
/// <code>monitoring-state</code> - Indicates whether monitoring is enabled for the instance
/// (<code>disabled</code> | <code>enabled</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>owner-id</code> - The AWS account ID of the instance owner.
/// </para>
/// </li> <li>
/// <para>
/// <code>placement-group-name</code> - The name of the placement group for the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>platform</code> - The platform. Use <code>windows</code> if you have Windows
/// instances; otherwise, leave blank.
/// </para>
/// </li> <li>
/// <para>
/// <code>private-dns-name</code> - The private DNS name of the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>private-ip-address</code> - The private IP address of the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>product-code</code> - The product code associated with the AMI used to launch
/// the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>product-code.type</code> - The type of product code (<code>devpay</code> | <code>marketplace</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>ramdisk-id</code> - The RAM disk ID.
/// </para>
/// </li> <li>
/// <para>
/// <code>reason</code> - The reason for the current state of the instance (for example,
/// shows "User Initiated [date]" when you stop or terminate the instance). Similar to
/// the state-reason-code filter.
/// </para>
/// </li> <li>
/// <para>
/// <code>requester-id</code> - The ID of the entity that launched the instance on your
/// behalf (for example, AWS Management Console, Auto Scaling, and so on).
/// </para>
/// </li> <li>
/// <para>
/// <code>reservation-id</code> - The ID of the instance's reservation. A reservation
/// ID is created any time you launch an instance. A reservation ID has a one-to-one relationship
/// with an instance launch request, but can be associated with more than one instance
/// if you launch multiple instances using the same launch request. For example, if you
/// launch one instance, you'll get one reservation ID. If you launch ten instances using
/// the same launch request, you'll also get one reservation ID.
/// </para>
/// </li> <li>
/// <para>
/// <code>root-device-name</code> - The name of the root device for the instance (for
/// example, <code>/dev/sda1</code> or <code>/dev/xvda</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>root-device-type</code> - The type of root device that the instance uses (<code>ebs</code>
/// | <code>instance-store</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>source-dest-check</code> - Indicates whether the instance performs source/destination
/// checking. A value of <code>true</code> means that checking is enabled, and <code>false</code>
/// means checking is disabled. The value must be <code>false</code> for the instance
/// to perform network address translation (NAT) in your VPC.
/// </para>
/// </li> <li>
/// <para>
/// <code>spot-instance-request-id</code> - The ID of the Spot Instance request.
/// </para>
/// </li> <li>
/// <para>
/// <code>state-reason-code</code> - The reason code for the state change.
/// </para>
/// </li> <li>
/// <para>
/// <code>state-reason-message</code> - A message that describes the state change.
/// </para>
/// </li> <li>
/// <para>
/// <code>subnet-id</code> - The ID of the subnet for the instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>tag</code>:<i>key</i>=<i>value</i> - The key/value combination of a tag assigned
/// to the resource, where <code>tag</code>:<i>key</i> is the tag's key.
/// </para>
/// </li> <li>
/// <para>
/// <code>tag-key</code> - The key of a tag assigned to the resource. This filter is independent
/// of the <code>tag-value</code> filter. For example, if you use both the filter "tag-key=Purpose"
/// and the filter "tag-value=X", you get any resources assigned both the tag key Purpose
/// (regardless of what the tag's value is), and the tag value X (regardless of what the
/// tag's key is). If you want to list only resources where Purpose is X, see the <code>tag</code>:<i>key</i>=<i>value</i>
/// filter.
/// </para>
/// </li> <li>
/// <para>
/// <code>tag-value</code> - The value of a tag assigned to the resource. This filter
/// is independent of the <code>tag-key</code> filter.
/// </para>
/// </li> <li>
/// <para>
/// <code>tenancy</code> - The tenancy of an instance (<code>dedicated</code> | <code>default</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>virtualization-type</code> - The virtualization type of the instance (<code>paravirtual</code>
/// | <code>hvm</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>vpc-id</code> - The ID of the VPC that the instance is running in.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.description</code> - The description of the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.subnet-id</code> - The ID of the subnet for the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.vpc-id</code> - The ID of the VPC for the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.network-interface-id</code> - The ID of the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.owner-id</code> - The ID of the owner of the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.availability-zone</code> - The Availability Zone for the network
/// interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.requester-id</code> - The requester ID for the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.requester-managed</code> - Indicates whether the network interface
/// is being managed by AWS.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.status</code> - The status of the network interface (<code>available</code>)
/// | <code>in-use</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.mac-address</code> - The MAC address of the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface-private-dns-name</code> - The private DNS name of the network
/// interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.source-dest-check</code> - Whether the network interface performs
/// source/destination checking. A value of <code>true</code> means checking is enabled,
/// and <code>false</code> means checking is disabled. The value must be <code>false</code>
/// for the network interface to perform network address translation (NAT) in your VPC.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.group-id</code> - The ID of a security group associated with
/// the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.group-name</code> - The name of a security group associated
/// with the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.attachment.attachment-id</code> - The ID of the interface
/// attachment.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.attachment.instance-id</code> - The ID of the instance to
/// which the network interface is attached.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.attachment.instance-owner-id</code> - The owner ID of the
/// instance to which the network interface is attached.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.addresses.private-ip-address</code> - The private IP address
/// associated with the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.attachment.device-index</code> - The device index to which
/// the network interface is attached.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.attachment.status</code> - The status of the attachment (<code>attaching</code>
/// | <code>attached</code> | <code>detaching</code> | <code>detached</code>).
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.attachment.attach-time</code> - The time that the network
/// interface was attached to an instance.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.attachment.delete-on-termination</code> - Specifies whether
/// the attachment is deleted when an instance is terminated.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.addresses.primary</code> - Specifies whether the IP address
/// of the network interface is the primary private IP address.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.addresses.association.public-ip</code> - The ID of the association
/// of an Elastic IP address with a network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>network-interface.addresses.association.ip-owner-id</code> - The owner ID of
/// the private IP address associated with the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>association.public-ip</code> - The address of the Elastic IP address bound to
/// the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>association.ip-owner-id</code> - The owner of the Elastic IP address associated
/// with the network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>association.allocation-id</code> - The allocation ID returned when you allocated
/// the Elastic IP address for your network interface.
/// </para>
/// </li> <li>
/// <para>
/// <code>association.association-id</code> - The association ID returned when the network
/// interface was associated with an IP address.
/// </para>
/// </li> </ul>
/// </summary>
public List<Filter> Filters
{
get { return this._filters; }
set { this._filters = value; }
}
// Check to see if Filters property is set
internal bool IsSetFilters()
{
return this._filters != null && this._filters.Count > 0;
}
/// <summary>
/// Gets and sets the property InstanceIds.
/// <para>
/// One or more instance IDs.
/// </para>
///
/// <para>
/// Default: Describes all your instances.
/// </para>
/// </summary>
public List<string> InstanceIds
{
get { return this._instanceIds; }
set { this._instanceIds = value; }
}
// Check to see if InstanceIds property is set
internal bool IsSetInstanceIds()
{
return this._instanceIds != null && this._instanceIds.Count > 0;
}
/// <summary>
/// Gets and sets the property MaxResults.
/// <para>
/// The maximum number of results to return for the request in a single page. The remaining
/// results of the initial request can be seen by sending another request with the returned
/// <code>NextToken</code> value. This value can be between 5 and 1000; if <code>MaxResults</code>
/// is given a value larger than 1000, only 1000 results are returned. You cannot specify
/// this parameter and the instance IDs parameter in the same request.
/// </para>
/// </summary>
public int MaxResults
{
get { return this._maxResults.GetValueOrDefault(); }
set { this._maxResults = value; }
}
// Check to see if MaxResults property is set
internal bool IsSetMaxResults()
{
return this._maxResults.HasValue;
}
/// <summary>
/// Gets and sets the property NextToken.
/// <para>
/// The token to request the next page of results.
/// </para>
/// </summary>
public string NextToken
{
get { return this._nextToken; }
set { this._nextToken = value; }
}
// Check to see if NextToken property is set
internal bool IsSetNextToken()
{
return this._nextToken != null;
}
}
}
| |
// This file is part of SNMP#NET.
//
// SNMP#NET is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// SNMP#NET is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with SNMP#NET. If not, see <http://www.gnu.org/licenses/>.
//
using System;
using System.Text;
namespace SnmpSharpNet
{
/// <summary>SNMP SMI version 1, version 2c and version 3 constants.
/// </summary>
public sealed class SnmpConstants
{
#region Snmp V1 errors
/// <summary>No error</summary>
public const int ErrNoError = 0;
/// <summary>Request too big</summary>
public const int ErrTooBig = 1;
/// <summary>Object identifier does not exist</summary>
public const int ErrNoSuchName = 2;
/// <summary>Invalid value</summary>
public const int ErrBadValue = 3;
/// <summary>Requested invalid operation on a read only table</summary>
public const int ErrReadOnly = 4;
/// <summary>Generic error</summary>
public const int ErrGenError = 5;
/// <summary>Enterprise specific error</summary>
public const int enterpriseSpecific = 6;
#endregion SnmpV1errors
#region SnmpV2errors
/// <summary>Access denied</summary>
public const int ErrNoAccess = 6;
/// <summary>Incorrect type</summary>
public const int ErrWrongType = 7;
/// <summary>Incorrect length</summary>
public const int ErrWrongLength = 8;
/// <summary>Invalid encoding</summary>
public const int ErrWrongEncoding = 9;
/// <summary>Object does not have correct value</summary>
public const int ErrWrongValue = 10;
/// <summary>Insufficient rights to perform create operation</summary>
public const int ErrNoCreation = 11;
/// <summary>Inconsistent value</summary>
public const int ErrInconsistentValue = 12;
/// <summary>Requested resource is not available</summary>
public const int ErrResourceUnavailable = 13;
/// <summary>Unable to commit values</summary>
public const int ErrCommitFailed = 14;
/// <summary>Undo request failed</summary>
public const int ErrUndoFailed = 15;
/// <summary>Authorization failed</summary>
public const int ErrAuthorizationError = 16;
/// <summary>Instance not writable</summary>
public const int ErrNotWritable = 17;
/// <summary>Inconsistent object identifier</summary>
public const int ErrInconsistentName = 18;
#endregion SnmpV2errors
#region SNMP version 1 trap generic error codes
/// <summary>Cold start trap</summary>
public const int ColdStart = 0;
/// <summary>Warm start trap</summary>
public const int WarmStart = 1;
/// <summary>Link down trap</summary>
public const int LinkDown = 2;
/// <summary>Link up trap</summary>
public const int LinkUp = 3;
/// <summary>Authentication-failure trap</summary>
public const int AuthenticationFailure = 4;
/// <summary>EGP Neighbor Loss trap</summary>
public const int EgpNeighborLoss = 5;
#endregion SNMP version 1 trap generic error codes
#region SMI Type codes and type names
/// <summary>Signed 32-bit integer ASN.1 data type. For implementation, see <see cref="Integer32"/></summary>
public static readonly byte SMI_INTEGER = (byte)(AsnType.UNIVERSAL | AsnType.INTEGER);
/// <summary>String representation of the AsnType.INTEGER type.</summary>
public static readonly string SMI_INTEGER_STR = "Integer32";
/// <summary>Data type representing a sequence of zero or more 8-bit byte values. For implementation, see <see cref="OctetString"/></summary>
public static readonly byte SMI_STRING = (byte)(AsnType.UNIVERSAL | AsnType.OCTETSTRING);
/// <summary>String representation of the AsnType.OCTETSTRING type.</summary>
public static readonly string SMI_STRING_STR = "OctetString";
/// <summary>Object id ASN.1 type. For implementation, see <see cref="Oid"/></summary>
public static readonly byte SMI_OBJECTID = (byte)(AsnType.UNIVERSAL | AsnType.OBJECTID);
/// <summary>String representation of the SMI_OBJECTID type.</summary>
public static readonly string SMI_OBJECTID_STR = "ObjectId";
/// <summary>Null ASN.1 value type. For implementation, see <see cref="Null"/>.</summary>
public static readonly byte SMI_NULL = (byte)(AsnType.UNIVERSAL | AsnType.NULL);
/// <summary>String representation of the SMI_NULL type.</summary>
public static readonly string SMI_NULL_STR = "NULL";
/// <summary> An application string is a sequence of octets
/// defined at the application level. Although the SMI
/// does not define an Application String, it does define
/// an IP Address which is an Application String of length
/// four.
/// </summary>
public static readonly byte SMI_APPSTRING = (byte)(AsnType.APPLICATION | 0x00);
/// <summary>String representation of the SMI_APPSTRING type.</summary>
public static readonly string SMI_APPSTRING_STR = "AppString";
/// <summary> An IP Address is an application string of length four
/// and is indistinguishable from the SMI_APPSTRING value.
/// The address is a 32-bit quantity stored in network byte order.
/// </summary>
public static readonly byte SMI_IPADDRESS = (byte)(AsnType.APPLICATION | 0x00);
/// <summary>String representation of the SMI_IPADDRESS type.</summary>
public static readonly string SMI_IPADDRESS_STR = "IPAddress";
/// <summary> A non-negative integer that may be incremented, but not
/// decremented. The value is a 32-bit unsigned quantity representing
/// the range of zero to 2^32-1 (4,294,967,295). When the counter
/// reaches its maximum value it wraps back to zero and starts again.
/// </summary>
public static readonly byte SMI_COUNTER32 = (byte)(AsnType.APPLICATION | 0x01);
/// <summary>String representation of the SMI_COUNTER32 type.</summary>
public static readonly string SMI_COUNTER32_STR = "Counter32";
/// <summary> Represents a non-negative integer that may increase or
/// decrease with a maximum value of 2^32-1. If the maximum
/// value is reached the gauge stays latched until reset.
/// </summary>
public static readonly byte SMI_GAUGE32 = (byte)(AsnType.APPLICATION | 0x02);
/// <summary>String representation of the SMI_GAUGE32 type.</summary>
public static readonly string SMI_GAUGE32_STR = "Gauge32";
/// <summary> Used to represent the integers in the range of 0 to 2^32-1.
/// This type is identical to the SMI_COUNTER32 and are
/// indistinguishable in ASN.1
/// </summary>
public static readonly byte SMI_UNSIGNED32 = (byte)(AsnType.APPLICATION | 0x02); // same as gauge
/// <summary>String representation of the SMI_UNSIGNED32 type.</summary>
public static readonly string SMI_UNSIGNED32_STR = "Unsigned32";
/// <summary> This represents a non-negative integer that counts time, modulo 2^32.
/// The time is represented in hundredths (1/100th) of a second.
/// </summary>
public static readonly byte SMI_TIMETICKS = (byte)(AsnType.APPLICATION | 0x03);
/// <summary>String representation of the SMI_TIMETICKS type.</summary>
public static readonly string SMI_TIMETICKS_STR = "TimeTicks";
/// <summary> Used to support the transport of arbitrary data. The
/// data itself is encoded as an octet string, but may be in
/// any format defined by ASN.1 or another standard.
/// </summary>
public static readonly byte SMI_OPAQUE = (byte)(AsnType.APPLICATION | 0x04);
/// <summary>String representation of the SMI_OPAQUE type.</summary>
public static readonly string SMI_OPAQUE_STR = "Opaque";
/// <summary> Defines a 64-bit unsigned counter. A counter is an integer that
/// can be incremented, but cannot be decremented. A maximum value
/// of 2^64 - 1 (18,446,744,073,709,551,615) can be represented.
/// When the counter reaches it's maximum it wraps back to zero and
/// starts again.
/// </summary>
public static readonly byte SMI_COUNTER64 = (byte)(AsnType.APPLICATION | 0x06); // SMIv2 only
/// <summary>String representation of the SMI_COUNTER64 type.</summary>
public static readonly string SMI_COUNTER64_STR = "Counter64";
/// <summary>String representation of the unknown SMI data type.</summary>
public static readonly string SMI_UNKNOWN_STR = "Unknown";
/// <summary> The SNMPv2 error representing that there is No-Such-Object
/// for a particular object identifier. This error is the result
/// of a requested object identifier that does not exist in the
/// agent's tables
/// </summary>
public static readonly byte SMI_NOSUCHOBJECT = (byte)(AsnType.CONTEXT | AsnType.PRIMITIVE | 0x00);
/// <summary> The SNMPv2 error representing that there is No-Such-Instance
/// for a particular object identifier. This error is the result
/// of a requested object identifier instance does not exist in the
/// agent's tables.
/// </summary>
public static readonly byte SMI_NOSUCHINSTANCE = (byte)(AsnType.CONTEXT | AsnType.PRIMITIVE | 0x01);
/// <summary> The SNMPv2 error representing the End-Of-Mib-View.
/// This error variable will be returned by a SNMPv2 agent
/// if the requested object identifier has reached the
/// end of the agent's mib table and there is no lexicographic
/// successor.
/// </summary>
public static readonly byte SMI_ENDOFMIBVIEW = (byte)(AsnType.CONTEXT | AsnType.PRIMITIVE | 0x02);
/// <summary>
/// SEQUENCE Variable Binding code. Hex value: 0x30
/// </summary>
public static readonly byte SMI_SEQUENCE = (byte)(AsnType.SEQUENCE | AsnType.CONSTRUCTOR);
/// <summary> Defines an SNMPv2 Party Clock. The Party Clock is currently
/// Obsolete, but included for backwards compatibility. Obsoleted in RFC 1902.
/// </summary>
public static readonly byte SMI_PARTY_CLOCK = (byte)(AsnType.APPLICATION | 0x07);
#endregion
#region SNMP version 2 TRAP OIDs
/// <summary>
/// sysUpTime.0 OID is the first value in the VarBind array of SNMP version 2 TRAP packets
/// </summary>
public static Oid SysUpTime = new Oid(new UInt32[] { 1, 3, 6, 1, 2, 1, 1, 3, 0 });
/// <summary>
/// trapObjectID.0 OID is the second value in the VarBind array of SNMP version 2 TRAP packets
/// </summary>
public static Oid TrapObjectId = new Oid(new UInt32[] { 1, 3, 6, 1, 6, 3, 1, 1, 4, 1, 0 });
#endregion
#region SNMP version 3 error OID values
/// <summary>
/// SNMP version 3, USM error
/// </summary>
public static Oid usmStatsUnsupportedSecLevels = new Oid(new UInt32[] { 1, 3, 6, 1, 6, 3, 15, 1, 1, 1, 0 });
/// <summary>
/// SNMP version 3, USM error
/// </summary>
public static Oid usmStatsNotInTimeWindows = new Oid(new UInt32[] { 1, 3, 6, 1, 6, 3, 15, 1, 1, 2, 0 });
/// <summary>
/// SNMP version 3, USM error
/// </summary>
public static Oid usmStatsUnknownSecurityNames = new Oid(new UInt32[] { 1, 3, 6, 1, 6, 3, 15, 1, 1, 3, 0 });
/// <summary>
/// SNMP version 3, USM error
/// </summary>
public static Oid usmStatsUnknownEngineIDs = new Oid(new UInt32[] { 1, 3, 6, 1, 6, 3, 15, 1, 1, 4, 0 });
/// <summary>
/// SNMP version 3, USM error
/// </summary>
public static Oid usmStatsWrongDigests = new Oid(new UInt32[] { 1, 3, 6, 1, 6, 3, 15, 1, 1, 5, 0 });
/// <summary>
/// SNMP version 3, USM error
/// </summary>
public static Oid usmStatsDecryptionErrors = new Oid(new UInt32[] { 1, 3, 6, 1, 6, 3, 15, 1, 1, 6, 0 });
/// <summary>
/// SNMP version 3, USM error
/// </summary>
public static Oid snmpUnknownSecurityModels = new Oid(new UInt32[] { 1, 3, 6, 1, 6, 3, 11, 2, 1, 1, 0 });
/// <summary>
/// SNMP version 3, USM error
/// </summary>
public static Oid snmpInvalidMsgs = new Oid(new UInt32[] { 1, 3, 6, 1, 6, 3, 11, 2, 1, 2, 0 });
/// <summary>
/// SNMP version 3, USM error
/// </summary>
public static Oid snmpUnknownPDUHandlers = new Oid(new UInt32[] { 1, 3, 6, 1, 6, 3, 11, 2, 1, 3, 0 });
/// <summary>
/// Array of all SNMP version 3 REPORT packet error OIDs
/// </summary>
public static Oid[] v3ErrorOids = new Oid[] { usmStatsUnsupportedSecLevels, usmStatsNotInTimeWindows, usmStatsUnknownSecurityNames,
usmStatsUnknownEngineIDs, usmStatsWrongDigests, usmStatsDecryptionErrors, snmpUnknownSecurityModels, snmpUnknownPDUHandlers };
#endregion SNMP version 3 error OID values
#region Helper methods
/// <summary>Used to create correct variable type object for the specified encoded type</summary>
/// <param name="asnType">ASN.1 type code</param>
/// <returns>A new object matching type supplied or null if type was not recognized.</returns>
public static AsnType GetSyntaxObject(byte asnType)
{
AsnType obj = null;
if (asnType == SnmpConstants.SMI_INTEGER)
obj = new Integer32();
else if (asnType == SnmpConstants.SMI_COUNTER32)
obj = new Counter32();
else if (asnType == SnmpConstants.SMI_GAUGE32)
obj = new Gauge32();
else if (asnType == SnmpConstants.SMI_COUNTER64)
obj = new Counter64();
else if (asnType == SnmpConstants.SMI_TIMETICKS)
obj = new TimeTicks();
else if (asnType == SnmpConstants.SMI_STRING)
obj = new OctetString();
else if (asnType == SnmpConstants.SMI_OPAQUE)
obj = new Opaque();
else if (asnType == SnmpConstants.SMI_IPADDRESS)
obj = new IpAddress();
else if (asnType == SnmpConstants.SMI_OBJECTID)
obj = new Oid();
else if (asnType == SnmpConstants.SMI_PARTY_CLOCK)
obj = new V2PartyClock();
else if (asnType == SnmpConstants.SMI_NOSUCHINSTANCE)
obj = new NoSuchInstance();
else if (asnType == SnmpConstants.SMI_NOSUCHOBJECT)
obj = new NoSuchObject();
else if (asnType == SnmpConstants.SMI_ENDOFMIBVIEW)
obj = new EndOfMibView();
else if (asnType == SnmpConstants.SMI_NULL)
{
obj = new Null();
}
return obj;
}
/// <summary>
/// Return SNMP type object of the type specified by name. Supported variable types are:
/// * <see cref="Integer32"/>
/// * <see cref="Counter32"/>
/// * <see cref="Gauge32"/>
/// * <see cref="Counter64"/>
/// * <see cref="TimeTicks"/>
/// * <see cref="OctetString"/>
/// * <see cref="IpAddress"/>
/// * <see cref="Oid"/>
/// * <see cref="Null"/>
/// </summary>
/// <param name="name">Name of the object type</param>
/// <returns>New <see cref="AsnType"/> object.</returns>
public static AsnType GetSyntaxObject(string name)
{
AsnType obj = null;
if (name == "Integer32")
obj = new Integer32();
else if (name == "Counter32")
obj = new Counter32();
else if (name == "Gauge32")
obj = new Gauge32();
else if (name == "Counter64")
obj = new Counter64();
else if (name == "TimeTicks")
obj = new TimeTicks();
else if (name == "OctetString")
obj = new OctetString();
else if (name == "IpAddress")
obj = new IpAddress();
else if (name == "Oid")
obj = new Oid();
else if (name == "Null")
obj = new Null();
else
throw new ArgumentException("Invalid value type name");
return obj;
}
/// <summary>
/// Return string representation of the SMI value type.
/// </summary>
/// <param name="type">AsnType class Type member function value.</param>
/// <returns>String formatted name of the SMI type.</returns>
public static string GetTypeName(byte type)
{
if( type == SMI_IPADDRESS)
return SMI_IPADDRESS_STR;
else if( type == SMI_APPSTRING)
return SMI_APPSTRING_STR;
else if( type == SMI_COUNTER32)
return SMI_COUNTER32_STR;
else if( type == SMI_COUNTER64)
return SMI_COUNTER64_STR;
else if( type == SMI_GAUGE32)
return SMI_GAUGE32_STR;
else if( type == SMI_INTEGER)
return SMI_INTEGER_STR;
else if( type == SMI_NULL)
return SMI_NULL_STR;
else if( type == SMI_OBJECTID)
return SMI_OBJECTID_STR;
else if( type == SMI_OPAQUE)
return SMI_OPAQUE_STR;
else if( type == SMI_STRING)
return SMI_STRING_STR;
else if( type == SMI_TIMETICKS)
return SMI_TIMETICKS_STR;
else if( type == SMI_UNSIGNED32)
return SMI_UNSIGNED32_STR;
else
return SMI_UNKNOWN_STR;
}
/// <summary>
/// Debugging function used to dump on the console supplied byte array in a format suitable for console output.
/// </summary>
/// <param name="data">Byte array data</param>
public static void DumpHex(byte[] data) {
int val = 0;
for(int i=0; i<data.Length; i++ ) {
if( val == 0 ) {
Console.Write("{0:d04} ", i);
}
Console.Write("{0:x2}", data[i]);
val += 1;
if( val == 16 ) {
val = 0;
Console.Write("\n");
} else {
Console.Write(" ");
}
}
if (val != 0)
Console.WriteLine("\n");
}
/// <summary>
/// Check if SNMP version value is correct
/// </summary>
/// <param name="version">SNMP version value</param>
/// <returns>true if valid SNMP version, otherwise false</returns>
public static bool IsValidVersion(int version)
{
if (version == (int)SnmpVersion.Ver1 || version == (int)SnmpVersion.Ver2 || version == (int)SnmpVersion.Ver3)
return true;
return false;
}
#endregion
/// <summary>
/// Private constructor to prevent the class with all static members from being instantiated.
/// </summary>
private SnmpConstants()
{
// nothing
}
}
}
| |
using Plang.Compiler.TypeChecker.AST;
using Plang.Compiler.TypeChecker.AST.Declarations;
using Plang.Compiler.TypeChecker.AST.ModuleExprs;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Plang.Compiler.TypeChecker
{
public class ModuleSystemTypeChecker
{
private readonly Scope globalScope;
private readonly ITranslationErrorHandler handler;
public ModuleSystemTypeChecker(ITranslationErrorHandler handler, Scope globalScope)
{
this.handler = handler;
this.globalScope = globalScope;
}
private IEnumerable<PEvent> GetPermissions(IEnumerable<PEvent> allowed)
{
if (allowed == null)
{
return globalScope.UniversalEventSet.Events;
}
return allowed;
}
public void CheckWellFormedness(IPModuleExpr moduleExpr)
{
switch (moduleExpr)
{
case AssertModuleExpr assertExpr:
CheckWellFormedness(assertExpr);
break;
case BindModuleExpr bindExpr:
CheckWellFormedness(bindExpr);
break;
case RenameModuleExpr renameExpr:
CheckWellFormedness(renameExpr);
break;
case UnionOrComposeModuleExpr uOrCExpr:
CheckWellFormedness(uOrCExpr);
break;
case HideEventModuleExpr hideEExpr:
CheckWellFormedness(hideEExpr);
break;
case HideInterfaceModuleExpr hideIExpr:
CheckWellFormedness(hideIExpr);
break;
default:
throw handler.InternalError(moduleExpr.SourceLocation,
new ArgumentOutOfRangeException(nameof(moduleExpr)));
}
}
private void CheckWellFormedness(AssertModuleExpr assertExpr)
{
if (assertExpr.ModuleInfo != null)
{
return;
}
//check if the current module is wellformed
CheckWellFormedness(assertExpr.ComponentModule);
ModuleInfo componentModuleInfo = assertExpr.ComponentModule.ModuleInfo;
// check that the observed events of the monitor is a subset of the sends set.
foreach (Machine monitor in assertExpr.SpecMonitors)
{
if (!monitor.Observes.IsSubsetEqOf(componentModuleInfo.Sends))
{
PEvent @event = monitor.Observes.Events.First(e => !componentModuleInfo.Sends.Contains(e));
throw handler.InvalidAssertExpr(assertExpr.SourceLocation, monitor, @event);
}
}
// check if the same monitor has already been attached
foreach (Machine conflictMonitor in componentModuleInfo.MonitorMap.Keys.Where(
x => assertExpr.SpecMonitors.Contains(x)))
{
throw handler.InvalidAssertExpr(assertExpr.SourceLocation, conflictMonitor);
}
assertExpr.ModuleInfo = new ModuleInfo();
ModuleInfo currentModule = assertExpr.ModuleInfo;
//populate the attributes of the module
// initialize the monitor map
foreach (KeyValuePair<Machine, IEnumerable<Interface>> mMapItem in componentModuleInfo.MonitorMap)
{
currentModule.MonitorMap.Add(mMapItem.Key, mMapItem.Value.ToList());
}
foreach (Machine monitor in assertExpr.SpecMonitors)
{
currentModule.MonitorMap.Add(monitor, componentModuleInfo.InterfaceDef.Select(id => id.Key).ToList());
}
// rest of the attributes remain same
currentModule.PrivateEvents.AddEvents(componentModuleInfo.PrivateEvents.Events);
currentModule.PrivateInterfaces.AddInterfaces(componentModuleInfo.PrivateInterfaces.Interfaces);
currentModule.Sends.AddEvents(componentModuleInfo.Sends.Events);
currentModule.Receives.AddEvents(componentModuleInfo.Receives.Events);
currentModule.Creates.AddInterfaces(componentModuleInfo.Creates.Interfaces);
foreach (KeyValuePair<Interface, IDictionary<Interface, Interface>> linkMapItem in componentModuleInfo
.LinkMap)
{
currentModule.LinkMap[linkMapItem.Key] = new Dictionary<Interface, Interface>();
foreach (KeyValuePair<Interface, Interface> localLinkMap in linkMapItem.Value)
{
currentModule.LinkMap[linkMapItem.Key].Add(localLinkMap.Key, localLinkMap.Value);
}
}
foreach (KeyValuePair<Interface, Machine> ipItem in componentModuleInfo.InterfaceDef)
{
currentModule.InterfaceDef.Add(ipItem.Key, ipItem.Value);
}
}
internal void CheckRefinementTest(RefinementTest test)
{
//check that the test module is closed with respect to creates
IEnumerable<Interface> notImplementedInterface =
test.LeftModExpr.ModuleInfo.Creates.Interfaces.Where(i =>
!test.LeftModExpr.ModuleInfo.InterfaceDef.Keys.Contains(i));
Interface[] @interface = notImplementedInterface as Interface[] ?? notImplementedInterface.ToArray();
if (@interface.Any())
{
throw handler.NotClosed(test.SourceLocation,
$"LHS test module is not closed with respect to created interfaces; interface {@interface.First().Name} is created but not implemented inside the module");
}
//check that the test module main machine exists
bool hasMainMachine =
test.LeftModExpr.ModuleInfo.InterfaceDef.Values.Any(m => m.Name == test.Main && !m.IsSpec);
if (!hasMainMachine)
{
throw handler.NoMain(test.SourceLocation,
$"machine {test.Main} does not exist in the LHS test module");
}
//check that the test module is closed with respect to creates
notImplementedInterface =
test.RightModExpr.ModuleInfo.Creates.Interfaces.Where(i =>
!test.RightModExpr.ModuleInfo.InterfaceDef.Keys.Contains(i));
@interface = notImplementedInterface as Interface[] ?? notImplementedInterface.ToArray();
if (@interface.Any())
{
throw handler.NotClosed(test.SourceLocation,
$"RHS test module is not closed with respect to created interfaces; interface {@interface.First().Name} is created but not implemented inside the module");
}
//check that the test module main machine exists
hasMainMachine =
test.RightModExpr.ModuleInfo.InterfaceDef.Values.Any(m => m.Name == test.Main && !m.IsSpec);
if (!hasMainMachine)
{
throw handler.NoMain(test.SourceLocation,
$"machine {test.Main} does not exist in the RHS test module");
}
//todo: Implement the checks with respect to refinement relation
throw new NotImplementedException();
}
internal void CheckSafetyTest(SafetyTest test)
{
//check that the test module is closed with respect to creates
IEnumerable<Interface> notImplementedInterface =
test.ModExpr.ModuleInfo.Creates.Interfaces.Where(i =>
!test.ModExpr.ModuleInfo.InterfaceDef.Keys.Contains(i));
Interface[] @interface = notImplementedInterface as Interface[] ?? notImplementedInterface.ToArray();
if (@interface.Any())
{
throw handler.NotClosed(test.SourceLocation,
$"test module is not closed with respect to created interfaces; interface {@interface.First().Name} is created but not implemented inside the module");
}
//check that the test module main machine exists
bool hasMainMachine = test.ModExpr.ModuleInfo.InterfaceDef.Values.Any(m => m.Name == test.Main && !m.IsSpec);
if (!hasMainMachine)
{
throw handler.NoMain(test.SourceLocation,
$"machine {test.Main} does not exist in the test module");
}
}
internal void CheckImplementationDecl(Implementation impl)
{
//check that the implementation module is closed with respect to creates
List<Interface> notImplementedInterface =
impl.ModExpr.ModuleInfo.Creates.Interfaces.Where(i =>
!impl.ModExpr.ModuleInfo.InterfaceDef.Keys.Contains(i)).ToList();
if (notImplementedInterface.Any())
{
throw handler.NotClosed(notImplementedInterface.First().SourceLocation,
$"implementation module is not closed with respect to created interfaces; interface {notImplementedInterface.First().Name} is created but not implemented inside the module");
}
}
private void CheckWellFormedness(BindModuleExpr bindExpr)
{
if (bindExpr.ModuleInfo != null)
{
return;
}
// checked already that the bindings is a function
// check that receive set of interface is a subset of the receive set of machine
foreach (Tuple<Interface, Machine> binding in bindExpr.Bindings)
{
if (!binding.Item1.ReceivableEvents.IsSubsetEqOf(binding.Item2.Receives))
{
throw handler.InvalidBindExpr(bindExpr.SourceLocation,
$"receive set of {binding.Item1.Name} is not a subset of receive set of {binding.Item2.Name}");
}
if (!binding.Item2.PayloadType.IsAssignableFrom(binding.Item1.PayloadType))
{
throw handler.InvalidBindExpr(bindExpr.SourceLocation,
$"payload type of {binding.Item1.Name} is not a subtype of payload type of {binding.Item2.Name}");
}
}
//populate the attributes of the module
bindExpr.ModuleInfo = new ModuleInfo();
ModuleInfo currentModuleInfo = bindExpr.ModuleInfo;
// 1) Private events and private interfaces are empty
// 2) Initialize Ip
foreach (Tuple<Interface, Machine> binding in bindExpr.Bindings)
{
currentModuleInfo.InterfaceDef.Add(binding.Item1, binding.Item2);
}
// 3) Initialize Lp
foreach (Tuple<Interface, Machine> binding in bindExpr.Bindings)
{
currentModuleInfo.LinkMap[binding.Item1] = new Dictionary<Interface, Interface>();
foreach (Interface interfaceCreated in binding.Item2.Creates.Interfaces)
{
currentModuleInfo.LinkMap[binding.Item1][interfaceCreated] = interfaceCreated;
}
}
List<Machine> boundMachines = bindExpr.Bindings.Select(b => b.Item2).ToList();
// 4) compute the sends
currentModuleInfo.Sends.AddEvents(boundMachines.SelectMany(m => m.Sends.Events));
// 5) compute the receives
currentModuleInfo.Receives.AddEvents(boundMachines.SelectMany(m => m.Receives.Events));
// 6) compute the creates
foreach (Tuple<Interface, Machine> binding in bindExpr.Bindings)
{
foreach (Interface createdInterface in binding.Item2.Creates.Interfaces)
{
currentModuleInfo.Creates.AddInterface(currentModuleInfo.LinkMap[binding.Item1][createdInterface]);
}
}
}
private void CheckWellFormedness(RenameModuleExpr renameExpr)
{
if (renameExpr.ModuleInfo != null)
{
return;
}
//check that component module is wellformed
CheckWellFormedness(renameExpr.ComponentModule);
//check that the module is wellformed
ModuleInfo componentModuleInfo = renameExpr.ComponentModule.ModuleInfo;
// 1) receives set of both new and old interface must be same
if (!renameExpr.NewInterface.ReceivableEvents.IsSame(renameExpr.OldInterface.ReceivableEvents))
{
throw handler.InvalidRenameExpr(renameExpr.SourceLocation,
$"{renameExpr.NewInterface.Name} and {renameExpr.OldInterface.Name} must have the same receive set");
}
// 2) oldInterface must belong to implemented or created interface
if (!componentModuleInfo.Creates.Interfaces.Union(componentModuleInfo.InterfaceDef.Keys)
.Contains(renameExpr.OldInterface))
{
throw handler.InvalidRenameExpr(renameExpr.SourceLocation,
$"{renameExpr.OldInterface.Name} must belong to either created interfaces or bounded interfaces of the module");
}
// 3) newInterface must not belong to created and implemented interfaces.
if (componentModuleInfo.Creates.Interfaces.Union(componentModuleInfo.InterfaceDef.Keys)
.Contains(renameExpr.NewInterface))
{
throw handler.InvalidRenameExpr(renameExpr.SourceLocation,
$"{renameExpr.NewInterface.Name} must not belong to created interfaces or bounded interfaces of the module");
}
//populate the attributes of the module
renameExpr.ModuleInfo = new ModuleInfo();
ModuleInfo currentModuleInfo = renameExpr.ModuleInfo;
// compute the new monitor map
foreach (KeyValuePair<Machine, IEnumerable<Interface>> monMap in componentModuleInfo.MonitorMap)
{
List<Interface> interfaceList = monMap.Value.Select(@interface => @interface.Equals(renameExpr.OldInterface)
? renameExpr.NewInterface
: @interface).ToList();
currentModuleInfo.MonitorMap[monMap.Key] = interfaceList;
}
// compute the new private interfaces
foreach (Interface @interface in componentModuleInfo.PrivateInterfaces.Interfaces)
{
currentModuleInfo.PrivateInterfaces.AddInterface(
@interface.Equals(renameExpr.OldInterface)
? renameExpr.NewInterface
: @interface);
}
// compute the new interface definition map
foreach (KeyValuePair<Interface, Machine> interfaceDefItem in componentModuleInfo.InterfaceDef)
{
currentModuleInfo.InterfaceDef.Add(
interfaceDefItem.Key.Equals(renameExpr.OldInterface)
? renameExpr.NewInterface
: interfaceDefItem.Key, interfaceDefItem.Value);
}
// compute the new link map
foreach (KeyValuePair<Interface, IDictionary<Interface, Interface>> linkMapItem in componentModuleInfo
.LinkMap)
{
Interface keyInterface = linkMapItem.Key.Equals(renameExpr.OldInterface)
? renameExpr.NewInterface
: linkMapItem.Key;
currentModuleInfo.LinkMap[keyInterface] = new Dictionary<Interface, Interface>();
foreach (KeyValuePair<Interface, Interface> localLinkMap in linkMapItem.Value)
{
currentModuleInfo.LinkMap[keyInterface].Add(localLinkMap.Key,
localLinkMap.Value.Equals(renameExpr.OldInterface)
? renameExpr.NewInterface
: localLinkMap.Value);
}
}
// compute the sends
currentModuleInfo.Sends.AddEvents(componentModuleInfo.Sends.Events);
// compute the receives
currentModuleInfo.Receives.AddEvents(componentModuleInfo.Receives.Events);
// compute the creates
foreach (KeyValuePair<Interface, Machine> binding in currentModuleInfo.InterfaceDef)
{
foreach (Interface createdInterface in binding.Value.Creates.Interfaces)
{
currentModuleInfo.Creates.AddInterface(currentModuleInfo.LinkMap[binding.Key][createdInterface]);
}
}
}
private void CheckWellFormedness(UnionOrComposeModuleExpr composeExpr)
{
if (composeExpr.ModuleInfo != null)
{
return;
}
//check that all component modules are wellformed
foreach (IPModuleExpr module in composeExpr.ComponentModules)
{
CheckWellFormedness(module);
}
//check if the current module is wellformed
// TODO: Woah, this is O(n^2). Can we get this down to O(n log n) at most?
foreach (IPModuleExpr module1 in composeExpr.ComponentModules)
{
foreach (IPModuleExpr module2 in composeExpr.ComponentModules)
{
if (module1 == module2)
{
continue;
}
ModuleInfo module1Info = module1.ModuleInfo;
ModuleInfo module2Info = module2.ModuleInfo;
List<PEvent> allPrivateEvents = module1Info
.PrivateEvents.Events
.Union(module2Info.PrivateEvents.Events).ToList();
List<PEvent> allSendAndReceiveEvents =
module1Info.Sends.Events.Union(
module1Info.Receives.Events.Union(
module2Info.Receives.Events.Union(
module2Info.Sends.Events))).ToList();
// 1) domain of interface def map is disjoint
foreach (Interface @interface in module1Info.InterfaceDef.Keys.Intersect(
module2Info.InterfaceDef.Keys))
{
throw handler.InvalidCompositionExpr(module1.SourceLocation,
"bound interfaces after composition are not disjoint, e.g., " +
$"interface {@interface.Name} is bound in both the modules being composed");
}
// 2) no private events in the sends or receives events
foreach (PEvent @event in allSendAndReceiveEvents.Intersect(allPrivateEvents))
{
throw handler.InvalidCompositionExpr(module1.SourceLocation,
"private events after composition are not disjoint from send and receives set, e.g., " +
$"after composition private event {@event.Name} belongs to both private and public (sends or receives) events");
}
// 3) no private events in the sends or receives permissions
foreach (PEvent @event in allSendAndReceiveEvents)
{
IEnumerable<PEvent> permissionsEmbedded = GetPermissions(@event.PayloadType.AllowedPermissions?.Value);
foreach (PEvent privatePermission in allPrivateEvents.Where(
ev => permissionsEmbedded.Contains(ev)))
{
throw handler.InvalidCompositionExpr(module1.SourceLocation,
"private events after composition are not disjoint from permissions in events sent or received, e.g., " +
$"after composition private event {privatePermission.Name} is in the permissions set of {@event.Name}");
}
}
IEnumerable<Interface> interfaceImplAndNotCreated1 =
module1Info.Creates.Interfaces.Except(module1Info.InterfaceDef.Keys);
IEnumerable<Interface> interfaceCreatedAndNotImpl1 =
module1Info.InterfaceDef.Keys.Except(module1Info.Creates.Interfaces);
IEnumerable<Interface> interfaceImplAndNotCreated2 =
module2Info.Creates.Interfaces.Except(module2Info.InterfaceDef.Keys);
IEnumerable<Interface> interfaceCreatedAndNotImpl2 =
module2Info.InterfaceDef.Keys.Except(module2Info.Creates.Interfaces);
foreach (Interface @interface in interfaceImplAndNotCreated1.Union(
interfaceCreatedAndNotImpl1.Union(
interfaceImplAndNotCreated2.Union(interfaceCreatedAndNotImpl2))))
{
foreach (PEvent @event in allPrivateEvents.Where(
ev => @interface.ReceivableEvents.Contains(ev)))
{
throw handler.InvalidCompositionExpr(module1.SourceLocation,
$"After composition, private event {@event.Name} is in the received events of interface {@interface.Name} which is created or bound in the module");
}
}
// ensure also that the monitor maps are disjoint
foreach (Machine monitor in module1Info.MonitorMap.Keys.Intersect(module2Info.MonitorMap.Keys))
{
throw handler.InvalidCompositionExpr(module1.SourceLocation,
$"monitor {monitor.Name} is attached in more than one modules being composed");
}
// if composition then output actions must be disjoint
if (composeExpr.IsComposition)
{
foreach (PEvent @event in module1Info.Sends.Events.Intersect(module2Info.Sends.Events))
{
throw handler.InvalidCompositionExpr(module1.SourceLocation,
$"output sends are not disjoint, {@event.Name} belongs to the sends of multiple composed module");
}
foreach (Interface @interface in module1Info.Creates.Interfaces.Intersect(
module2Info.Creates.Interfaces))
{
throw handler.InvalidCompositionExpr(module1.SourceLocation,
$"output creates are not disjoint, {@interface.Name} belongs to the creates of multiple composed module");
}
}
foreach (Interface exportedOrCreatedInterface in module1.ModuleInfo.InterfaceDef.Keys.Union(module1.ModuleInfo
.Creates.Interfaces))
{
foreach (PEvent priEvent in module2.ModuleInfo.PrivateEvents.Events.Where(ev =>
GetPermissions(exportedOrCreatedInterface.PayloadType.AllowedPermissions?.Value).Contains(ev)))
{
throw handler.InvalidHideEventExpr(module2.SourceLocation,
$"private event {priEvent.Name} belongs to the permissions of the constructor type of public interface {exportedOrCreatedInterface.Name}");
}
}
}
}
composeExpr.ModuleInfo = new ModuleInfo();
ModuleInfo currentModuleInfo = composeExpr.ModuleInfo;
//populate the attributes of the module
foreach (ModuleInfo module in composeExpr.ComponentModules.Select(cm => cm.ModuleInfo))
{
currentModuleInfo.PrivateEvents.AddEvents(module.PrivateEvents.Events);
currentModuleInfo.PrivateInterfaces.AddInterfaces(module.PrivateInterfaces.Interfaces);
foreach (KeyValuePair<Machine, IEnumerable<Interface>> monMap in module.MonitorMap)
{
currentModuleInfo.MonitorMap[monMap.Key] = monMap.Value.ToList();
}
foreach (KeyValuePair<Interface, IDictionary<Interface, Interface>> linkMapItem in module.LinkMap)
{
currentModuleInfo.LinkMap[linkMapItem.Key] = new Dictionary<Interface, Interface>();
foreach (KeyValuePair<Interface, Interface> localLinkMap in linkMapItem.Value)
{
currentModuleInfo.LinkMap[linkMapItem.Key].Add(localLinkMap.Key, localLinkMap.Value);
}
}
foreach (KeyValuePair<Interface, Machine> ipItem in module.InterfaceDef)
{
currentModuleInfo.InterfaceDef.Add(ipItem.Key, ipItem.Value);
}
}
// compute all the derived attributes
currentModuleInfo.Sends.AddEvents(composeExpr.ComponentModules.SelectMany(m => m.ModuleInfo.Sends.Events));
currentModuleInfo.Receives.AddEvents(
composeExpr.ComponentModules.SelectMany(m => m.ModuleInfo.Receives.Events));
currentModuleInfo.Creates.AddInterfaces(
composeExpr.ComponentModules.SelectMany(m => m.ModuleInfo.Creates.Interfaces));
}
private void CheckWellFormedness(HideEventModuleExpr hideEExpr)
{
if (hideEExpr.ModuleInfo != null)
{
return;
}
//check that component module is wellformed
CheckWellFormedness(hideEExpr.ComponentModule);
//check if the current module is wellformed
ModuleInfo componentModuleInfo = hideEExpr.ComponentModule.ModuleInfo;
// 1) e \subseteq ER \intersect ES
List<PEvent> receiveAndsends = componentModuleInfo
.Sends.Events
.Where(ev => componentModuleInfo.Receives.Contains(ev))
.ToList();
if (!hideEExpr.HideEvents.IsSubsetEqOf(receiveAndsends))
{
PEvent @event = hideEExpr.HideEvents.Events.First(h => !receiveAndsends.Contains(h));
throw handler.InvalidHideEventExpr(hideEExpr.SourceLocation,
$"event {@event.Name} cannot be made private, it must belong to both receive and send set of the module");
}
// 2) only events in interfaces that are both created and implemented by the module can be hidden
IEnumerable<Interface> interfaceImplAndNotCreated =
componentModuleInfo.Creates.Interfaces.Except(componentModuleInfo.InterfaceDef.Keys);
IEnumerable<Interface> interfaceCreatedAndNotImpl =
componentModuleInfo.InterfaceDef.Keys.Except(componentModuleInfo.Creates.Interfaces);
foreach (Interface @interface in interfaceCreatedAndNotImpl
.Union(interfaceImplAndNotCreated)
.Where(i => hideEExpr.HideEvents.Intersects(i.ReceivableEvents.Events)))
{
PEvent @event = hideEExpr.HideEvents.Events.First(ev => @interface.ReceivableEvents.Contains(ev));
throw handler.InvalidHideEventExpr(hideEExpr.SourceLocation,
$"event {@event.Name} cannot be made private as interface {@interface.Name} contains this event. " +
"Only events in interfaces that are both created and bound in the module can be hidden");
}
// 3) events received and sent by the module must not include private permissions
IEnumerable<PEvent> eventsReceivedAndSent =
componentModuleInfo.Sends.Events.Union(componentModuleInfo.Receives.Events);
foreach (PEvent @event in eventsReceivedAndSent.Except(hideEExpr.HideEvents.Events))
{
IEnumerable<PEvent> permissionsEmbedded = GetPermissions(@event.PayloadType.AllowedPermissions.Value);
foreach (PEvent privatePermission in hideEExpr.HideEvents.Events.Where(
ev => permissionsEmbedded.Contains(ev)))
{
throw handler.InvalidHideEventExpr(hideEExpr.SourceLocation,
$"event {privatePermission} cannot be made private as it belongs to allowed permission of {@event.Name} which is received or sent by the module");
}
}
foreach (Interface exportedOrCreatedInterface in hideEExpr.ModuleInfo.InterfaceDef.Keys.Union(hideEExpr.ModuleInfo
.Creates.Interfaces))
{
foreach (PEvent priEvent in hideEExpr.HideEvents.Events.Where(ev =>
GetPermissions(exportedOrCreatedInterface.PayloadType.AllowedPermissions?.Value).Contains(ev)))
{
throw handler.InvalidHideEventExpr(hideEExpr.SourceLocation,
$"event {priEvent.Name} cannot be made private as it belongs to the permissions of the contructor type of interface {exportedOrCreatedInterface.Name}");
}
}
hideEExpr.ModuleInfo = new ModuleInfo();
ModuleInfo currentModuleInfo = hideEExpr.ModuleInfo;
//populate the attributes of the module
currentModuleInfo.PrivateEvents.AddEvents(
componentModuleInfo.PrivateEvents.Events.Union(hideEExpr.HideEvents.Events));
currentModuleInfo.PrivateInterfaces.AddInterfaces(componentModuleInfo.PrivateInterfaces.Interfaces);
currentModuleInfo.Sends.AddEvents(componentModuleInfo.Sends.Events.Except(hideEExpr.HideEvents.Events));
currentModuleInfo.Receives.AddEvents(
componentModuleInfo.Receives.Events.Except(hideEExpr.HideEvents.Events));
currentModuleInfo.Creates.AddInterfaces(componentModuleInfo.Creates.Interfaces);
foreach (KeyValuePair<Machine, IEnumerable<Interface>> monMap in componentModuleInfo.MonitorMap)
{
currentModuleInfo.MonitorMap[monMap.Key] = monMap.Value.ToList();
}
foreach (KeyValuePair<Interface, IDictionary<Interface, Interface>> linkMapItem in componentModuleInfo
.LinkMap)
{
currentModuleInfo.LinkMap[linkMapItem.Key] = new Dictionary<Interface, Interface>();
foreach (KeyValuePair<Interface, Interface> localLinkMap in linkMapItem.Value)
{
currentModuleInfo.LinkMap[linkMapItem.Key].Add(localLinkMap.Key, localLinkMap.Value);
}
}
foreach (KeyValuePair<Interface, Machine> ipItem in componentModuleInfo.InterfaceDef)
{
currentModuleInfo.InterfaceDef.Add(ipItem.Key, ipItem.Value);
}
}
private void CheckWellFormedness(HideInterfaceModuleExpr hideIExpr)
{
if (hideIExpr.ModuleInfo != null)
{
return;
}
//check that component module is wellformed
CheckWellFormedness(hideIExpr.ComponentModule);
//check if the current module is wellformed
ModuleInfo componentModuleInfo = hideIExpr.ComponentModule.ModuleInfo;
// 1) interfaces to be hidden must be both implemented and created by the module
IEnumerable<Interface> interfacesImplementedAndCreated =
componentModuleInfo.Creates.Interfaces.Intersect(componentModuleInfo.InterfaceDef.Keys);
foreach (Interface @interface in hideIExpr.HideInterfaces.Where(
it => !interfacesImplementedAndCreated.Contains(it)))
{
throw handler.InvalidHideInterfaceExpr(hideIExpr.SourceLocation,
$"interface {@interface.Name} cannot be made private. Interface {@interface.Name} must be both created and bounded in the module");
}
hideIExpr.ModuleInfo = new ModuleInfo();
ModuleInfo currentModuleInfo = hideIExpr.ModuleInfo;
//populate the attributes of the module
currentModuleInfo.PrivateEvents.AddEvents(componentModuleInfo.PrivateEvents.Events);
currentModuleInfo.PrivateInterfaces.AddInterfaces(
componentModuleInfo.PrivateInterfaces.Interfaces.Union(hideIExpr.HideInterfaces));
currentModuleInfo.Sends.AddEvents(componentModuleInfo.Sends.Events);
currentModuleInfo.Receives.AddEvents(componentModuleInfo.Receives.Events);
currentModuleInfo.Creates.AddInterfaces(componentModuleInfo.Creates.Interfaces);
foreach (KeyValuePair<Machine, IEnumerable<Interface>> monMap in componentModuleInfo.MonitorMap)
{
currentModuleInfo.MonitorMap[monMap.Key] = monMap.Value.ToList();
}
foreach (KeyValuePair<Interface, IDictionary<Interface, Interface>> linkMapItem in componentModuleInfo
.LinkMap)
{
currentModuleInfo.LinkMap[linkMapItem.Key] = new Dictionary<Interface, Interface>();
foreach (KeyValuePair<Interface, Interface> localLinkMap in linkMapItem.Value)
{
currentModuleInfo.LinkMap[linkMapItem.Key].Add(localLinkMap.Key, localLinkMap.Value);
}
}
foreach (KeyValuePair<Interface, Machine> ipItem in componentModuleInfo.InterfaceDef)
{
currentModuleInfo.InterfaceDef.Add(ipItem.Key, ipItem.Value);
}
}
}
}
| |
// ***********************************************************************
// Copyright (c) 2008-2018 Charlie Poole, Rob Prouse
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using NUnit.Compatibility;
using NUnit.Framework.Interfaces;
using NUnit.Framework.Internal;
using NUnit.Framework.Internal.Builders;
namespace NUnit.Framework
{
/// <summary>
/// TestCaseSourceAttribute indicates the source to be used to
/// provide test cases for a test method.
/// </summary>
[AttributeUsage(AttributeTargets.Method, AllowMultiple = true, Inherited = false)]
public class TestCaseSourceAttribute : NUnitAttribute, ITestBuilder, IImplyFixture
{
private readonly NUnitTestCaseBuilder _builder = new NUnitTestCaseBuilder();
#region Constructors
/// <summary>
/// Construct with the name of the method, property or field that will provide data
/// </summary>
/// <param name="sourceName">The name of a static method, property or field that will provide data.</param>
public TestCaseSourceAttribute(string sourceName)
{
this.SourceName = sourceName;
}
/// <summary>
/// Construct with a Type and name
/// </summary>
/// <param name="sourceType">The Type that will provide data</param>
/// <param name="sourceName">The name of a static method, property or field that will provide data.</param>
/// <param name="methodParams">A set of parameters passed to the method, works only if the Source Name is a method.
/// If the source name is a field or property has no effect.</param>
public TestCaseSourceAttribute(Type sourceType, string sourceName, object[] methodParams)
{
this.MethodParams = methodParams;
this.SourceType = sourceType;
this.SourceName = sourceName;
}
/// <summary>
/// Construct with a Type and name
/// </summary>
/// <param name="sourceType">The Type that will provide data</param>
/// <param name="sourceName">The name of a static method, property or field that will provide data.</param>
public TestCaseSourceAttribute(Type sourceType, string sourceName)
{
this.SourceType = sourceType;
this.SourceName = sourceName;
}
/// <summary>
/// Construct with a name
/// </summary>
/// <param name="sourceName">The name of a static method, property or field that will provide data.</param>
/// <param name="methodParams">A set of parameters passed to the method, works only if the Source Name is a method.
/// If the source name is a field or property has no effect.</param>
public TestCaseSourceAttribute(string sourceName, object[] methodParams)
{
this.MethodParams = methodParams;
this.SourceName = sourceName;
}
/// <summary>
/// Construct with a Type
/// </summary>
/// <param name="sourceType">The type that will provide data</param>
public TestCaseSourceAttribute(Type sourceType)
{
this.SourceType = sourceType;
}
#endregion
#region Properties
/// <summary>
/// A set of parameters passed to the method, works only if the Source Name is a method.
/// If the source name is a field or property has no effect.
/// </summary>
public object[] MethodParams { get; }
/// <summary>
/// The name of a the method, property or fiend to be used as a source
/// </summary>
public string SourceName { get; }
/// <summary>
/// A Type to be used as a source
/// </summary>
public Type SourceType { get; }
/// <summary>
/// Gets or sets the category associated with every fixture created from
/// this attribute. May be a single category or a comma-separated list.
/// </summary>
public string Category { get; set; }
#endregion
#region ITestBuilder Members
/// <summary>
/// Builds any number of tests from the specified method and context.
/// </summary>
/// <param name="method">The method to be used as a test.</param>
/// <param name="suite">The parent to which the test will be added.</param>
public IEnumerable<TestMethod> BuildFrom(FixtureMethod method, Test suite)
{
int count = 0;
foreach (TestCaseParameters parms in GetTestCasesFor(method))
{
count++;
yield return _builder.BuildTestMethod(method, suite, parms);
}
// If count > 0, error messages will be shown for each case
// but if it's 0, we need to add an extra "test" to show the message.
if (count == 0 && method.Method.GetParameters().Length == 0)
{
var parms = new TestCaseParameters();
parms.RunState = RunState.NotRunnable;
parms.Properties.Set(PropertyNames.SkipReason, "TestCaseSourceAttribute may not be used on a method without parameters");
yield return _builder.BuildTestMethod(method, suite, parms);
}
}
#endregion
#region Helper Methods
private IEnumerable<ITestCaseData> GetTestCasesFor(FixtureMethod method)
{
List<ITestCaseData> data = new List<ITestCaseData>();
try
{
IEnumerable source = GetTestCaseSource(method.FixtureType);
if (source != null)
{
foreach (object item in source)
{
// First handle two easy cases:
// 1. Source is null. This is really an error but if we
// throw an exception we simply get an invalid fixture
// without good info as to what caused it. Passing a
// single null argument will cause an error to be
// reported at the test level, in most cases.
// 2. User provided an ITestCaseData and we just use it.
ITestCaseData parms = item == null
? new TestCaseParameters(new object[] { null })
: item as ITestCaseData;
if (parms == null)
{
object[] args = null;
// 3. An array was passed, it may be an object[]
// or possibly some other kind of array, which
// TestCaseSource can accept.
var array = item as Array;
if (array != null)
{
// If array has the same number of elements as parameters
// and it does not fit exactly into single existing parameter
// we believe that this array contains arguments, not is a bare
// argument itself.
var parameters = method.Method.GetParameters();
var argsNeeded = parameters.Length;
if (argsNeeded > 0 && argsNeeded == array.Length && parameters[0].ParameterType != array.GetType())
{
args = new object[array.Length];
for (var i = 0; i < array.Length; i++)
args[i] = array.GetValue(i);
}
}
if (args == null)
{
args = new object[] { item };
}
parms = new TestCaseParameters(args);
}
if (this.Category != null)
foreach (string cat in this.Category.Split(new char[] { ',' }))
parms.Properties.Add(PropertyNames.Category, cat);
data.Add(parms);
}
}
else
{
data.Clear();
data.Add(new TestCaseParameters(new Exception("The test case source could not be found.")));
}
}
catch (Exception ex)
{
data.Clear();
data.Add(new TestCaseParameters(ex));
}
return data;
}
private IEnumerable GetTestCaseSource(Type type)
{
Type sourceType = SourceType ?? type;
// Handle Type implementing IEnumerable separately
if (SourceName == null)
return Reflect.Construct(sourceType, null) as IEnumerable;
MemberInfo[] members = sourceType.GetMember(SourceName,
BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static | BindingFlags.Instance | BindingFlags.FlattenHierarchy);
if (members.Length == 1)
{
MemberInfo member = members[0];
var field = member as FieldInfo;
if (field != null)
return field.IsStatic
? (MethodParams == null ? (IEnumerable)field.GetValue(null)
: ReturnErrorAsParameter(ParamGivenToField))
: ReturnErrorAsParameter(SourceMustBeStatic);
var property = member as PropertyInfo;
if (property != null)
return property.GetGetMethod(true).IsStatic
? (MethodParams == null ? (IEnumerable)property.GetValue(null, null)
: ReturnErrorAsParameter(ParamGivenToProperty))
: ReturnErrorAsParameter(SourceMustBeStatic);
var m = member as MethodInfo;
if (m != null)
return m.IsStatic
? (MethodParams == null || m.GetParameters().Length == MethodParams.Length ? (IEnumerable)m.Invoke(null, MethodParams)
: ReturnErrorAsParameter(NumberOfArgsDoesNotMatch))
: ReturnErrorAsParameter(SourceMustBeStatic);
}
return null;
}
private static IEnumerable ReturnErrorAsParameter(string errorMessage)
{
var parms = new TestCaseParameters();
parms.RunState = RunState.NotRunnable;
parms.Properties.Set(PropertyNames.SkipReason, errorMessage);
return new TestCaseParameters[] { parms };
}
private const string SourceMustBeStatic =
"The sourceName specified on a TestCaseSourceAttribute must refer to a static field, property or method.";
private const string ParamGivenToField = "You have specified a data source field but also given a set of parameters. Fields cannot take parameters, " +
"please revise the 3rd parameter passed to the TestCaseSourceAttribute and either remove " +
"it or specify a method.";
private const string ParamGivenToProperty = "You have specified a data source property but also given a set of parameters. " +
"Properties cannot take parameters, please revise the 3rd parameter passed to the " +
"TestCaseSource attribute and either remove it or specify a method.";
private const string NumberOfArgsDoesNotMatch = "You have given the wrong number of arguments to the method in the TestCaseSourceAttribute" +
", please check the number of parameters passed in the object is correct in the 3rd parameter for the " +
"TestCaseSourceAttribute and this matches the number of parameters in the target method and try again.";
#endregion
}
}
| |
// SF API version v50.0
// Custom fields included: False
// Relationship objects included: True
using System;
using NetCoreForce.Client.Models;
using NetCoreForce.Client.Attributes;
using Newtonsoft.Json;
namespace NetCoreForce.Models
{
///<summary>
/// Site
///<para>SObject Name: Site</para>
///<para>Custom Object: False</para>
///</summary>
public class SfSite : SObject
{
[JsonIgnore]
public static string SObjectTypeName
{
get { return "Site"; }
}
///<summary>
/// Site ID
/// <para>Name: Id</para>
/// <para>SF Type: id</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "id")]
[Updateable(false), Createable(false)]
public string Id { get; set; }
///<summary>
/// Site Name
/// <para>Name: Name</para>
/// <para>SF Type: string</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "name")]
[Updateable(false), Createable(false)]
public string Name { get; set; }
///<summary>
/// Site Subdomain Prefix
/// <para>Name: Subdomain</para>
/// <para>SF Type: string</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "subdomain")]
[Updateable(false), Createable(false)]
public string Subdomain { get; set; }
///<summary>
/// Default Web Address
/// <para>Name: UrlPathPrefix</para>
/// <para>SF Type: string</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "urlPathPrefix")]
[Updateable(false), Createable(false)]
public string UrlPathPrefix { get; set; }
///<summary>
/// User ID
/// <para>Name: GuestUserId</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "guestUserId")]
[Updateable(false), Createable(false)]
public string GuestUserId { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: GuestUser</para>
///</summary>
[JsonProperty(PropertyName = "guestUser")]
[Updateable(false), Createable(false)]
public SfUser GuestUser { get; set; }
///<summary>
/// Site Status
/// <para>Name: Status</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "status")]
[Updateable(false), Createable(false)]
public string Status { get; set; }
///<summary>
/// User ID
/// <para>Name: AdminId</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "adminId")]
[Updateable(false), Createable(false)]
public string AdminId { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: Admin</para>
///</summary>
[JsonProperty(PropertyName = "admin")]
[Updateable(false), Createable(false)]
public SfUser Admin { get; set; }
///<summary>
/// Enable Feeds
/// <para>Name: OptionsEnableFeeds</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsEnableFeeds")]
[Updateable(false), Createable(false)]
public bool? OptionsEnableFeeds { get; set; }
///<summary>
/// Require Secure Connections (HTTPS)
/// <para>Name: OptionsRequireHttps</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsRequireHttps")]
[Updateable(false), Createable(false)]
public bool? OptionsRequireHttps { get; set; }
///<summary>
/// Guest Access to the Payments API
/// <para>Name: OptionsAllowGuestPaymentsApi</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsAllowGuestPaymentsApi")]
[Updateable(false), Createable(false)]
public bool? OptionsAllowGuestPaymentsApi { get; set; }
///<summary>
/// HasStoredPathPrefix
/// <para>Name: OptionsHasStoredPathPrefix</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsHasStoredPathPrefix")]
[Updateable(false), Createable(false)]
public bool? OptionsHasStoredPathPrefix { get; set; }
///<summary>
/// Enable Standard Home Page
/// <para>Name: OptionsAllowHomePage</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsAllowHomePage")]
[Updateable(false), Createable(false)]
public bool? OptionsAllowHomePage { get; set; }
///<summary>
/// Enable Standard Ideas Pages
/// <para>Name: OptionsAllowStandardIdeasPages</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsAllowStandardIdeasPages")]
[Updateable(false), Createable(false)]
public bool? OptionsAllowStandardIdeasPages { get; set; }
///<summary>
/// Enable Standard Lookup Pages
/// <para>Name: OptionsAllowStandardSearch</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsAllowStandardSearch")]
[Updateable(false), Createable(false)]
public bool? OptionsAllowStandardSearch { get; set; }
///<summary>
/// Enable Standard Search Pages
/// <para>Name: OptionsAllowStandardLookups</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsAllowStandardLookups")]
[Updateable(false), Createable(false)]
public bool? OptionsAllowStandardLookups { get; set; }
///<summary>
/// Enable Standard Answers Pages
/// <para>Name: OptionsAllowStandardAnswersPages</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsAllowStandardAnswersPages")]
[Updateable(false), Createable(false)]
public bool? OptionsAllowStandardAnswersPages { get; set; }
///<summary>
/// Guest Access to the Support API
/// <para>Name: OptionsAllowGuestSupportApi</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsAllowGuestSupportApi")]
[Updateable(false), Createable(false)]
public bool? OptionsAllowGuestSupportApi { get; set; }
///<summary>
/// Allow Access to Standard Salesforce Pages
/// <para>Name: OptionsAllowStandardPortalPages</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsAllowStandardPortalPages")]
[Updateable(false), Createable(false)]
public bool? OptionsAllowStandardPortalPages { get; set; }
///<summary>
/// Upgrade all requests to HTTPS
/// <para>Name: OptionsCspUpgradeInsecureRequests</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsCspUpgradeInsecureRequests")]
[Updateable(false), Createable(false)]
public bool? OptionsCspUpgradeInsecureRequests { get; set; }
///<summary>
/// Enable Content Sniffing Protection
/// <para>Name: OptionsContentSniffingProtection</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsContentSniffingProtection")]
[Updateable(false), Createable(false)]
public bool? OptionsContentSniffingProtection { get; set; }
///<summary>
/// Enable Browser Cross Site Scripting Protection
/// <para>Name: OptionsBrowserXssProtection</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsBrowserXssProtection")]
[Updateable(false), Createable(false)]
public bool? OptionsBrowserXssProtection { get; set; }
///<summary>
/// Referrer URL Protection
/// <para>Name: OptionsReferrerPolicyOriginWhenCrossOrigin</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "optionsReferrerPolicyOriginWhenCrossOrigin")]
[Updateable(false), Createable(false)]
public bool? OptionsReferrerPolicyOriginWhenCrossOrigin { get; set; }
///<summary>
/// Site Description
/// <para>Name: Description</para>
/// <para>SF Type: textarea</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "description")]
[Updateable(false), Createable(false)]
public string Description { get; set; }
///<summary>
/// Site Label
/// <para>Name: MasterLabel</para>
/// <para>SF Type: string</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "masterLabel")]
[Updateable(false), Createable(false)]
public string MasterLabel { get; set; }
///<summary>
/// Analytics Tracking Code
/// <para>Name: AnalyticsTrackingCode</para>
/// <para>SF Type: string</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "analyticsTrackingCode")]
[Updateable(false), Createable(false)]
public string AnalyticsTrackingCode { get; set; }
///<summary>
/// Site Type
/// <para>Name: SiteType</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "siteType")]
[Updateable(false), Createable(false)]
public string SiteType { get; set; }
///<summary>
/// Clickjack Protection Level
/// <para>Name: ClickjackProtectionLevel</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "clickjackProtectionLevel")]
[Updateable(false), Createable(false)]
public string ClickjackProtectionLevel { get; set; }
///<summary>
/// Daily Bandwidth Limit (MB)
/// <para>Name: DailyBandwidthLimit</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "dailyBandwidthLimit")]
[Updateable(false), Createable(false)]
public int? DailyBandwidthLimit { get; set; }
///<summary>
/// Daily Bandwidth Used
/// <para>Name: DailyBandwidthUsed</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "dailyBandwidthUsed")]
[Updateable(false), Createable(false)]
public int? DailyBandwidthUsed { get; set; }
///<summary>
/// Daily Request Time Limit (min)
/// <para>Name: DailyRequestTimeLimit</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "dailyRequestTimeLimit")]
[Updateable(false), Createable(false)]
public int? DailyRequestTimeLimit { get; set; }
///<summary>
/// Daily Request Time Used
/// <para>Name: DailyRequestTimeUsed</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "dailyRequestTimeUsed")]
[Updateable(false), Createable(false)]
public int? DailyRequestTimeUsed { get; set; }
///<summary>
/// Monthly Page Views Allowed
/// <para>Name: MonthlyPageViewsEntitlement</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "monthlyPageViewsEntitlement")]
[Updateable(false), Createable(false)]
public int? MonthlyPageViewsEntitlement { get; set; }
///<summary>
/// Created Date
/// <para>Name: CreatedDate</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "createdDate")]
[Updateable(false), Createable(false)]
public DateTimeOffset? CreatedDate { get; set; }
///<summary>
/// Created By ID
/// <para>Name: CreatedById</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "createdById")]
[Updateable(false), Createable(false)]
public string CreatedById { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: CreatedBy</para>
///</summary>
[JsonProperty(PropertyName = "createdBy")]
[Updateable(false), Createable(false)]
public SfUser CreatedBy { get; set; }
///<summary>
/// Last Modified Date
/// <para>Name: LastModifiedDate</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedDate")]
[Updateable(false), Createable(false)]
public DateTimeOffset? LastModifiedDate { get; set; }
///<summary>
/// Last Modified By ID
/// <para>Name: LastModifiedById</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedById")]
[Updateable(false), Createable(false)]
public string LastModifiedById { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: LastModifiedBy</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedBy")]
[Updateable(false), Createable(false)]
public SfUser LastModifiedBy { get; set; }
///<summary>
/// System Modstamp
/// <para>Name: SystemModstamp</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "systemModstamp")]
[Updateable(false), Createable(false)]
public DateTimeOffset? SystemModstamp { get; set; }
///<summary>
/// User ID
/// <para>Name: GuestRecordDefaultOwnerId</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "guestRecordDefaultOwnerId")]
[Updateable(false), Createable(false)]
public string GuestRecordDefaultOwnerId { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: GuestRecordDefaultOwner</para>
///</summary>
[JsonProperty(PropertyName = "guestRecordDefaultOwner")]
[Updateable(false), Createable(false)]
public SfUser GuestRecordDefaultOwner { get; set; }
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Reflection;
namespace Microsoft.Tools.ServiceModel.Svcutil
{
/// <summary>
/// This class knows how to parse objects into the types supported by the Options OM,
/// and how to represent these types as strings that can be serialized into JSON.
/// </summary>
internal class OptionValueParser
{
public static TValue ParseValue<TValue>(object value, OptionBase option)
{
ThrowInvalidValueIf(value == null, value, option);
var valueType = typeof(TValue);
if (value.GetType() != typeof(TValue))
{
// parsing is needed, the passed in value must be a string.
var stringValue = value as string;
ThrowInvalidValueIf(stringValue == null, value, option);
if (valueType == typeof(bool))
{
// Special-case boolean values as it is common to specify them as strings in the json file.
// { "myFlag" : "True" } will be resolved to { "MyFlag" : true }
ThrowInvalidValueIf(!bool.TryParse(stringValue, out var boolValue), stringValue, option);
value = boolValue;
}
else if (valueType.GetTypeInfo().IsEnum)
{
value = ParseEnum<TValue>(stringValue, option);
}
else if (valueType == typeof(CultureInfo))
{
value = CreateValue<CultureInfo>(() => new CultureInfo(stringValue), option, stringValue);
}
else if (valueType == typeof(Uri))
{
value = CreateValue<Uri>(() => new Uri(stringValue, UriKind.RelativeOrAbsolute), option, stringValue);
}
else if (valueType == typeof(DirectoryInfo))
{
value = CreateValue<DirectoryInfo>(() => new DirectoryInfo(stringValue), option, stringValue);
}
else if (valueType == typeof(FileInfo))
{
value = CreateValue<FileInfo>(() => new FileInfo(stringValue), option, stringValue);
}
else if (valueType == typeof(MSBuildProj))
{
value = CreateValue<MSBuildProj>(() => MSBuildProj.FromPathAsync(stringValue, null, System.Threading.CancellationToken.None).Result, option, stringValue);
}
else if (valueType == typeof(FrameworkInfo))
{
value = CreateValue<FrameworkInfo>(() => TargetFrameworkHelper.GetValidFrameworkInfo(stringValue), option, stringValue);
}
else if (valueType == typeof(ProjectDependency))
{
value = CreateValue<ProjectDependency>(() => ProjectDependency.Parse(stringValue), option, stringValue);
}
else if (valueType == typeof(KeyValuePair<string, string>))
{
value = ParseKeyValuePair(stringValue, option);
}
else
{
ThrowInvalidValueIf(true, stringValue, option);
}
}
return (TValue)value;
}
public static object GetSerializationValue(object value)
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
var valueType = value.GetType();
if (value is string || value is bool)
{
// no formatting needed, optimize if-else block for these two types.
}
else if (valueType.GetTypeInfo().IsEnum)
{
value = value.ToString();
}
else if (value is CultureInfo ci)
{
value = ci.Name;
}
else if (value is Uri uri)
{
value = (uri.IsAbsoluteUri && uri.IsFile ? uri.LocalPath : uri.OriginalString).Replace("\\", "/");
}
else if (value is DirectoryInfo di)
{
value = di.OriginalPath().Replace("\\", "/");
}
else if (value is FileInfo fi)
{
value = fi.OriginalPath().Replace("\\", "/");
}
else if (value is MSBuildProj proj)
{
value = proj.FullPath.Replace("\\", "/");
}
else if (value is FrameworkInfo fx)
{
value = fx.FullName;
}
else if (value is ProjectDependency pd)
{
value = pd.ReferenceIdentity;
}
else if (valueType == typeof(KeyValuePair<string, string>))
{
var pair = (KeyValuePair<string, string>)value;
value = $"{pair.Key}, {pair.Value}";
}
else if (value is ICollection collection)
{
var list = new List<object>();
foreach (var item in collection)
{
var serializationValue = GetSerializationValue(item);
list.Add(serializationValue);
}
list.Sort();
value = list;
}
return value;
}
private static readonly string[] s_nonTelemetrySensitiveOptionIds = new string[]
{
ApplicationOptions.ProviderIdKey, ApplicationOptions.VersionKey,
UpdateOptions.CollectionTypesKey, UpdateOptions.ExcludeTypesKey, UpdateOptions.ReferencesKey, UpdateOptions.RuntimeIdentifierKey
};
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Globalization", "CA1305:SpecifyIFormatProvider", MessageId = "System.String.Format(System.String,System.Object)")]
public static object GetTelemetryValue(OptionBase option)
{
// Avoid logging arbitrary strings input by the user!
var value = option.Value;
if (value == null)
{
value = "<null>";
}
else if (s_nonTelemetrySensitiveOptionIds.Any(id => option.HasSameId(id)))
{
var newValue = GetSerializationValue(value);
if (newValue is List<object> list)
{
value = list.Select(item => $"'{item}'").Aggregate((num, s) => num + ", " + s).ToString();
}
}
else
{
var valueType = value.GetType();
if (value is bool)
{
value = value.ToString();
}
else if (valueType.GetTypeInfo().IsEnum)
{
value = value.ToString();
}
else if (value is CultureInfo ci)
{
value = ci.Name;
}
else if (value is FrameworkInfo fx)
{
value = fx.FullName;
}
else if (value is ICollection collection)
{
value = $"Count:{collection.Count}";
}
else
{
value = $"<{valueType}>";
}
}
return value;
}
private static KeyValuePair<string, string> ParseKeyValuePair(string stringValue, OptionBase option)
{
// format namespace as a mapping key/value pair:
// "Namespace": "MyServiceReference1"
var parts = stringValue.Split(',');
ThrowInvalidValueIf(parts.Length != 2, stringValue, option);
var value = new KeyValuePair<string, string>(parts[0].Trim(), parts[1].Trim());
return value;
}
public static object ParseEnum<TValue>(string value, OptionBase option)
{
// Enum.TryParse is not available in all supported platforms, need to implement own parsing of enums.
Type thisType = typeof(TValue);
object enumValue = null;
foreach (var entry in thisType.GetTypeInfo().GetEnumValues())
{
if (StringComparer.OrdinalIgnoreCase.Compare(entry.ToString(), value) == 0)
{
enumValue = entry;
break;
}
}
if (enumValue == null || enumValue.GetType() != thisType)
{
var invalidValueError = string.Format(CultureInfo.CurrentCulture, Shared.Resources.ErrorInvalidOptionValueFormat, value, option.Name);
var supportedValues = string.Format(CultureInfo.CurrentCulture, Shared.Resources.ErrorOnInvalidEnumSupportedValuesFormat, string.Join(", ", Enum.GetNames(typeof(TValue))));
throw new ArgumentException(string.Concat(invalidValueError, " ", supportedValues));
}
return enumValue;
}
public static object CreateValue<TValue>(Func<object> GetValueFunc, OptionBase option, object originalValue)
{
object value = null;
try
{
value = GetValueFunc();
}
catch (Exception ex)
{
if (Utils.IsFatalOrUnexpected(ex)) throw;
ThrowInvalidValue(originalValue, option, ex);
}
ThrowInvalidValueIf(value.GetType() != typeof(TValue), value, option);
return value;
}
public static void ThrowInvalidValueIf(bool condition, object value, OptionBase option)
{
if (option == null)
{
throw new ArgumentNullException(nameof(option));
}
if (condition)
{
throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, Shared.Resources.ErrorInvalidOptionValueFormat, value, option.Name));
}
}
public static void ThrowInvalidValue(object value, OptionBase option, Exception innerException)
{
throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, Shared.Resources.ErrorInvalidOptionValueFormat, value, option.Name), innerException);
}
}
}
| |
using System.Net.Sockets;
namespace Tmds.DBus.Protocol;
using SizeT = System.UIntPtr;
using SSizeT = System.IntPtr;
static class SocketExtensions
{
public static ValueTask<int> ReceiveAsync(this Socket socket, Memory<byte> memory, UnixFdCollection? fdCollection)
{
if (fdCollection is null)
{
return socket.ReceiveAsync(memory, SocketFlags.None);
}
else
{
return socket.ReceiveWithHandlesAsync(memory, fdCollection);
}
}
private async static ValueTask<int> ReceiveWithHandlesAsync(this Socket socket, Memory<byte> memory, UnixFdCollection fdCollection)
{
while (true)
{
await socket.ReceiveAsync(new Memory<byte>(), SocketFlags.None).ConfigureAwait(false);
int rv = recvmsg(socket, memory, fdCollection);
if (rv >= 0)
{
return rv;
}
else
{
int errno = Marshal.GetLastWin32Error();
if (errno == EAGAIN || errno == EINTR)
{
continue;
}
throw new SocketException(errno);
}
}
}
public static ValueTask SendAsync(this Socket socket, ReadOnlyMemory<byte> buffer, IReadOnlyList<SafeHandle>? handles)
{
if (handles is null || handles.Count == 0)
{
return socket.SendAsync(buffer);
}
else
{
return socket.SendAsyncWithHandlesAsync(buffer, handles);
}
}
private static async ValueTask SendAsync(this Socket socket, ReadOnlyMemory<byte> buffer)
{
while (buffer.Length > 0)
{
int sent = await socket.SendAsync(buffer, SocketFlags.None);
buffer = buffer.Slice(sent);
}
}
private static ValueTask SendAsyncWithHandlesAsync(this Socket socket, ReadOnlyMemory<byte> buffer, IReadOnlyList<SafeHandle> handles)
{
socket.Blocking = false;
do
{
int rv = sendmsg(socket, buffer, handles);
if (rv > 0)
{
if (buffer.Length == rv)
{
return default;
}
return socket.SendAsync(buffer.Slice(rv));
}
else
{
int errno = Marshal.GetLastWin32Error();
if (errno == EINTR)
{
continue;
}
// TODO: handle EAGAIN.
return new ValueTask(Task.FromException(new SocketException(errno)));
}
} while (true);
}
private static unsafe int sendmsg(Socket socket, ReadOnlyMemory<byte> buffer, IReadOnlyList<SafeHandle> handles)
{
fixed (byte* ptr = buffer.Span)
{
IOVector* iovs = stackalloc IOVector[1];
iovs[0].Base = ptr;
iovs[0].Length = (SizeT)buffer.Length;
msghdr msg = new msghdr();
msg.msg_iov = iovs;
msg.msg_iovlen = (SizeT)1;
var fdm = new cmsg_fd();
int size = sizeof(cmsghdr) + 4 * handles.Count;
msg.msg_control = &fdm;
msg.msg_controllen = (SizeT)size;
fdm.hdr.cmsg_len = (SizeT)size;
fdm.hdr.cmsg_level = SOL_SOCKET;
fdm.hdr.cmsg_type = SCM_RIGHTS;
SafeHandle handle = socket.GetSafeHandle();
int handleRefsAdded = 0;
bool refAdded = false;
try
{
handle.DangerousAddRef(ref refAdded);
for (int i = 0, j = 0; i < handles.Count; i++)
{
bool added = false;
SafeHandle h = handles[i];
h.DangerousAddRef(ref added);
handleRefsAdded++;
fdm.fds[j++] = h.DangerousGetHandle().ToInt32();
}
return (int)sendmsg(handle.DangerousGetHandle().ToInt32(), new IntPtr(&msg), 0);
}
finally
{
for (int i = 0; i < handleRefsAdded; i++)
{
SafeHandle h = handles[i];
h.DangerousRelease();
}
if (refAdded)
handle.DangerousRelease();
}
}
}
private static unsafe int recvmsg(Socket socket, Memory<byte> buffer, UnixFdCollection handles)
{
fixed (byte* buf = buffer.Span)
{
IOVector iov = new IOVector();
iov.Base = buf;
iov.Length = (SizeT)buffer.Length;
msghdr msg = new msghdr();
msg.msg_iov = &iov;
msg.msg_iovlen = (SizeT)1;
cmsg_fd cm = new cmsg_fd();
msg.msg_control = &cm;
msg.msg_controllen = (SizeT)sizeof(cmsg_fd);
var handle = socket.GetSafeHandle();
bool refAdded = false;
try
{
handle.DangerousAddRef(ref refAdded);
int rv = (int)recvmsg(handle.DangerousGetHandle().ToInt32(), new IntPtr(&msg), 0);
if (rv >= 0)
{
if (cm.hdr.cmsg_level == SOL_SOCKET && cm.hdr.cmsg_type == SCM_RIGHTS)
{
int msgFdCount = ((int)cm.hdr.cmsg_len - sizeof(cmsghdr)) / sizeof(int);
for (int i = 0; i < msgFdCount; i++)
{
handles.Add((new IntPtr(cm.fds[i]), true));
}
}
}
return rv;
}
finally
{
if (refAdded)
handle.DangerousRelease();
}
}
}
const int SOL_SOCKET = 1;
const int EINTR = 4;
const int EBADF = 9;
static readonly int EAGAIN = RuntimeInformation.IsOSPlatform(OSPlatform.OSX) ? 35 : 11;
const int SCM_RIGHTS = 1;
private unsafe struct msghdr
{
public IntPtr msg_name; //optional address
public uint msg_namelen; //size of address
public IOVector* msg_iov; //scatter/gather array
public SizeT msg_iovlen; //# elements in msg_iov
public void* msg_control; //ancillary data, see below
public SizeT msg_controllen; //ancillary data buffer len
public int msg_flags; //flags on received message
}
private unsafe struct IOVector
{
public void* Base;
public SizeT Length;
}
private struct cmsghdr
{
public SizeT cmsg_len; //data byte count, including header
public int cmsg_level; //originating protocol
public int cmsg_type; //protocol-specific type
}
private unsafe struct cmsg_fd
{
public cmsghdr hdr;
public fixed int fds[64];
}
[DllImport("libc", SetLastError = true)]
public static extern SSizeT sendmsg(int sockfd, IntPtr msg, int flags);
[DllImport("libc", SetLastError = true)]
public static extern SSizeT recvmsg(int sockfd, IntPtr msg, int flags);
}
| |
/* Copyright (C) 2014 Newcastle University
*
* This software may be modified and distributed under the terms
* of the MIT license. See the LICENSE file for details.
*/
using System;
using Android.OS;
using Android.Views;
using Bootleg.API;
using Android.Support.V7.Widget;
using Android.Support.V4.Widget;
using Android.Widget;
using Android.Content;
using Square.Picasso;
using System.Threading;
using Bootleg.Droid.UI;
using AndroidHUD;
using System.Threading.Tasks;
using static Android.Support.V7.Widget.GridLayoutManager;
using Android.App;
using Bootleg.API.Model;
namespace Bootleg.Droid
{
public class MyEditsFragment : Android.Support.V4.App.Fragment, IImagePausable
{
public MyEditsFragment()
{
}
//Shoot CurrentEvent = null;
//public MyEditsFragment()
//{
// //this.CurrentEvent = currentevent;
//}
//private bool editing;
public override void OnCreate(Bundle savedInstanceState)
{
base.OnCreate(savedInstanceState);
}
public View theview;
//private Review review;
//bool loaded = false;
public override void OnDestroy()
{
base.OnDestroy();
Picasso.With(Context).CancelTag(Context);
}
bool alreadyloaded = false;
public override void OnResume()
{
base.OnResume();
if (!alreadyloaded)
{
alreadyloaded = true;
theview.Post(() =>
{
theview.FindViewById<ProgressBar>(Resource.Id.progressBar).Visibility = ViewStates.Visible;
});
theview.PostDelayed(() => { Refresh(true); }, 1000);
}
else
{
Refresh(false);
theview.Post(() =>
{
theview.FindViewById<ProgressBar>(Resource.Id.progressBar).Visibility = ViewStates.Gone;
});
}
Reattach();
}
public void Reattach()
{
Bootlegger.BootleggerClient.OnEditUpdated += Comms_OnEditUpdated;
}
private void Comms_OnEditUpdated(Edit obj)
{
//find the edit in the list, and update...
_adapter.UpdateEdit(obj);
if (obj.progress > 97)
{
theview.FindViewById<ProgressBar>(Resource.Id.progressBar).Post(() =>
{
var b = Bootlegger.BootleggerClient.MyEdits;
_adapter.UpdateData(Bootlegger.BootleggerClient.MyEdits, true);
});
}
}
EditAdapter _adapter;
public void Pause()
{
Picasso picasso = Picasso.With(Context);
picasso.PauseTag(_adapter);
}
public void Resume()
{
Picasso picasso = Picasso.With(Context);
picasso.ResumeTag(_adapter);
}
private class MySpanSizeLookup : SpanSizeLookup
{
EditAdapter adapter;
private Activity Activity;
int collumns = 1;
public MySpanSizeLookup(EditAdapter adapter, int collumns)
{
this.adapter = adapter;
this.collumns = collumns;
}
public override int GetSpanSize(int position)
{
if (adapter.GetItemViewType(position) == (int)EditAdapter.EditTileType.VIEW_TYPE_CONTENT)
return 1;
else
return collumns;
}
}
public override View OnCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState)
{
var view = inflater.Inflate(Resource.Layout.edits_list, container, false);
//create adapter for edits:
_adapter = new EditAdapter(Activity, Bootlegger.BootleggerClient.MyEdits,Bootlegger.BootleggerClient.CurrentEvent);
_adapter.HasStableIds = true;
_adapter.OnShare += _adapter_OnShare;
_adapter.OnEdit += _adapter_OnEdit;
_adapter.OnPreview += _adapter_OnPreview;
_adapter.OnDelete += _adapter_OnDelete;
//_adapter.OnRestart += _adapter_OnRestart;
_adapter.OnRefresh += _adapter_OnRefresh;
view.FindViewById<SwipeRefreshLayout>(Resource.Id.swiperefresh).Refresh += MyEditsFragment_Refresh;
var listView = view.FindViewById<RecyclerView>(Resource.Id.alledits);
int cols = Activity.Resources.Configuration.Orientation == Android.Content.Res.Orientation.Landscape ? 2 : 1;
var mLayoutManager = new GridLayoutManager(Activity, cols);
mLayoutManager.SetSpanSizeLookup(new MySpanSizeLookup(_adapter, cols));
//var mLayoutManager = new GridLayoutManager(container.Context,2);
listView.SetLayoutManager(mLayoutManager);
listView.SetAdapter(_adapter);
theview = view;
//listView.ScrollChange += ListView_ScrollChange;
//RecyclerView.ItemAnimator animator = listView.GetItemAnimator();
//if (animator is SimpleItemAnimator)
//{
// ((SimpleItemAnimator)animator).SupportsChangeAnimations = false;
//}
listView.SetItemAnimator(null);
listView.AddOnScrollListener(new PausableScrollListener(Context,_adapter));
return view;
}
private void _adapter_OnRefresh()
{
Refresh();
}
//private void ListView_ScrollChange(object sender, View.ScrollChangeEventArgs e)
//{
// OnScrollChange?.Invoke(e);
//}
//public event Action<View.ScrollChangeEventArgs> OnScrollChange;
public override void OnViewCreated(View view, Bundle savedInstanceState)
{
base.OnViewCreated(view, savedInstanceState);
theview.FindViewById<View>(Resource.Id.emptytext).Visibility = ViewStates.Gone;
if (_adapter.ItemCount == 1)
{
theview.FindViewById<View>(Resource.Id.emptytext).Visibility = ViewStates.Visible;
}
//if (Bootlegger.BootleggerClient.CurrentEvent != null && _adapter.ItemCount == 0)
//{
// theview.FindViewById<View>(Resource.Id.emptytext).Visibility = ViewStates.Visible;
//}
}
public override void OnPause()
{
cancel.Cancel();
Bootlegger.BootleggerClient.OnEditUpdated -= Comms_OnEditUpdated;
base.OnPause();
}
private async void _adapter_OnRestart(Edit obj)
{
AndHUD.Shared.Show(Activity, Resources.GetString(Resource.String.connecting), -1, MaskType.Black);
try
{
cancel = new CancellationTokenSource();
await LoginFuncs.TryLogin(Activity, cancel.Token);
//await Bootlegger.BootleggerClient.RestartEdit(obj);
_adapter.UpdateData(Bootlegger.BootleggerClient.MyEdits,true);
_adapter.NotifyDataSetChanged();
}
catch (TaskCanceledException)
{
//do nothing
}
catch (Exception e)
{
LoginFuncs.ShowError(Context, e);
}
finally
{
AndHUD.Shared.Dismiss();
}
}
private void _adapter_OnDelete(Edit obj)
{
//show delete dialog:
new Android.Support.V7.App.AlertDialog.Builder(Activity).SetMessage(Resource.String.deleteedit)
.SetPositiveButton(Android.Resource.String.Ok, new EventHandler<DialogClickEventArgs>(async (oe, eo) =>
{
await Bootlegger.BootleggerClient.DeleteEdit(obj);
_adapter.UpdateData(Bootlegger.BootleggerClient.MyEdits,false);
if (_adapter.ItemCount == 1)
{
theview.FindViewById<View>(Resource.Id.emptytext).Visibility = ViewStates.Visible;
}
}))
.SetNegativeButton(Android.Resource.String.Cancel, new EventHandler<DialogClickEventArgs>((oe, eo) =>
{
}))
.SetTitle(Resource.String.removeedit)
.Show();
}
private void _adapter_OnPreview(Edit obj,View v)
{
//open preview:
if (!string.IsNullOrEmpty(obj.code) && obj.progress>97 && !obj.failed)
{
OnPreview?.Invoke(obj, v);
}
else
{
_adapter_OnEdit(obj);
}
}
public event Action<Edit, View> OnPreview;
public event Action<Edit> OnOpenEdit;
private void _adapter_OnEdit(Edit obj)
{
if (string.IsNullOrEmpty(obj.path) && obj.progress==null)
{
OnOpenEdit?.Invoke(obj);
}
else
{
LoginFuncs.ShowMessage(Activity, Resource.String.editready);
}
}
private void _adapter_OnShare(Edit obj)
{
//do share...
Intent sharingIntent = new Intent(Intent.ActionSend);
sharingIntent.SetType("text/plain");
sharingIntent.PutExtra(Intent.ExtraSubject, obj.title);
sharingIntent.PutExtra(Intent.ExtraText, Bootlegger.BootleggerClient.server + "/v/" + obj.shortlink);
//StartActivity(Intent.CreateChooser(sharingIntent, Resources.GetString(Resource.String.sharevia)));
}
CancellationTokenSource cancel = new CancellationTokenSource();
private async void Refresh(bool manually)
{
cancel = new CancellationTokenSource();
try
{
if (manually)
{
if (!Bootlegger.BootleggerClient.Connected && (Context.ApplicationContext as BootleggerApp).IsReallyConnected)
{
//AndHUD.Shared.Show(Activity, Resources.GetString(Resource.String.connecting), -1, MaskType.Black);
try
{
await LoginFuncs.TryLogin(Activity, cancel.Token);
}
catch (TaskCanceledException)
{
//do nothing
}
catch (Exception)
{
//LoginFuncs.ShowError(Context, Resource.String.cantconnect);
throw new Exception();
//return;
}
}
if ((Context.ApplicationContext as BootleggerApp).IsReallyConnected)
{
await Bootlegger.BootleggerClient.GetMyEdits(cancel.Token, false);
//if we are in the review screen
if (Bootlegger.BootleggerClient.CurrentEvent != null)
{
try
{
Bootlegger.BootleggerClient.RegisterForEditUpdates();
}
catch
{
//not online, so dont register for updates
}
}
}
}
_adapter.UpdateData(Bootlegger.BootleggerClient.MyEdits, true);
theview.FindViewById<View>(Resource.Id.emptytext).Visibility = ViewStates.Gone;
if (_adapter.ItemCount == 1)
{
theview.FindViewById<View>(Resource.Id.emptytext).Visibility = ViewStates.Visible;
}
//if (Bootlegger.BootleggerClient.CurrentEvent != null && _adapter.ItemCount == 0)
//{
// theview.FindViewById<View>(Resource.Id.emptytext).Visibility = ViewStates.Visible;
//}
}
catch(TaskCanceledException)
{
//user cancelled
}
catch (Exception e)
{
if (Activity != null)
LoginFuncs.ShowError(Activity,e);
}
finally
{
theview.Post(() => { theview.FindViewById<ProgressBar>(Resource.Id.progressBar).Visibility = ViewStates.Gone; });
AndHUD.Shared.Dismiss();
}
//}
//else
//{
// theview.Post(() => { theview.FindViewById<ProgressBar>(Resource.Id.progressBar).Visibility = ViewStates.Gone; });
//}
}
private void MyEditsFragment_Refresh(object sender, EventArgs e)
{
theview.FindViewById<SwipeRefreshLayout>(Resource.Id.swiperefresh).Post(() =>
{
theview.FindViewById<SwipeRefreshLayout>(Resource.Id.swiperefresh).Refreshing =true;
});
Refresh(true);
theview.FindViewById<SwipeRefreshLayout>(Resource.Id.swiperefresh).Post(() =>
{
theview.FindViewById<SwipeRefreshLayout>(Resource.Id.swiperefresh).Refreshing = false;
});
}
internal void Refresh()
{
try {
theview.FindViewById<ProgressBar>(Resource.Id.progressBar).Post(() =>
{
theview.FindViewById<ProgressBar>(Resource.Id.progressBar).Visibility = ViewStates.Visible;
});
Refresh(true);
theview.FindViewById<SwipeRefreshLayout>(Resource.Id.swiperefresh).Post(() =>
{
theview.FindViewById<ProgressBar>(Resource.Id.progressBar).Visibility = ViewStates.Gone;
});
}
catch
{
//view not started...
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Web.Routing;
using Nop.Core.Domain.Orders;
using Nop.Core.Domain.Payments;
using Nop.Core.Plugins;
using Nop.Plugin.Payments.Manual.Controllers;
using Nop.Services.Configuration;
using Nop.Services.Localization;
using Nop.Services.Orders;
using Nop.Services.Payments;
namespace Nop.Plugin.Payments.Manual
{
/// <summary>
/// Manual payment processor
/// </summary>
public class ManualPaymentProcessor : BasePlugin, IPaymentMethod
{
#region Fields
private readonly ManualPaymentSettings _manualPaymentSettings;
private readonly ISettingService _settingService;
private readonly IOrderTotalCalculationService _orderTotalCalculationService;
#endregion
#region Ctor
public ManualPaymentProcessor(ManualPaymentSettings manualPaymentSettings,
ISettingService settingService, IOrderTotalCalculationService orderTotalCalculationService)
{
this._manualPaymentSettings = manualPaymentSettings;
this._settingService = settingService;
this._orderTotalCalculationService = orderTotalCalculationService;
}
#endregion
#region Methods
/// <summary>
/// Process a payment
/// </summary>
/// <param name="processPaymentRequest">Payment info required for an order processing</param>
/// <returns>Process payment result</returns>
public ProcessPaymentResult ProcessPayment(ProcessPaymentRequest processPaymentRequest)
{
var result = new ProcessPaymentResult();
result.AllowStoringCreditCardNumber = true;
switch (_manualPaymentSettings.TransactMode)
{
case TransactMode.Pending:
result.NewPaymentStatus = PaymentStatus.Pending;
break;
case TransactMode.Authorize:
result.NewPaymentStatus = PaymentStatus.Authorized;
break;
case TransactMode.AuthorizeAndCapture:
result.NewPaymentStatus = PaymentStatus.Paid;
break;
default:
{
result.AddError("Not supported transaction type");
return result;
}
}
return result;
}
/// <summary>
/// Post process payment (used by payment gateways that require redirecting to a third-party URL)
/// </summary>
/// <param name="postProcessPaymentRequest">Payment info required for an order processing</param>
public void PostProcessPayment(PostProcessPaymentRequest postProcessPaymentRequest)
{
//nothing
}
/// <summary>
/// Returns a value indicating whether payment method should be hidden during checkout
/// </summary>
/// <param name="cart">Shoping cart</param>
/// <returns>true - hide; false - display.</returns>
public bool HidePaymentMethod(IList<ShoppingCartItem> cart)
{
//you can put any logic here
//for example, hide this payment method if all products in the cart are downloadable
//or hide this payment method if current customer is from certain country
return false;
}
/// <summary>
/// Gets additional handling fee
/// </summary>
/// <returns>Additional handling fee</returns>
public decimal GetAdditionalHandlingFee(IList<ShoppingCartItem> cart)
{
var result = this.CalculateAdditionalFee(_orderTotalCalculationService, cart,
_manualPaymentSettings.AdditionalFee, _manualPaymentSettings.AdditionalFeePercentage);
return result;
}
/// <summary>
/// Captures payment
/// </summary>
/// <param name="capturePaymentRequest">Capture payment request</param>
/// <returns>Capture payment result</returns>
public CapturePaymentResult Capture(CapturePaymentRequest capturePaymentRequest)
{
var result = new CapturePaymentResult();
result.AddError("Capture method not supported");
return result;
}
/// <summary>
/// Refunds a payment
/// </summary>
/// <param name="refundPaymentRequest">Request</param>
/// <returns>Result</returns>
public RefundPaymentResult Refund(RefundPaymentRequest refundPaymentRequest)
{
var result = new RefundPaymentResult();
result.AddError("Refund method not supported");
return result;
}
/// <summary>
/// Voids a payment
/// </summary>
/// <param name="voidPaymentRequest">Request</param>
/// <returns>Result</returns>
public VoidPaymentResult Void(VoidPaymentRequest voidPaymentRequest)
{
var result = new VoidPaymentResult();
result.AddError("Void method not supported");
return result;
}
/// <summary>
/// Process recurring payment
/// </summary>
/// <param name="processPaymentRequest">Payment info required for an order processing</param>
/// <returns>Process payment result</returns>
public ProcessPaymentResult ProcessRecurringPayment(ProcessPaymentRequest processPaymentRequest)
{
var result = new ProcessPaymentResult();
result.AllowStoringCreditCardNumber = true;
switch (_manualPaymentSettings.TransactMode)
{
case TransactMode.Pending:
result.NewPaymentStatus = PaymentStatus.Pending;
break;
case TransactMode.Authorize:
result.NewPaymentStatus = PaymentStatus.Authorized;
break;
case TransactMode.AuthorizeAndCapture:
result.NewPaymentStatus = PaymentStatus.Paid;
break;
default:
{
result.AddError("Not supported transaction type");
return result;
}
}
return result;
}
/// <summary>
/// Cancels a recurring payment
/// </summary>
/// <param name="cancelPaymentRequest">Request</param>
/// <returns>Result</returns>
public CancelRecurringPaymentResult CancelRecurringPayment(CancelRecurringPaymentRequest cancelPaymentRequest)
{
//always success
return new CancelRecurringPaymentResult();
}
/// <summary>
/// Gets a value indicating whether customers can complete a payment after order is placed but not completed (for redirection payment methods)
/// </summary>
/// <param name="order">Order</param>
/// <returns>Result</returns>
public bool CanRePostProcessPayment(Order order)
{
if (order == null)
throw new ArgumentNullException("order");
//it's not a redirection payment method. So we always return false
return false;
}
/// <summary>
/// Gets a route for provider configuration
/// </summary>
/// <param name="actionName">Action name</param>
/// <param name="controllerName">Controller name</param>
/// <param name="routeValues">Route values</param>
public void GetConfigurationRoute(out string actionName, out string controllerName, out RouteValueDictionary routeValues)
{
actionName = "Configure";
controllerName = "PaymentManual";
routeValues = new RouteValueDictionary { { "Namespaces", "Nop.Plugin.Payments.Manual.Controllers" }, { "area", null } };
}
/// <summary>
/// Gets a route for payment info
/// </summary>
/// <param name="actionName">Action name</param>
/// <param name="controllerName">Controller name</param>
/// <param name="routeValues">Route values</param>
public void GetPaymentInfoRoute(out string actionName, out string controllerName, out RouteValueDictionary routeValues)
{
actionName = "PaymentInfo";
controllerName = "PaymentManual";
routeValues = new RouteValueDictionary { { "Namespaces", "Nop.Plugin.Payments.Manual.Controllers" }, { "area", null } };
}
public Type GetControllerType()
{
return typeof(PaymentManualController);
}
public override void Install()
{
//settings
var settings = new ManualPaymentSettings
{
TransactMode = TransactMode.Pending
};
_settingService.SaveSetting(settings);
//locales
this.AddOrUpdatePluginLocaleResource("Plugins.Payments.Manual.Fields.AdditionalFee", "Additional fee");
this.AddOrUpdatePluginLocaleResource("Plugins.Payments.Manual.Fields.AdditionalFee.Hint", "Enter additional fee to charge your customers.");
this.AddOrUpdatePluginLocaleResource("Plugins.Payments.Manual.Fields.AdditionalFeePercentage", "Additional fee. Use percentage");
this.AddOrUpdatePluginLocaleResource("Plugins.Payments.Manual.Fields.AdditionalFeePercentage.Hint", "Determines whether to apply a percentage additional fee to the order total. If not enabled, a fixed value is used.");
this.AddOrUpdatePluginLocaleResource("Plugins.Payments.Manual.Fields.TransactMode", "After checkout mark payment as");
this.AddOrUpdatePluginLocaleResource("Plugins.Payments.Manual.Fields.TransactMode.Hint", "Specify transaction mode.");
base.Install();
}
public override void Uninstall()
{
//settings
_settingService.DeleteSetting<ManualPaymentSettings>();
//locales
this.DeletePluginLocaleResource("Plugins.Payments.Manual.Fields.AdditionalFee");
this.DeletePluginLocaleResource("Plugins.Payments.Manual.Fields.AdditionalFee.Hint");
this.DeletePluginLocaleResource("Plugins.Payments.Manual.Fields.AdditionalFeePercentage");
this.DeletePluginLocaleResource("Plugins.Payments.Manual.Fields.AdditionalFeePercentage.Hint");
this.DeletePluginLocaleResource("Plugins.Payments.Manual.Fields.TransactMode");
this.DeletePluginLocaleResource("Plugins.Payments.Manual.Fields.TransactMode.Hint");
base.Uninstall();
}
#endregion
#region Properties
/// <summary>
/// Gets a value indicating whether capture is supported
/// </summary>
public bool SupportCapture
{
get
{
return false;
}
}
/// <summary>
/// Gets a value indicating whether partial refund is supported
/// </summary>
public bool SupportPartiallyRefund
{
get
{
return false;
}
}
/// <summary>
/// Gets a value indicating whether refund is supported
/// </summary>
public bool SupportRefund
{
get
{
return false;
}
}
/// <summary>
/// Gets a value indicating whether void is supported
/// </summary>
public bool SupportVoid
{
get
{
return false;
}
}
/// <summary>
/// Gets a recurring payment type of payment method
/// </summary>
public RecurringPaymentType RecurringPaymentType
{
get
{
return RecurringPaymentType.Manual;
}
}
/// <summary>
/// Gets a payment method type
/// </summary>
public PaymentMethodType PaymentMethodType
{
get
{
return PaymentMethodType.Standard;
}
}
/// <summary>
/// Gets a value indicating whether we should display a payment information page for this plugin
/// </summary>
public bool SkipPaymentInfo
{
get
{
return false;
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Globalization;
using System.Text;
using Microsoft.CSharp.RuntimeBinder.Semantics;
using Microsoft.CSharp.RuntimeBinder.Syntax;
namespace Microsoft.CSharp.RuntimeBinder.Errors
{
internal sealed class UserStringBuilder
{
private bool m_buildingInProgress;
private GlobalSymbolContext m_globalSymbols;
private StringBuilder m_strBuilder;
public UserStringBuilder(
GlobalSymbolContext globalSymbols)
{
Debug.Assert(globalSymbols != null);
m_buildingInProgress = false;
m_globalSymbols = globalSymbols;
}
private void BeginString()
{
Debug.Assert(!m_buildingInProgress);
m_buildingInProgress = true;
m_strBuilder = new StringBuilder();
}
private void EndString(out string s)
{
Debug.Assert(m_buildingInProgress);
m_buildingInProgress = false;
s = m_strBuilder.ToString();
m_strBuilder = null;
}
private void ErrSK(out string psz, SYMKIND sk)
{
MessageID id;
switch (sk)
{
case SYMKIND.SK_MethodSymbol:
id = MessageID.SK_METHOD;
break;
case SYMKIND.SK_AggregateSymbol:
id = MessageID.SK_CLASS;
break;
case SYMKIND.SK_NamespaceSymbol:
id = MessageID.SK_NAMESPACE;
break;
case SYMKIND.SK_FieldSymbol:
id = MessageID.SK_FIELD;
break;
case SYMKIND.SK_LocalVariableSymbol:
id = MessageID.SK_VARIABLE;
break;
case SYMKIND.SK_PropertySymbol:
id = MessageID.SK_PROPERTY;
break;
case SYMKIND.SK_EventSymbol:
id = MessageID.SK_EVENT;
break;
case SYMKIND.SK_TypeParameterSymbol:
id = MessageID.SK_TYVAR;
break;
default:
Debug.Assert(false, "impossible sk");
id = MessageID.SK_UNKNOWN;
break;
}
ErrId(out psz, id);
}
/*
* Create a fill-in string describing a parameter list.
* Does NOT include ()
*/
private void ErrAppendParamList(TypeArray @params, bool isParamArray)
{
if (null == @params)
return;
for (int i = 0; i < @params.Count; i++)
{
if (i > 0)
{
ErrAppendString(", ");
}
if (isParamArray && i == @params.Count - 1)
{
ErrAppendString("params ");
}
// parameter type name
ErrAppendType(@params[i], null);
}
}
private void ErrAppendString(string str)
{
m_strBuilder.Append(str);
}
private void ErrAppendChar(char ch)
{
m_strBuilder.Append(ch);
}
private void ErrAppendPrintf(string format, params object[] args)
{
ErrAppendString(string.Format(CultureInfo.InvariantCulture, format, args));
}
private void ErrAppendName(Name name)
{
if (name == NameManager.GetPredefinedName(PredefinedName.PN_INDEXERINTERNAL))
{
ErrAppendString("this");
}
else
{
ErrAppendString(name.Text);
}
}
private void ErrAppendParentSym(Symbol sym, SubstContext pctx)
{
ErrAppendParentCore(sym.parent, pctx);
}
private void ErrAppendParentCore(Symbol parent, SubstContext pctx)
{
if (parent == null || parent == NamespaceSymbol.Root)
{
return;
}
if (pctx != null && !pctx.FNop() && parent is AggregateSymbol agg && 0 != agg.GetTypeVarsAll().Count)
{
CType pType = GetTypeManager().SubstType(agg.getThisType(), pctx);
ErrAppendType(pType, null);
}
else
{
ErrAppendSym(parent, null);
}
ErrAppendChar('.');
}
private void ErrAppendTypeParameters(TypeArray @params, SubstContext pctx, bool forClass)
{
if (@params != null && @params.Count != 0)
{
ErrAppendChar('<');
ErrAppendType(@params[0], pctx);
for (int i = 1; i < @params.Count; i++)
{
ErrAppendString(",");
ErrAppendType(@params[i], pctx);
}
ErrAppendChar('>');
}
}
private void ErrAppendMethod(MethodSymbol meth, SubstContext pctx, bool fArgs)
{
if (meth.IsExpImpl() && meth.swtSlot)
{
ErrAppendParentSym(meth, pctx);
// Get the type args from the explicit impl type and substitute using pctx (if there is one).
SubstContext ctx = new SubstContext(GetTypeManager().SubstType(meth.swtSlot.GetType(), pctx));
ErrAppendSym(meth.swtSlot.Sym, ctx, fArgs);
// args already added
return;
}
if (meth.isPropertyAccessor())
{
PropertySymbol prop = meth.getProperty();
// this includes the parent class
ErrAppendSym(prop, pctx);
// add accessor name
if (prop.GetterMethod == meth)
{
ErrAppendString(".get");
}
else
{
Debug.Assert(meth == prop.SetterMethod);
ErrAppendString(".set");
}
// args already added
return;
}
if (meth.isEventAccessor())
{
EventSymbol @event = meth.getEvent();
// this includes the parent class
ErrAppendSym(@event, pctx);
// add accessor name
if (@event.methAdd == meth)
{
ErrAppendString(".add");
}
else
{
Debug.Assert(meth == @event.methRemove);
ErrAppendString(".remove");
}
// args already added
return;
}
ErrAppendParentSym(meth, pctx);
if (meth.IsConstructor())
{
// Use the name of the parent class instead of the name "<ctor>".
ErrAppendName(meth.getClass().name);
}
else if (meth.IsDestructor())
{
// Use the name of the parent class instead of the name "Finalize".
ErrAppendChar('~');
ErrAppendName(meth.getClass().name);
}
else if (meth.isConversionOperator())
{
// implicit/explicit
ErrAppendString(meth.isImplicit() ? "implicit" : "explicit");
ErrAppendString(" operator ");
// destination type name
ErrAppendType(meth.RetType, pctx);
}
else if (meth.isOperator)
{
// handle user defined operators
// map from CLS predefined names to "operator <X>"
ErrAppendString("operator ");
ErrAppendString(Operators.OperatorOfMethodName(meth.name));
}
else if (!meth.IsExpImpl())
{
// regular method
ErrAppendName(meth.name);
}
ErrAppendTypeParameters(meth.typeVars, pctx, false);
if (fArgs)
{
// append argument types
ErrAppendChar('(');
ErrAppendParamList(GetTypeManager().SubstTypeArray(meth.Params, pctx), meth.isParamArray);
ErrAppendChar(')');
}
}
private void ErrAppendIndexer(IndexerSymbol indexer, SubstContext pctx)
{
ErrAppendString("this[");
ErrAppendParamList(GetTypeManager().SubstTypeArray(indexer.Params, pctx), indexer.isParamArray);
ErrAppendChar(']');
}
private void ErrAppendProperty(PropertySymbol prop, SubstContext pctx)
{
ErrAppendParentSym(prop, pctx);
if (prop.IsExpImpl())
{
if (prop.swtSlot.Sym != null)
{
SubstContext ctx = new SubstContext(GetTypeManager().SubstType(prop.swtSlot.GetType(), pctx));
ErrAppendSym(prop.swtSlot.Sym, ctx);
}
else if (prop is IndexerSymbol indexer)
{
ErrAppendChar('.');
ErrAppendIndexer(indexer, pctx);
}
}
else if (prop is IndexerSymbol indexer)
{
ErrAppendIndexer(indexer, pctx);
}
else
{
ErrAppendName(prop.name);
}
}
private void ErrAppendEvent(EventSymbol @event, SubstContext pctx)
{
}
private void ErrAppendId(MessageID id)
{
string str;
ErrId(out str, id);
ErrAppendString(str);
}
/*
* Create a fill-in string describing a symbol.
*/
private void ErrAppendSym(Symbol sym, SubstContext pctx)
{
ErrAppendSym(sym, pctx, true);
}
private void ErrAppendSym(Symbol sym, SubstContext pctx, bool fArgs)
{
switch (sym.getKind())
{
case SYMKIND.SK_AggregateDeclaration:
ErrAppendSym(((AggregateDeclaration)sym).Agg(), pctx);
break;
case SYMKIND.SK_AggregateSymbol:
{
// Check for a predefined class with a special "nice" name for
// error reported.
string text = PredefinedTypes.GetNiceName(sym as AggregateSymbol);
if (text != null)
{
// Found a nice name.
ErrAppendString(text);
}
else
{
ErrAppendParentSym(sym, pctx);
ErrAppendName(sym.name);
ErrAppendTypeParameters(((AggregateSymbol)sym).GetTypeVars(), pctx, true);
}
break;
}
case SYMKIND.SK_MethodSymbol:
ErrAppendMethod((MethodSymbol)sym, pctx, fArgs);
break;
case SYMKIND.SK_PropertySymbol:
ErrAppendProperty((PropertySymbol)sym, pctx);
break;
case SYMKIND.SK_EventSymbol:
ErrAppendEvent((EventSymbol)sym, pctx);
break;
case SYMKIND.SK_NamespaceSymbol:
if (sym == NamespaceSymbol.Root)
{
ErrAppendId(MessageID.GlobalNamespace);
}
else
{
ErrAppendParentSym(sym, null);
ErrAppendName(sym.name);
}
break;
case SYMKIND.SK_FieldSymbol:
ErrAppendParentSym(sym, pctx);
ErrAppendName(sym.name);
break;
case SYMKIND.SK_TypeParameterSymbol:
if (null == sym.name)
{
var parSym = (TypeParameterSymbol)sym;
// It's a standard type variable.
if (parSym.IsMethodTypeParameter())
ErrAppendChar('!');
ErrAppendChar('!');
ErrAppendPrintf("{0}", parSym.GetIndexInTotalParameters());
}
else
ErrAppendName(sym.name);
break;
case SYMKIND.SK_LocalVariableSymbol:
// Generate symbol name.
ErrAppendName(sym.name);
break;
default:
// Shouldn't happen.
Debug.Assert(false, $"Bad symbol kind: {sym.getKind()}");
break;
}
}
private void ErrAppendType(CType pType, SubstContext pCtx)
{
ErrAppendType(pType, pCtx, true);
}
private void ErrAppendType(CType pType, SubstContext pctx, bool fArgs)
{
if (pctx != null)
{
if (!pctx.FNop())
{
pType = GetTypeManager().SubstType(pType, pctx);
}
// We shouldn't use the SubstContext again so set it to NULL.
pctx = null;
}
switch (pType.TypeKind)
{
case TypeKind.TK_AggregateType:
{
AggregateType pAggType = (AggregateType)pType;
// Check for a predefined class with a special "nice" name for
// error reported.
string text = PredefinedTypes.GetNiceName(pAggType.OwningAggregate);
if (text != null)
{
// Found a nice name.
ErrAppendString(text);
}
else
{
if (pAggType.OuterType != null)
{
ErrAppendType(pAggType.OuterType, pctx);
ErrAppendChar('.');
}
else
{
// In a namespace.
ErrAppendParentSym(pAggType.OwningAggregate, pctx);
}
ErrAppendName(pAggType.OwningAggregate.name);
}
ErrAppendTypeParameters(pAggType.TypeArgsThis, pctx, true);
break;
}
case TypeKind.TK_TypeParameterType:
TypeParameterType tpType = (TypeParameterType)pType;
if (null == tpType.Name)
{
// It's a standard type variable.
if (tpType.IsMethodTypeParameter)
{
ErrAppendChar('!');
}
ErrAppendChar('!');
ErrAppendPrintf("{0}", tpType.IndexInTotalParameters);
}
else
{
ErrAppendName(tpType.Name);
}
break;
case TypeKind.TK_NullType:
// Load the string "<null>".
ErrAppendId(MessageID.NULL);
break;
case TypeKind.TK_MethodGroupType:
ErrAppendId(MessageID.MethodGroup);
break;
case TypeKind.TK_ArgumentListType:
ErrAppendString(TokenFacts.GetText(TokenKind.ArgList));
break;
case TypeKind.TK_ArrayType:
{
CType elementType = ((ArrayType)pType).BaseElementType;
if (null == elementType)
{
Debug.Assert(false, "No element type");
break;
}
ErrAppendType(elementType, pctx);
for (elementType = pType;
elementType is ArrayType arrType;
elementType = arrType.ElementType)
{
int rank = arrType.Rank;
// Add [] with (rank-1) commas inside
ErrAppendChar('[');
// known rank.
if (rank == 1)
{
if (!arrType.IsSZArray)
{
ErrAppendChar('*');
}
}
else
{
for (int i = rank; i > 1; --i)
{
ErrAppendChar(',');
}
}
ErrAppendChar(']');
}
break;
}
case TypeKind.TK_VoidType:
ErrAppendName(NameManager.GetPredefinedName(PredefinedName.PN_VOID));
break;
case TypeKind.TK_ParameterModifierType:
ParameterModifierType mod = (ParameterModifierType)pType;
// add ref or out
ErrAppendString(mod.IsOut ? "out " : "ref ");
// add base type name
ErrAppendType(mod.ParameterType, pctx);
break;
case TypeKind.TK_PointerType:
// Generate the base type.
ErrAppendType(((PointerType)pType).ReferentType, pctx);
{
// add the trailing *
ErrAppendChar('*');
}
break;
case TypeKind.TK_NullableType:
ErrAppendType(((NullableType)pType).UnderlyingType, pctx);
ErrAppendChar('?');
break;
default:
// Shouldn't happen.
Debug.Assert(false, "Bad type kind");
break;
}
}
// Returns true if the argument could be converted to a string.
public bool ErrArgToString(out string psz, ErrArg parg, out bool fUserStrings)
{
fUserStrings = false;
psz = null;
bool result = true;
switch (parg.eak)
{
case ErrArgKind.SymKind:
ErrSK(out psz, parg.sk);
break;
case ErrArgKind.Type:
BeginString();
ErrAppendType(parg.pType, null);
EndString(out psz);
fUserStrings = true;
break;
case ErrArgKind.Sym:
BeginString();
ErrAppendSym(parg.sym, null);
EndString(out psz);
fUserStrings = true;
break;
case ErrArgKind.Name:
if (parg.name == NameManager.GetPredefinedName(PredefinedName.PN_INDEXERINTERNAL))
{
psz = "this";
}
else
{
psz = parg.name.Text;
}
break;
case ErrArgKind.Str:
psz = parg.psz;
break;
case ErrArgKind.SymWithType:
{
SubstContext ctx = new SubstContext(parg.swtMemo.ats, null);
BeginString();
ErrAppendSym(parg.swtMemo.sym, ctx, true);
EndString(out psz);
fUserStrings = true;
break;
}
case ErrArgKind.MethWithInst:
{
SubstContext ctx = new SubstContext(parg.mpwiMemo.ats, parg.mpwiMemo.typeArgs);
BeginString();
ErrAppendSym(parg.mpwiMemo.sym, ctx, true);
EndString(out psz);
fUserStrings = true;
break;
}
default:
result = false;
break;
}
return result;
}
private TypeManager GetTypeManager()
{
return m_globalSymbols.GetTypes();
}
private void ErrId(out string s, MessageID id)
{
s = ErrorFacts.GetMessage(id);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All Rights Reserved. See License.txt in the project root for license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using Microsoft.VisualStudio.Shell.Interop;
using Microsoft.VisualStudio.Shell;
using Microsoft.VisualStudio.OLE.Interop;
using OleConstants = Microsoft.VisualStudio.OLE.Interop.Constants;
using IOleDataObject = Microsoft.VisualStudio.OLE.Interop.IDataObject;
using System.Security.Permissions;
using System.Globalization;
namespace Microsoft.VisualStudio.FSharp.ProjectSystem
{
/// <summary>
/// Manages the CopyPaste and Drag and Drop scenarios for a Project.
/// </summary>
/// <remarks>This is a partial class.</remarks>
public partial class ProjectNode : IVsUIHierWinClipboardHelperEvents
{
private uint copyPasteCookie;
private DropDataType dropDataType;
/// <summary>
/// Called as soon as the mouse drags an item over a new hierarchy or hierarchy window
/// </summary>
/// <param name="pDataObject">reference to interface IDataObject of the item being dragged</param>
/// <param name="grfKeyState">Current state of the keyboard and the mouse modifier keys. See docs for a list of possible values</param>
/// <param name="itemid">Item identifier for the item currently being dragged</param>
/// <param name="pdwEffect">On entry, a pointer to the current DropEffect. On return, must contain the new valid DropEffect</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public override int DragEnter(IOleDataObject pDataObject, uint grfKeyState, uint itemid, ref uint pdwEffect)
{
pdwEffect = (uint)DropEffect.None;
this.dropDataType = QueryDropDataType(pDataObject);
if (this.dropDataType != DropDataType.None)
{
pdwEffect = (uint)this.QueryDropEffect(this.dropDataType, grfKeyState);
}
return VSConstants.S_OK;
}
/// <summary>
/// Called when one or more items are dragged out of the hierarchy or hierarchy window, or when the drag-and-drop operation is cancelled or completed.
/// </summary>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public override int DragLeave()
{
this.dropDataType = DropDataType.None;
return VSConstants.S_OK;
}
/// <summary>
/// Called when one or more items are dragged over the target hierarchy or hierarchy window.
/// </summary>
/// <param name="grfKeyState">Current state of the keyboard keys and the mouse modifier buttons. See <seealso cref="IVsHierarchyDropDataTarget"/></param>
/// <param name="itemid">Item identifier of the drop data target over which the item is being dragged</param>
/// <param name="pdwEffect"> On entry, reference to the value of the pdwEffect parameter of the IVsHierarchy object, identifying all effects that the hierarchy supports.
/// On return, the pdwEffect parameter must contain one of the effect flags that indicate the result of the drop operation. For a list of pwdEffects values, see <seealso cref="DragEnter"/></param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public override int DragOver(uint grfKeyState, uint itemid, ref uint pdwEffect)
{
pdwEffect = (uint)DropEffect.None;
// Dragging items to a project that is being debugged is not supported
// (see VSWhidbey 144785)
DBGMODE dbgMode = VsShellUtilities.GetDebugMode(this.Site) & ~DBGMODE.DBGMODE_EncMask;
if (dbgMode == DBGMODE.DBGMODE_Run || dbgMode == DBGMODE.DBGMODE_Break)
{
return VSConstants.S_OK;
}
if (this.isClosed || this.site == null)
{
return VSConstants.E_UNEXPECTED;
}
// We should also analyze if the node being dragged over can accept the drop.
if (!this.CanTargetNodeAcceptDrop(itemid))
{
return VSConstants.E_NOTIMPL;
}
if (this.dropDataType != DropDataType.None)
{
pdwEffect = (uint)this.QueryDropEffect(this.dropDataType, grfKeyState);
}
return VSConstants.S_OK;
}
/// <summary>
/// Called when one or more items are dropped into the target hierarchy or hierarchy window when the mouse button is released.
/// </summary>
/// <param name="pDataObject">Reference to the IDataObject interface on the item being dragged. This data object contains the data being transferred in the drag-and-drop operation.
/// If the drop occurs, then this data object (item) is incorporated into the target hierarchy or hierarchy window.</param>
/// <param name="grfKeyState">Current state of the keyboard and the mouse modifier keys. See <seealso cref="IVsHierarchyDropDataTarget"/></param>
/// <param name="itemid">Item identifier of the drop data target over which the item is being dragged</param>
/// <param name="pdwEffect">Visual effects associated with the drag-and drop-operation, such as a cursor, bitmap, and so on.
/// The value of dwEffects passed to the source object via the OnDropNotify method is the value of pdwEffects returned by the Drop method</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code. </returns>
public override int Drop(IOleDataObject pDataObject, uint grfKeyState, uint itemid, ref uint pdwEffect)
{
if (pDataObject == null)
{
return VSConstants.E_INVALIDARG;
}
pdwEffect = (uint)DropEffect.None;
// Get the node that is being dragged over and ask it which node should handle this call
HierarchyNode targetNode = NodeFromItemId(itemid);
if (targetNode != null)
{
targetNode = targetNode.GetDragTargetHandlerNode();
}
else
{
// There is no target node. The drop can not be completed.
return VSConstants.S_FALSE;
}
int returnValue;
try
{
DropDataType dropDataType = DropDataType.None;
dropDataType = ProcessSelectionDataObject(pDataObject, targetNode);
pdwEffect = (uint)this.QueryDropEffect(dropDataType, grfKeyState);
// If it is a drop from windows and we get any kind of error we return S_FALSE and dropeffect none. This
// prevents bogus messages from the shell from being displayed
returnValue = (dropDataType != DropDataType.Shell) ? VSConstants.E_FAIL : VSConstants.S_OK;
}
catch (System.IO.FileNotFoundException e)
{
Trace.WriteLine("Exception : " + e.Message);
if (!Utilities.IsInAutomationFunction(this.Site))
{
string message = e.Message;
string title = string.Empty;
OLEMSGICON icon = OLEMSGICON.OLEMSGICON_CRITICAL;
OLEMSGBUTTON buttons = OLEMSGBUTTON.OLEMSGBUTTON_OK;
OLEMSGDEFBUTTON defaultButton = OLEMSGDEFBUTTON.OLEMSGDEFBUTTON_FIRST;
VsShellUtilities.ShowMessageBox(this.Site, title, message, icon, buttons, defaultButton);
}
returnValue = VSConstants.E_FAIL;
}
return returnValue;
}
/// <summary>
/// Returns information about one or more of the items being dragged
/// </summary>
/// <param name="pdwOKEffects">Pointer to a DWORD value describing the effects displayed while the item is being dragged,
/// such as cursor icons that change during the drag-and-drop operation.
/// For example, if the item is dragged over an invalid target point
/// (such as the item's original location), the cursor icon changes to a circle with a line through it.
/// Similarly, if the item is dragged over a valid target point, the cursor icon changes to a file or folder.</param>
/// <param name="ppDataObject">Pointer to the IDataObject interface on the item being dragged.
/// This data object contains the data being transferred in the drag-and-drop operation.
/// If the drop occurs, then this data object (item) is incorporated into the target hierarchy or hierarchy window.</param>
/// <param name="ppDropSource">Pointer to the IDropSource interface of the item being dragged.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public override int GetDropInfo(out uint pdwOKEffects, out IOleDataObject ppDataObject, out IDropSource ppDropSource)
{
//init out params
pdwOKEffects = (uint)DropEffect.None;
ppDataObject = null;
ppDropSource = null;
IOleDataObject dataObject = PackageSelectionDataObject(false);
if (dataObject == null)
{
return VSConstants.E_NOTIMPL;
}
this.SourceDraggedOrCutOrCopied = true;
pdwOKEffects = (uint)(DropEffect.Move | DropEffect.Copy);
ppDataObject = dataObject;
return VSConstants.S_OK;
}
/// <summary>
/// Notifies clients that the dragged item was dropped.
/// </summary>
/// <param name="fDropped">If true, then the dragged item was dropped on the target. If false, then the drop did not occur.</param>
/// <param name="dwEffects">Visual effects associated with the drag-and-drop operation, such as cursors, bitmaps, and so on.
/// The value of dwEffects passed to the source object via OnDropNotify method is the value of pdwEffects returned by Drop method.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code. </returns>
public override int OnDropNotify(int fDropped, uint dwEffects)
{
if (!this.SourceDraggedOrCutOrCopied)
{
return VSConstants.S_FALSE;
}
this.CleanupSelectionDataObject(fDropped != 0, false, dwEffects == (uint)DropEffect.Move);
this.SourceDraggedOrCutOrCopied = false;
return VSConstants.S_OK;
}
/// <summary>
/// Allows the drag source to prompt to save unsaved items being dropped.
/// Notifies the source hierarchy that information dragged from it is about to be dropped on a target.
/// This method is called immediately after the mouse button is released on a drop.
/// </summary>
/// <param name="o">Reference to the IDataObject interface on the item being dragged.
/// This data object contains the data being transferred in the drag-and-drop operation.
/// If the drop occurs, then this data object (item) is incorporated into the hierarchy window of the new hierarchy.</param>
/// <param name="dwEffect">Current state of the keyboard and the mouse modifier keys.</param>
/// <param name="fCancelDrop">If true, then the drop is cancelled by the source hierarchy. If false, then the drop can continue.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code. </returns>
public override int OnBeforeDropNotify(IOleDataObject o, uint dwEffect, out int fCancelDrop)
{
// If there is nothing to be dropped just return that drop should be cancelled.
if (this.ItemsDraggedOrCutOrCopied == null)
{
fCancelDrop = 1;
return VSConstants.S_OK;
}
fCancelDrop = 0;
bool dirty = false;
foreach (HierarchyNode node in this.ItemsDraggedOrCutOrCopied)
{
bool isDirty, isOpen, isOpenedByUs;
uint docCookie;
IVsPersistDocData ppIVsPersistDocData;
DocumentManager manager = node.GetDocumentManager();
if (manager != null)
{
manager.GetDocInfo(out isOpen, out isDirty, out isOpenedByUs, out docCookie, out ppIVsPersistDocData);
if (isDirty && isOpenedByUs)
{
dirty = true;
break;
}
}
}
// if there are no dirty docs we are ok to proceed
if (!dirty)
{
return VSConstants.S_OK;
}
// Prompt to save if there are dirty docs
string message = SR.GetString(SR.SaveModifiedDocuments, CultureInfo.CurrentUICulture);
string title = string.Empty;
OLEMSGICON icon = OLEMSGICON.OLEMSGICON_WARNING;
OLEMSGBUTTON buttons = OLEMSGBUTTON.OLEMSGBUTTON_YESNOCANCEL;
OLEMSGDEFBUTTON defaultButton = OLEMSGDEFBUTTON.OLEMSGDEFBUTTON_FIRST;
int result = VsShellUtilities.ShowMessageBox(Site, title, message, icon, buttons, defaultButton);
switch (result)
{
case NativeMethods.IDYES:
break;
case NativeMethods.IDNO:
return VSConstants.S_OK;
case NativeMethods.IDCANCEL: goto default;
default:
fCancelDrop = 1;
return VSConstants.S_OK;
}
// Save all dirty documents
foreach (HierarchyNode node in this.ItemsDraggedOrCutOrCopied)
{
DocumentManager manager = node.GetDocumentManager();
if (manager != null)
{
manager.Save(true);
}
}
return VSConstants.S_OK;
}
/// <summary>
/// Called after your cut/copied items has been pasted
/// </summary>
///<param name="wasCut">If true, then the IDataObject has been successfully pasted into a target hierarchy.
/// If false, then the cut or copy operation was cancelled.</param>
/// <param name="dropEffect">Visual effects associated with the drag and drop operation, such as cursors, bitmaps, and so on.
/// These should be the same visual effects used in OnDropNotify</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code. </returns>
public virtual int OnPaste(int wasCut, uint dropEffect)
{
if (!this.SourceDraggedOrCutOrCopied)
{
return VSConstants.S_FALSE;
}
if (dropEffect == (uint)DropEffect.None)
{
return OnClear(wasCut);
}
this.CleanupSelectionDataObject(false, wasCut != 0, dropEffect == (uint)DropEffect.Move);
this.SourceDraggedOrCutOrCopied = false;
return VSConstants.S_OK;
}
/// <summary>
/// Called when your cut/copied operation is canceled
/// </summary>
/// <param name="wasCut">This flag informs the source that the Cut method was called (true),
/// rather than Copy (false), so the source knows whether to "un-cut-highlight" the items that were cut.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code. </returns>
public virtual int OnClear(int wasCut)
{
if (!this.SourceDraggedOrCutOrCopied)
{
return VSConstants.S_FALSE;
}
this.CleanupSelectionDataObject(false, wasCut != 0, false, true);
this.SourceDraggedOrCutOrCopied = false;
return VSConstants.S_OK;
}
/// <summary>
/// Determines if a node can accept drop opertaion.
/// </summary>
/// <param name="itemId">The id of the node.</param>
/// <returns>true if the node acceots drag operation.</returns>
public virtual bool CanTargetNodeAcceptDrop(uint itemId)
{
HierarchyNode targetNode = NodeFromItemId(itemId);
if (targetNode is ReferenceContainerNode || targetNode is ReferenceNode)
{
return false;
}
else
{
return true;
}
}
/// <summary>
/// Returns a dataobject from selected nodes
/// </summary>
/// <param name="cutHighlightItems">boolean that defines if the selected items must be cut</param>
/// <returns>data object for selected items</returns>
internal virtual DataObject PackageSelectionDataObject(bool cutHighlightItems)
{
this.CleanupSelectionDataObject(false, false, false);
StringBuilder sb = new StringBuilder();
DataObject dataObject = null;
try
{
IList<HierarchyNode> selectedNodes = this.GetSelectedNodes();
if (selectedNodes != null)
{
this.InstantiateItemsDraggedOrCutOrCopiedList();
StringBuilder selectionContent = null;
// If there is a selection package the data
if (selectedNodes.Count > 1)
{
foreach (HierarchyNode node in selectedNodes)
{
selectionContent = node.PrepareSelectedNodesForClipBoard();
if (selectionContent != null)
{
sb.Append(selectionContent);
}
}
}
else if (selectedNodes.Count == 1)
{
HierarchyNode selectedNode = selectedNodes[0];
selectionContent = selectedNode.PrepareSelectedNodesForClipBoard();
if (selectionContent != null)
{
sb.Append(selectionContent);
}
}
}
// Add the project items first.
IntPtr ptrToItems = this.PackageSelectionData(sb, false);
if (ptrToItems == IntPtr.Zero)
{
return null;
}
FORMATETC fmt = DragDropHelper.CreateFormatEtc(DragDropHelper.CF_VSSTGPROJECTITEMS);
dataObject = new DataObject();
dataObject.SetData(fmt, ptrToItems);
// Now add the project path that sourced data. We just write the project file path.
IntPtr ptrToProjectPath = this.PackageSelectionData(new StringBuilder(this.GetMkDocument()), true);
if (ptrToProjectPath != IntPtr.Zero)
{
dataObject.SetData(DragDropHelper.CreateFormatEtc(DragDropHelper.CF_VSPROJECTCLIPDESCRIPTOR), ptrToProjectPath);
}
if (cutHighlightItems)
{
bool first = true;
IVsUIHierarchyWindow w = UIHierarchyUtilities.GetUIHierarchyWindow(this.site, HierarchyNode.SolutionExplorer);
foreach (HierarchyNode node in this.ItemsDraggedOrCutOrCopied)
{
ErrorHandler.ThrowOnFailure(w.ExpandItem((IVsUIHierarchy)this, node.ID, first ? EXPANDFLAGS.EXPF_CutHighlightItem : EXPANDFLAGS.EXPF_AddCutHighlightItem));
first = false;
}
}
}
catch (COMException e)
{
Trace.WriteLine("Exception : " + e.Message);
dataObject = null;
}
return dataObject;
}
/// <summary>
/// This is used to recursively add a folder from an other project.
/// Note that while we copy the folder content completely, we only
/// add to the project items which are part of the source project.
/// </summary>
/// <param name="folderToAdd">Project reference (from data object) using the format: {Guid}|project|folderPath</param>
/// <param name="targetNode">Node to add the new folder to</param>
private void AddFolderFromOtherProject(string folderToAdd, HierarchyNode targetNode)
{
if (String.IsNullOrEmpty(folderToAdd))
throw new ArgumentNullException("folderToAdd");
if (targetNode == null)
throw new ArgumentNullException("targetNode");
// Split the reference in its 3 parts
int index1 = Guid.Empty.ToString("B").Length;
if (index1 + 1 >= folderToAdd.Length)
throw new ArgumentException("folderToAdd");
// Get the Guid
string guidString = folderToAdd.Substring(1, index1 - 2);
Guid projectInstanceGuid = new Guid(guidString);
// Get the project path
int index2 = folderToAdd.IndexOf('|', index1 + 1);
if (index2 < 0 || index2 + 1 >= folderToAdd.Length)
throw new ArgumentException("folderToAdd");
// Finally get the source path
string folder = folderToAdd.Substring(index2 + 1);
// Get the target path
string folderName = Path.GetFileName(Path.GetDirectoryName(folder));
string targetPath = Path.Combine(GetBaseDirectoryForAddingFiles(targetNode), folderName);
// Recursively copy the directory to the new location
Utilities.RecursivelyCopyDirectory(folder, targetPath);
// Retrieve the project from which the items are being copied
IVsHierarchy sourceHierarchy;
IVsSolution solution = (IVsSolution)GetService(typeof(SVsSolution));
ErrorHandler.ThrowOnFailure(solution.GetProjectOfGuid(ref projectInstanceGuid, out sourceHierarchy));
// Then retrieve the item ID of the item to copy
uint itemID = VSConstants.VSITEMID_ROOT;
ErrorHandler.ThrowOnFailure(sourceHierarchy.ParseCanonicalName(folder, out itemID));
// Ensure we don't end up in an endless recursion
if (Utilities.IsSameComObject(this, sourceHierarchy))
{
HierarchyNode cursorNode = targetNode;
while (cursorNode != null)
{
if (String.Compare(folder, cursorNode.GetMkDocument(), StringComparison.OrdinalIgnoreCase) == 0)
throw new ApplicationException();
cursorNode = cursorNode.Parent;
}
}
// Now walk the source project hierarchy to see which node needs to be added.
WalkSourceProjectAndAdd(sourceHierarchy, itemID, targetNode, false);
}
/// <summary>
/// Recursive method that walk a hierarchy and add items it find to our project.
/// Note that this is meant as an helper to the Copy&Paste/Drag&Drop functionality.
/// </summary>
/// <param name="sourceHierarchy">Hierarchy to walk</param>
/// <param name="itemId">Item ID where to start walking the hierarchy</param>
/// <param name="targetNode">Node to start adding to</param>
/// <param name="addSiblings">Typically false on first call and true after that</param>
public virtual void WalkSourceProjectAndAdd(IVsHierarchy sourceHierarchy, uint itemId, HierarchyNode targetNode, bool addSiblings)
{
// Before we start the walk, add the current node
object variant = null;
HierarchyNode newNode = targetNode;
if (itemId != VSConstants.VSITEMID_NIL)
{
// Calculate the corresponding path in our project
string source;
ErrorHandler.ThrowOnFailure(((IVsProject)sourceHierarchy).GetMkDocument(itemId, out source));
string name = Path.GetFileName(source.TrimEnd(new char[] { '/', '\\' }));
string targetPath = Path.Combine(GetBaseDirectoryForAddingFiles(targetNode), name);
// See if this is a linked item (file can be linked, not folders)
ErrorHandler.ThrowOnFailure(sourceHierarchy.GetProperty(itemId, (int)__VSHPROPID.VSHPROPID_BrowseObject, out variant), VSConstants.E_NOTIMPL);
VSLangProj.FileProperties fileProperties = variant as VSLangProj.FileProperties;
if (fileProperties != null && fileProperties.IsLink)
{
// Since we don't support linked item, we make a copy of the file into our storage where it would have been linked
File.Copy(source, targetPath, true);
}
newNode = AddNodeIfTargetExistInStorage(targetNode, name, targetPath);
// Start with child nodes (depth first)
variant = null;
ErrorHandler.ThrowOnFailure(sourceHierarchy.GetProperty(itemId, (int)__VSHPROPID.VSHPROPID_FirstVisibleChild, out variant));
uint currentItemID = (uint)(int)variant;
WalkSourceProjectAndAdd(sourceHierarchy, currentItemID, newNode, true);
if (addSiblings)
{
// Then look at siblings
currentItemID = itemId;
while (currentItemID != VSConstants.VSITEMID_NIL)
{
variant = null;
ErrorHandler.ThrowOnFailure(sourceHierarchy.GetProperty(currentItemID, (int)__VSHPROPID.VSHPROPID_NextVisibleSibling, out variant));
currentItemID = (uint)(int)variant;
WalkSourceProjectAndAdd(sourceHierarchy, currentItemID, targetNode, true);
}
}
}
}
/// <summary>
/// Add an existing item (file/folder) to the project if it already exist in our storage.
/// </summary>
/// <param name="parentNode">Node to that this item to</param>
/// <param name="name">Name of the item being added</param>
/// <param name="targetPath">Path of the item being added</param>
/// <returns>Node that was added</returns>
public virtual HierarchyNode AddNodeIfTargetExistInStorage(HierarchyNode parentNode, string name, string targetPath)
{
HierarchyNode newNode = parentNode;
// If the file/directory exist, add a node for it
if (File.Exists(targetPath))
{
VSADDRESULT[] result = new VSADDRESULT[1];
ErrorHandler.ThrowOnFailure(this.AddItem(parentNode.ID, VSADDITEMOPERATION.VSADDITEMOP_OPENFILE, name, 1, new string[] { targetPath }, IntPtr.Zero, result));
if (result[0] != VSADDRESULT.ADDRESULT_Success)
throw new ApplicationException();
newNode = this.FindChild(targetPath);
if (newNode == null)
throw new ApplicationException();
}
else if (Directory.Exists(targetPath))
{
newNode = this.CreateFolderNodes(targetPath);
}
return newNode;
}
/// <summary>
/// Handle the Cut operation to the clipboard
/// </summary>
public override int CutToClipboard()
{
int returnValue = (int)OleConstants.OLECMDERR_E_NOTSUPPORTED;
try
{
this.RegisterClipboardNotifications(true);
// Create our data object and change the selection to show item(s) being cut
IOleDataObject dataObject = this.PackageSelectionDataObject(true);
if (dataObject != null)
{
this.SourceDraggedOrCutOrCopied = true;
// Add our cut item(s) to the clipboard
ErrorHandler.ThrowOnFailure(UnsafeNativeMethods.OleSetClipboard(dataObject));
// Inform VS (UiHierarchyWindow) of the cut
IVsUIHierWinClipboardHelper clipboardHelper = (IVsUIHierWinClipboardHelper)GetService(typeof(SVsUIHierWinClipboardHelper));
if (clipboardHelper == null)
{
return VSConstants.E_FAIL;
}
returnValue = ErrorHandler.ThrowOnFailure(clipboardHelper.Cut(dataObject));
}
}
catch (COMException e)
{
Trace.WriteLine("Exception : " + e.Message);
returnValue = e.ErrorCode;
}
return returnValue;
}
/// <summary>
/// Handle the Copy operation to the clipboard
/// </summary>
public override int CopyToClipboard()
{
int returnValue = (int)OleConstants.OLECMDERR_E_NOTSUPPORTED;
try
{
this.RegisterClipboardNotifications(true);
// Create our data object and change the selection to show item(s) being copy
IOleDataObject dataObject = this.PackageSelectionDataObject(false);
if (dataObject != null)
{
this.SourceDraggedOrCutOrCopied = true;
// Add our copy item(s) to the clipboard
ErrorHandler.ThrowOnFailure(UnsafeNativeMethods.OleSetClipboard(dataObject));
// Inform VS (UiHierarchyWindow) of the copy
IVsUIHierWinClipboardHelper clipboardHelper = (IVsUIHierWinClipboardHelper)GetService(typeof(SVsUIHierWinClipboardHelper));
if (clipboardHelper == null)
{
return VSConstants.E_FAIL;
}
returnValue = ErrorHandler.ThrowOnFailure(clipboardHelper.Copy(dataObject));
}
}
catch (COMException e)
{
Trace.WriteLine("Exception : " + e.Message);
returnValue = e.ErrorCode;
}
catch (ArgumentException e)
{
Trace.WriteLine("Exception : " + e.Message);
returnValue = Marshal.GetHRForException(e);
}
return returnValue;
}
/// <summary>
/// Handle the Paste operation to a targetNode
/// </summary>
public override int PasteFromClipboard(HierarchyNode targetNode)
{
int returnValue = (int)OleConstants.OLECMDERR_E_NOTSUPPORTED;
//Get the clipboardhelper service and use it after processing dataobject
IVsUIHierWinClipboardHelper clipboardHelper = (IVsUIHierWinClipboardHelper)GetService(typeof(SVsUIHierWinClipboardHelper));
if (clipboardHelper == null)
{
return VSConstants.E_FAIL;
}
try
{
//Get dataobject from clipboard
IOleDataObject dataObject = null;
ErrorHandler.ThrowOnFailure(UnsafeNativeMethods.OleGetClipboard(out dataObject));
if (dataObject == null)
{
return VSConstants.E_UNEXPECTED;
}
DropEffect dropEffect = DropEffect.None;
DropDataType dropDataType = DropDataType.None;
try
{
dropDataType = this.ProcessSelectionDataObject(dataObject, targetNode.GetDragTargetHandlerNode());
dropEffect = this.QueryDropEffect(dropDataType, 0);
}
catch (ExternalException e)
{
Trace.WriteLine("Exception : " + e.Message);
// If it is a drop from windows and we get any kind of error ignore it. This
// prevents bogus messages from the shell from being displayed
if (dropDataType != DropDataType.Shell)
{
throw;
}
}
finally
{
// Inform VS (UiHierarchyWindow) of the paste
returnValue = clipboardHelper.Paste(dataObject, (uint)dropEffect);
}
}
catch (COMException e)
{
Trace.WriteLine("Exception : " + e.Message);
returnValue = e.ErrorCode;
}
return returnValue;
}
/// <summary>
/// Determines if the paste command should be allowed.
/// </summary>
/// <returns></returns>
public override bool AllowPasteCommand()
{
IOleDataObject dataObject = null;
try
{
ErrorHandler.ThrowOnFailure(UnsafeNativeMethods.OleGetClipboard(out dataObject));
if (dataObject == null)
{
return false;
}
// First see if this is a set of storage based items
FORMATETC format = DragDropHelper.CreateFormatEtc((ushort)DragDropHelper.CF_VSSTGPROJECTITEMS);
if (dataObject.QueryGetData(new FORMATETC[] { format }) == VSConstants.S_OK)
return true;
// Try reference based items
format = DragDropHelper.CreateFormatEtc((ushort)DragDropHelper.CF_VSREFPROJECTITEMS);
if (dataObject.QueryGetData(new FORMATETC[] { format }) == VSConstants.S_OK)
return true;
// Try windows explorer files format
format = DragDropHelper.CreateFormatEtc((ushort)NativeMethods.CF_HDROP);
return (dataObject.QueryGetData(new FORMATETC[] { format }) == VSConstants.S_OK);
}
// We catch External exceptions since it might be that it is not our data on the clipboard.
catch (ExternalException e)
{
Trace.WriteLine("Exception :" + e.Message);
return false;
}
}
/// <summary>
/// Register/Unregister for Clipboard events for the UiHierarchyWindow (solution explorer)
/// </summary>
/// <param name="register">true for register, false for unregister</param>
public override void RegisterClipboardNotifications(bool register)
{
// Get the UiHierarchy window clipboard helper service
IVsUIHierWinClipboardHelper clipboardHelper = (IVsUIHierWinClipboardHelper)GetService(typeof(SVsUIHierWinClipboardHelper));
if (clipboardHelper == null)
{
return;
}
if (register && this.copyPasteCookie == 0)
{
// Register
ErrorHandler.ThrowOnFailure(clipboardHelper.AdviseClipboardHelperEvents(this, out this.copyPasteCookie));
Debug.Assert(this.copyPasteCookie != 0, "AdviseClipboardHelperEvents returned an invalid cookie");
}
else if (!register && this.copyPasteCookie != 0)
{
// Unregister
ErrorHandler.ThrowOnFailure(clipboardHelper.UnadviseClipboardHelperEvents(this.copyPasteCookie));
this.copyPasteCookie = 0;
}
}
/// <summary>
/// Process dataobject from Drag/Drop/Cut/Copy/Paste operation
/// </summary>
/// <remarks>The targetNode is set if the method is called from a drop operation, otherwise it is null</remarks>
internal DropDataType ProcessSelectionDataObject(IOleDataObject dataObject, HierarchyNode targetNode)
{
DropDataType dropDataType = DropDataType.None;
bool isWindowsFormat = false;
// Try to get it as a directory based project.
List<string> filesDropped = DragDropHelper.GetDroppedFiles(DragDropHelper.CF_VSSTGPROJECTITEMS, dataObject, out dropDataType);
if (filesDropped.Count == 0)
{
filesDropped = DragDropHelper.GetDroppedFiles(DragDropHelper.CF_VSREFPROJECTITEMS, dataObject, out dropDataType);
}
if (filesDropped.Count == 0)
{
filesDropped = DragDropHelper.GetDroppedFiles(NativeMethods.CF_HDROP, dataObject, out dropDataType);
isWindowsFormat = (filesDropped.Count > 0);
}
if (dropDataType != DropDataType.None && filesDropped.Count > 0)
{
string[] filesDroppedAsArray = filesDropped.ToArray();
HierarchyNode node = (targetNode == null) ? this : targetNode;
// For directory based projects the content of the clipboard is a double-NULL terminated list of Projref strings.
if (isWindowsFormat)
{
// This is the code path when source is windows explorer
VSADDRESULT[] vsaddresults = new VSADDRESULT[1];
vsaddresults[0] = VSADDRESULT.ADDRESULT_Failure;
int addResult = AddItem(node.ID, VSADDITEMOPERATION.VSADDITEMOP_OPENFILE, null, (uint)filesDropped.Count, filesDroppedAsArray, IntPtr.Zero, vsaddresults);
if (addResult != VSConstants.S_OK && addResult != VSConstants.S_FALSE && addResult != (int)OleConstants.OLECMDERR_E_CANCELED
&& vsaddresults[0] != VSADDRESULT.ADDRESULT_Success)
{
ErrorHandler.ThrowOnFailure(addResult);
}
return dropDataType;
}
else
{
if (AddFilesFromProjectReferences(node, filesDroppedAsArray))
{
return dropDataType;
}
}
}
// If we reached this point then the drop data must be set to None.
// Otherwise the OnPaste will be called with a valid DropData and that would actually delete the item.
return DropDataType.None;
}
/// <summary>
/// Get the dropdatatype from the dataobject
/// </summary>
/// <param name="pDataObject">The dataobject to be analysed for its format</param>
/// <returns>dropdatatype or none if dataobject does not contain known format</returns>
internal static DropDataType QueryDropDataType(IOleDataObject pDataObject)
{
if (pDataObject == null)
{
return DropDataType.None;
}
// known formats include File Drops (as from WindowsExplorer),
// VSProject Reference Items and VSProject Storage Items.
FORMATETC fmt = DragDropHelper.CreateFormatEtc(NativeMethods.CF_HDROP);
if (DragDropHelper.QueryGetData(pDataObject, ref fmt) == VSConstants.S_OK)
{
return DropDataType.Shell;
}
fmt.cfFormat = DragDropHelper.CF_VSREFPROJECTITEMS;
if (DragDropHelper.QueryGetData(pDataObject, ref fmt) == VSConstants.S_OK)
{
// Data is from a Ref-based project.
return DropDataType.VsRef;
}
fmt.cfFormat = DragDropHelper.CF_VSSTGPROJECTITEMS;
if (DragDropHelper.QueryGetData(pDataObject, ref fmt) == VSConstants.S_OK)
{
return DropDataType.VsStg;
}
return DropDataType.None;
}
/// <summary>
/// Returns the drop effect.
/// </summary>
/// <remarks>
/// // A directory based project should perform as follow:
/// NO MODIFIER
/// - COPY if not from current hierarchy,
/// - MOVE if from current hierarchy
/// SHIFT DRAG - MOVE
/// CTRL DRAG - COPY
/// CTRL-SHIFT DRAG - NO DROP (used for reference based projects only)
/// </remarks>
internal DropEffect QueryDropEffect(DropDataType dropDataType, uint grfKeyState)
{
//Validate the dropdatatype
if ((dropDataType != DropDataType.Shell) && (dropDataType != DropDataType.VsRef) && (dropDataType != DropDataType.VsStg))
{
return DropEffect.None;
}
// CTRL-SHIFT
if ((grfKeyState & NativeMethods.MK_CONTROL) != 0 && (grfKeyState & NativeMethods.MK_SHIFT) != 0)
{
// Because we are not referenced base, we don't support link
return DropEffect.None;
}
// CTRL
if ((grfKeyState & NativeMethods.MK_CONTROL) != 0)
return DropEffect.Copy;
// SHIFT
if ((grfKeyState & NativeMethods.MK_SHIFT) != 0)
return DropEffect.Move;
// no modifier
if (this.SourceDraggedOrCutOrCopied)
{
return DropEffect.Move;
}
else
{
return DropEffect.Copy;
}
}
public void CleanupSelectionDataObject(bool dropped, bool cut, bool moved)
{
this.CleanupSelectionDataObject(dropped, cut, moved, false);
}
/// <summary>
/// After a drop or paste, will use the dwEffects
/// to determine whether we need to clean up the source nodes or not. If
/// justCleanup is set, it only does the cleanup work.
/// </summary>
public void CleanupSelectionDataObject(bool dropped, bool cut, bool moved, bool justCleanup)
{
if (this.ItemsDraggedOrCutOrCopied == null || this.ItemsDraggedOrCutOrCopied.Count == 0)
{
return;
}
try
{
IVsUIHierarchyWindow w = UIHierarchyUtilities.GetUIHierarchyWindow(this.site, HierarchyNode.SolutionExplorer);
foreach (HierarchyNode node in this.ItemsDraggedOrCutOrCopied)
{
if ((moved && (cut || dropped) && !justCleanup))
{
// do not close it if the doc is dirty or we do not own it
bool isDirty, isOpen, isOpenedByUs;
uint docCookie;
IVsPersistDocData ppIVsPersistDocData;
DocumentManager manager = node.GetDocumentManager();
if (manager != null)
{
manager.GetDocInfo(out isOpen, out isDirty, out isOpenedByUs, out docCookie, out ppIVsPersistDocData);
if (isDirty || (isOpen && !isOpenedByUs))
{
continue;
}
// close it if opened
if (isOpen)
{
manager.Close(__FRAMECLOSE.FRAMECLOSE_NoSave);
}
}
node.Remove(removeFromStorage: true, promptSave: false);
}
else if (w != null)
{
ErrorHandler.ThrowOnFailure(w.ExpandItem((IVsUIHierarchy)this, node.ID, EXPANDFLAGS.EXPF_UnCutHighlightItem));
}
}
}
finally
{
try
{
// Now delete the memory allocated by the packaging of datasources.
// If we just did a cut, or we are told to cleanup, then we need to free the data object. Otherwise, we leave it
// alone so that you can continue to paste the data in new locations.
if (moved || cut || justCleanup)
{
this.ItemsDraggedOrCutOrCopied.Clear();
this.CleanAndFlushClipboard();
}
}
finally
{
this.dropDataType = DropDataType.None;
}
}
}
/// <summary>
/// Moves files from one part of our project to another.
/// </summary>
/// <param name="targetNode">the targetHandler node</param>
/// <param name="projectReferences">List of projectref string</param>
/// <returns>true if succeeded</returns>
private bool AddFilesFromProjectReferences(HierarchyNode targetNode, string[] projectReferences)
{
//Validate input
if (projectReferences == null)
{
throw new ArgumentException(SR.GetString(SR.InvalidParameter, CultureInfo.CurrentUICulture), "projectReferences");
}
if (targetNode == null)
{
throw new InvalidOperationException();
}
//Iteratively add files from projectref
foreach (string projectReference in projectReferences)
{
if (projectReference == null)
{
// bad projectref, bail out
return false;
}
if (projectReference.EndsWith("/", StringComparison.Ordinal) || projectReference.EndsWith("\\", StringComparison.Ordinal))
{
AddFolderFromOtherProject(projectReference, targetNode);
}
else if (!AddFileToNodeFromProjectReference(projectReference, targetNode))
{
return false;
}
}
return true;
}
/// <summary>
/// Adds an item from a project refererence to target node.
/// </summary>
/// <param name="projectRef"></param>
/// <param name="targetNode"></param>
private bool AddFileToNodeFromProjectReference(string projectRef, HierarchyNode targetNode)
{
if (String.IsNullOrEmpty(projectRef))
{
throw new ArgumentException(SR.GetString(SR.ParameterCannotBeNullOrEmpty, CultureInfo.CurrentUICulture), "projectRef");
}
if (targetNode == null)
{
throw new ArgumentNullException("targetNode");
}
IVsSolution solution = this.GetService(typeof(IVsSolution)) as IVsSolution;
if (solution == null)
{
throw new InvalidOperationException();
}
uint itemidLoc;
IVsHierarchy hierarchy;
string str;
VSUPDATEPROJREFREASON[] reason = new VSUPDATEPROJREFREASON[1];
ErrorHandler.ThrowOnFailure(solution.GetItemOfProjref(projectRef, out hierarchy, out itemidLoc, out str, reason));
if (hierarchy == null)
{
throw new InvalidOperationException();
}
// This will throw invalid cast exception if the hierrachy is not a project.
IVsProject project = (IVsProject)hierarchy;
string moniker;
ErrorHandler.ThrowOnFailure(project.GetMkDocument(itemidLoc, out moniker));
string[] files = new String[1] { moniker };
VSADDRESULT[] vsaddresult = new VSADDRESULT[1];
vsaddresult[0] = VSADDRESULT.ADDRESULT_Failure;
int addResult = targetNode.ProjectMgr.DoAddItem(targetNode.ID, VSADDITEMOPERATION.VSADDITEMOP_OPENFILE, null, 0, files, IntPtr.Zero, vsaddresult, AddItemContext.Paste);
if (addResult != VSConstants.S_OK && addResult != VSConstants.S_FALSE && addResult != (int)OleConstants.OLECMDERR_E_CANCELED)
{
ErrorHandler.ThrowOnFailure(addResult);
return false;
}
return (vsaddresult[0] == VSADDRESULT.ADDRESULT_Success);
}
/// <summary>
/// Empties all the data structures added to the clipboard and flushes the clipboard.
/// </summary>
private void CleanAndFlushClipboard()
{
IOleDataObject oleDataObject = null;
ErrorHandler.ThrowOnFailure(UnsafeNativeMethods.OleGetClipboard(out oleDataObject));
if (oleDataObject == null)
{
return;
}
string sourceProjectPath = DragDropHelper.GetSourceProjectPath(oleDataObject);
if (!String.IsNullOrEmpty(sourceProjectPath) && NativeMethods.IsSamePath(sourceProjectPath, this.GetMkDocument()))
{
UnsafeNativeMethods.OleFlushClipboard();
int clipboardOpened = 0;
try
{
clipboardOpened = UnsafeNativeMethods.OpenClipboard(IntPtr.Zero);
UnsafeNativeMethods.EmptyClipboard();
}
finally
{
if (clipboardOpened == 1)
{
UnsafeNativeMethods.CloseClipboard();
}
}
}
}
private IntPtr PackageSelectionData(StringBuilder sb, bool addEndFormatDelimiter)
{
if (sb == null || sb.ToString().Length == 0 || this.ItemsDraggedOrCutOrCopied.Count == 0)
{
return IntPtr.Zero;
}
// Double null at end.
if (addEndFormatDelimiter)
{
if (sb.ToString()[sb.Length - 1] != '\0')
{
sb.Append('\0');
}
}
// We request unmanaged permission to execute the below.
new SecurityPermission(SecurityPermissionFlag.UnmanagedCode).Demand();
_DROPFILES df = new _DROPFILES();
int dwSize = Marshal.SizeOf(df);
Int16 wideChar = 0;
int dwChar = Marshal.SizeOf(wideChar);
int structSize = dwSize + ((sb.Length + 1) * dwChar);
IntPtr ptr = Marshal.AllocHGlobal(structSize);
df.pFiles = dwSize;
df.fWide = 1;
IntPtr data = IntPtr.Zero;
try
{
data = UnsafeNativeMethods.GlobalLock(ptr);
Marshal.StructureToPtr(df, data, false);
IntPtr strData = new IntPtr((long)data + dwSize);
DragDropHelper.CopyStringToHGlobal(sb.ToString(), strData, structSize);
}
finally
{
if (data != IntPtr.Zero)
UnsafeNativeMethods.GlobalUnLock(data);
}
return ptr;
}
}
}
| |
namespace Microsoft.Azure.Management.StorSimple8000Series
{
using Azure;
using Management;
using Rest;
using Rest.Azure;
using Rest.Azure.OData;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// JobsOperations operations.
/// </summary>
public partial interface IJobsOperations
{
/// <summary>
/// Gets all the jobs for specified device. With optional OData query
/// parameters, a filtered set of jobs is returned.
/// </summary>
/// <param name='deviceName'>
/// The device name
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name
/// </param>
/// <param name='managerName'>
/// The manager name
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<Job>>> ListByDeviceWithHttpMessagesAsync(string deviceName, string resourceGroupName, string managerName, ODataQuery<JobFilter> odataQuery = default(ODataQuery<JobFilter>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the details of the specified job name.
/// </summary>
/// <param name='deviceName'>
/// The device name
/// </param>
/// <param name='jobName'>
/// The job Name.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name
/// </param>
/// <param name='managerName'>
/// The manager name
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<Job>> GetWithHttpMessagesAsync(string deviceName, string jobName, string resourceGroupName, string managerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Cancels a job on the device.
/// </summary>
/// <param name='deviceName'>
/// The device name
/// </param>
/// <param name='jobName'>
/// The jobName.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name
/// </param>
/// <param name='managerName'>
/// The manager name
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> CancelWithHttpMessagesAsync(string deviceName, string jobName, string resourceGroupName, string managerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets all the jobs for the specified manager. With optional OData
/// query parameters, a filtered set of jobs is returned.
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name
/// </param>
/// <param name='managerName'>
/// The manager name
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<Job>>> ListByManagerWithHttpMessagesAsync(string resourceGroupName, string managerName, ODataQuery<JobFilter> odataQuery = default(ODataQuery<JobFilter>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Cancels a job on the device.
/// </summary>
/// <param name='deviceName'>
/// The device name
/// </param>
/// <param name='jobName'>
/// The jobName.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name
/// </param>
/// <param name='managerName'>
/// The manager name
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> BeginCancelWithHttpMessagesAsync(string deviceName, string jobName, string resourceGroupName, string managerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets all the jobs for specified device. With optional OData query
/// parameters, a filtered set of jobs is returned.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<Job>>> ListByDeviceNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets all the jobs for the specified manager. With optional OData
/// query parameters, a filtered set of jobs is returned.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<Job>>> ListByManagerNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
using UnityEngine;
using System.Collections;
namespace UniverseEngine
{
[System.Flags]
public enum FollowParentParameters
{
None = 0,
Default = FollowRotation | FollowScale | CheckCollisions,
FollowRotation = 1 << 1,
FollowScale = 1 << 2,
CheckCollisions = 1 << 3
}
public class UniverseObject
{
public TilemapCircle parent;
protected bool useGravity = true;
protected Vector2 position;
protected float scale = 1.0f;
protected float rotation = 0.0f; //radians
protected Vector2 size = new Vector2(1, 1);
protected bool visible = true;
protected Vector2 velocity;
protected float rotationVelocity;
protected TileHitFlags hitFlags;
protected float distanceInTilemapCircle;
protected float angleInTilemapCirclePosition;
protected IUniverseObjectListener listener;
protected bool parentFollowScale;
protected bool parentFollowRotation;
protected bool parentCheckCollisions;
public Vector2 Position
{
get { return position; }
}
public float Scale
{
get { return scale; }
}
public float Rotation
{
get { return rotation; }
}
public Vector2 Size
{
get { return size; }
}
public Vector2 Velocity
{
get { return velocity; }
set { this.velocity = value; }
}
public TileHitFlags HitFlags
{
get { return hitFlags; }
}
public IUniverseObjectListener Listener
{
get { return listener; }
set { this.listener = value; }
}
public bool Visible
{
get { return visible; }
set
{
this.visible = value;
if (listener != null)
listener.OnUniverseObjectUpdated(0.0f);
}
}
public void Init(Vector2 size, TilemapCircle parent, FollowParentParameters followParameters, Vector2 position, float rotation)
{
this.size = size;
SetParent(parent, followParameters, position, rotation);
}
public void SetParent(TilemapCircle parent, FollowParentParameters followParameters, Vector2 position, float rotation)
{
this.parent = parent;
this.position = position;
this.rotation = rotation;
this.parentFollowScale = (followParameters & FollowParentParameters.FollowScale) != 0;
this.parentFollowRotation = (followParameters & FollowParentParameters.FollowRotation) != 0;
this.parentCheckCollisions = (followParameters & FollowParentParameters.CheckCollisions) != 0;
if (parent != null)
{
if (parentFollowScale)
this.scale = parent.GetScaleFromPosition(position);
if (parentFollowRotation)
this.rotation = parent.GetAngleFromPosition(position);
distanceInTilemapCircle = parent.GetDistanceFromPosition(position);
angleInTilemapCirclePosition = parent.GetAngleFromPosition(position);
}
if (listener != null)
listener.OnParentChanged(parent);
}
public void Update(float deltaTime)
{
OnUpdate(deltaTime);
UpdatePosition(deltaTime);
if (listener != null)
listener.OnUniverseObjectUpdated(deltaTime);
}
protected virtual void OnUpdate(float deltaTime)
{
}
protected void UpdatePosition(float deltaTime)
{
Vector2 normal;
Vector2 tangent;
Vector2 deltaPosition;
float deltaRotation;
if (parent != null)
{
position = parent.GetPositionFromDistanceAndAngle(distanceInTilemapCircle, angleInTilemapCirclePosition);
if (parentFollowRotation)
rotation = parent.GetAngleFromPosition(position);
if (parentFollowScale)
scale = parent.GetScaleFromPosition(position);
if (parent is Planet && useGravity)
velocity.y -= ((Planet) parent).Gravity * deltaTime;
normal = parent.GetNormalFromPosition(position); //doesn't change with vertical position
tangent = parent.GetTangentFromPosition(position); //doesn't change with vertical position
deltaPosition = velocity * deltaTime * scale;
if (parentFollowRotation)
deltaRotation = 0.0f;
else
deltaRotation = rotationVelocity * deltaTime;
}
else
{
normal = Vector2.up;
tangent = Vector2.right;
deltaPosition = velocity * deltaTime;
deltaRotation = rotationVelocity * deltaTime;
}
hitFlags = TileHitFlags.None;
if (parent != null && parentCheckCollisions)
{
TileHitInfo hitInfo;
if (deltaPosition.y > 0)
{
//Check against ceiling
if (parent.RaycastSquare(
position + normal * (size.y * 0.5f * scale),
size.x * scale,
TileDirection.Up,
deltaPosition.y + (size.y * 0.5f * scale),
out hitInfo))
{
deltaPosition.y = -(hitInfo.hitDistance - (size.y * 0.5f * scale));
velocity.y = 0.0f;
hitFlags |= TileHitFlags.Up;
}
}
else if (deltaPosition.y < 0)
{
//Check against floor
if (parent.RaycastSquare(
position + normal * (size.y * 0.5f * scale),
size.x * scale,
TileDirection.Down,
-deltaPosition.y + (size.y * 0.5f * scale),
out hitInfo))
{
deltaPosition.y = -(hitInfo.hitDistance - (size.y * 0.5f * scale));
velocity.y = 0.0f;
hitFlags |= TileHitFlags.Down;
}
}
}
if (deltaPosition.y != 0)
{
position += normal * deltaPosition.y;
if (parent != null && parentFollowScale)
scale = parent.GetScaleFromPosition(position);
}
if (parent != null && parentCheckCollisions)
{
TileHitInfo hitInfo;
if (deltaPosition.x > 0)
{
//Check against right wall
if (parent.RaycastSquare(
position + normal * (size.y * 0.5f * scale),
size.y * scale,
TileDirection.Right,
deltaPosition.x + (size.x * 0.5f * scale),
out hitInfo))
{
deltaPosition.x = (hitInfo.hitDistance - (size.x * 0.5f * scale));
velocity.x = 0.0f;
hitFlags |= TileHitFlags.Right;
}
}
else if (deltaPosition.x < 0)
{
//Check against left wall
if (parent.RaycastSquare(
position + normal * (size.y * 0.5f * scale),
size.y * scale,
TileDirection.Left,
-deltaPosition.x + (size.x * 0.5f * scale),
out hitInfo))
{
deltaPosition.x = -(hitInfo.hitDistance - (size.x * 0.5f * scale));
velocity.x = 0.0f;
hitFlags |= TileHitFlags.Left;
}
}
}
if (deltaPosition.x != 0)
{
position += tangent * deltaPosition.x;
if (parent != null)
normal = parent.GetNormalFromPosition(position);
}
if (parent != null)
{
if (parentFollowRotation)
rotation = parent.GetAngleFromPosition(position);
else
rotation += deltaRotation;
distanceInTilemapCircle = parent.GetDistanceFromPosition(position);
angleInTilemapCirclePosition = parent.GetAngleFromPosition(position);
}
else
{
rotation += deltaRotation;
}
}
/*
public bool MoveTo(Vector2 position)
{
if (CanMoveTo(position))
{
this.position = position;
return true;
}
return false;
}
public bool CanMoveTo(Vector2 position)
{
float scale = tilemapCircle.GetScaleFromPosition(position);
int tileX, tileY;
Vector2 right = transform.right;
Vector2 up = transform.up;
position += up * 0.05f;
for (int x = -1; x <= 1; x++)
{
for (int y = 0; y <= 2; y++)
{
Vector2 pos = position +
right * (size.x * 0.9f * x * 0.5f * scale) +
up * ((size.y * 0.9f / 2) * y * scale);
if (tilemapCircle.GetTileCoordinatesFromPosition(pos, out tileX, out tileY))
if (tilemapRicle.GetTile(tileX, tileY) != 0)
return false;
}
}
return true;
}
*/
}
}
| |
using System;
using System.Collections.Generic;
using System.Reactive.Linq;
using System.Reactive.Threading.Tasks;
using MS.Core;
namespace System.Net.Http
{
public static class __HttpClient
{
public static IObservable<System.String> GetStringAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri)
{
return Observable.Zip(HttpClientValue, requestUri, (HttpClientValueLambda, requestUriLambda) => HttpClientValueLambda.GetStringAsync(requestUriLambda).ToObservable()).Flatten();
}
public static IObservable<System.String> GetStringAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri)
{
return Observable.Zip(HttpClientValue, requestUri, (HttpClientValueLambda, requestUriLambda) => HttpClientValueLambda.GetStringAsync(requestUriLambda).ToObservable()).Flatten();
}
public static IObservable<System.Byte[]> GetByteArrayAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri)
{
return Observable.Zip(HttpClientValue, requestUri, (HttpClientValueLambda, requestUriLambda) => HttpClientValueLambda.GetByteArrayAsync(requestUriLambda).ToObservable()).Flatten();
}
public static IObservable<System.Byte[]> GetByteArrayAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri)
{
return Observable.Zip(HttpClientValue, requestUri, (HttpClientValueLambda, requestUriLambda) => HttpClientValueLambda.GetByteArrayAsync(requestUriLambda).ToObservable()).Flatten();
}
public static IObservable<System.IO.Stream> GetStreamAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri)
{
return Observable.Zip(HttpClientValue, requestUri, (HttpClientValueLambda, requestUriLambda) => HttpClientValueLambda.GetStreamAsync(requestUriLambda).ToObservable()).Flatten();
}
public static IObservable<System.IO.Stream> GetStreamAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri)
{
return Observable.Zip(HttpClientValue, requestUri, (HttpClientValueLambda, requestUriLambda) => HttpClientValueLambda.GetStreamAsync(requestUriLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> GetAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri)
{
return Observable.Zip(HttpClientValue, requestUri, (HttpClientValueLambda, requestUriLambda) => HttpClientValueLambda.GetAsync(requestUriLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> GetAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri)
{
return Observable.Zip(HttpClientValue, requestUri, (HttpClientValueLambda, requestUriLambda) => HttpClientValueLambda.GetAsync(requestUriLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> GetAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri, IObservable<System.Net.Http.HttpCompletionOption> completionOption)
{
return Observable.Zip(HttpClientValue, requestUri, completionOption, (HttpClientValueLambda, requestUriLambda, completionOptionLambda) => HttpClientValueLambda.GetAsync(requestUriLambda, completionOptionLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> GetAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri, IObservable<System.Net.Http.HttpCompletionOption> completionOption)
{
return Observable.Zip(HttpClientValue, requestUri, completionOption, (HttpClientValueLambda, requestUriLambda, completionOptionLambda) => HttpClientValueLambda.GetAsync(requestUriLambda, completionOptionLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> GetAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, requestUri, cancellationToken, (HttpClientValueLambda, requestUriLambda, cancellationTokenLambda) => HttpClientValueLambda.GetAsync(requestUriLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> GetAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, requestUri, cancellationToken, (HttpClientValueLambda, requestUriLambda, cancellationTokenLambda) => HttpClientValueLambda.GetAsync(requestUriLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> GetAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri, IObservable<System.Net.Http.HttpCompletionOption> completionOption, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, requestUri, completionOption, cancellationToken, (HttpClientValueLambda, requestUriLambda, completionOptionLambda, cancellationTokenLambda) => HttpClientValueLambda.GetAsync(requestUriLambda, completionOptionLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> GetAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri, IObservable<System.Net.Http.HttpCompletionOption> completionOption, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, requestUri, completionOption, cancellationToken, (HttpClientValueLambda, requestUriLambda, completionOptionLambda, cancellationTokenLambda) => HttpClientValueLambda.GetAsync(requestUriLambda, completionOptionLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> PostAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri, IObservable<System.Net.Http.HttpContent> content)
{
return Observable.Zip(HttpClientValue, requestUri, content, (HttpClientValueLambda, requestUriLambda, contentLambda) => HttpClientValueLambda.PostAsync(requestUriLambda, contentLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> PostAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri, IObservable<System.Net.Http.HttpContent> content)
{
return Observable.Zip(HttpClientValue, requestUri, content, (HttpClientValueLambda, requestUriLambda, contentLambda) => HttpClientValueLambda.PostAsync(requestUriLambda, contentLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> PostAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri, IObservable<System.Net.Http.HttpContent> content, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, requestUri, content, cancellationToken, (HttpClientValueLambda, requestUriLambda, contentLambda, cancellationTokenLambda) => HttpClientValueLambda.PostAsync(requestUriLambda, contentLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> PostAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri, IObservable<System.Net.Http.HttpContent> content, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, requestUri, content, cancellationToken, (HttpClientValueLambda, requestUriLambda, contentLambda, cancellationTokenLambda) => HttpClientValueLambda.PostAsync(requestUriLambda, contentLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> PutAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri, IObservable<System.Net.Http.HttpContent> content)
{
return Observable.Zip(HttpClientValue, requestUri, content, (HttpClientValueLambda, requestUriLambda, contentLambda) => HttpClientValueLambda.PutAsync(requestUriLambda, contentLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> PutAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri, IObservable<System.Net.Http.HttpContent> content)
{
return Observable.Zip(HttpClientValue, requestUri, content, (HttpClientValueLambda, requestUriLambda, contentLambda) => HttpClientValueLambda.PutAsync(requestUriLambda, contentLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> PutAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri, IObservable<System.Net.Http.HttpContent> content, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, requestUri, content, cancellationToken, (HttpClientValueLambda, requestUriLambda, contentLambda, cancellationTokenLambda) => HttpClientValueLambda.PutAsync(requestUriLambda, contentLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> PutAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri, IObservable<System.Net.Http.HttpContent> content, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, requestUri, content, cancellationToken, (HttpClientValueLambda, requestUriLambda, contentLambda, cancellationTokenLambda) => HttpClientValueLambda.PutAsync(requestUriLambda, contentLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> DeleteAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri)
{
return Observable.Zip(HttpClientValue, requestUri, (HttpClientValueLambda, requestUriLambda) => HttpClientValueLambda.DeleteAsync(requestUriLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> DeleteAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri)
{
return Observable.Zip(HttpClientValue, requestUri, (HttpClientValueLambda, requestUriLambda) => HttpClientValueLambda.DeleteAsync(requestUriLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> DeleteAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.String> requestUri, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, requestUri, cancellationToken, (HttpClientValueLambda, requestUriLambda, cancellationTokenLambda) => HttpClientValueLambda.DeleteAsync(requestUriLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> DeleteAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> requestUri, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, requestUri, cancellationToken, (HttpClientValueLambda, requestUriLambda, cancellationTokenLambda) => HttpClientValueLambda.DeleteAsync(requestUriLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> SendAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Net.Http.HttpRequestMessage> request)
{
return Observable.Zip(HttpClientValue, request, (HttpClientValueLambda, requestLambda) => HttpClientValueLambda.SendAsync(requestLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> SendAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Net.Http.HttpRequestMessage> request, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, request, cancellationToken, (HttpClientValueLambda, requestLambda, cancellationTokenLambda) => HttpClientValueLambda.SendAsync(requestLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> SendAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Net.Http.HttpRequestMessage> request, IObservable<System.Net.Http.HttpCompletionOption> completionOption)
{
return Observable.Zip(HttpClientValue, request, completionOption, (HttpClientValueLambda, requestLambda, completionOptionLambda) => HttpClientValueLambda.SendAsync(requestLambda, completionOptionLambda).ToObservable()).Flatten();
}
public static IObservable<System.Net.Http.HttpResponseMessage> SendAsync(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Net.Http.HttpRequestMessage> request, IObservable<System.Net.Http.HttpCompletionOption> completionOption, IObservable<System.Threading.CancellationToken> cancellationToken)
{
return Observable.Zip(HttpClientValue, request, completionOption, cancellationToken, (HttpClientValueLambda, requestLambda, completionOptionLambda, cancellationTokenLambda) => HttpClientValueLambda.SendAsync(requestLambda, completionOptionLambda, cancellationTokenLambda).ToObservable()).Flatten();
}
public static IObservable<System.Reactive.Unit> CancelPendingRequests(this IObservable<System.Net.Http.HttpClient> HttpClientValue)
{
return Observable.Do(HttpClientValue, (HttpClientValueLambda) => HttpClientValueLambda.CancelPendingRequests()).ToUnit();
}
public static IObservable<System.Net.Http.Headers.HttpRequestHeaders> get_DefaultRequestHeaders(this IObservable<System.Net.Http.HttpClient> HttpClientValue)
{
return Observable.Select(HttpClientValue, (HttpClientValueLambda) => HttpClientValueLambda.DefaultRequestHeaders);
}
public static IObservable<System.Uri> get_BaseAddress(this IObservable<System.Net.Http.HttpClient> HttpClientValue)
{
return Observable.Select(HttpClientValue, (HttpClientValueLambda) => HttpClientValueLambda.BaseAddress);
}
public static IObservable<System.TimeSpan> get_Timeout(this IObservable<System.Net.Http.HttpClient> HttpClientValue)
{
return Observable.Select(HttpClientValue, (HttpClientValueLambda) => HttpClientValueLambda.Timeout);
}
public static IObservable<System.Int64> get_MaxResponseContentBufferSize(this IObservable<System.Net.Http.HttpClient> HttpClientValue)
{
return Observable.Select(HttpClientValue, (HttpClientValueLambda) => HttpClientValueLambda.MaxResponseContentBufferSize);
}
public static IObservable<System.Reactive.Unit> set_BaseAddress(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Uri> value)
{
return ObservableExt.ZipExecute(HttpClientValue, value, (HttpClientValueLambda, valueLambda) => HttpClientValueLambda.BaseAddress = valueLambda);
}
public static IObservable<System.Reactive.Unit> set_Timeout(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.TimeSpan> value)
{
return ObservableExt.ZipExecute(HttpClientValue, value, (HttpClientValueLambda, valueLambda) => HttpClientValueLambda.Timeout = valueLambda);
}
public static IObservable<System.Reactive.Unit> set_MaxResponseContentBufferSize(this IObservable<System.Net.Http.HttpClient> HttpClientValue, IObservable<System.Int64> value)
{
return ObservableExt.ZipExecute(HttpClientValue, value, (HttpClientValueLambda, valueLambda) => HttpClientValueLambda.MaxResponseContentBufferSize = valueLambda);
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.LanguageServices;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CodeGeneration
{
internal abstract partial class AbstractCodeGenerationService : ICodeGenerationService
{
private readonly ISymbolDeclarationService _symbolDeclarationService;
protected readonly Workspace Workspace;
protected AbstractCodeGenerationService(
ISymbolDeclarationService symbolDeclarationService,
Workspace workspace)
{
_symbolDeclarationService = symbolDeclarationService;
Workspace = workspace;
}
public TDeclarationNode AddEvent<TDeclarationNode>(TDeclarationNode destination, IEventSymbol @event, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode
{
return AddEvent(destination, @event, options ?? CodeGenerationOptions.Default, GetAvailableInsertionIndices(destination, cancellationToken));
}
public TDeclarationNode AddField<TDeclarationNode>(TDeclarationNode destination, IFieldSymbol field, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode
{
return AddField(destination, field, options ?? CodeGenerationOptions.Default, GetAvailableInsertionIndices(destination, cancellationToken));
}
public TDeclarationNode AddMethod<TDeclarationNode>(TDeclarationNode destination, IMethodSymbol method, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode
{
return AddMethod(destination, method, options ?? CodeGenerationOptions.Default, GetAvailableInsertionIndices(destination, cancellationToken));
}
public TDeclarationNode AddProperty<TDeclarationNode>(TDeclarationNode destination, IPropertySymbol property, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode
{
return AddProperty(destination, property, options ?? CodeGenerationOptions.Default, GetAvailableInsertionIndices(destination, cancellationToken));
}
public TDeclarationNode AddNamedType<TDeclarationNode>(TDeclarationNode destination, INamedTypeSymbol namedType, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode
{
return AddNamedType(destination, namedType, options ?? CodeGenerationOptions.Default, GetAvailableInsertionIndices(destination, cancellationToken), cancellationToken);
}
public TDeclarationNode AddNamespace<TDeclarationNode>(TDeclarationNode destination, INamespaceSymbol @namespace, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode
{
return AddNamespace(destination, @namespace, options ?? CodeGenerationOptions.Default, GetAvailableInsertionIndices(destination, cancellationToken), cancellationToken);
}
public TDeclarationNode AddMembers<TDeclarationNode>(TDeclarationNode destination, IEnumerable<ISymbol> members, CodeGenerationOptions options, CancellationToken cancellationToken)
where TDeclarationNode : SyntaxNode
{
return AddMembers(destination, members, GetAvailableInsertionIndices(destination, cancellationToken), options ?? CodeGenerationOptions.Default, cancellationToken);
}
protected abstract TDeclarationNode AddEvent<TDeclarationNode>(TDeclarationNode destination, IEventSymbol @event, CodeGenerationOptions options, IList<bool> availableIndices) where TDeclarationNode : SyntaxNode;
protected abstract TDeclarationNode AddField<TDeclarationNode>(TDeclarationNode destination, IFieldSymbol field, CodeGenerationOptions options, IList<bool> availableIndices) where TDeclarationNode : SyntaxNode;
protected abstract TDeclarationNode AddMethod<TDeclarationNode>(TDeclarationNode destination, IMethodSymbol method, CodeGenerationOptions options, IList<bool> availableIndices) where TDeclarationNode : SyntaxNode;
protected abstract TDeclarationNode AddProperty<TDeclarationNode>(TDeclarationNode destination, IPropertySymbol property, CodeGenerationOptions options, IList<bool> availableIndices) where TDeclarationNode : SyntaxNode;
protected abstract TDeclarationNode AddNamedType<TDeclarationNode>(TDeclarationNode destination, INamedTypeSymbol namedType, CodeGenerationOptions options, IList<bool> availableIndices, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode;
protected abstract TDeclarationNode AddNamespace<TDeclarationNode>(TDeclarationNode destination, INamespaceSymbol @namespace, CodeGenerationOptions options, IList<bool> availableIndices, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode;
protected abstract TDeclarationNode AddMembers<TDeclarationNode>(TDeclarationNode destination, IEnumerable<SyntaxNode> members) where TDeclarationNode : SyntaxNode;
public abstract TDeclarationNode AddParameters<TDeclarationNode>(TDeclarationNode destinationMember, IEnumerable<IParameterSymbol> parameters, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode;
public abstract TDeclarationNode AddAttributes<TDeclarationNode>(TDeclarationNode destination, IEnumerable<AttributeData> attributes, SyntaxToken? target, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode;
public abstract TDeclarationNode RemoveAttribute<TDeclarationNode>(TDeclarationNode destination, SyntaxNode attributeToRemove, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode;
public abstract TDeclarationNode RemoveAttribute<TDeclarationNode>(TDeclarationNode destination, AttributeData attributeToRemove, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode;
public abstract TDeclarationNode AddStatements<TDeclarationNode>(TDeclarationNode destinationMember, IEnumerable<SyntaxNode> statements, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode;
public abstract TDeclarationNode UpdateDeclarationModifiers<TDeclarationNode>(TDeclarationNode declaration, IEnumerable<SyntaxToken> newModifiers, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode;
public abstract TDeclarationNode UpdateDeclarationAccessibility<TDeclarationNode>(TDeclarationNode declaration, Accessibility newAccessibility, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode;
public abstract TDeclarationNode UpdateDeclarationType<TDeclarationNode>(TDeclarationNode declaration, ITypeSymbol newType, CodeGenerationOptions options, CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode;
public abstract TDeclarationNode UpdateDeclarationMembers<TDeclarationNode>(TDeclarationNode declaration, IList<ISymbol> newMembers, CodeGenerationOptions options = null, CancellationToken cancellationToken = default(CancellationToken)) where TDeclarationNode : SyntaxNode;
public abstract CodeGenerationDestination GetDestination(SyntaxNode node);
public abstract SyntaxNode CreateEventDeclaration(IEventSymbol @event, CodeGenerationDestination destination, CodeGenerationOptions options);
public abstract SyntaxNode CreateFieldDeclaration(IFieldSymbol field, CodeGenerationDestination destination, CodeGenerationOptions options);
public abstract SyntaxNode CreateMethodDeclaration(IMethodSymbol method, CodeGenerationDestination destination, CodeGenerationOptions options);
public abstract SyntaxNode CreatePropertyDeclaration(IPropertySymbol property, CodeGenerationDestination destination, CodeGenerationOptions options);
public abstract SyntaxNode CreateNamedTypeDeclaration(INamedTypeSymbol namedType, CodeGenerationDestination destination, CodeGenerationOptions options, CancellationToken cancellationToken);
public abstract SyntaxNode CreateNamespaceDeclaration(INamespaceSymbol @namespace, CodeGenerationDestination destination, CodeGenerationOptions options, CancellationToken cancellationToken);
protected abstract AbstractImportsAdder CreateImportsAdder(Document document);
protected static T Cast<T>(object value)
{
return (T)value;
}
protected static void CheckDeclarationNode<TDeclarationNode>(SyntaxNode destination) where TDeclarationNode : SyntaxNode
{
if (destination == null)
{
throw new ArgumentNullException(nameof(destination));
}
if (!(destination is TDeclarationNode))
{
throw new ArgumentException(
string.Format(WorkspacesResources.Destination_type_must_be_a_0_but_given_one_is_1, typeof(TDeclarationNode).Name, destination.GetType().Name),
nameof(destination));
}
}
protected static void CheckDeclarationNode<TDeclarationNode1, TDeclarationNode2>(SyntaxNode destination)
where TDeclarationNode1 : SyntaxNode
where TDeclarationNode2 : SyntaxNode
{
if (destination == null)
{
throw new ArgumentNullException(nameof(destination));
}
if (!(destination is TDeclarationNode1) &&
!(destination is TDeclarationNode2))
{
throw new ArgumentException(
string.Format(WorkspacesResources.Destination_type_must_be_a_0_or_a_1_but_given_one_is_2,
typeof(TDeclarationNode1).Name, typeof(TDeclarationNode2).Name, destination.GetType().Name),
nameof(destination));
}
}
protected static void CheckDeclarationNode<TDeclarationNode1, TDeclarationNode2, TDeclarationNode3>(SyntaxNode destination)
where TDeclarationNode1 : SyntaxNode
where TDeclarationNode2 : SyntaxNode
where TDeclarationNode3 : SyntaxNode
{
if (destination == null)
{
throw new ArgumentNullException(nameof(destination));
}
if (!(destination is TDeclarationNode1) &&
!(destination is TDeclarationNode2) &&
!(destination is TDeclarationNode3))
{
throw new ArgumentException(
string.Format(WorkspacesResources.Destination_type_must_be_a_0_1_or_2_but_given_one_is_3,
typeof(TDeclarationNode1).Name, typeof(TDeclarationNode2).Name, typeof(TDeclarationNode3).Name, destination.GetType().Name),
nameof(destination));
}
}
protected static void CheckDeclarationNode<TDeclarationNode1, TDeclarationNode2, TDeclarationNode3, TDeclarationNode4>(SyntaxNode destination)
where TDeclarationNode1 : SyntaxNode
where TDeclarationNode2 : SyntaxNode
where TDeclarationNode3 : SyntaxNode
where TDeclarationNode4 : SyntaxNode
{
if (!(destination is TDeclarationNode1) &&
!(destination is TDeclarationNode2) &&
!(destination is TDeclarationNode3) &&
!(destination is TDeclarationNode4))
{
throw new ArgumentException(
string.Format(WorkspacesResources.Destination_type_must_be_a_0_1_or_2_but_given_one_is_3,
typeof(TDeclarationNode1).Name, typeof(TDeclarationNode2).Name, typeof(TDeclarationNode3).Name, typeof(TDeclarationNode4).Name),
nameof(destination));
}
}
private async Task<Document> GetEditAsync(
Solution solution,
INamespaceOrTypeSymbol destination,
Func<SyntaxNode, CodeGenerationOptions, IList<bool>, CancellationToken, SyntaxNode> declarationTransform,
CodeGenerationOptions options,
IEnumerable<ISymbol> members,
CancellationToken cancellationToken)
{
options = options ?? CodeGenerationOptions.Default;
var result = await this.FindMostRelevantDeclarationAsync(solution, destination, options, cancellationToken).ConfigureAwait(false);
SyntaxNode destinationDeclaration = result.Item1;
IList<bool> availableIndices = result.Item2;
if (destinationDeclaration == null)
{
throw new ArgumentException(WorkspacesResources.Could_not_find_location_to_generation_symbol_into);
}
var transformedDeclaration = declarationTransform(destinationDeclaration, options, availableIndices, cancellationToken);
var destinationTree = destinationDeclaration.SyntaxTree;
var root = await destinationTree.GetRootAsync(cancellationToken).ConfigureAwait(false);
var currentRoot = root.ReplaceNode(destinationDeclaration, transformedDeclaration);
var oldDocument = solution.GetDocument(destinationTree);
var newDocument = oldDocument.WithSyntaxRoot(currentRoot);
if (options.AddImports)
{
var adder = this.CreateImportsAdder(newDocument);
newDocument = await adder.AddAsync(members, options.PlaceSystemNamespaceFirst, options, cancellationToken).ConfigureAwait(false);
}
return newDocument;
}
protected TDeclarationNode AddMembers<TDeclarationNode>(
TDeclarationNode destination,
IEnumerable<ISymbol> members,
IList<bool> availableIndices,
CodeGenerationOptions options,
CancellationToken cancellationToken)
where TDeclarationNode : SyntaxNode
{
var membersList = members.ToList();
if (membersList.Count > 1)
{
options = CreateOptionsForMultipleMembers(options);
}
// Filter out the members that are implicitly declared. They're implicit, hence we do
// not want an explicit declaration.
var filteredMembers = membersList.Where(m => !m.IsImplicitlyDeclared);
return options.AutoInsertionLocation
? AddMembersToAppropiateLocationInDestination(destination, filteredMembers, availableIndices, options, cancellationToken)
: AddMembersToEndOfDestination(destination, filteredMembers, availableIndices, options, cancellationToken);
}
private TDeclarationSyntax AddMembersToEndOfDestination<TDeclarationSyntax>(
TDeclarationSyntax destination,
IEnumerable<ISymbol> members,
IList<bool> availableIndices,
CodeGenerationOptions options,
CancellationToken cancellationToken)
where TDeclarationSyntax : SyntaxNode
{
var newMembers = new List<SyntaxNode>();
var codeGenerationDestination = GetDestination(destination);
foreach (var member in members)
{
cancellationToken.ThrowIfCancellationRequested();
var newMember = GetNewMember(options, codeGenerationDestination, member, cancellationToken);
if (newMember != null)
{
newMembers.Add(newMember);
}
}
// Metadata as source generates complete declarations and doesn't modify
// existing ones. We can take the members to generate, sort them once,
// and then add them in that order to the end of the destination.
if (!GeneratingEnum(members) && options.SortMembers)
{
newMembers.Sort(GetMemberComparer());
}
return this.AddMembers(destination, newMembers);
}
private TDeclarationSyntax AddMembersToAppropiateLocationInDestination<TDeclarationSyntax>(
TDeclarationSyntax destination,
IEnumerable<ISymbol> members,
IList<bool> availableIndices,
CodeGenerationOptions options,
CancellationToken cancellationToken)
where TDeclarationSyntax : SyntaxNode
{
var currentDestination = destination;
foreach (var member in members)
{
cancellationToken.ThrowIfCancellationRequested();
currentDestination = UpdateDestination(availableIndices, options, currentDestination, member, cancellationToken);
}
return currentDestination;
}
private SyntaxNode GetNewMember(
CodeGenerationOptions options, CodeGenerationDestination codeGenerationDestination,
ISymbol member, CancellationToken cancellationToken)
{
switch (member)
{
case IEventSymbol @event: return this.CreateEventDeclaration(@event, codeGenerationDestination, options);
case IFieldSymbol field: return this.CreateFieldDeclaration(field, codeGenerationDestination, options);
case IPropertySymbol property: return this.CreatePropertyDeclaration(property, codeGenerationDestination, options);
case IMethodSymbol method: return this.CreateMethodDeclaration(method, codeGenerationDestination, options);
case INamedTypeSymbol namedType: return this.CreateNamedTypeDeclaration(namedType, codeGenerationDestination, options, cancellationToken);
case INamespaceSymbol @namespace: return this.CreateNamespaceDeclaration(@namespace, codeGenerationDestination, options, cancellationToken);
}
return null;
}
private TDeclarationNode UpdateDestination<TDeclarationNode>(
IList<bool> availableIndices,
CodeGenerationOptions options,
TDeclarationNode currentDestination,
ISymbol member,
CancellationToken cancellationToken) where TDeclarationNode : SyntaxNode
{
switch (member)
{
case IEventSymbol @event: return this.AddEvent(currentDestination, @event, options, availableIndices);
case IFieldSymbol field: return this.AddField(currentDestination, field, options, availableIndices);
case IPropertySymbol property: return this.AddProperty(currentDestination, property, options, availableIndices);
case IMethodSymbol method: return this.AddMethod(currentDestination, method, options, availableIndices);
case INamedTypeSymbol namedType: return this.AddNamedType(currentDestination, namedType, options, availableIndices, cancellationToken);
case INamespaceSymbol @namespace: return this.AddNamespace(currentDestination, @namespace, options, availableIndices, cancellationToken);
}
return currentDestination;
}
private bool GeneratingEnum(IEnumerable<ISymbol> members)
{
var field = members.OfType<IFieldSymbol>().FirstOrDefault();
return field != null && field.ContainingType.IsEnumType();
}
protected abstract IComparer<SyntaxNode> GetMemberComparer();
protected static CodeGenerationOptions CreateOptionsForMultipleMembers(CodeGenerationOptions options)
{
// For now we ignore the afterThisLocation/beforeThisLocation if we're adding
// multiple members. In the future it would be nice to appropriately handle this.
// The difficulty lies with ensuring that we properly understand the position we're
// inserting into, even as we change the type by adding multiple members. Not
// impossible to figure out, but out of scope right now.
options = new CodeGenerationOptions(
options.ContextLocation,
addImports: options.AddImports,
placeSystemNamespaceFirst: options.PlaceSystemNamespaceFirst,
additionalImports: options.AdditionalImports,
generateMembers: options.GenerateMembers,
mergeNestedNamespaces: options.MergeNestedNamespaces,
mergeAttributes: options.MergeAttributes,
generateDefaultAccessibility: options.GenerateDefaultAccessibility,
generateMethodBodies: options.GenerateMethodBodies,
generateDocumentationComments: options.GenerateDocumentationComments,
autoInsertionLocation: options.AutoInsertionLocation,
reuseSyntax: options.ReuseSyntax,
sortMembers: options.SortMembers);
return options;
}
public virtual Task<Document> AddEventAsync(
Solution solution, INamedTypeSymbol destination, IEventSymbol @event,
CodeGenerationOptions options, CancellationToken cancellationToken)
{
return GetEditAsync(
solution,
destination,
(t, opts, ai, ct) => AddEvent(t, @event, opts, ai),
options,
new[] { @event },
cancellationToken);
}
public Task<Document> AddFieldAsync(Solution solution, INamedTypeSymbol destination, IFieldSymbol field, CodeGenerationOptions options, CancellationToken cancellationToken)
{
return GetEditAsync(
solution,
destination,
(t, opts, ai, ct) => AddField(t, field, opts, ai),
options,
new[] { field },
cancellationToken);
}
public Task<Document> AddPropertyAsync(Solution solution, INamedTypeSymbol destination, IPropertySymbol property, CodeGenerationOptions options, CancellationToken cancellationToken)
{
return GetEditAsync(
solution, destination,
(t, opts, ai, ct) => AddProperty(t, property, opts, ai),
options, new[] { property },
cancellationToken);
}
public Task<Document> AddNamedTypeAsync(Solution solution, INamedTypeSymbol destination, INamedTypeSymbol namedType, CodeGenerationOptions options, CancellationToken cancellationToken)
{
return GetEditAsync(
solution, destination,
(t, opts, ai, ct) => AddNamedType(t, namedType, opts, ai, ct),
options, new[] { namedType },
cancellationToken);
}
public Task<Document> AddNamedTypeAsync(Solution solution, INamespaceSymbol destination, INamedTypeSymbol namedType, CodeGenerationOptions options, CancellationToken cancellationToken)
{
return GetEditAsync(
solution, destination,
(t, opts, ai, ct) => AddNamedType(t, namedType, opts, ai, ct),
options, new[] { namedType }, cancellationToken);
}
public Task<Document> AddNamespaceAsync(Solution solution, INamespaceSymbol destination, INamespaceSymbol @namespace, CodeGenerationOptions options, CancellationToken cancellationToken)
{
return GetEditAsync(
solution, destination,
(t, opts, ai, ct) => AddNamespace(t, @namespace, opts, ai, ct),
options, new[] { @namespace }, cancellationToken);
}
public Task<Document> AddMethodAsync(Solution solution, INamedTypeSymbol destination, IMethodSymbol method, CodeGenerationOptions options, CancellationToken cancellationToken)
{
return GetEditAsync(
solution, destination,
(t, opts, ai, ct) => AddMethod(t, method, opts, ai),
options, new[] { method }, cancellationToken);
}
public Task<Document> AddMembersAsync(Solution solution, INamedTypeSymbol destination, IEnumerable<ISymbol> members, CodeGenerationOptions options, CancellationToken cancellationToken)
{
return GetEditAsync(
solution, destination,
(t, opts, ai, ct) => AddMembers(t, members, ai, opts, ct),
options, members, cancellationToken);
}
public Task<Document> AddNamespaceOrTypeAsync(Solution solution, INamespaceSymbol destination, INamespaceOrTypeSymbol namespaceOrType, CodeGenerationOptions options, CancellationToken cancellationToken)
{
if (namespaceOrType == null)
{
throw new ArgumentNullException(nameof(namespaceOrType));
}
if (namespaceOrType is INamespaceSymbol)
{
return AddNamespaceAsync(solution, destination, (INamespaceSymbol)namespaceOrType, options, cancellationToken);
}
else
{
return AddNamedTypeAsync(solution, destination, (INamedTypeSymbol)namespaceOrType, options, cancellationToken);
}
}
protected static void CheckLocation<TDeclarationNode>(
TDeclarationNode destinationMember, Location location) where TDeclarationNode : SyntaxNode
{
if (location == null)
{
throw new ArgumentException(WorkspacesResources.No_location_provided_to_add_statements_to);
}
if (!location.IsInSource)
{
throw new ArgumentException(WorkspacesResources.Destination_location_was_not_in_source);
}
if (location.SourceTree != destinationMember.SyntaxTree)
{
throw new ArgumentException(WorkspacesResources.Destination_location_was_from_a_different_tree);
}
}
protected static void ComputePositionAndTriviaForRemoveAttributeList(
SyntaxNode attributeList,
Func<SyntaxTrivia, bool> isEndOfLineTrivia,
out int positionOfRemovedNode,
out IEnumerable<SyntaxTrivia> triviaOfRemovedNode)
{
positionOfRemovedNode = attributeList.FullSpan.Start;
var leading = attributeList.GetLeadingTrivia();
var trailing = attributeList.GetTrailingTrivia();
if (trailing.Count >= 1 && isEndOfLineTrivia(trailing.Last()))
{
// Remove redundant trailing trivia as we are removing the entire attribute list.
triviaOfRemovedNode = leading;
}
else
{
triviaOfRemovedNode = leading.Concat(trailing);
}
}
protected static void ComputePositionAndTriviaForRemoveAttributeFromAttributeList(
SyntaxNode attributeToRemove,
Func<SyntaxToken, bool> isComma,
out int positionOfRemovedNode,
out IEnumerable<SyntaxTrivia> triviaOfRemovedNode)
{
positionOfRemovedNode = attributeToRemove.FullSpan.Start;
var root = attributeToRemove.SyntaxTree.GetRoot();
var previousToken = root.FindToken(attributeToRemove.FullSpan.Start - 1);
var leading = isComma(previousToken) ? previousToken.LeadingTrivia : attributeToRemove.GetLeadingTrivia();
var nextToken = root.FindToken(attributeToRemove.FullSpan.End + 1);
var trailing = isComma(nextToken) ? nextToken.TrailingTrivia : attributeToRemove.GetTrailingTrivia();
triviaOfRemovedNode = leading.Concat(trailing);
}
protected static T AppendTriviaAtPosition<T>(T node, int position, SyntaxTriviaList trivia)
where T : SyntaxNode
{
if (trivia.Any())
{
var tokenToInsertTrivia = node.FindToken(position);
var tokenWithInsertedTrivia = tokenToInsertTrivia.WithLeadingTrivia(trivia.Concat(tokenToInsertTrivia.LeadingTrivia));
return node.ReplaceToken(tokenToInsertTrivia, tokenWithInsertedTrivia);
}
return node;
}
protected static IList<SyntaxToken> GetUpdatedDeclarationAccessibilityModifiers(IList<SyntaxToken> newModifierTokens, SyntaxTokenList modifiersList, Func<SyntaxToken, bool> isAccessibilityModifier)
{
var updatedModifiersList = new List<SyntaxToken>();
var anyAccessModifierSeen = false;
foreach (var modifier in modifiersList)
{
SyntaxToken newModifier;
if (isAccessibilityModifier(modifier))
{
if (newModifierTokens.Count == 0)
{
continue;
}
newModifier = newModifierTokens[0]
.WithLeadingTrivia(modifier.LeadingTrivia)
.WithTrailingTrivia(modifier.TrailingTrivia);
newModifierTokens.RemoveAt(0);
anyAccessModifierSeen = true;
}
else
{
if (anyAccessModifierSeen && newModifierTokens.Any())
{
updatedModifiersList.AddRange(newModifierTokens);
newModifierTokens.Clear();
}
newModifier = modifier;
}
updatedModifiersList.Add(newModifier);
}
if (!anyAccessModifierSeen)
{
updatedModifiersList.InsertRange(0, newModifierTokens);
}
else
{
updatedModifiersList.AddRange(newModifierTokens);
}
return updatedModifiersList;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Reflection.Metadata;
using System.Reflection.Metadata.Ecma335;
using System.Reflection.PortableExecutable;
namespace ILCompiler.PEWriter
{
/// <summary>
/// Ready-to-run PE builder combines copying the input MSIL PE executable with managed
/// metadata and IL and adding new code and data representing the R2R JITted code and
/// additional runtime structures (R2R header and tables).
/// </summary>
public class R2RPEBuilder : PEBuilder
{
/// <summary>
/// This structure describes how a particular section moved between the original MSIL
/// and the output PE file. It holds beginning and end RVA of the input (MSIL) section
/// and a delta between the input and output starting RVA of the section.
/// </summary>
struct SectionRVADelta
{
/// <summary>
/// Starting RVA of the section in the input MSIL PE.
/// </summary>
public readonly int StartRVA;
/// <summary>
/// End RVA (one plus the last RVA in the section) of the section in the input MSIL PE.
/// </summary>
public readonly int EndRVA;
/// <summary>
/// Starting RVA of the section in the output PE minus its starting RVA in the input MSIL.
/// </summary>
public readonly int DeltaRVA;
/// <summary>
/// Initialize the section RVA delta information.
/// </summary>
/// <param name="startRVA">Starting RVA of the section in the input MSIL</param>
/// <param name="endRVA">End RVA of the section in the input MSIL</param>
/// <param name="deltaRVA">Output RVA of the section minus input RVA of the section</param>
public SectionRVADelta(int startRVA, int endRVA, int deltaRVA)
{
StartRVA = startRVA;
EndRVA = endRVA;
DeltaRVA = deltaRVA;
}
}
/// <summary>
/// Name of the text section.
/// </summary>
public const string TextSectionName = ".text";
/// <summary>
/// Name of the initialized data section.
/// </summary>
public const string SDataSectionName = ".sdata";
/// <summary>
/// Name of the resource section.
/// </summary>
public const string RsrcSectionName = ".rsrc";
/// <summary>
/// Name of the relocation section.
/// </summary>
public const string RelocSectionName = ".reloc";
/// <summary>
/// PE reader representing the input MSIL PE file we're copying to the output composite PE file.
/// </summary>
private PEReader _peReader;
/// <summary>
/// Custom sections explicitly injected by the caller.
/// </summary>
private HashSet<string> _customSections;
/// <summary>
/// Complete list of section names includes the sections present in the input MSIL file
/// (.text, optionally .rsrc and .reloc) and extra questions injected during the R2R PE
/// creation.
/// </summary>
private ImmutableArray<Section> _sections;
/// <summary>
/// Callback which is called to emit the data for each section.
/// </summary>
private Func<string, SectionLocation, int, BlobBuilder> _sectionSerializer;
/// <summary>
/// Optional callback can be used to adjust the default directory table obtained by relocating
/// the directory table from input MSIL PE file.
/// </summary>
private Action<PEDirectoriesBuilder> _directoriesUpdater;
/// <summary>
/// For each copied section, we store its initial and end RVA in the source PE file
/// and the RVA difference between the old and new file. We use this table to relocate
/// directory entries in the PE file header.
/// </summary>
private List<SectionRVADelta> _sectionRvaDeltas;
/// <summary>
/// COR header builder is populated from the input MSIL and possibly updated during final
/// relocation of the output file.
/// </summary>
private CorHeaderBuilder _corHeaderBuilder;
/// <summary>
/// File offset of the COR header in the output file.
/// </summary>
private int _corHeaderFileOffset;
/// <summary>
/// COR header decoded from the input MSIL file.
/// </summary>
public CorHeaderBuilder CorHeader => _corHeaderBuilder;
/// <summary>
/// File offset of the COR header in the output file.
/// </summary>
public int CorHeaderFileOffset => _corHeaderFileOffset;
/// <summary>
/// Constructor initializes the various control structures and combines the section list.
/// </summary>
/// <param name="machine">Target machine architecture</param>
/// <param name="peReader">Input MSIL PE file reader</param>
/// <param name="sectionNames">Custom section names to add to the output PE</param>
/// <param name="sectionSerializer">Callback for emission of data for the individual sections</param>
public R2RPEBuilder(
Machine machine,
PEReader peReader,
IEnumerable<SectionInfo> sectionNames = null,
Func<string, SectionLocation, int, BlobBuilder> sectionSerializer = null,
Action<PEDirectoriesBuilder> directoriesUpdater = null)
: base(PEHeaderCopier.Copy(peReader.PEHeaders, machine), deterministicIdProvider: null)
{
_peReader = peReader;
_sectionSerializer = sectionSerializer;
_directoriesUpdater = directoriesUpdater;
_customSections = new HashSet<string>(sectionNames.Select((sn) => sn.SectionName));
_sectionRvaDeltas = new List<SectionRVADelta>();
ImmutableArray<Section>.Builder sectionListBuilder = ImmutableArray.CreateBuilder<Section>();
int textSectionIndex = -1;
int sdataSectionIndex = -1;
int rsrcSectionIndex = -1;
int relocSectionIndex = -1;
for (int sectionIndex = 0; sectionIndex < peReader.PEHeaders.SectionHeaders.Length; sectionIndex++)
{
switch (peReader.PEHeaders.SectionHeaders[sectionIndex].Name)
{
case TextSectionName:
textSectionIndex = sectionIndex;
break;
case SDataSectionName:
sdataSectionIndex = sectionIndex;
break;
case RsrcSectionName:
rsrcSectionIndex = sectionIndex;
break;
case RelocSectionName:
relocSectionIndex = sectionIndex;
break;
}
}
if (textSectionIndex >= 0 && !sectionNames.Any((sc) => sc.SectionName == TextSectionName))
{
SectionHeader sectionHeader = peReader.PEHeaders.SectionHeaders[textSectionIndex];
sectionListBuilder.Add(new Section(sectionHeader.Name, sectionHeader.SectionCharacteristics));
}
if (sectionNames != null)
{
foreach (SectionInfo sectionInfo in sectionNames)
{
sectionListBuilder.Add(new Section(sectionInfo.SectionName, sectionInfo.Characteristics));
}
}
if (sdataSectionIndex >= 0 && !sectionNames.Any((sc) => sc.SectionName == SDataSectionName))
{
SectionHeader sectionHeader = peReader.PEHeaders.SectionHeaders[sdataSectionIndex];
sectionListBuilder.Add(new Section(sectionHeader.Name, sectionHeader.SectionCharacteristics));
}
if (rsrcSectionIndex >= 0 && !sectionNames.Any((sc) => sc.SectionName == RsrcSectionName))
{
SectionHeader sectionHeader = peReader.PEHeaders.SectionHeaders[rsrcSectionIndex];
sectionListBuilder.Add(new Section(sectionHeader.Name, sectionHeader.SectionCharacteristics));
}
if (relocSectionIndex >= 0)
{
SectionHeader sectionHeader = peReader.PEHeaders.SectionHeaders[relocSectionIndex];
sectionListBuilder.Add(new Section(sectionHeader.Name, sectionHeader.SectionCharacteristics));
}
else
{
// Always inject the relocation section to the end of section list
sectionListBuilder.Add(new Section(RelocSectionName,
SectionCharacteristics.ContainsInitializedData |
SectionCharacteristics.MemRead |
SectionCharacteristics.MemDiscardable));
}
_sections = sectionListBuilder.ToImmutable();
}
/// <summary>
/// Copy all directory entries and the address of entry point, relocating them along the way.
/// </summary>
protected override PEDirectoriesBuilder GetDirectories()
{
PEDirectoriesBuilder builder = new PEDirectoriesBuilder();
builder.CorHeaderTable = RelocateDirectoryEntry(_peReader.PEHeaders.PEHeader.CorHeaderTableDirectory);
if (_directoriesUpdater != null)
{
_directoriesUpdater(builder);
}
return builder;
}
/// <summary>
/// Relocate a single directory entry.
/// </summary>
/// <param name="entry">Directory entry to allocate</param>
/// <returns>Relocated directory entry</returns>
public DirectoryEntry RelocateDirectoryEntry(DirectoryEntry entry)
{
return new DirectoryEntry(RelocateRVA(entry.RelativeVirtualAddress), entry.Size);
}
/// <summary>
/// Relocate a given RVA using the section offset table produced during section serialization.
/// </summary>
/// <param name="rva">RVA to relocate</param>
/// <returns>Relocated RVA</returns>
private int RelocateRVA(int rva)
{
if (rva == 0)
{
// Zero RVA is normally used as NULL
return rva;
}
foreach (SectionRVADelta sectionRvaDelta in _sectionRvaDeltas)
{
if (rva >= sectionRvaDelta.StartRVA && rva < sectionRvaDelta.EndRVA)
{
// We found the input section holding the RVA, apply its specific delt (output RVA - input RVA).
return rva + sectionRvaDelta.DeltaRVA;
}
}
Debug.Assert(false, "RVA is not within any of the input sections - output PE may be inconsistent");
return rva;
}
/// <summary>
/// Provide an array of sections for the PEBuilder to use.
/// </summary>
protected override ImmutableArray<Section> CreateSections()
{
return _sections;
}
/// <summary>
/// Output the section with a given name. For sections existent in the source MSIL PE file
/// (.text, optionally .rsrc and .reloc), we first copy the content of the input MSIL PE file
/// and then call the section serialization callback to emit the extra content after the input
/// section content.
/// </summary>
/// <param name="name">Section name</param>
/// <param name="location">RVA and file location where the section will be put</param>
/// <returns>Blob builder representing the section data</returns>
protected override BlobBuilder SerializeSection(string name, SectionLocation location)
{
BlobBuilder sectionDataBuilder = null;
bool haveCustomSection = _customSections.Contains(name);
int sectionIndex = _peReader.PEHeaders.SectionHeaders.Count() - 1;
int sectionStartRva = location.RelativeVirtualAddress;
while (sectionIndex >= 0 && _peReader.PEHeaders.SectionHeaders[sectionIndex].Name != name)
{
sectionIndex--;
}
if (sectionIndex >= 0)
{
SectionHeader sectionHeader = _peReader.PEHeaders.SectionHeaders[sectionIndex];
int sectionOffset = (_peReader.IsLoadedImage ? sectionHeader.VirtualAddress : sectionHeader.PointerToRawData);
int rvaDelta = location.RelativeVirtualAddress - sectionHeader.VirtualAddress;
_sectionRvaDeltas.Add(new SectionRVADelta(
startRVA: sectionHeader.VirtualAddress,
endRVA: sectionHeader.VirtualAddress + Math.Max(sectionHeader.VirtualSize, sectionHeader.SizeOfRawData),
deltaRVA: rvaDelta));
unsafe
{
int bytesToRead = Math.Min(sectionHeader.SizeOfRawData, sectionHeader.VirtualSize);
BlobReader inputSectionReader = _peReader.GetEntireImage().GetReader(sectionOffset, bytesToRead);
if (name == ".rsrc")
{
// There seems to be a bug in BlobBuilder - when we LinkSuffix to an empty blob builder,
// the blob data goes out of sync and WriteContentTo outputs garbage.
sectionDataBuilder = PEResourceHelper.Relocate(inputSectionReader, rvaDelta);
}
else
{
sectionDataBuilder = new BlobBuilder();
sectionDataBuilder.WriteBytes(inputSectionReader.CurrentPointer, inputSectionReader.RemainingBytes);
int corHeaderRvaDelta = _peReader.PEHeaders.PEHeader.CorHeaderTableDirectory.RelativeVirtualAddress - sectionHeader.VirtualAddress;
if (corHeaderRvaDelta >= 0 && corHeaderRvaDelta < bytesToRead)
{
// Assume COR header resides in this section, deserialize it and store its location
_corHeaderFileOffset = location.PointerToRawData + corHeaderRvaDelta;
inputSectionReader.Offset = corHeaderRvaDelta;
_corHeaderBuilder = new CorHeaderBuilder(ref inputSectionReader);
}
}
int alignedSize = sectionHeader.VirtualSize;
// When custom section data is present, align the section size to 4K to prevent
// pre-generated MSIL relocations from tampering with native relocations.
if (_customSections.Contains(name))
{
alignedSize = (alignedSize + 0xFFF) & ~0xFFF;
}
if (alignedSize > bytesToRead)
{
// If the number of bytes read from the source PE file is less than the virtual size,
// zero pad to the end of virtual size before emitting extra section data
sectionDataBuilder.WriteBytes(0, alignedSize - bytesToRead);
}
location = new SectionLocation(
location.RelativeVirtualAddress + sectionDataBuilder.Count,
location.PointerToRawData + sectionDataBuilder.Count);
}
}
if (_sectionSerializer != null)
{
BlobBuilder extraData = _sectionSerializer(name, location, sectionStartRva);
if (extraData != null)
{
if (sectionDataBuilder == null)
{
// See above - there's a bug due to which LinkSuffix to an empty BlobBuilder screws up the blob content.
sectionDataBuilder = extraData;
}
else
{
sectionDataBuilder.LinkSuffix(extraData);
}
}
}
// Make sure the section has at least 1 byte, otherwise the PE emitter goes mad,
// messes up the section map and corrups the output executable.
if (sectionDataBuilder == null)
{
sectionDataBuilder = new BlobBuilder();
}
if (sectionDataBuilder.Count == 0)
{
sectionDataBuilder.WriteByte(0);
}
return sectionDataBuilder;
}
}
/// <summary>
/// When copying PE contents we may need to move the resource section, however its internal
/// ResourceDataEntry records hold RVA's so they need to be relocated. Thankfully the resource
/// data model is very simple so that we just traverse the structure using offset constants.
/// </summary>
unsafe sealed class PEResourceHelper
{
/// <summary>
/// Field offsets in the resource directory table.
/// </summary>
private static class DirectoryTable
{
public const int Characteristics = 0x0;
public const int TimeDateStamp = 0x04;
public const int MajorVersion = 0x08;
public const int MinorVersion = 0x0A;
public const int NumberOfNameEntries = 0x0C;
public const int NumberOfIDEntries = 0x0E;
public const int Size = 0x10;
}
/// <summary>
/// Field offsets in the resource directory entry.
/// </summary>
private static class DirectoryEntry
{
public const int NameOffsetOrID = 0x0;
public const int DataOrSubdirectoryOffset = 0x4;
public const int Size = 0x8;
}
/// <summary>
/// When the 4-byte value at the offset DirectoryEntry.DataOrSubdirectoryOffset
/// has 31-st bit set, it's a subdirectory table entry; when it's clear, it's a
/// resource data entry.
/// </summary>
private const int EntryOffsetIsSubdirectory = unchecked((int)0x80000000u);
/// <summary>
/// Field offsets in the resource data entry.
/// </summary>
private static class DataEntry
{
public const int RVA = 0x0;
public const int Size = 0x4;
public const int Codepage = 0x8;
public const int Reserved = 0xC;
}
/// <summary>
/// Blob reader representing the input resource section.
/// </summary>
private BlobReader _reader;
/// <summary>
/// This BlobBuilder holds the relocated resource section after the ctor finishes.
/// </summary>
private BlobBuilder _builder;
/// <summary>
/// Relocation delta (the difference between input and output RVA of the resource section).
/// </summary>
private int _delta;
/// <summary>
/// Offsets within the resource section representing RVA's in the resource data entries
/// that need relocating.
/// </summary>
private List<int> _offsetsOfRvasToRelocate;
/// <summary>
/// Public API receives the input resource section reader and the relocation delta
/// and returns a blob builder representing the relocated resource section.
/// </summary>
/// <param name="reader">Blob reader representing the input resource section</param>
/// <param name="delta">Relocation delta to apply (value to add to RVA's)</param>
public static BlobBuilder Relocate(BlobReader reader, int delta)
{
return new PEResourceHelper(reader, delta)._builder;
}
/// <summary>
/// Private constructor first traverses the internal graph of resource tables
/// and collects offsets to RVA's that need relocation; after that we sort the list of
/// offsets and do a linear copying pass patching the RVA cells with the updated values.
/// </summary>
/// <param name="reader">Blob reader representing the input resource section</param>
/// <param name="delta">Relocation delta to apply (value to add to RVA's)</param>
private PEResourceHelper(BlobReader reader, int delta)
{
_reader = reader;
_builder = new BlobBuilder();
_delta = delta;
_offsetsOfRvasToRelocate = new List<int>();
TraverseDirectoryTable(tableOffset: 0);
_offsetsOfRvasToRelocate.Sort();
int currentOffset = 0;
_reader.Reset();
foreach (int offsetOfRvaToRelocate in _offsetsOfRvasToRelocate)
{
int bytesToCopy = offsetOfRvaToRelocate - currentOffset;
Debug.Assert(bytesToCopy >= 0);
if (bytesToCopy > 0)
{
_builder.WriteBytes(_reader.CurrentPointer, bytesToCopy);
_reader.Offset += bytesToCopy;
currentOffset += bytesToCopy;
}
int rva = _reader.ReadInt32();
_builder.WriteInt32(rva + delta);
currentOffset += sizeof(int);
}
if (_reader.RemainingBytes > 0)
{
_builder.WriteBytes(_reader.CurrentPointer, _reader.RemainingBytes);
}
}
/// <summary>
/// Traverse a single directory table at a given offset within the resource section.
/// Please note the method might end up calling itself recursively through the call graph
/// TraverseDirectoryTable -> TraverseDirectoryEntry -> TraverseDirectoryTable.
/// Maximum depth is equal to depth of the table graph - today resources use 3.
/// </summary>
/// <param name="tableOffset">Offset of the resource directory table within the resource section</param>
private void TraverseDirectoryTable(int tableOffset)
{
_reader.Offset = tableOffset + DirectoryTable.NumberOfNameEntries;
int numberOfNameEntries = _reader.ReadInt16();
int numberOfIDEntries = _reader.ReadInt16();
int totalEntries = numberOfNameEntries + numberOfIDEntries;
for (int entryIndex = 0; entryIndex < totalEntries; entryIndex++)
{
TraverseDirectoryEntry(tableOffset + DirectoryTable.Size + entryIndex * DirectoryEntry.Size);
}
}
/// <summary>
/// Traverse a single directory entry (name- and ID-based directory entries are processed
/// the same way as we're not really interested in the entry identifier, just in the
/// data / table pointers.
/// </summary>
/// <param name="entryOffset">Offset of the resource directory entry within the resource section</param>
private void TraverseDirectoryEntry(int entryOffset)
{
_reader.Offset = entryOffset + DirectoryEntry.DataOrSubdirectoryOffset;
int dataOrSubdirectoryOffset = _reader.ReadInt32();
if ((dataOrSubdirectoryOffset & EntryOffsetIsSubdirectory) != 0)
{
// subdirectory offset
TraverseDirectoryTable(dataOrSubdirectoryOffset & ~EntryOffsetIsSubdirectory);
}
else
{
// data entry offset
_offsetsOfRvasToRelocate.Add(dataOrSubdirectoryOffset + DataEntry.RVA);
}
}
}
/// <summary>
/// Simple helper for copying the various global values in the PE header.
/// </summary>
static class PEHeaderCopier
{
/// <summary>
/// Copy PE headers into a PEHeaderBuilder used by PEBuilder.
/// </summary>
/// <param name="peHeaders">Headers to copy</param>
/// <param name="targetMachineOverride">Target architecture to set in the header</param>
public static PEHeaderBuilder Copy(PEHeaders peHeaders, Machine targetMachineOverride)
{
bool is64BitTarget = (targetMachineOverride == Machine.Amd64 ||
targetMachineOverride == Machine.IA64); // TODO - ARM64
Characteristics imageCharacteristics = peHeaders.CoffHeader.Characteristics;
if (is64BitTarget)
{
imageCharacteristics &= ~Characteristics.Bit32Machine;
imageCharacteristics |= Characteristics.LargeAddressAware;
}
return new PEHeaderBuilder(
machine: targetMachineOverride,
sectionAlignment: peHeaders.PEHeader.SectionAlignment,
fileAlignment: peHeaders.PEHeader.FileAlignment,
imageBase: peHeaders.PEHeader.ImageBase,
majorLinkerVersion: peHeaders.PEHeader.MajorLinkerVersion,
minorLinkerVersion: peHeaders.PEHeader.MinorLinkerVersion,
majorOperatingSystemVersion: peHeaders.PEHeader.MajorOperatingSystemVersion,
minorOperatingSystemVersion: peHeaders.PEHeader.MinorOperatingSystemVersion,
majorImageVersion: peHeaders.PEHeader.MajorImageVersion,
minorImageVersion: peHeaders.PEHeader.MinorImageVersion,
majorSubsystemVersion: peHeaders.PEHeader.MajorSubsystemVersion,
minorSubsystemVersion: peHeaders.PEHeader.MinorSubsystemVersion,
subsystem: peHeaders.PEHeader.Subsystem,
dllCharacteristics: peHeaders.PEHeader.DllCharacteristics,
imageCharacteristics: imageCharacteristics,
sizeOfStackReserve: peHeaders.PEHeader.SizeOfStackReserve,
sizeOfStackCommit: peHeaders.PEHeader.SizeOfStackCommit,
sizeOfHeapReserve: peHeaders.PEHeader.SizeOfHeapReserve,
sizeOfHeapCommit: peHeaders.PEHeader.SizeOfHeapCommit);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using OLEDB.Test.ModuleCore;
using System.IO;
using System.Linq;
using System.Text;
using XmlCoreTest.Common;
using Xunit;
namespace System.Xml.Tests
{
public partial class TCErrorConditionWriter// : XmlWriterTestCaseBase
{
public static string file = "writerErr.out";
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
[XmlWriterInlineData(5)]
[XmlWriterInlineData(6)]
[XmlWriterInlineData(7)]
[XmlWriterInlineData(8)]
public void var_01(XmlWriterUtils utils, int param)
{
XmlWriterSettings ws = new XmlWriterSettings();
try
{
switch (param)
{
case 1: XmlWriter w1 = WriterHelper.Create((Stream)null, overrideAsync: true, async: utils.Async); break;
case 2: XmlWriter w2 = WriterHelper.Create((TextWriter)null, overrideAsync: true, async: utils.Async); break;
case 3: XmlWriter w3 = WriterHelper.Create((StringBuilder)null, overrideAsync: true, async: utils.Async); break;
case 4: XmlWriter w4 = WriterHelper.Create((XmlWriter)null, overrideAsync: true, async: utils.Async); break;
case 5: XmlWriter w5 = WriterHelper.Create((Stream)null, ws, overrideAsync: true, async: utils.Async); break;
case 6: XmlWriter w6 = WriterHelper.Create((TextWriter)null, ws, overrideAsync: true, async: utils.Async); break;
case 7: XmlWriter w7 = WriterHelper.Create((StringBuilder)null, ws, overrideAsync: true, async: utils.Async); break;
case 8: XmlWriter w8 = WriterHelper.Create((XmlWriter)null, ws, overrideAsync: true, async: utils.Async); break;
}
}
catch (ArgumentNullException) { return; }
Assert.True(false);
}
[Theory]
[XmlWriterInlineData(true)]
[XmlWriterInlineData(false)]
public void var_02(XmlWriterUtils utils, bool param)
{
bool result = false;
XmlWriter w = utils.CreateWriter();
try
{
w.WriteAttributes(null, param);
}
catch (ArgumentNullException)
{
try
{
w.WriteAttributes(null, param);
}
catch (ArgumentNullException) { result = true; }
}
finally
{
w.Dispose();
}
Assert.True(result);
}
[Theory]
[XmlWriterInlineData(true)]
[XmlWriterInlineData(false)]
public void var_03(XmlWriterUtils utils, bool param)
{
bool result = false;
XmlWriter w = utils.CreateWriter();
try
{
w.WriteNode((XmlReader)null, param);
}
catch (ArgumentNullException)
{
try
{
w.WriteNode((XmlReader)null, param);
}
catch (ArgumentNullException) { result = true; }
}
finally
{
w.Dispose();
}
Assert.True((result));
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
[XmlWriterInlineData(5)]
[XmlWriterInlineData(6)]
[XmlWriterInlineData(7)]
[XmlWriterInlineData(8)]
[XmlWriterInlineData(9)]
[XmlWriterInlineData(10)]
[XmlWriterInlineData(11)]
[XmlWriterInlineData(12)]
[XmlWriterInlineData(13)]
[XmlWriterInlineData(14)]
[XmlWriterInlineData(15)]
[XmlWriterInlineData(16)]
[XmlWriterInlineData(17)]
[XmlWriterInlineData(18)]
[XmlWriterInlineData(19)]
[XmlWriterInlineData(20)]
public void var_04(XmlWriterUtils utils, int param)
{
bool result = false;
XmlWriter w = utils.CreateWriter();
try
{
switch (param)
{
case 1: w.WriteAttributeString(null, null); break;
case 2: w.WriteAttributeString(null, null, null); break;
case 3: w.WriteAttributeString("a", null, null, null); break;
case 4: w.WriteAttributeString(null, null, "a", null); break;
case 5: w.WriteDocType(null, null, null, null); break;
case 6: w.WriteElementString(null, null); break;
case 7: w.WriteElementString(null, null, null); break;
case 8: w.WriteElementString("a", null, null, null); break;
case 9: w.WriteElementString("a", null, "a", null); break;
case 10: w.WriteEntityRef(null); break;
case 11: w.WriteName(null); break;
case 12: w.WriteNmToken(null); break;
case 13: w.WriteProcessingInstruction(null, null); break;
case 14: w.WriteQualifiedName(null, null); break;
case 15: w.WriteStartAttribute(null); break;
case 16: w.WriteStartAttribute(null, null); break;
case 17: w.WriteStartAttribute("a", null, null); break;
case 18: w.WriteStartElement(null); break;
case 19: w.WriteStartElement(null, null); break;
case 20: w.WriteStartElement("a", null, null); break;
}
}
catch (ArgumentException)
{
try
{
switch (param)
{
case 1: w.WriteAttributeString(null, null); break;
case 2: w.WriteAttributeString(null, null, null); break;
case 3: w.WriteAttributeString("a", null, null, null); break;
case 4: w.WriteAttributeString(null, null, "a", null); break;
case 5: w.WriteDocType(null, null, null, null); break;
case 6: w.WriteElementString(null, null); break;
case 7: w.WriteElementString(null, null, null); break;
case 8: w.WriteElementString("a", null, null, null); break;
case 9: w.WriteElementString("a", null, "a", null); break;
case 10: w.WriteEntityRef(null); break;
case 11: w.WriteName(null); break;
case 12: w.WriteNmToken(null); break;
case 13: w.WriteProcessingInstruction(null, null); break;
case 14: w.WriteQualifiedName(null, null); break;
case 15: w.WriteStartAttribute(null); break;
case 16: w.WriteStartAttribute(null, null); break;
case 17: w.WriteStartAttribute("a", null, null); break;
case 18: w.WriteStartElement(null); break;
case 19: w.WriteStartElement(null, null); break;
case 20: w.WriteStartElement("a", null, null); break;
}
}
catch (ArgumentException) { result = true; }
}
catch (NullReferenceException)
{
try
{
switch (param)
{
case 5: w.WriteDocType(null, null, null, null); break;
case 10: w.WriteEntityRef(null); break;
case 13: w.WriteProcessingInstruction(null, null); break;
case 14: w.WriteQualifiedName(null, null); break;
}
}
catch (NullReferenceException)
{
result = (utils.WriterType == WriterType.CharCheckingWriter);
}
}
finally
{
w.Dispose();
}
Assert.True((result || param == 14 && utils.WriterType == WriterType.CustomWriter));
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
[XmlWriterInlineData(5)]
[XmlWriterInlineData(6)]
public void var_05(XmlWriterUtils utils, int param)
{
bool result = false;
XmlWriter w = utils.CreateWriter();
w.WriteStartElement("Root");
try
{
switch (param)
{
case 1: w.WriteBinHex(null, 0, 0); break;
case 2: w.WriteBase64(null, 0, 0); break;
case 3: w.WriteChars(null, 0, 0); break;
case 4: w.LookupPrefix(null); break;
case 5: w.WriteRaw(null, 0, 0); break;
case 6: w.WriteValue((object)null); break;
}
}
catch (ArgumentNullException)
{
try
{
switch (param)
{
case 1: w.WriteBinHex(null, 0, 0); break;
case 2: w.WriteBase64(null, 0, 0); break;
case 3: w.WriteChars(null, 0, 0); break;
case 4: w.LookupPrefix(null); break;
case 5: w.WriteRaw(null, 0, 0); break;
case 6: w.WriteValue((object)null); break;
}
}
catch (ArgumentNullException) { result = true; }
catch (InvalidOperationException) { result = true; }
}
finally
{
w.Dispose();
}
Assert.True((result));
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
[XmlWriterInlineData(5)]
[XmlWriterInlineData(6)]
[XmlWriterInlineData(7)]
[XmlWriterInlineData(8)]
[XmlWriterInlineData(9)]
[XmlWriterInlineData(10)]
[XmlWriterInlineData(11)]
[XmlWriterInlineData(12)]
[XmlWriterInlineData(13)]
[XmlWriterInlineData(14)]
[XmlWriterInlineData(15)]
[XmlWriterInlineData(16)]
[XmlWriterInlineData(17)]
[XmlWriterInlineData(18)]
[XmlWriterInlineData(19)]
[XmlWriterInlineData(20)]
[XmlWriterInlineData(21)]
[XmlWriterInlineData(22)]
[XmlWriterInlineData(23)]
[XmlWriterInlineData(24)]
[XmlWriterInlineData(25)]
[XmlWriterInlineData(26)]
[XmlWriterInlineData(27)]
[XmlWriterInlineData(28)]
[XmlWriterInlineData(29)]
[XmlWriterInlineData(30)]
[XmlWriterInlineData(31)]
[XmlWriterInlineData(32)]
[XmlWriterInlineData(33)]
[XmlWriterInlineData(34)]
[XmlWriterInlineData(35)]
[XmlWriterInlineData(36)]
public void var_07(XmlWriterUtils utils, int param)
{
bool result = false;
int[] skipParams = new int[] { 14, 20, 21 };
XmlWriter w = utils.CreateWriter();
if (param != 30 && param != 31 && param != 32)
w.WriteStartElement("Root");
switch (param)
{
case 1: w.WriteComment(null); break;
case 3: w.WriteCData(null); break;
case 5: w.WriteRaw(null); break;
case 6: w.WriteString(null); break;
case 8: w.WriteValue((string)null); break;
case 9: w.WriteWhitespace(null); break;
}
try
{
switch (param)
{
case 1: w.WriteComment("\ud800\ud800"); break;
case 2: w.WriteCharEntity('\ud800'); break;
case 3: w.WriteCData("\ud800\ud800"); break;
case 4: w.WriteEntityRef("\ud800\ud800"); break;
case 5: w.WriteRaw("\ud800\ud800"); break;
case 6: w.WriteString("\ud800\ud800"); break;
case 7: w.WriteSurrogateCharEntity('\ud800', '\ud800'); break;
case 8: w.WriteValue("\ud800\ud800"); break;
case 9: w.WriteWhitespace("\ud800\ud800"); break;
case 10: w.WriteAttributeString("\ud800\ud800", "\ud800\ud800"); break;
case 11: w.WriteAttributeString("a0", "\ud800\ud800", "\ud800\ud800"); break;
case 12: w.WriteAttributeString("a1", "b1", "\ud800\ud800", "\ud800\ud800"); break;
case 13: w.WriteAttributeString("a2", "b2", "c2", "\ud800\ud800"); break;
case 14: w.WriteDocType("\ud800\ud800", "\ud800\ud800", "\ud800\ud800", "\ud800\ud800"); break;
case 15: w.WriteElementString("\ud800\ud800", "\ud800\ud800"); break;
case 16: w.WriteElementString("a", "\ud800\ud800", "\ud800\ud800"); break;
case 17: w.WriteElementString("a", "a", "\ud800\ud800", "\ud800\ud800"); break;
case 18: w.WriteElementString("a", "a", "a", "\ud800\ud800"); break;
case 19: w.WriteEntityRef("\ud800\ud800"); break;
case 20: w.WriteName("\ud800\ud800"); break;
case 21: w.WriteNmToken("\ud800\ud800"); break;
case 22: w.WriteProcessingInstruction("\ud800\ud800", "\ud800\ud800"); break;
case 23: w.WriteQualifiedName("\ud800\ud800", "\ud800\ud800"); break;
case 24: w.WriteStartAttribute("\ud800\ud800"); break;
case 25: w.WriteStartAttribute("\ud800\ud800", "\ud800\ud800"); break;
case 26: w.WriteStartAttribute("a3", "\ud800\ud800", "\ud800\ud800"); break;
case 27: w.WriteStartElement("\ud800\ud800"); break;
case 28: w.WriteStartElement("\ud800\ud800", "\ud800\ud800"); break;
case 29: w.WriteStartElement("a", "\ud800\ud800", "\ud800\ud800"); break;
case 30: w.WriteDocType("a", "\ud800\ud800", "\ud800\ud800", "\ud800\ud800"); break;
case 31: w.WriteDocType("a", "b", "\ud800\ud800", "\ud800\ud800"); break;
case 32: w.WriteDocType("a", "b", "c", "\ud800\ud800"); break;
case 33: w.WriteAttributeString("a4", "\ud800\ud800"); break;
case 34: w.WriteElementString("a", "\ud800\ud800"); break;
case 35: w.WriteProcessingInstruction("a", "\ud800\ud800"); break;
case 36: w.WriteQualifiedName("a", "\ud800\ud800"); break;
}
}
catch (ArgumentException)
{
try
{
switch (param)
{
case 1: w.WriteComment("\ud800\ud800"); break;
case 2: w.WriteCharEntity('\ud800'); break;
case 3: w.WriteCData("\ud800\ud800"); break;
case 4: w.WriteEntityRef("\ud800\ud800"); break;
case 5: w.WriteRaw("\ud800\ud800"); break;
case 6: w.WriteString("\ud800\ud800"); break;
case 7: w.WriteSurrogateCharEntity('\ud800', '\ud800'); break;
case 8: w.WriteValue("\ud800\ud800"); break;
case 9: w.WriteWhitespace("\ud800\ud800"); break;
case 10: w.WriteAttributeString("\ud800\ud800", "\ud800\ud800"); break;
case 11: w.WriteAttributeString("a", "\ud800\ud800", "\ud800\ud800"); break;
case 12: w.WriteAttributeString("a", "b", "\ud800\ud800", "\ud800\ud800"); break;
case 13: w.WriteAttributeString("a", "b", "c", "\ud800\ud800"); break;
case 15: w.WriteElementString("\ud800\ud800", "\ud800\ud800"); break;
case 16: w.WriteElementString("a", "\ud800\ud800", "\ud800\ud800"); break;
case 17: w.WriteElementString("a", "a", "\ud800\ud800", "\ud800\ud800"); break;
case 18: w.WriteElementString("a", "a", "a", "\ud800\ud800"); break;
case 19: w.WriteEntityRef("\ud800\ud800"); break;
case 20: w.WriteName("\ud800\ud800"); break;
case 21: w.WriteNmToken("\ud800\ud800"); break;
case 22: w.WriteProcessingInstruction("\ud800\ud800", "\ud800\ud800"); break;
case 23: w.WriteQualifiedName("\ud800\ud800", "\ud800\ud800"); break;
case 24: w.WriteStartAttribute("\ud800\ud800"); break;
case 25: w.WriteStartAttribute("a", "\ud800\ud800"); break;
case 26: w.WriteStartAttribute("a", "b", "\ud800\ud800"); break;
case 27: w.WriteStartElement("\ud800\ud800"); break;
case 28: w.WriteStartElement("\ud800\ud800", "\ud800\ud800"); break;
case 29: w.WriteStartElement("a", "\ud800\ud800", "\ud800\ud800"); break;
case 30: w.WriteDocType("a", "\ud800\ud800", "\ud800\ud800", "\ud800\ud800"); break;
case 31: w.WriteDocType("a", "b", "\ud800\ud800", "\ud800\ud800"); break;
case 32: w.WriteDocType("a", "b", "c", "\ud800\ud800"); break;
case 33: w.WriteAttributeString("a", "\ud800\ud800"); break;
case 34: w.WriteElementString("a", "\ud800\ud800"); break;
case 35: w.WriteProcessingInstruction("a", "\ud800\ud800"); break;
case 36: w.WriteQualifiedName("a", "\ud800\ud800"); break;
}
}
catch (InvalidOperationException) { return; }
catch (ArgumentException) { return; }
}
catch (XmlException)
{
try
{
switch (param)
{
case 14: w.WriteDocType("\ud800\ud800", "\ud800\ud800", "\ud800\ud800", "\ud800\ud800"); break;
case 30: w.WriteDocType("a", "\ud800\ud800", "\ud800\ud800", "\ud800\ud800"); break;
case 31: w.WriteDocType("a", "b", "\ud800\ud800", "\ud800\ud800"); break;
case 32: w.WriteDocType("a", "b", "c", "\ud800\ud800"); break;
}
}
catch (XmlException)
{
Assert.True((param == 14), "exception expected only for doctype");
return;
}
catch (InvalidOperationException) { Assert.True(false, "InvalidOperationException not expected here"); }
}
finally
{
try
{
w.Dispose();
}
catch (ArgumentException)
{
result = true;
}
}
Assert.True(result || (utils.WriterType == WriterType.CharCheckingWriter && skipParams.Contains(param)));
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
public void var_10(XmlWriterUtils utils, int param)
{
int iBufferSize = 5;
int iIndex = 0;
int iCount = 6;
byte[] byteBuffer = new byte[iBufferSize];
for (int i = 0; i < iBufferSize; i++)
byteBuffer[i] = (byte)(i + '0');
char[] charBuffer = new char[iBufferSize];
for (int i = 0; i < iBufferSize; i++)
charBuffer[i] = (char)(i + '0');
XmlWriterSettings ws = new XmlWriterSettings();
ws.ConformanceLevel = ConformanceLevel.Auto;
using (XmlWriter w = utils.CreateWriter(ws))
{
try
{
switch (param)
{
case 1: w.WriteChars(charBuffer, iIndex, iCount); break;
case 2: w.WriteRaw(charBuffer, iIndex, iCount); break;
case 3: w.WriteStartElement("a"); w.WriteBinHex(byteBuffer, iIndex, iCount); break;
case 4: w.WriteBase64(byteBuffer, iIndex, iCount); break;
}
}
catch (ArgumentOutOfRangeException)
{
try
{
switch (param)
{
case 1: w.WriteChars(charBuffer, iIndex, iCount); break;
case 2: w.WriteRaw(charBuffer, iIndex, iCount); break;
case 3: w.WriteBinHex(byteBuffer, iIndex, iCount); break;
case 4: w.WriteBase64(byteBuffer, iIndex, iCount); break;
}
}
catch (ArgumentOutOfRangeException) { return; }
catch (InvalidOperationException) { return; }
}
catch (IndexOutOfRangeException)
{
try
{
switch (param)
{
case 1: w.WriteChars(charBuffer, iIndex, iCount); break;
}
}
catch (IndexOutOfRangeException) { Assert.True((utils.WriterType == WriterType.CharCheckingWriter)); }
}
}
Assert.True(false);
}
[Theory]
[InlineData(1)]
[InlineData(2)]
[InlineData(3)]
[InlineData(4)]
public void var_11(int param)
{
XmlWriterSettings ws = new XmlWriterSettings();
try
{
switch (param)
{
case 1: ws.ConformanceLevel = (ConformanceLevel)777; break;
case 2: ws.NewLineHandling = (NewLineHandling)777; break;
case 3: ws.ConformanceLevel = (ConformanceLevel)(-1); break;
case 4: ws.NewLineHandling = (NewLineHandling)(-1); break;
}
}
catch (ArgumentOutOfRangeException)
{
try
{
switch (param)
{
case 1: ws.ConformanceLevel = (ConformanceLevel)555; break;
case 2: ws.NewLineHandling = (NewLineHandling)555; break;
case 3: ws.ConformanceLevel = (ConformanceLevel)(-1); break;
case 4: ws.NewLineHandling = (NewLineHandling)(-1); break;
}
}
catch (ArgumentOutOfRangeException) { return; }
}
Assert.True(false);
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
[XmlWriterInlineData(5)]
[XmlWriterInlineData(6)]
[XmlWriterInlineData(7)]
[XmlWriterInlineData(8)]
public void var_12(XmlWriterUtils utils, int param)
{
XmlWriterSettings ws = new XmlWriterSettings();
TextWriter stringWriter = new StringWriter();
switch (param)
{
case 1: XmlWriter w1 = WriterHelper.Create(stringWriter, overrideAsync: true, async: utils.Async); break;
case 2: XmlWriter w2 = WriterHelper.Create(stringWriter, overrideAsync: true, async: utils.Async); break;
case 3: XmlWriter w3 = WriterHelper.Create(new StringBuilder(), overrideAsync: true, async: utils.Async); break;
case 4: XmlWriter w4 = WriterHelper.Create(WriterHelper.Create(stringWriter, overrideAsync: true, async: utils.Async), overrideAsync: true, async: utils.Async); break;
case 5: XmlWriter w5 = WriterHelper.Create(stringWriter, ws, overrideAsync: true, async: utils.Async); break;
case 6: XmlWriter w6 = WriterHelper.Create(stringWriter, ws, overrideAsync: true, async: utils.Async); break;
case 7: XmlWriter w7 = WriterHelper.Create(new StringBuilder(), ws, overrideAsync: true, async: utils.Async); break;
case 8: XmlWriter w8 = WriterHelper.Create(WriterHelper.Create(stringWriter, overrideAsync: true, async: utils.Async), ws, overrideAsync: true, async: utils.Async); break;
}
return;
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
[XmlWriterInlineData(5)]
[XmlWriterInlineData(6)]
[XmlWriterInlineData(7)]
[XmlWriterInlineData(8)]
[XmlWriterInlineData(9)]
[XmlWriterInlineData(10)]
[XmlWriterInlineData(11)]
[XmlWriterInlineData(12)]
[XmlWriterInlineData(13)]
[XmlWriterInlineData(14)]
[XmlWriterInlineData(15)]
[XmlWriterInlineData(16)]
[XmlWriterInlineData(17)]
[XmlWriterInlineData(18)]
[XmlWriterInlineData(19)]
[XmlWriterInlineData(20)]
[XmlWriterInlineData(21)]
[XmlWriterInlineData(22)]
[XmlWriterInlineData(23)]
[XmlWriterInlineData(24)]
[XmlWriterInlineData(25)]
[XmlWriterInlineData(26)]
[XmlWriterInlineData(27)]
public void var_13(XmlWriterUtils utils, int param)
{
XmlWriterSettings ws = new XmlWriterSettings();
ws.ConformanceLevel = ConformanceLevel.Document;
XmlWriter w = utils.CreateWriter(ws);
bool result = false;
if (param != 30 && param != 31 && param != 32)
w.WriteStartElement("Root");
switch (param)
{
case 1: w.WriteComment(string.Empty); result = true; break;
case 2: w.WriteCData(string.Empty); result = true; break;
case 4: w.WriteRaw(string.Empty); result = true; break;
case 5: w.WriteString(string.Empty); result = true; break;
case 6: w.WriteValue(string.Empty); result = true; break;
case 7: w.WriteWhitespace(string.Empty); result = true; break;
}
try
{
switch (param)
{
case 3: w.WriteEntityRef(string.Empty); break;
case 8: w.WriteAttributeString(string.Empty, string.Empty); break;
case 9: w.WriteAttributeString(string.Empty, string.Empty, string.Empty); break;
case 10: w.WriteAttributeString(string.Empty, string.Empty, string.Empty, string.Empty); break;
case 11: w.WriteDocType(string.Empty, string.Empty, string.Empty, string.Empty); break;
case 12: w.WriteElementString(string.Empty, string.Empty); break;
case 13: w.WriteElementString(string.Empty, string.Empty, string.Empty); break;
case 14: w.WriteElementString(string.Empty, string.Empty, string.Empty, string.Empty); break;
case 15: w.WriteEntityRef(string.Empty); break;
case 16: w.WriteName(string.Empty); break;
case 17: w.WriteNmToken(string.Empty); break;
case 18: w.WriteProcessingInstruction(string.Empty, string.Empty); break;
case 19: w.WriteQualifiedName(string.Empty, string.Empty); break;
case 20: w.WriteStartAttribute(string.Empty); break;
case 21: w.WriteStartAttribute(string.Empty, string.Empty); break;
case 22: w.WriteStartAttribute(string.Empty, string.Empty, string.Empty); break;
case 23: w.WriteStartElement(string.Empty); break;
case 24: w.WriteStartElement(string.Empty, string.Empty); break;
case 25: w.WriteStartElement(string.Empty, string.Empty, string.Empty); break;
case 26: w.WriteDocType(string.Empty, string.Empty, string.Empty, string.Empty); break;
case 27: w.WriteProcessingInstruction(string.Empty, string.Empty); break;
}
}
catch (ArgumentException)
{
try
{
switch (param)
{
case 3: w.WriteEntityRef(string.Empty); break;
case 8: w.WriteAttributeString(string.Empty, string.Empty); break;
case 9: w.WriteAttributeString(string.Empty, string.Empty, string.Empty); break;
case 10: w.WriteAttributeString(string.Empty, string.Empty, string.Empty, string.Empty); break;
case 11: w.WriteDocType(string.Empty, string.Empty, string.Empty, string.Empty); break;
case 12: w.WriteElementString(string.Empty, string.Empty); break;
case 13: w.WriteElementString(string.Empty, string.Empty, string.Empty); break;
case 14: w.WriteElementString(string.Empty, string.Empty, string.Empty, string.Empty); break;
case 15: w.WriteEntityRef(string.Empty); break;
case 16: w.WriteName(string.Empty); break;
case 17: w.WriteNmToken(string.Empty); break;
case 18: w.WriteProcessingInstruction(string.Empty, string.Empty); break;
case 19: w.WriteQualifiedName(string.Empty, string.Empty); break;
case 20: w.WriteStartAttribute(string.Empty); break;
case 21: w.WriteStartAttribute(string.Empty, string.Empty); break;
case 22: w.WriteStartAttribute(string.Empty, string.Empty, string.Empty); break;
case 23: w.WriteStartElement(string.Empty); break;
case 24: w.WriteStartElement(string.Empty, string.Empty); break;
case 25: w.WriteStartElement(string.Empty, string.Empty, string.Empty); break;
case 26: w.WriteDocType(string.Empty, string.Empty, string.Empty, string.Empty); break;
case 27: w.WriteProcessingInstruction(string.Empty, string.Empty); break;
}
}
catch (ArgumentException) { result = true; }
}
finally
{
w.Dispose();
}
Assert.True((result || param == 19 && utils.WriterType == WriterType.CustomWriter));
}
[Theory]
[InlineData(1)]
[InlineData(2)]
public void var_14(int param)
{
XmlWriterSettings ws = new XmlWriterSettings();
try
{
switch (param)
{
case 1: ws.IndentChars = null; break;
case 2: ws.NewLineChars = null; break;
}
}
catch (ArgumentNullException)
{
try
{
switch (param)
{
case 1: ws.IndentChars = null; break;
case 2: ws.NewLineChars = null; break;
}
}
catch (ArgumentNullException) { return; }
}
Assert.True(false);
}
[Theory]
[XmlWriterInlineData]
public void var_15(XmlWriterUtils utils)
{
XmlWriter w = utils.CreateWriter();
bool isUnicode = (utils.WriterType == WriterType.UnicodeWriter || utils.WriterType == WriterType.UnicodeWriterIndent) ? true : false;
bool isIndent = (utils.WriterType == WriterType.UTF8WriterIndent || utils.WriterType == WriterType.UnicodeWriterIndent) ? true : false;
w.WriteElementString("a", "b");
((IDisposable)w).Dispose();
((IDisposable)w).Dispose();
((IDisposable)w).Dispose();
CError.Compare(w.LookupPrefix(""), string.Empty, "LookupPrefix");
CError.Compare(w.WriteState, WriteState.Closed, "WriteState");
CError.Compare(w.XmlLang, null, "XmlLang");
CError.Compare(w.XmlSpace, XmlSpace.None, "XmlSpace");
if (utils.WriterType != WriterType.CustomWriter)
{
CError.Compare(w.Settings.CheckCharacters, true, "CheckCharacters");
CError.Compare(w.Settings.CloseOutput, false, "CloseOutput");
CError.Compare(w.Settings.ConformanceLevel, ConformanceLevel.Document, "ConformanceLevel");
CError.Compare(w.Settings.Indent, (isIndent) ? true : false, "Indent");
CError.Compare(w.Settings.IndentChars, " ", "IndentChars");
CError.Compare(w.Settings.NewLineChars, Environment.NewLine, "NewLineChars");
CError.Compare(w.Settings.NewLineHandling, NewLineHandling.Replace, "NewLineHandling");
CError.Compare(w.Settings.NewLineOnAttributes, false, "NewLineOnAttributes");
CError.Compare(w.Settings.OmitXmlDeclaration, true, "OmitXmlDeclaration");
CError.Compare(w.Settings.Encoding.WebName, (isUnicode) ? "utf-16" : "utf-8", "Encoding");
}
return;
}
[Theory]
[XmlWriterInlineData]
public void var_16(XmlWriterUtils utils)
{
XmlWriter w = utils.CreateWriter();
bool isUnicode = (utils.WriterType == WriterType.UnicodeWriter || utils.WriterType == WriterType.UnicodeWriterIndent) ? true : false;
bool isIndent = (utils.WriterType == WriterType.UTF8WriterIndent || utils.WriterType == WriterType.UnicodeWriterIndent) ? true : false;
w.WriteElementString("a", "b");
try
{
w.WriteDocType("a", "b", "c", "d");
}
catch (InvalidOperationException)
{
CError.Compare(w.LookupPrefix(""), string.Empty, "LookupPrefix");
CError.Compare(w.WriteState, WriteState.Error, "WriteState");
CError.Compare(w.XmlLang, null, "XmlLang");
CError.Compare(w.XmlSpace, XmlSpace.None, "XmlSpace");
if (utils.WriterType != WriterType.CustomWriter)
{
CError.Compare(w.Settings.CheckCharacters, true, "CheckCharacters");
CError.Compare(w.Settings.CloseOutput, false, "CloseOutput");
CError.Compare(w.Settings.ConformanceLevel, ConformanceLevel.Document, "ConformanceLevel");
CError.Compare(w.Settings.Indent, (isIndent) ? true : false, "Indent");
CError.Compare(w.Settings.IndentChars, " ", "IndentChars");
CError.Compare(w.Settings.NewLineChars, Environment.NewLine, "NewLineChars");
CError.Compare(w.Settings.NewLineHandling, NewLineHandling.Replace, "NewLineHandling");
CError.Compare(w.Settings.NewLineOnAttributes, false, "NewLineOnAttributes");
CError.Compare(w.Settings.OmitXmlDeclaration, true, "OmitXmlDeclaration");
CError.Compare(w.Settings.Encoding.WebName, (isUnicode) ? "utf-16" : "utf-8", "Encoding");
}
return;
}
Assert.True(false);
}
[Theory]
[XmlWriterInlineData(WriterType.All & ~WriterType.Async)]
public void bug601305(XmlWriterUtils utils)
{
CError.WriteLine("expected:");
CError.WriteLine("<p:root xmlns:p='uri' />");
CError.WriteLine("actual:");
XmlWriterSettings ws = new XmlWriterSettings();
ws.OmitXmlDeclaration = true;
StringWriter sw = new StringWriter();
using (XmlWriter w = WriterHelper.Create(sw, ws, overrideAsync: true, async: utils.Async))
{
w.WriteStartElement("root", "uri");
w.WriteStartAttribute("xmlns", "p", "http://www.w3.org/2000/xmlns/");
w.WriteString("uri");
}
CError.Compare(sw.ToString(), "<root xmlns:p=\"uri\" xmlns=\"uri\" />", "writer output");
return;
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
[XmlWriterInlineData(5)]
[XmlWriterInlineData(6)]
[XmlWriterInlineData(7)]
[XmlWriterInlineData(8)]
[XmlWriterInlineData(9)]
[XmlWriterInlineData(10)]
public void var17(XmlWriterUtils utils, int param)
{
if (utils.WriterType == WriterType.CustomWriter) return;
XmlWriter writer = utils.CreateWriter();
try
{
switch (param)
{
case 1: writer.Settings.CheckCharacters = false; break;
case 2: writer.Settings.CloseOutput = false; break;
case 3: writer.Settings.ConformanceLevel = ConformanceLevel.Fragment; break;
case 4: writer.Settings.Encoding = Encoding.UTF8; break;
case 5: writer.Settings.Indent = false; break;
case 6: writer.Settings.IndentChars = "#"; break;
case 7: writer.Settings.NewLineChars = "%"; break;
case 8: writer.Settings.NewLineHandling = NewLineHandling.None; break;
case 9: writer.Settings.NewLineOnAttributes = false; break;
case 10: writer.Settings.OmitXmlDeclaration = true; break;
}
}
catch (XmlException)
{
try
{
switch (param)
{
case 1: writer.Settings.CheckCharacters = false; break;
case 2: writer.Settings.CloseOutput = false; break;
case 3: writer.Settings.ConformanceLevel = ConformanceLevel.Fragment; break;
case 4: writer.Settings.Encoding = Encoding.UTF8; break;
case 5: writer.Settings.Indent = false; break;
case 6: writer.Settings.IndentChars = "#"; break;
case 7: writer.Settings.NewLineChars = "%"; break;
case 8: writer.Settings.NewLineHandling = NewLineHandling.None; break;
case 9: writer.Settings.NewLineOnAttributes = false; break;
case 10: writer.Settings.OmitXmlDeclaration = true; break;
}
}
catch (XmlException) { return; }
}
Assert.True(false);
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
[XmlWriterInlineData(5)]
[XmlWriterInlineData(6)]
[XmlWriterInlineData(7)]
[XmlWriterInlineData(8)]
[XmlWriterInlineData(9)]
[XmlWriterInlineData(10)]
[XmlWriterInlineData(11)]
[XmlWriterInlineData(12)]
[XmlWriterInlineData(13)]
[XmlWriterInlineData(14)]
[XmlWriterInlineData(15)]
[XmlWriterInlineData(16)]
[XmlWriterInlineData(17)]
[XmlWriterInlineData(18)]
[XmlWriterInlineData(19)]
[XmlWriterInlineData(20)]
[XmlWriterInlineData(21)]
[XmlWriterInlineData(22)]
[XmlWriterInlineData(23)]
[XmlWriterInlineData(24)]
[XmlWriterInlineData(25)]
[XmlWriterInlineData(26)]
[XmlWriterInlineData(27)]
[XmlWriterInlineData(28)]
[XmlWriterInlineData(29)]
public void var_18(XmlWriterUtils utils, int param)
{
XmlReader r = ReaderHelper.Create(new StringReader("<xmlns/>"));
byte[] buffer = new byte[10];
char[] chbuffer = new char[10];
XmlWriter w = utils.CreateWriter();
w.WriteElementString("a", "b");
w.Dispose();
CError.Compare(w.WriteState, WriteState.Closed, "WriteState should be Error");
try
{
switch (param)
{
case 1: w.WriteQualifiedName("foo", ""); break;
case 2: w.WriteAttributes(r, true); break;
case 3: w.WriteAttributeString("a", "b", "c", "d"); break;
case 4: w.WriteBase64(buffer, 0, 3); break;
case 5: w.WriteBinHex(buffer, 0, 3); break;
case 6: w.WriteCData("a"); break;
case 7: w.WriteCharEntity(char.MaxValue); break;
case 8: w.WriteChars(chbuffer, 1, 3); break;
case 9: w.WriteComment("a"); break;
case 10: w.WriteDocType("a", "b", "c", "d"); break;
case 11: w.WriteElementString("a", "b", "c", "d"); break;
case 12: w.WriteEndAttribute(); break;
case 13: w.WriteEndDocument(); break;
case 14: w.WriteEndElement(); break;
case 15: w.WriteEntityRef("a"); break;
case 16: w.WriteFullEndElement(); break;
case 17: w.WriteName("b"); break;
case 18: w.WriteNmToken("b"); break;
case 19: w.WriteNode(r, true); break;
case 20: w.WriteProcessingInstruction("a", "b"); break;
case 21: w.WriteRaw("a"); break;
case 22: w.WriteRaw(chbuffer, 1, 3); break;
case 23: w.WriteStartAttribute("a", "b", "c"); break;
case 24: w.WriteStartDocument(true); break;
case 25: w.WriteStartElement("a", "b", "c"); break;
case 26: w.WriteString("a"); break;
case 27: w.WriteSurrogateCharEntity('\uD812', '\uDD12'); break;
case 28: w.WriteValue(true); break;
case 29: w.WriteWhitespace(""); break;
}
}
catch (InvalidOperationException)
{
try
{
switch (param)
{
case 1: w.WriteQualifiedName("foo", ""); break;
case 3: w.WriteAttributeString("a", "b", "c", "d"); break;
case 4: w.WriteBase64(buffer, 0, 3); break;
case 5: w.WriteBinHex(buffer, 0, 3); break;
case 6: w.WriteCData("a"); break;
case 7: w.WriteCharEntity(char.MaxValue); break;
case 8: w.WriteChars(chbuffer, 1, 3); break;
case 9: w.WriteComment("a"); break;
case 10: w.WriteDocType("a", "b", "c", "d"); break;
case 11: w.WriteElementString("a", "b", "c", "d"); break;
case 12: w.WriteEndAttribute(); break;
case 13: w.WriteEndDocument(); break;
case 14: w.WriteEndElement(); break;
case 15: w.WriteEntityRef("a"); break;
case 16: w.WriteFullEndElement(); break;
case 17: w.WriteName("b"); break;
case 18: w.WriteNmToken("b"); break;
case 19: w.WriteNode(r, true); break;
case 20: w.WriteProcessingInstruction("a", "b"); break;
case 21: w.WriteRaw("a"); break;
case 22: w.WriteRaw(chbuffer, 1, 3); break;
case 23: w.WriteStartAttribute("a", "b", "c"); break;
case 24: w.WriteStartDocument(true); break;
case 25: w.WriteStartElement("a", "b", "c"); break;
case 26: w.WriteString("a"); break;
case 28: w.WriteValue(true); break;
case 29: w.WriteWhitespace(""); break;
}
}
catch (InvalidOperationException) { return; }
}
catch (ArgumentException)
{
try
{
switch (param)
{
case 8: w.WriteChars(chbuffer, 1, 3); break;
case 27: w.WriteSurrogateCharEntity('\uD812', '\uDD12'); break;
}
}
catch (ArgumentException) { return; }
}
catch (XmlException)
{
try
{
switch (param)
{
case 2: w.WriteAttributes(r, true); break;
}
}
catch (XmlException) { return; }
}
Assert.True(false);
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
[XmlWriterInlineData(5)]
[XmlWriterInlineData(6)]
[XmlWriterInlineData(7)]
[XmlWriterInlineData(8)]
[XmlWriterInlineData(9)]
[XmlWriterInlineData(10)]
[XmlWriterInlineData(11)]
[XmlWriterInlineData(12)]
[XmlWriterInlineData(13)]
[XmlWriterInlineData(14)]
[XmlWriterInlineData(15)]
[XmlWriterInlineData(16)]
[XmlWriterInlineData(17)]
[XmlWriterInlineData(18)]
[XmlWriterInlineData(19)]
[XmlWriterInlineData(20)]
[XmlWriterInlineData(21)]
[XmlWriterInlineData(22)]
[XmlWriterInlineData(23)]
[XmlWriterInlineData(24)]
[XmlWriterInlineData(25)]
[XmlWriterInlineData(26)]
[XmlWriterInlineData(27)]
[XmlWriterInlineData(28)]
[XmlWriterInlineData(29)]
public void var_19(XmlWriterUtils utils, int param)
{
XmlReader r = ReaderHelper.Create(new StringReader("<xmlns/>"));
byte[] buffer = new byte[10];
char[] chbuffer = new char[10];
XmlWriter w = utils.CreateWriter();
try
{
w.WriteStartDocument();
w.WriteEntityRef("ent");
}
catch (InvalidOperationException)
{
CError.Compare(w.WriteState, WriteState.Error, "WriteState should be Error");
try
{
switch (param)
{
case 1: w.WriteQualifiedName("foo", ""); break;
case 2: w.WriteAttributes(r, true); break;
case 3: w.WriteAttributeString("a", "b", "c", "d"); break;
case 4: w.WriteBase64(buffer, 0, 3); break;
case 5: w.WriteBinHex(buffer, 0, 3); break;
case 6: w.WriteCData("a"); break;
case 7: w.WriteCharEntity(char.MaxValue); break;
case 8: w.WriteChars(chbuffer, 1, 3); break;
case 9: w.WriteComment("a"); break;
case 10: w.WriteDocType("a", "b", "c", "d"); break;
case 11: w.WriteElementString("a", "b", "c", "d"); break;
case 12: w.WriteEndAttribute(); break;
case 13: w.WriteEndDocument(); break;
case 14: w.WriteEndElement(); break;
case 15: w.WriteEntityRef("a"); break;
case 16: w.WriteFullEndElement(); break;
case 17: w.WriteName("b"); break;
case 18: w.WriteNmToken("b"); break;
case 19: w.WriteNode(r, true); break;
case 20: w.WriteProcessingInstruction("a", "b"); break;
case 21: w.WriteRaw("a"); break;
case 22: w.WriteRaw(chbuffer, 1, 3); break;
case 23: w.WriteStartAttribute("a", "b", "c"); break;
case 24: w.WriteStartDocument(true); break;
case 25: w.WriteStartElement("a", "b", "c"); break;
case 26: w.WriteString("a"); break;
case 27: w.WriteSurrogateCharEntity('\uD812', '\uDD12'); break;
case 28: w.WriteValue(true); break;
case 29: w.WriteWhitespace(""); break;
}
}
catch (InvalidOperationException)
{
try
{
switch (param)
{
case 1: w.WriteQualifiedName("foo", ""); break;
case 3: w.WriteAttributeString("a", "b", "c", "d"); break;
case 4: w.WriteBase64(buffer, 0, 3); break;
case 5: w.WriteBinHex(buffer, 0, 3); break;
case 6: w.WriteCData("a"); break;
case 7: w.WriteCharEntity(char.MaxValue); break;
case 8: w.WriteChars(chbuffer, 1, 3); break;
case 9: w.WriteComment("a"); break;
case 10: w.WriteDocType("a", "b", "c", "d"); break;
case 11: w.WriteElementString("a", "b", "c", "d"); break;
case 12: w.WriteEndAttribute(); break;
case 13: w.WriteEndDocument(); break;
case 14: w.WriteEndElement(); break;
case 15: w.WriteEntityRef("a"); break;
case 16: w.WriteFullEndElement(); break;
case 17: w.WriteName("b"); break;
case 18: w.WriteNmToken("b"); break;
case 19: w.WriteNode(r, true); break;
case 20: w.WriteProcessingInstruction("a", "b"); break;
case 21: w.WriteRaw("a"); break;
case 22: w.WriteRaw(chbuffer, 1, 3); break;
case 23: w.WriteStartAttribute("a", "b", "c"); break;
case 24: w.WriteStartDocument(true); break;
case 25: w.WriteStartElement("a", "b", "c"); break;
case 26: w.WriteString("a"); break;
case 28: w.WriteValue(true); break;
case 29: w.WriteWhitespace(""); break;
}
}
catch (InvalidOperationException) { return; }
}
catch (ArgumentException)
{
try
{
switch (param)
{
case 8: w.WriteChars(chbuffer, 1, 3); break;
case 27: w.WriteSurrogateCharEntity('\uD812', '\uDD12'); break;
}
}
catch (ArgumentException) { return; }
}
catch (XmlException)
{
try
{
switch (param)
{
case 2: w.WriteAttributes(r, true); break;
}
}
catch (XmlException) { return; }
}
}
Assert.True(false);
}
[Theory]
[XmlWriterInlineData]
public void var_20(XmlWriterUtils utils)
{
XmlWriter w = utils.CreateWriter();
w.WriteStartElement("root");
try
{
w.WriteAttributeString("attr1", "\uD812\uD812");
w.WriteEndElement();
}
catch (ArgumentException e)
{
CError.WriteLine(e);
try
{
w.WriteAttributeString("attr2", "\uD812\uD812");
w.WriteEndElement();
}
catch (InvalidOperationException ioe) { CError.WriteLine(ioe); return; }
catch (ArgumentException ae) { CError.WriteLine(ae); return; }
}
finally
{
w.Dispose();
}
Assert.True(false);
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
[XmlWriterInlineData(5)]
[XmlWriterInlineData(6)]
[XmlWriterInlineData(7)]
[XmlWriterInlineData(8)]
[XmlWriterInlineData(9)]
[XmlWriterInlineData(10)]
[XmlWriterInlineData(11)]
[XmlWriterInlineData(12)]
[XmlWriterInlineData(13)]
[XmlWriterInlineData(14)]
[XmlWriterInlineData(15)]
[XmlWriterInlineData(16)]
[XmlWriterInlineData(17)]
[XmlWriterInlineData(18)]
[XmlWriterInlineData(19)]
[XmlWriterInlineData(20)]
[XmlWriterInlineData(21)]
[XmlWriterInlineData(22)]
[XmlWriterInlineData(23)]
[XmlWriterInlineData(24)]
[XmlWriterInlineData(25)]
[XmlWriterInlineData(26)]
[XmlWriterInlineData(27)]
[XmlWriterInlineData(28)]
[XmlWriterInlineData(29)]
[XmlWriterInlineData(30)]
[XmlWriterInlineData(31)]
[XmlWriterInlineData(32)]
[XmlWriterInlineData(33)]
[XmlWriterInlineData(34)]
public void var_21(XmlWriterUtils utils, int param)
{
bool result = false;
string val = "\uDE34\uD9A2";
XmlWriter w = utils.CreateWriter();
if (param != 13 && param != 14 && param != 15) w.WriteStartElement("a", "b");
try
{
switch (param)
{
case 1: w.WriteStartAttribute("c"); w.WriteValue(val); break;
case 2: w.WriteStartAttribute("c"); w.WriteComment(val); break;
case 3: w.WriteStartAttribute("c"); w.WriteCData(val); break;
case 4: w.WriteStartAttribute("c"); w.WriteProcessingInstruction("a", val); break;
case 5: w.WriteStartAttribute("c"); w.WriteRaw(val); break;
case 6: w.WriteValue(val); break;
case 7: w.WriteComment(val); break;
case 8: w.WriteCData(val); break;
case 9: w.WriteProcessingInstruction("a", val); break;
case 10: w.WriteRaw(val); break;
case 11: w.WriteAttributeString("a", val); break;
case 12: w.WriteCharEntity('\uDE34'); break;
case 13: w.WriteDocType("a", val, val, val); break;
case 14: w.WriteDocType("a", "b", val, val); break;
case 15: w.WriteDocType("a", "b", "c", val); break;
case 16: w.WriteElementString(val, val, val, val); break;
case 17: w.WriteElementString("a", val, val, val); break;
case 18: w.WriteElementString("a", "b", val, val); break;
case 19: w.WriteElementString("a", "b", "c", val); break;
case 20: w.WriteEntityRef(val); break;
case 21: w.WriteName(val); break;
case 22: w.WriteNmToken(val); break;
case 23: w.WriteQualifiedName(val, val); break;
case 24: w.WriteQualifiedName("a", val); break;
case 25: w.WriteStartAttribute(val); break;
case 26: w.WriteStartAttribute("a", val); break;
case 27: w.WriteStartAttribute("a", val, val); break;
case 28: w.WriteStartElement(val); break;
case 29: w.WriteStartElement("a", val); break;
case 30: w.WriteStartElement("a", val, val); break;
case 31: w.WriteString(val); break;
case 32: w.WriteWhitespace(val); break;
case 33: w.WriteStartAttribute("c"); w.WriteString(val); break;
case 34: w.WriteSurrogateCharEntity('\uD9A2', '\uDE34'); break;
}
}
catch (ArgumentException e)
{
CError.WriteLine(e.Message);
try
{
switch (param)
{
case 1: w.WriteStartAttribute("b"); w.WriteValue(val); break;
case 2: w.WriteStartAttribute("b"); w.WriteComment(val); break;
case 3: w.WriteStartAttribute("b"); w.WriteCData(val); break;
case 4: w.WriteStartAttribute("b"); w.WriteProcessingInstruction("a", val); break;
case 5: w.WriteStartAttribute("b"); w.WriteRaw(val); break;
case 6: w.WriteValue(val); break;
case 7: w.WriteComment(val); break;
case 8: w.WriteCData(val); break;
case 9: w.WriteProcessingInstruction("a", val); break;
case 10: w.WriteRaw(val); break;
case 11: w.WriteAttributeString("a2", val); break;
case 12: w.WriteCharEntity('\uDE34'); break;
case 13: w.WriteDocType("a", val, val, val); break;
case 14: w.WriteDocType("a", "b", val, val); break;
case 15: w.WriteDocType("a", "b", "c", val); break;
case 16: w.WriteElementString(val, val, val, val); break;
case 17: w.WriteElementString("a", val, val, val); break;
case 18: w.WriteElementString("a", "b", val, val); break;
case 19: w.WriteElementString("a", "b", "c", val); break;
case 20: w.WriteEntityRef(val); break;
case 21: w.WriteName(val); break;
case 22: w.WriteNmToken(val); break;
case 23: w.WriteQualifiedName(val, val); break;
case 24: w.WriteQualifiedName("a", val); break;
case 25: w.WriteStartAttribute(val); break;
case 26: w.WriteStartAttribute("a", val); break;
case 27: w.WriteStartAttribute("a", val, val); break;
case 28: w.WriteStartElement(val); break;
case 29: w.WriteStartElement("a", val); break;
case 30: w.WriteStartElement("a", val, val); break;
case 31: w.WriteString(val); break;
case 32: w.WriteWhitespace(val); break;
case 33: w.WriteStartAttribute("b"); w.WriteString(val); break;
case 34: w.WriteSurrogateCharEntity('\uD9A2', '\uDE34'); break;
}
}
catch (InvalidOperationException) { CError.WriteLine(e.Message); result = true; }
catch (ArgumentException) { CError.WriteLine(e.Message); result = true; }
}
catch (XmlException e)
{
CError.WriteLine(e.Message);
try
{
switch (param)
{
case 13: w.WriteDocType("a", val, val, val); break;
case 14: w.WriteDocType("a", "b", val, val); break;
case 15: w.WriteDocType("a", "b", "c", val); break;
case 21: w.WriteName(val); break;
case 22: w.WriteNmToken(val); break;
}
}
catch (XmlException)
{
result = (utils.WriterType == WriterType.CharCheckingWriter && (param == 21 || param == 22));
}
catch (InvalidOperationException) { result = false; }
}
finally
{
try
{
w.Dispose();
}
catch (ArgumentException) { result = true; }
}
Assert.True(result);
}
[Theory]
[XmlWriterInlineData(1)]
[XmlWriterInlineData(2)]
[XmlWriterInlineData(3)]
[XmlWriterInlineData(4)]
public void bug600541(XmlWriterUtils utils, int param)
{
string xml = "<root a=\"a\" b=\"b\" c=\"c\" d=\"d\" />";
switch (param)
{
case 1: break;
case 2: xml = "<root b=\"b\" c=\"c\" d=\"d\" />"; break;
case 3: xml = "<root c=\"c\" d=\"d\" />"; break;
case 4: xml = "<root d=\"d\" />"; break;
}
using (XmlReader r = ReaderHelper.Create(new StringReader(xml)))
{
r.Read();
CError.Compare(r.NodeType, XmlNodeType.Element, "XNT");
CError.Compare(r.MoveToFirstAttribute(), true, "MFA");
using (XmlWriter w = utils.CreateWriter())
{
w.WriteStartElement("root");
switch (param)
{
case 1: break;
case 2: r.MoveToAttribute("b"); break;
case 3: r.MoveToAttribute("c"); break;
case 4: r.MoveToAttribute("d"); break;
}
w.WriteAttributes(r, true);
w.Dispose();
Assert.True((utils.CompareString(xml)));
}
}
}
[Theory]
[XmlWriterInlineData]
public void bug630890(XmlWriterUtils utils)
{
object obj = (object)1;
for (int i = 0; i < 100000; i++)
{
obj = new object[1] { obj };
}
using (XmlWriter w = utils.CreateWriter())
{
w.WriteStartElement("Root");
try
{
w.WriteValue(obj);
CError.Compare(false, "Failed1");
}
catch (InvalidCastException e)
{
CError.WriteLine(e);
try
{
w.WriteValue(obj);
CError.Compare(false, "Failed1");
}
catch (InvalidOperationException) { CError.WriteLine(e.Message); return; }
catch (InvalidCastException) { CError.WriteLine(e.Message); return; }
}
}
Assert.True(false);
}
[Theory]
[XmlWriterInlineData]
public void PassingArrayWithNullOrEmptyItemsCausesWriteValueToFail(XmlWriterUtils utils)
{
string[] a = new string[5];
string exp = "<b>a a1 </b>";
a[0] = "a";
a[1] = "a1";
a[3] = null;
a[4] = "";
using (XmlWriter w = utils.CreateWriter())
{
w.WriteStartElement("b");
w.WriteValue(a);
}
Assert.True((utils.CompareString(exp)));
}
}
}
| |
using System;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Reflection;
using System.Windows.Forms;
using System.IO;
using WeifenLuo.WinFormsUI.Docking;
using ATABBI.TexE.Customization;
using System.Collections.Generic;
using ICSharpCode.AvalonEdit.Search;
using System.Windows.Threading;
namespace ATABBI.TexE
{
public partial class MainForm : Form
{
private bool m_bSaveLayout = true;
private DeserializeDockContent m_deserializeDockContent;
private DocumentExplorer m_solutionExplorer;
private PropertyWindow m_propertyWindow;
private FindReplaceWindow m_findreplaceWindow;
private Toolbox m_toolbox;
private OutputWindow m_outputWindow;
private BibliographyList m_BibliographyList;
private GoToWindow m_gotoline;
private SearchPanel sp = new SearchPanel();
public TextDocument ActiveDocument
{
get
{
return dockPanel.ActiveDocument as TextDocument;
}
}
Queue<string> recentDocuments = new Queue<string>();
public MainForm()
{
InitializeComponent();
CreateStandardControls();
showRightToLeft.Checked = (RightToLeft == RightToLeft.Yes);
RightToLeftLayout = showRightToLeft.Checked;
m_solutionExplorer.RightToLeftLayout = RightToLeftLayout;
m_deserializeDockContent = new DeserializeDockContent(GetContentFromPersistString);
this.dockPanel.ActiveDocumentChanged += new EventHandler(dockPanel_ActiveDocumentChanged);
}
void dockPanel_ActiveDocumentChanged(object sender, EventArgs e)
{
this.m_solutionExplorer.UpdateContent(this.dockPanel.ActiveDocument);
this.m_propertyWindow.UpdateContent(this.dockPanel.ActiveDocument);
this.m_findreplaceWindow.UpdateContent(this.dockPanel.ActiveDocument);
this.m_outputWindow.UpdateContent(this.dockPanel.ActiveDocument);
this.m_BibliographyList.UpdateContent(this.dockPanel.ActiveDocument);
this.m_toolbox.UpdateContent(this.dockPanel.ActiveDocument);
this.m_gotoline.UpdateContent(this.dockPanel.ActiveDocument);
sp = null;
//sp.Dispatcher.BeginInvoke(DispatcherPriority.Inactive, (Action)sp.Reactivate);
//if (this.sp != null && !this.sp.IsClosed)
// sp.Close();
}
#region Methods
private void Run(string cmd = "pdflatex")
{
if (!this.m_outputWindow.Visible)
{
this.m_outputWindow.Visible = true;
this.m_outputWindow.VisibleState = DockState.DockBottom;
this.m_outputWindow.DockState = DockState.DockBottom;
}
if (this.m_outputWindow.Visible)
{
this.m_outputWindow.UpdateContent(this.dockPanel.ActiveDocument);
this.SaveFile();
this.m_outputWindow.Compile(cmd + ".exe");
}
}
SaveFileDialogWithEncoding saveFileDialog1 = new SaveFileDialogWithEncoding();
private void SaveFile()
{
this.SaveFile(this.ActiveDocument);
}
private void SaveFileAs()
{
this.SaveFileAs(this.ActiveDocument);
}
private void SaveFile(TextDocument ad)
{
if (ad == null)
return;
if (string.IsNullOrEmpty(ad.FileName))
{
// Default file extension
//this.saveFileDialog1.DefaultExt = "txt";
// Available file extensions
saveFileDialog1.Filter = "tex files (*.tex)|*.tex|bibtex files (*.bib)|*.bib|txt files (*.txt)|*.txt|All files (*.*)|*.*";
// show dialog
if (saveFileDialog1.ShowDialog() == DialogResult.OK)
{
string format = ".tex";
// resolve file format
switch (Path.GetExtension(saveFileDialog1.FileName).ToLower())
{
case ".txt":
format = ".txt";
break;
case ".tex":
format = ".tex";
break;
case ".bib":
format = ".bib";
break;
default:
MessageBox.Show(this, "Unsupported file format was specified", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
return;
}
}
ad.txtEditor.Encoding = System.Text.Encoding.Default;
if(saveFileDialog1.EncodingType == EncodingType.Ansi )
ad.txtEditor.Encoding = System.Text.Encoding.Default;
if (saveFileDialog1.EncodingType == EncodingType.Unicode)
ad.txtEditor.Encoding = System.Text.Encoding.Unicode;
if (saveFileDialog1.EncodingType == EncodingType.UTF8)
ad.txtEditor.Encoding = new System.Text.UTF8Encoding(false);
ad.FileName = saveFileDialog1.FileName;
}
this.UpdateRecent(ad.FileName);
ActiveDocument.SaveFile();
}
private void SaveFileAs(TextDocument ad)
{
if (ad == null)
return;
// Default file extension
//this.saveFileDialog1.DefaultExt = "txt";
// Available file extensions
saveFileDialog1.Filter = "tex files (*.tex)|*.tex|bibtex files (*.bib)|*.bib|txt files (*.txt)|*.txt|All files (*.*)|*.*";
// show dialog
if (saveFileDialog1.ShowDialog() == DialogResult.OK)
{
string format = ".tex";
// resolve file format
switch (Path.GetExtension(saveFileDialog1.FileName).ToLower())
{
case ".txt":
format = ".txt";
break;
case ".tex":
format = ".tex";
break;
case ".bib":
format = ".bib";
break;
default:
MessageBox.Show(this, "Unsupported image format was specified", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
return;
}
ad.FileName = saveFileDialog1.FileName;
ad.txtEditor.Encoding = System.Text.Encoding.Default;
if (saveFileDialog1.EncodingType == EncodingType.Ansi)
ad.txtEditor.Encoding = System.Text.Encoding.Default;
if (saveFileDialog1.EncodingType == EncodingType.Unicode)
ad.txtEditor.Encoding = System.Text.Encoding.Unicode;
if (saveFileDialog1.EncodingType == EncodingType.UTF8)
ad.txtEditor.Encoding = new System.Text.UTF8Encoding(false);
}
this.UpdateRecent(ad.FileName);
ad.SaveFile();
}
private void SaveAll()
{
foreach (IDockContent document in dockPanel.Documents)
{
if (string.IsNullOrEmpty((document as TextDocument).FileName))
{
this.SaveFileAs((document as TextDocument));
}
else
{
this.SaveFile((document as TextDocument));
}
}
}
public void Open(string defaultFileName = "")
{
string fullName = "";
string fileName = "";
if (!string.IsNullOrEmpty(defaultFileName))
{
fullName = defaultFileName;
fileName = Path.GetFileName(fullName);
if (FindDocument(fileName) != null)
{
//MessageBox.Show("The document: " + fileName + " has already opened!");
return;
}
}
else
{
OpenFileDialog openFile = new OpenFileDialog();
openFile.InitialDirectory = Application.ExecutablePath;
openFile.Filter = "tex files (*.tex)|*.tex|bibtex files (*.bib)|*.bib|txt files (*.txt)|*.txt|All files (*.*)|*.*";
openFile.FilterIndex = 1;
openFile.RestoreDirectory = true;
if (openFile.ShowDialog() == DialogResult.OK)
{
fullName = openFile.FileName;
fileName = Path.GetFileName(fullName);
if (FindDocument(fileName) != null)
{
MessageBox.Show("The document: " + fileName + " has already opened!");
return;
}
}
}
if (string.IsNullOrEmpty(fullName))
return;
TextDocument dummyDoc = new TextDocument();
dummyDoc.Text = fileName;
try
{
dummyDoc.FileName = fullName;
dummyDoc.LoadFile();
this.UpdateRecent(fullName);
}
catch (Exception exception)
{
dummyDoc.Close();
MessageBox.Show(exception.Message);
}
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
dummyDoc.MdiParent = this;
dummyDoc.Show();
}
else
{
dummyDoc.Show(dockPanel);
}
}
private void UpdateRecent(string fileName="")
{
recentDocuments.Clear();
this.recentDocumentsToolStripMenuItem.DropDownItems.Clear();
foreach (string s in ATABBI.TexE.Properties.Settings.Default.RecentDocuments.ToString().Split(new string[] { "|,|" }, StringSplitOptions.RemoveEmptyEntries))
{
this.recentDocumentsToolStripMenuItem.DropDownItems.Add(s, null, recent_Item_Click);
if (!recentDocuments.Contains(s))
recentDocuments.Enqueue(s);
}
if (!string.IsNullOrEmpty(fileName))
{
if(!recentDocuments.Contains(fileName))
recentDocuments.Enqueue(fileName);
if (recentDocuments.Count > 15)
recentDocuments.Dequeue();
string ss = string.Join("|,|", recentDocuments.ToArray());
ATABBI.TexE.Properties.Settings.Default.RecentDocuments = ss;
ATABBI.TexE.Properties.Settings.Default.Save();
}
}
private IDockContent FindDocument(string text)
{
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
foreach (Form form in MdiChildren)
if (form.Text == text)
return form as IDockContent;
return null;
}
else
{
foreach (IDockContent content in dockPanel.Documents)
if (content.DockHandler.TabText == text)
return content;
return null;
}
}
private TextDocument CreateNewDocument()
{
TextDocument dummyDoc = new TextDocument();
int count = 1;
//string text = "C:\\MADFDKAJ\\ADAKFJASD\\ADFKDSAKFJASD\\ASDFKASDFJASDF\\ASDFIJADSFJ\\ASDFKDFDA" + count.ToString();
string text = "Document" + count.ToString();
while (FindDocument(text) != null)
{
count++;
//text = "C:\\MADFDKAJ\\ADAKFJASD\\ADFKDSAKFJASD\\ASDFKASDFJASDF\\ASDFIJADSFJ\\ASDFKDFDA" + count.ToString();
text = "Document" + count.ToString();
}
dummyDoc.Text = text;
return dummyDoc;
}
private TextDocument CreateNewDocument(string text)
{
TextDocument dummyDoc = new TextDocument();
dummyDoc.Text = text;
return dummyDoc;
}
private void CloseAllDocuments()
{
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
foreach (Form form in MdiChildren)
form.Close();
}
else
{
foreach (IDockContent document in dockPanel.DocumentsToArray())
{
this.CloseDocument(document);
// document.DockHandler.Close();
}
}
}
private void CloseDocument(IDockContent document)
{
if ((document as TextDocument).txtEditor.IsModified)
{
var window = MessageBox.Show("Save changes to file: " + (document as TextDocument).FileName, "Confirm", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Question);
if (window == DialogResult.Yes)
{
this.SaveFile((document as TextDocument));
document.DockHandler.Close();
}
else if (window == DialogResult.No)
{
document.DockHandler.Close();
}
else
{
}
}
else
{
document.DockHandler.Close();
}
}
private IDockContent GetContentFromPersistString(string persistString)
{
if (persistString == typeof(DocumentExplorer).ToString())
return m_solutionExplorer;
else if (persistString == typeof(PropertyWindow).ToString())
return m_propertyWindow;
else if (persistString == typeof(Toolbox).ToString())
return m_toolbox;
else if (persistString == typeof(OutputWindow).ToString())
return m_outputWindow;
else if (persistString == typeof(BibliographyList).ToString())
return m_BibliographyList;
else if (persistString == typeof(FindReplaceWindow).ToString())
return m_findreplaceWindow;
else if (persistString == typeof(GoToWindow).ToString())
return m_gotoline;
else
{
// TextDoc overrides GetPersistString to add extra information into persistString.
// Any DockContent may override this value to add any needed information for deserialization.
string[] parsedStrings = persistString.Split(new char[] { ',' });
if (parsedStrings.Length != 3)
return null;
if (parsedStrings[0] != typeof(TextDocument).ToString())
return null;
TextDocument dummyDoc = new TextDocument();
if (parsedStrings[1] != string.Empty)
{
dummyDoc.FileName = parsedStrings[1];
dummyDoc.LoadFile();
}
if (parsedStrings[2] != string.Empty)
dummyDoc.Text = parsedStrings[2];
return dummyDoc;
}
}
private void CloseAllContents()
{
// we don't want to create another instance of tool window, set DockPanel to null
m_solutionExplorer.DockPanel = null;
m_propertyWindow.DockPanel = null;
m_toolbox.DockPanel = null;
m_outputWindow.DockPanel = null;
m_BibliographyList.DockPanel = null;
m_gotoline.DockPanel = null;
// Close all other document windows
CloseAllDocuments();
}
private void SetSchema(object sender, System.EventArgs e)
{
CloseAllContents();
if (sender == menuItemSchemaVS2005)
Extender.SetSchema(dockPanel, Extender.Schema.VS2005);
else if (sender == menuItemSchemaVS2003)
Extender.SetSchema(dockPanel, Extender.Schema.VS2003);
menuItemSchemaVS2005.Checked = (sender == menuItemSchemaVS2005);
menuItemSchemaVS2003.Checked = (sender == menuItemSchemaVS2003);
}
private void SetDocumentStyle(object sender, System.EventArgs e)
{
DocumentStyle oldStyle = dockPanel.DocumentStyle;
DocumentStyle newStyle;
if (sender == menuItemDockingMdi)
newStyle = DocumentStyle.DockingMdi;
else if (sender == menuItemDockingWindow)
newStyle = DocumentStyle.DockingWindow;
else if (sender == menuItemDockingSdi)
newStyle = DocumentStyle.DockingSdi;
else
newStyle = DocumentStyle.SystemMdi;
if (oldStyle == newStyle)
return;
if (oldStyle == DocumentStyle.SystemMdi || newStyle == DocumentStyle.SystemMdi)
CloseAllDocuments();
dockPanel.DocumentStyle = newStyle;
menuItemDockingMdi.Checked = (newStyle == DocumentStyle.DockingMdi);
menuItemDockingWindow.Checked = (newStyle == DocumentStyle.DockingWindow);
menuItemDockingSdi.Checked = (newStyle == DocumentStyle.DockingSdi);
menuItemSystemMdi.Checked = (newStyle == DocumentStyle.SystemMdi);
menuItemLayoutByCode.Enabled = (newStyle != DocumentStyle.SystemMdi);
menuItemLayoutByXml.Enabled = (newStyle != DocumentStyle.SystemMdi);
toolBarButtonLayoutByCode.Enabled = (newStyle != DocumentStyle.SystemMdi);
toolBarButtonLayoutByXml.Enabled = (newStyle != DocumentStyle.SystemMdi);
}
private void SetDockPanelSkinOptions(bool isChecked)
{
if (isChecked)
{
// All of these options may be set in the designer.
// This is not a complete list of possible options available in the skin.
AutoHideStripSkin autoHideSkin = new AutoHideStripSkin();
autoHideSkin.DockStripGradient.StartColor = Color.AliceBlue;
autoHideSkin.DockStripGradient.EndColor = Color.Blue;
autoHideSkin.DockStripGradient.LinearGradientMode = System.Drawing.Drawing2D.LinearGradientMode.ForwardDiagonal;
autoHideSkin.TabGradient.StartColor = SystemColors.Control;
autoHideSkin.TabGradient.EndColor = SystemColors.ControlDark;
autoHideSkin.TabGradient.TextColor = SystemColors.ControlText;
autoHideSkin.TextFont = new Font("Showcard Gothic", 10);
dockPanel.Skin.AutoHideStripSkin = autoHideSkin;
DockPaneStripSkin dockPaneSkin = new DockPaneStripSkin();
dockPaneSkin.DocumentGradient.DockStripGradient.StartColor = Color.Red;
dockPaneSkin.DocumentGradient.DockStripGradient.EndColor = Color.Pink;
dockPaneSkin.DocumentGradient.ActiveTabGradient.StartColor = Color.Green;
dockPaneSkin.DocumentGradient.ActiveTabGradient.EndColor = Color.Green;
dockPaneSkin.DocumentGradient.ActiveTabGradient.TextColor = Color.White;
dockPaneSkin.DocumentGradient.InactiveTabGradient.StartColor = Color.Gray;
dockPaneSkin.DocumentGradient.InactiveTabGradient.EndColor = Color.Gray;
dockPaneSkin.DocumentGradient.InactiveTabGradient.TextColor = Color.Black;
dockPaneSkin.TextFont = new Font("SketchFlow Print", 10);
dockPanel.Skin.DockPaneStripSkin = dockPaneSkin;
}
else
{
dockPanel.Skin = new DockPanelSkin();
}
menuItemLayoutByXml_Click(menuItemLayoutByXml, EventArgs.Empty);
}
#endregion
#region Event Handlers
private void MainForm_Load(object sender, System.EventArgs e)
{
//dockPanel.ShowDocumentIcon = true;
string configFile = Path.Combine(Path.GetDirectoryName(Application.ExecutablePath), "DockPanel.config");
if (File.Exists(configFile))
dockPanel.LoadFromXml(configFile, m_deserializeDockContent);
this.UpdateRecent();
}
void recent_Item_Click(object sender, EventArgs e)
{
this.Open(((ToolStripDropDownItem)sender).Text);
}
private void MainForm_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
string configFile = Path.Combine(Path.GetDirectoryName(Application.ExecutablePath), "DockPanel.config");
if (m_bSaveLayout)
dockPanel.SaveAsXml(configFile);
else if (File.Exists(configFile))
File.Delete(configFile);
}
private void menuItemExit_Click(object sender, System.EventArgs e)
{
Close();
}
private void menuItemSolutionExplorer_Click(object sender, System.EventArgs e)
{
m_solutionExplorer.Show(dockPanel);
}
private void menuItemPropertyWindow_Click(object sender, System.EventArgs e)
{
m_propertyWindow.Show(dockPanel);
}
private void menuItemToolbox_Click(object sender, System.EventArgs e)
{
m_toolbox.Show(dockPanel);
}
private void menuItemOutputWindow_Click(object sender, System.EventArgs e)
{
m_outputWindow.Show(dockPanel);
}
private void menuItemBibliographyList_Click(object sender, System.EventArgs e)
{
m_BibliographyList.Show(dockPanel);
}
private void menuItemAbout_Click(object sender, System.EventArgs e)
{
AboutDialog aboutDialog = new AboutDialog();
aboutDialog.ShowDialog(this);
}
private void menuItemNew_Click(object sender, System.EventArgs e)
{
TextDocument dummyDoc = CreateNewDocument();
if (dockPanel.DocumentStyle == DocumentStyle.DockingWindow)
{
dummyDoc.MdiParent = this;
dummyDoc.Show();
}
else
dummyDoc.Show(dockPanel);
}
private void menuItemOpen_Click(object sender, System.EventArgs e)
{
this.Open();
}
private void menuItemFile_Popup(object sender, System.EventArgs e)
{
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
menuItemClose.Enabled = menuItemCloseAll.Enabled = menuItemCloseAllButThisOne.Enabled = (ActiveMdiChild != null);
}
else
{
menuItemClose.Enabled = (dockPanel.ActiveDocument != null);
menuItemCloseAll.Enabled = menuItemCloseAllButThisOne.Enabled = (dockPanel.DocumentsCount > 0);
}
}
private void menuItemClose_Click(object sender, System.EventArgs e)
{
//if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
// ActiveMdiChild.Close();
if (dockPanel.ActiveDocument != null)
//dockPanel.ActiveDocument.DockHandler.Close();
this.CloseDocument(ActiveDocument);
}
private void menuItemCloseAll_Click(object sender, System.EventArgs e)
{
CloseAllDocuments();
}
private void menuItemToolBar_Click(object sender, System.EventArgs e)
{
toolBar.Visible = menuItemToolBar.Checked = !menuItemToolBar.Checked;
}
private void menuItemStatusBar_Click(object sender, System.EventArgs e)
{
statusBar.Visible = menuItemStatusBar.Checked = !menuItemStatusBar.Checked;
}
private void toolBar_ButtonClick(object sender, System.Windows.Forms.ToolStripItemClickedEventArgs e)
{
if (e.ClickedItem == toolBarButtonNew)
menuItemNew_Click(null, null);
else if (e.ClickedItem == toolBarButtonOpen)
menuItemOpen_Click(null, null);
else if (e.ClickedItem == toolBarButtonSolutionExplorer)
menuItemSolutionExplorer_Click(null, null);
else if (e.ClickedItem == toolBarButtonPropertyWindow)
menuItemPropertyWindow_Click(null, null);
else if (e.ClickedItem == toolBarButtonToolbox)
menuItemToolbox_Click(null, null);
else if (e.ClickedItem == toolBarButtonOutputWindow)
menuItemOutputWindow_Click(null, null);
else if (e.ClickedItem == toolBarButtonBibliographyList)
menuItemBibliographyList_Click(null, null);
else if (e.ClickedItem == toolBarButtonLayoutByCode)
// menuItemLayoutByCode_Click(null, null);
MessageBox.Show("No Implementation");
else if (e.ClickedItem == toolBarButtonLayoutByXml)
MessageBox.Show("No Implementation");
// menuItemLayoutByXml_Click(null, null);
else if (e.ClickedItem == toolBarButtonSave)
saveToolStripMenuItem_Click(null, null);
else if (e.ClickedItem == toolStripButtonSaveAll)
saveAllToolStripMenuItem_Click(null, null);
else if (e.ClickedItem == toolStripButtonQuickFind)
this.quickSearchToolStripMenuItem_Click(null, null);
else if (e.ClickedItem == toolStripButtonClose)
this.menuItemClose_Click(null, null);
//else if (e.ClickedItem == toolStripButtonRun)
//{
// if (!this.m_outputWindow.Visible)
// {
// this.m_outputWindow.DockState = DockState.DockBottom;
// this.m_outputWindow.Visible = true;
// }
// this.m_outputWindow.UpdateContent(this.dockPanel.ActiveDocument);
// this.SaveFile();
// this.m_outputWindow.Compile();
//}
else if (e.ClickedItem == toolStripButtonViewPDF)
{
this.m_outputWindow.UpdateContent(this.dockPanel.ActiveDocument);
this.m_outputWindow.ViewPDF();
}
else if (e.ClickedItem == toolBarButtonSpellCheck)
{
this.ActiveDocument.SpellCheck();
}
}
private void menuItemNewWindow_Click(object sender, System.EventArgs e)
{
MainForm newWindow = new MainForm();
newWindow.Text = newWindow.Text + " - New";
newWindow.Show();
}
private void menuItemTools_Popup(object sender, System.EventArgs e)
{
menuItemLockLayout.Checked = !this.dockPanel.AllowEndUserDocking;
}
private void menuItemLockLayout_Click(object sender, System.EventArgs e)
{
dockPanel.AllowEndUserDocking = !dockPanel.AllowEndUserDocking;
}
private void menuItemLayoutByCode_Click(object sender, System.EventArgs e)
{
return;
#region MyRegion
//dockPanel.SuspendLayout(true);
//CloseAllDocuments();
//CreateStandardControls();
//m_solutionExplorer.Show(dockPanel, DockState.DockRight);
//m_propertyWindow.Show(m_solutionExplorer.Pane, m_solutionExplorer);
//m_toolbox.Show(dockPanel, new Rectangle(98, 133, 200, 383));
//m_outputWindow.Show(m_solutionExplorer.Pane, DockAlignment.Bottom, 0.35);
//m_BibliographyList.Show(m_toolbox.Pane, DockAlignment.Left, 0.4);
//TextDocument doc1 = CreateNewDocument("Document1");
//TextDocument doc2 = CreateNewDocument("Document2");
//TextDocument doc3 = CreateNewDocument("Document3");
//TextDocument doc4 = CreateNewDocument("Document4");
//doc1.Show(dockPanel, DockState.Document);
//doc2.Show(doc1.Pane, null);
//doc3.Show(doc1.Pane, DockAlignment.Bottom, 0.5);
//doc4.Show(doc3.Pane, DockAlignment.Right, 0.5);
//dockPanel.ResumeLayout(true, true);
#endregion
}
private void CreateStandardControls()
{
m_solutionExplorer = new DocumentExplorer();
m_propertyWindow = new PropertyWindow();
m_toolbox = new Toolbox();
m_outputWindow = new OutputWindow();
m_BibliographyList = new BibliographyList();
m_findreplaceWindow = new FindReplaceWindow();
m_gotoline = new GoToWindow();
}
private void menuItemLayoutByXml_Click(object sender, System.EventArgs e)
{
return;
#region MyRegion
dockPanel.SuspendLayout(true);
// In order to load layout from XML, we need to close all the DockContents
CloseAllContents();
CreateStandardControls();
Assembly assembly = Assembly.GetAssembly(typeof(MainForm));
Stream xmlStream = assembly.GetManifestResourceStream("ATABBI.TexE.Resources.DockPanel.xml");
dockPanel.LoadFromXml(xmlStream, m_deserializeDockContent);
xmlStream.Close();
dockPanel.ResumeLayout(true, true);
#endregion
}
private void menuItemCloseAllButThisOne_Click(object sender, System.EventArgs e)
{
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
Form activeMdi = ActiveMdiChild;
foreach (Form form in MdiChildren)
{
if (form != activeMdi)
form.Close();
}
}
else
{
foreach (IDockContent document in dockPanel.DocumentsToArray())
{
if (!document.DockHandler.IsActivated)
this.CloseDocument(document);
}
}
}
private void menuItemShowDocumentIcon_Click(object sender, System.EventArgs e)
{
dockPanel.ShowDocumentIcon = menuItemShowDocumentIcon.Checked = !menuItemShowDocumentIcon.Checked;
}
private void showRightToLeft_Click(object sender, EventArgs e)
{
CloseAllContents();
if (showRightToLeft.Checked)
{
this.RightToLeft = RightToLeft.No;
this.RightToLeftLayout = false;
}
else
{
this.RightToLeft = RightToLeft.Yes;
this.RightToLeftLayout = true;
}
m_solutionExplorer.RightToLeftLayout = this.RightToLeftLayout;
showRightToLeft.Checked = !showRightToLeft.Checked;
}
private void exitWithoutSavingLayout_Click(object sender, EventArgs e)
{
m_bSaveLayout = false;
Close();
m_bSaveLayout = true;
}
private void saveToolStripMenuItem_Click(object sender, EventArgs e)
{
this.m_outputWindow.UpdateContent(this.dockPanel.ActiveDocument);
this.SaveFile();
}
private void findAndReplaceToolStripMenuItem_Click(object sender, EventArgs e)
{
m_findreplaceWindow.Show(dockPanel);
}
private void compileToolStripMenuItem_Click(object sender, EventArgs e)
{
this.m_outputWindow.UpdateContent(this.dockPanel.ActiveDocument);
this.SaveFile();
this.m_outputWindow.Compile();
}
#endregion
private void saveAsToolStripMenuItem_Click(object sender, EventArgs e)
{
this.m_outputWindow.UpdateContent(this.dockPanel.ActiveDocument);
this.SaveFileAs();
}
private void saveAllToolStripMenuItem_Click(object sender, EventArgs e)
{
this.m_outputWindow.UpdateContent(this.dockPanel.ActiveDocument);
this.SaveAll();
}
private void goToLineToolStripMenuItem_Click(object sender, EventArgs e)
{
// m_gotoline.DockState = DockState.Float;
m_gotoline.Show(dockPanel);
}
private void quickSearchToolStripMenuItem_Click(object sender, EventArgs e)
{
if (this.ActiveDocument != null)
{
sp = new SearchPanel();
sp.UseRegex = false;
sp.Attach(this.ActiveDocument.txtEditor.TextArea);
sp.Open();
sp.Dispatcher.BeginInvoke(DispatcherPriority.Input, (Action)sp.Reactivate);
}
}
private void MainForm_FormClosed(object sender, FormClosedEventArgs e)
{
}
private void toolStripButtonRun_DropDownItemClicked(object sender, ToolStripItemClickedEventArgs e)
{
Run(e.ClickedItem.Text);
if (!this.m_outputWindow.Visible)
{
this.m_outputWindow.DockState = DockState.DockBottom;
this.m_outputWindow.Visible = true;
}
this.m_outputWindow.UpdateContent(this.dockPanel.ActiveDocument);
this.SaveFile();
this.m_outputWindow.Compile(e.ClickedItem.Text+ ".exe");
}
private void toolStripButtonRun_ButtonClick(object sender, EventArgs e)
{
Run();
}
private void toolStripSplitButtonMikTexOption_DropDownItemClicked(object sender, ToolStripItemClickedEventArgs e)
{
if (e.ClickedItem == this.packageManagerToolStripMenuItem)
System.Diagnostics.Process.Start("mpm_mfc_admin.exe");
else if (e.ClickedItem == this.optionsMikToolStripMenuItem)
System.Diagnostics.Process.Start("mo.exe");
else if (e.ClickedItem == this.updateWizardToolStripMenuItem )
System.Diagnostics.Process.Start("miktex-update_admin.exe");
//mpm_mfc_admin.exe
//mo.exe
//miktex-update_admin.exe
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.Xml;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Media;
using Microsoft.Xna.Framework.Audio;
using Microsoft.Xna.Framework.Graphics;
using Microsoft.Xna.Framework.Content;
using System.IO;
using NodeDefinition;
using Pathfinding;
using Particles2DPipelineSample;
namespace HackPrototype
{
public class HackNodeGameBoardMedia
{
public Texture2D NodeBoxtexture;
public Texture2D NodeEmptytexture;
public Texture2D BridgeNStexture;
public Texture2D BridgeEWtexture;
public Texture2D NodeBox_Pathedtexture;
public Texture2D BridgeNS_Pathedtexture;
public Texture2D BridgeEW_Pathedtexture;
public Texture2D Loot_Blue_Texture;
public Texture2D Loot_Yellow_Texture;
public Texture2D Loot_Black_Texture;
public Texture2D Loot_2x_Score_Texture;
public Texture2D Loot_4x_Score_Texture;
public Texture2D Weapon_Multimissile_texture;
public Texture2D Weapon_Heatseeker_texture;
public Texture2D Weapon_Decoy_texture;
public Texture2D Weapon_Mortar_texture;
public Texture2D TimingRingEmpty;
public Texture2D TimingRing1_4;
public Texture2D TimingRing2_4;
public Texture2D TimingRing3_4;
public Texture2D TimingRingComplete;
public Texture2D ExitTexture;
public Texture2D PlayerTexture;
public Texture2D AITexture;
public Texture2D ProjectileTexture;
public Texture2D PingTexture;
public Texture2D WeaponPingTexture;
public Texture2D CollapserPingTexture;
public Texture2D AIQuestionMark_Left;
public Texture2D AIQuestionMark_Center;
public Texture2D AIQuestionMark_Right;
public Texture2D AIExclamationMark_Left;
public Texture2D AIExclamationMark_Center;
public Texture2D AIExclamationMark_Right;
public Texture2D LowerUI_Shell;
public Texture2D TargetSlice_0_Percent;
public Texture2D TargetSlice_25_Percent;
public Texture2D TargetSlice_50_Percent;
public Texture2D TargetSlice_75_Percent;
public Texture2D TargetSlice_100_Percent;
public Texture2D CollapserTexture;
public Texture2D WhiteOneByOne;
public Texture2D GradientLeft;
public SpriteFont debugSpriteFont;
public SpriteFont LowerUI_Score_Font;
public SpriteFont LowerUI_Bonus_Font;
public SpriteFont Overlay_Font;
public SpriteFont Ticker_Font;
public SpriteFont LootAmount_Font;
public SpriteFont BG_Font;
public SpriteFont Collapse_GiantNumbers_Font;
public SpriteFont Collapse_Warning_Font;
public SpriteFont Collapse_Node_Font;
public ParticleSystem explosion;
public ParticleSystem binaryOneFountain;
public ParticleEmitter binaryOneFountain_emitter;
public ParticleSystem binaryZeroFountain;
public ParticleEmitter binaryZeroFountain_emitter;
public ParticleSystem playerDeathParticles;
public ParticleSystem AIDeathParticles;
public SoundEffect ExplosionSound;
public SoundEffect GameOverSound;
public SoundEffect PlayerPingSound;
public SoundEffect PlayerLockLocationSound;
public SoundEffect HackSuccessfulSound;
public SoundEffect AlertUpSound;
public SoundEffect ThumpSound;
public SoundEffect NodeRevealSound;
public SoundEffect WhooshSound;
public SoundEffect MessageSound;
public SoundEffect StartExitSound;
public SoundEffect MissileLaunchSound;
public SoundEffect ProximityAlertSound;
public SoundEffect MortarFallSound;
public SoundEffect TimerTickSound;
public SoundEffect TimerWarningSound;
private SoundEffectInstance MoneyLoopSoundInstance;
private SoundEffectInstance HackProgressSoundInstance;
private SoundEffectInstance WarningLoopSoundInstance;
private SoundEffectInstance MessageSoundInstance;
private SoundEffect MoneyLoopSound;
private SoundEffect HackProgressSound;
private const float HackProgressMinPitch = -0.5f;
private const float HackProgressMaxPitch = 0.5f;
public HackNodeGameBoardMedia(Game game, ContentManager content)
{
NodeBoxtexture = content.Load<Texture2D>("Sprites\\nodebox");
NodeEmptytexture = content.Load<Texture2D>("Sprites\\nodeempty");
BridgeNStexture = content.Load<Texture2D>("Sprites\\north_south");
BridgeEWtexture = content.Load<Texture2D>("Sprites\\east_west");
NodeBox_Pathedtexture = content.Load<Texture2D>("Sprites\\nodebox_pathed");
BridgeNS_Pathedtexture = content.Load<Texture2D>("Sprites\\north_south_pathed");
BridgeEW_Pathedtexture = content.Load<Texture2D>("Sprites\\east_west_pathed");
Loot_Blue_Texture = content.Load<Texture2D>("Sprites\\Loot\\blue_loot");
Loot_Yellow_Texture = content.Load<Texture2D>("Sprites\\Loot\\yellow_loot");
Loot_Black_Texture = content.Load<Texture2D>("Sprites\\Loot\\black_loot");
Loot_2x_Score_Texture = content.Load<Texture2D>("Sprites\\Loot\\2x_score");
Loot_4x_Score_Texture = content.Load<Texture2D>("Sprites\\Loot\\4x_score");
Weapon_Multimissile_texture = content.Load<Texture2D>("Sprites\\weapon_multimissile");
Weapon_Heatseeker_texture = content.Load<Texture2D>("Sprites\\weapon_heatseeker");
Weapon_Decoy_texture = content.Load<Texture2D>("Sprites\\weapon_decoy");
Weapon_Mortar_texture = content.Load<Texture2D>("Sprites\\weapon_mortar");
PlayerTexture = content.Load<Texture2D>("Sprites\\player");
AITexture = content.Load<Texture2D>("Sprites\\ai");
ProjectileTexture = content.Load<Texture2D>("Sprites\\projectile");
TimingRingEmpty = content.Load<Texture2D>("Sprites\\timing_ring_0");
TimingRing1_4 = content.Load<Texture2D>("Sprites\\timing_ring_1");
TimingRing2_4 = content.Load<Texture2D>("Sprites\\timing_ring_2");
TimingRing3_4 = content.Load<Texture2D>("Sprites\\timing_ring_3");
TimingRingComplete = content.Load<Texture2D>("Sprites\\timing_ring_4");
ExitTexture = content.Load<Texture2D>("Sprites\\exit");
PingTexture = content.Load<Texture2D>("Sprites\\ping_effect");
WeaponPingTexture = content.Load<Texture2D>("Sprites\\weapon_ping_effect");
CollapserPingTexture = content.Load<Texture2D>("Sprites\\collapser_ping_effect");
AIQuestionMark_Left = content.Load<Texture2D>("Sprites\\ai_question_left");
AIQuestionMark_Center = content.Load<Texture2D>("Sprites\\ai_question_center");
AIQuestionMark_Right = content.Load<Texture2D>("Sprites\\ai_question_right");
AIExclamationMark_Left = content.Load<Texture2D>("Sprites\\ai_exclaim_left");
AIExclamationMark_Center = content.Load<Texture2D>("Sprites\\ai_exclaim_center");
AIExclamationMark_Right = content.Load<Texture2D>("Sprites\\ai_exclaim_right");
LowerUI_Shell = content.Load<Texture2D>("Sprites\\UI\\ui_lower_section_portrait");
//LowerUI_BonusShell = content.Load<Texture2D>("Sprites\\UI\\ui_lower_section_bonus");
//LowerUI_AlertShell = content.Load<Texture2D>("Sprites\\UI\\alert_level_text");
//LowerUI_Alert_Light_Off = content.Load<Texture2D>("Sprites\\UI\\alert_level_light_off");
//LowerUI_Alert_Light_On = content.Load<Texture2D>("Sprites\\UI\\alert_level_light_on");
TargetSlice_0_Percent = content.Load<Texture2D>("Sprites\\UI\\target_slice_0_percent");
TargetSlice_25_Percent = content.Load<Texture2D>("Sprites\\UI\\target_slice_25_percent");
TargetSlice_50_Percent = content.Load<Texture2D>("Sprites\\UI\\target_slice_50_percent");
TargetSlice_75_Percent = content.Load<Texture2D>("Sprites\\UI\\target_slice_75_percent");
TargetSlice_100_Percent = content.Load<Texture2D>("Sprites\\UI\\target_slice_100_percent");
CollapserTexture = content.Load<Texture2D>("Sprites\\collapser");
WhiteOneByOne = content.Load<Texture2D>("Sprites\\white_1_1");
GradientLeft = content.Load<Texture2D>("Sprites\\gradient_left");
debugSpriteFont = content.Load<SpriteFont>("Fonts\\DebugUI");
LowerUI_Bonus_Font = content.Load<SpriteFont>("Fonts\\LowerUI_Bonus");
LowerUI_Score_Font = content.Load<SpriteFont>("Fonts\\LowerUI_Score");
Overlay_Font = content.Load<SpriteFont>("Fonts\\OverlayFont");
Ticker_Font = content.Load<SpriteFont>("Fonts\\TickerFont");
LootAmount_Font = content.Load<SpriteFont>("Fonts\\LootAmountFont");
BG_Font = content.Load<SpriteFont>("Fonts\\bgfontsheet");
Collapse_GiantNumbers_Font = content.Load<SpriteFont>("Fonts\\GiantNumbersOnly");
Collapse_Warning_Font = content.Load<SpriteFont>("Fonts\\WarningMoire");
Collapse_Node_Font = content.Load<SpriteFont>("Fonts\\CollapseNodeCountFont");
// create the particle systems and add them to the components list.
explosion = new ParticleSystem(game, "ExplosionSettings") { DrawOrder = ParticleSystem.AdditiveDrawOrder };
game.Components.Add(explosion);
binaryOneFountain = new ParticleSystem(game, "BinaryOneEmitterSettings") { DrawOrder = ParticleSystem.AlphaBlendDrawOrder };
binaryOneFountain_emitter = new ParticleEmitter(binaryOneFountain, 10.0f, new Vector2(200.0f, 200.0f));
game.Components.Add(binaryOneFountain);
binaryZeroFountain = new ParticleSystem(game, "BinaryZeroEmitterSettings") { DrawOrder = ParticleSystem.AlphaBlendDrawOrder };
binaryZeroFountain_emitter = new ParticleEmitter(binaryZeroFountain, 5.0f, new Vector2(200.0f, 200.0f));
game.Components.Add(binaryZeroFountain);
playerDeathParticles = new ParticleSystem(game, "PlayerDeathParticleSettings") { DrawOrder = ParticleSystem.AlphaBlendDrawOrder };
game.Components.Add(playerDeathParticles);
AIDeathParticles = new ParticleSystem(game, "AIDeathParticleSettings") { DrawOrder = ParticleSystem.AlphaBlendDrawOrder };
game.Components.Add(AIDeathParticles);
//now load sounds
ExplosionSound = content.Load<SoundEffect>("Sounds\\Explosion");
AlertUpSound = content.Load<SoundEffect>("Sounds\\Alert_Up");
GameOverSound = content.Load<SoundEffect>("Sounds\\Game_Over");
PlayerPingSound = content.Load<SoundEffect>("Sounds\\Player_Ping");
PlayerLockLocationSound = content.Load<SoundEffect>("Sounds\\Player_Move_Start");
HackSuccessfulSound = content.Load<SoundEffect>("Sounds\\Hack_Successful");
MoneyLoopSound = content.Load<SoundEffect>("Sounds\\Money_Loop_Short");
HackProgressSound = content.Load<SoundEffect>("Sounds\\Hacking_Loop");
MoneyLoopSoundInstance = MoneyLoopSound.CreateInstance();
MoneyLoopSoundInstance.IsLooped = true;
HackProgressSoundInstance = HackProgressSound.CreateInstance();
HackProgressSoundInstance.IsLooped = true;
ThumpSound = content.Load<SoundEffect>("Sounds\\Thump");
WhooshSound = content.Load<SoundEffect>("Sounds\\Whoosh");
NodeRevealSound = content.Load<SoundEffect>("Sounds\\NodeReveal");
MessageSound = content.Load<SoundEffect>("Sounds\\MorseCode");
StartExitSound = content.Load<SoundEffect>("Sounds\\StartExit");
MissileLaunchSound = content.Load<SoundEffect>("Sounds\\MissileLaunch");
ProximityAlertSound = content.Load<SoundEffect>("Sounds\\HeatSeeker_2");
MortarFallSound = content.Load<SoundEffect>("Sounds\\MortarFall");
TimerTickSound = content.Load<SoundEffect>("Sounds\\TimerTick");
TimerWarningSound = content.Load<SoundEffect>("Sounds\\TimerWarning");
WarningLoopSoundInstance = TimerWarningSound.CreateInstance();
WarningLoopSoundInstance.IsLooped = true;
MessageSoundInstance = MessageSound.CreateInstance();
MessageSoundInstance.IsLooped = false;
}
public void UpdatePanAndZoomDeltas(Vector2 panDelta, float zoomDelta)
{
explosion.ApplyCameraOffsetDelta(panDelta, zoomDelta);
binaryOneFountain.ApplyCameraOffsetDelta(panDelta, zoomDelta);
binaryZeroFountain.ApplyCameraOffsetDelta(panDelta, zoomDelta);
playerDeathParticles.ApplyCameraOffsetDelta(panDelta, zoomDelta);
AIDeathParticles.ApplyCameraOffsetDelta(panDelta, zoomDelta);
}
public void StartWarningLoopSound()
{
WarningLoopSoundInstance.Play();
}
public void StopWarningLoopSound()
{
if (WarningLoopSoundInstance.State == SoundState.Playing || WarningLoopSoundInstance.State == SoundState.Paused)
{
WarningLoopSoundInstance.Stop();
}
}
public void StartMessageSound()
{
if (MessageSoundInstance.State != SoundState.Playing)
{
MessageSoundInstance.Play();
}
}
public void StopMessageSound()
{
if (MessageSoundInstance.State == SoundState.Playing || MessageSoundInstance.State == SoundState.Paused)
{
MessageSoundInstance.Stop();
}
}
public void StartHackLoopSound()
{
HackProgressSoundInstance.Play();
SetHackLoopSoundAmountComplete(0);
}
public void StopHackLoopSound()
{
if (HackProgressSoundInstance.State == SoundState.Playing || HackProgressSoundInstance.State == SoundState.Paused)
{
HackProgressSoundInstance.Stop();
}
}
public void SetHackLoopSoundAmountComplete(float T)
{
HackProgressSoundInstance.Pitch = MathHelper.Lerp(HackProgressMinPitch, HackProgressMaxPitch, T);
}
public void StartMoneyLoopSound()
{
MoneyLoopSoundInstance.Play();
}
public void StopMoneyLoopSound()
{
MoneyLoopSoundInstance.Stop();
}
public void PauseAllSounds()
{
if (MoneyLoopSoundInstance != null && MoneyLoopSoundInstance.State == SoundState.Playing)
{
MoneyLoopSoundInstance.Pause();
}
if (HackProgressSoundInstance != null && HackProgressSoundInstance.State == SoundState.Playing)
{
HackProgressSoundInstance.Pause();
}
if (WarningLoopSoundInstance != null && WarningLoopSoundInstance.State == SoundState.Playing)
{
WarningLoopSoundInstance.Pause();
}
if (MessageSoundInstance != null && MessageSoundInstance.State == SoundState.Playing)
{
MessageSoundInstance.Pause();
}
}
public void UnpauseAllSounds()
{
if (MoneyLoopSoundInstance != null && MoneyLoopSoundInstance.State == SoundState.Paused)
{
MoneyLoopSoundInstance.Resume();
}
if (HackProgressSoundInstance != null && HackProgressSoundInstance.State == SoundState.Paused)
{
HackProgressSoundInstance.Resume();
}
if (WarningLoopSoundInstance != null && WarningLoopSoundInstance.State == SoundState.Paused)
{
WarningLoopSoundInstance.Resume();
}
if (MessageSoundInstance != null && MessageSoundInstance.State == SoundState.Paused)
{
MessageSoundInstance.Resume();
}
}
}
public class FlashingElement
{
public enum FlashingElement_OperationType
{
FlashingElement_OperationType_Normal,
FlashingElement_OperationType_StayOn,
FlashingElement_OperationType_StayOff
};
FlashingElement_OperationType flashType = FlashingElement_OperationType.FlashingElement_OperationType_Normal;
bool toggleOn;
float currentTimer;
float maxTimer;
float afterTimer = 0;
FlashingElement_OperationType afterType = FlashingElement_OperationType.FlashingElement_OperationType_Normal;
public FlashingElement(float flashTime, bool startOn, FlashingElement_OperationType type)
{
Reset(flashTime, startOn, type);
}
public void Reset(float flashTime, bool startOn, FlashingElement_OperationType type)
{
toggleOn = startOn;
maxTimer = flashTime;
currentTimer = 0;
flashType = type;
}
public void Update(GameTime t)
{
float floatt = (float)t.ElapsedGameTime.TotalSeconds;
if (afterTimer > 0)
{
afterTimer -= floatt;
if (afterTimer <= 0)
{
afterTimer = 0;
flashType = afterType;
}
}
if (flashType == FlashingElement_OperationType.FlashingElement_OperationType_Normal)
{
currentTimer += floatt;
if (currentTimer >= maxTimer)
{
currentTimer = 0;
if (toggleOn)
toggleOn = false;
else
toggleOn = true;
}
}
}
public void ChangeToModeAfter(float afterTime, FlashingElement_OperationType typeToChangeTo)
{
afterTimer = afterTime;
if (afterTimer == 0)
{
flashType = typeToChangeTo;
}
else
{
afterType = typeToChangeTo;
}
}
public bool IsOn()
{
if (flashType == FlashingElement_OperationType.FlashingElement_OperationType_StayOn)
return true;
else if (flashType == FlashingElement_OperationType.FlashingElement_OperationType_StayOff)
return false;
else return toggleOn;
}
}
class HackGameTimer
{
//lifetime
float lifeTimeLeft;
public HackGameTimer(float lifeTimeSeconds)
{
lifeTimeLeft = lifeTimeSeconds;
}
public void Reset(float lifeTimeSeconds)
{
lifeTimeLeft = lifeTimeSeconds;
}
public void Update(GameTime t)
{
float floatt = (float)t.ElapsedGameTime.TotalSeconds;
if (lifeTimeLeft > 0)
{
lifeTimeLeft -= floatt;
}
}
public float GetLifeTimeLeft()
{
return lifeTimeLeft;
}
public bool IsAlive()
{
return (lifeTimeLeft > 0);
}
}
abstract class HackGameLerpDrawHelper
{
//scale
protected float initialScale;
protected float targetScale;
protected float currentScaleT;
protected float scaleTPerSecond;
protected float currentScale;
//color
protected Color initialColor;
protected Color targetColor;
protected float currentColorT;
protected float colorTPerSecond;
protected Color currentColor;
//position
protected Vector2 initialPosition;
protected Vector2 targetPosition;
protected float currentPositionT;
protected float positionTPerSecond;
protected Vector2 currentPosition;
public HackGameLerpDrawHelper(float startScale, float endScale, float scaleSeconds, Color startColor, Color endColor, float colorSeconds, Vector2 startPosition, Vector2 endPosition, float positionSeconds)
{
scaleTPerSecond = 1.0f / scaleSeconds;
colorTPerSecond = 1.0f / colorSeconds;
positionTPerSecond = 1.0f / positionSeconds;
initialScale = startScale;
targetScale = endScale;
currentScale = initialScale;
initialColor = startColor;
targetColor = endColor;
currentColor = initialColor;
initialPosition = startPosition;
targetPosition = endPosition;
currentPosition = startPosition;
}
public void Reset(float startScale, float endScale, float scaleSeconds, Color startColor, Color endColor, float colorSeconds, Vector2 startPosition, Vector2 endPosition, float positionSeconds)
{
scaleTPerSecond = 1.0f / scaleSeconds;
colorTPerSecond = 1.0f / colorSeconds;
positionTPerSecond = 1.0f / positionSeconds;
initialScale = startScale;
targetScale = endScale;
currentScale = initialScale;
initialColor = startColor;
targetColor = endColor;
currentColor = initialColor;
initialPosition = startPosition;
targetPosition = endPosition;
currentPosition = startPosition;
currentPositionT = 0;
currentScaleT = 0;
currentColorT = 0;
}
public Color CurrentColor()
{
return currentColor;
}
public float CurrentScale()
{
return currentScale;
}
public Vector2 CurrentPosition()
{
return currentPosition;
}
public void Update(GameTime t)
{
currentColor.R = (byte)(initialColor.R + ((float)(targetColor.R - initialColor.R) * currentColorT));
currentColor.G = (byte)(initialColor.G + ((float)(targetColor.G - initialColor.G) * currentColorT));
currentColor.B = (byte)(initialColor.B + ((float)(targetColor.B - initialColor.B) * currentColorT));
currentColor.A = (byte)(initialColor.A + ((float)(targetColor.A - initialColor.A) * currentColorT));
currentPosition.X = (byte)(initialPosition.X + ((float)(targetPosition.X - initialPosition.X) * currentPositionT));
currentPosition.Y = (byte)(initialPosition.Y + ((float)(targetPosition.Y - initialPosition.Y) * currentPositionT));
currentScale = MathHelper.Lerp(initialScale, targetScale, currentScaleT);
}
}
class HackGameForwardLerpDrawHelper : HackGameLerpDrawHelper
{
//lifetime
float lifeTimeLeft;
public HackGameForwardLerpDrawHelper(float lifeTimeSeconds, float startScale, float endScale, float scaleSeconds, Color startColor, Color endColor, float colorSeconds, Vector2 startPosition, Vector2 endPosition, float positionSeconds) :
base(startScale, endScale, scaleSeconds, startColor, endColor, colorSeconds, startPosition, endPosition, positionSeconds)
{
lifeTimeLeft = lifeTimeSeconds;
}
public void Reset(float lifeTimeSeconds, float startScale, float endScale, float scaleSeconds, Color startColor, Color endColor, float colorSeconds, Vector2 startPosition, Vector2 endPosition, float positionSeconds)
{
lifeTimeLeft = lifeTimeSeconds;
base.Reset(startScale, endScale, scaleSeconds, startColor, endColor, colorSeconds, startPosition, endPosition, positionSeconds);
}
public float GetLifeTimeLeft()
{
return lifeTimeLeft;
}
public void Update(GameTime t)
{
float floatt = (float)t.ElapsedGameTime.TotalSeconds;
if (lifeTimeLeft > 0)
{
if (currentScaleT < 1.0f)
{
currentScaleT += scaleTPerSecond * floatt;
if (currentScaleT > 1.0f)
currentScaleT = 1.0f;
}
if (currentColorT < 1.0f)
{
currentColorT += colorTPerSecond * floatt;
if (currentColorT > 1.0f)
currentColorT = 1.0f;
}
if (currentPositionT < 1.0f)
{
currentPositionT += positionTPerSecond * floatt;
if (currentPositionT > 1.0f)
currentPositionT = 1.0f;
}
lifeTimeLeft -= floatt;
}
base.Update(t);
}
public bool IsAlive()
{
return (lifeTimeLeft > 0);
}
}
class HackGameReversibleLerpDrawHelper : HackGameLerpDrawHelper
{
float timebetweenswitches = 0;
float delayCount = 0;
bool forward = true;
float currentT = 0;
public HackGameReversibleLerpDrawHelper(float secondsT, float secondsDelay, float startScale, float endScale, Color startColor, Color endColor, Vector2 startPosition, Vector2 endPosition) :
base(startScale, endScale, secondsT, startColor, endColor, secondsT, startPosition, endPosition, secondsT)
{
timebetweenswitches = secondsDelay;
}
public void Reset(float secondsT, float secondsDelay, float startScale, float endScale, Color startColor, Color endColor, Vector2 startPosition, Vector2 endPosition)
{
timebetweenswitches = secondsDelay;
forward = true;
base.Reset(startScale, endScale, secondsT, startColor, endColor, secondsT, startPosition, endPosition, secondsT);
}
public void Update(GameTime t)
{
float floatt = (float)t.ElapsedGameTime.TotalSeconds;
//check if we need to wait a sec
if (delayCount > 0)
{
delayCount -= floatt;
if (delayCount <= 0)
{
FinishSwitch();
}
}
else if (forward)
{
//we're adding to t
if (currentScaleT < 1.0f)
{
currentScaleT += scaleTPerSecond * floatt;
if (currentScaleT > 1.0f)
currentScaleT = 1.0f;
}
if (currentColorT < 1.0f)
{
currentColorT += colorTPerSecond * floatt;
if (currentColorT > 1.0f)
currentColorT = 1.0f;
}
if (currentPositionT < 1.0f)
{
currentPositionT += positionTPerSecond * floatt;
if (currentPositionT > 1.0f)
currentPositionT = 1.0f;
}
currentT += floatt;
if (currentT >= 1.0f)
{
StartSwitch();
}
}
else
{
//we're subtracting from t
if (currentScaleT > 0.0f)
{
currentScaleT -= scaleTPerSecond * floatt;
if (currentScaleT < 0.0f)
currentScaleT = 0.0f;
}
if (currentColorT > 0.0f)
{
currentColorT -= colorTPerSecond * floatt;
if (currentColorT < 0.0f)
currentColorT = 0.0f;
}
if (currentPositionT > 0.0f)
{
currentPositionT -= positionTPerSecond * floatt;
if (currentPositionT < 0.0f)
currentPositionT = 0.0f;
}
currentT -= floatt;
if (currentT <= 0.0f)
{
StartSwitch();
}
}
base.Update(t);
}
private void StartSwitch()
{
delayCount = timebetweenswitches;
if (timebetweenswitches <= 0)
{
FinishSwitch();
}
}
private void FinishSwitch()
{
delayCount = 0;
if (forward)
{
forward = false;
currentT = 1.0f;
}
else
{
forward = true;
currentT = 0;
}
}
}
class WorldSpaceUIElement
{
Texture2D texture = null;
float timeBeforeStart;
float timeToLiveMax;
float timeToLiveCurrent;
float startScale = 1.0f;
float endScale = 1.0f;
float currentScale = 1.0f;
Vector2 startOffsetFromParent;
Vector2 endOffsetFromParent;
Vector2 currentOffsetFromParent;
Color startColor = Color.White;
Color endColor = Color.White;
Color currentColor = Color.White;
public WorldSpaceUIElement(Texture2D tex, float lifetimeSeconds, Vector2 offsetFromParent, float delay)
{
startOffsetFromParent = offsetFromParent;
endOffsetFromParent = offsetFromParent;
currentOffsetFromParent = offsetFromParent;
timeToLiveMax = lifetimeSeconds;
timeToLiveCurrent = lifetimeSeconds;
timeBeforeStart = delay;
texture = tex;
}
public WorldSpaceUIElement(Texture2D tex, float lifetimeSeconds, Vector2 offsetFromParent_Start, Vector2 offsetFromParent_End, Color color_Start, Color color_End, float scale_Start, float scale_End, float delay)
{
timeToLiveMax = lifetimeSeconds;
timeToLiveCurrent = lifetimeSeconds;
texture = tex;
startOffsetFromParent = offsetFromParent_Start;
endOffsetFromParent = offsetFromParent_End;
currentOffsetFromParent = offsetFromParent_Start;
startColor = color_Start;
endColor = color_End;
currentColor = color_Start;
startScale = scale_Start;
endScale = scale_End;
currentScale = scale_Start;
timeBeforeStart = delay;
}
public bool Alive()
{
if (timeToLiveCurrent > 0.0f || timeBeforeStart > 0.0f)
{
return true;
}
return false;
}
public void Kill()
{
timeToLiveCurrent = 0.0f;
}
public void UpdateState(GameTime time, HackGameBoard board)
{
if (timeBeforeStart > 0.0f)
timeBeforeStart -= (float)time.ElapsedGameTime.TotalSeconds;
if (timeBeforeStart <= 0.0f)
{
timeToLiveCurrent -= (float)time.ElapsedGameTime.TotalSeconds;
float t = timeToLiveMax != 0.0f ? 1.0f - (timeToLiveCurrent / timeToLiveMax) : 0.0f;
currentScale = startScale + ((endScale - startScale) * t);
currentColor.R = (byte)(startColor.R + ((float)(endColor.R - startColor.R) * t));
currentColor.G = (byte)(startColor.G + ((float)(endColor.G - startColor.G) * t));
currentColor.B = (byte)(startColor.B + ((float)(endColor.B - startColor.B) * t));
currentColor.A = (byte)(startColor.A + ((float)(endColor.A - startColor.A) * t));
currentOffsetFromParent.X = startOffsetFromParent.X + ((endOffsetFromParent.X - startOffsetFromParent.X) * t);
currentOffsetFromParent.Y = startOffsetFromParent.Y + ((endOffsetFromParent.Y - startOffsetFromParent.Y) * t);
}
}
public void DrawSelf(SpriteBatch sb, Vector2 drawPos, float zoom)
{
if (timeBeforeStart <= 0.0f)
{
Vector2 realOffset = currentOffsetFromParent;
realOffset.X *= zoom;
realOffset.Y *= zoom;
Vector2 realOrigin = new Vector2(texture.Bounds.Width / 2.0f, texture.Bounds.Height / 2.0f);
sb.Draw(texture, drawPos + realOffset, null, currentColor, 0f, realOrigin, currentScale * zoom, SpriteEffects.None, 0);
}
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: Chat.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace chat.messages {
/// <summary>Holder for reflection information generated from Chat.proto</summary>
public static partial class ChatReflection {
#region Descriptor
/// <summary>File descriptor for Chat.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static ChatReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CgpDaGF0LnByb3RvEghtZXNzYWdlcxoYUHJvdG8uQWN0b3IvUHJvdG9zLnBy",
"b3RvIiUKB0Nvbm5lY3QSGgoGU2VuZGVyGAEgASgLMgouYWN0b3IuUElEIhwK",
"CUNvbm5lY3RlZBIPCgdNZXNzYWdlGAEgASgJIi8KClNheVJlcXVlc3QSEAoI",
"VXNlck5hbWUYASABKAkSDwoHTWVzc2FnZRgCIAEoCSIwCgtTYXlSZXNwb25z",
"ZRIQCghVc2VyTmFtZRgBIAEoCRIPCgdNZXNzYWdlGAIgASgJIjcKC05pY2tS",
"ZXF1ZXN0EhMKC09sZFVzZXJOYW1lGAEgASgJEhMKC05ld1VzZXJOYW1lGAIg",
"ASgJIjgKDE5pY2tSZXNwb25zZRITCgtPbGRVc2VyTmFtZRgBIAEoCRITCgtO",
"ZXdVc2VyTmFtZRgCIAEoCUIQqgINY2hhdC5tZXNzYWdlc2IGcHJvdG8z"));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::Proto.ProtosReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::chat.messages.Connect), global::chat.messages.Connect.Parser, new[]{ "Sender" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::chat.messages.Connected), global::chat.messages.Connected.Parser, new[]{ "Message" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::chat.messages.SayRequest), global::chat.messages.SayRequest.Parser, new[]{ "UserName", "Message" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::chat.messages.SayResponse), global::chat.messages.SayResponse.Parser, new[]{ "UserName", "Message" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::chat.messages.NickRequest), global::chat.messages.NickRequest.Parser, new[]{ "OldUserName", "NewUserName" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::chat.messages.NickResponse), global::chat.messages.NickResponse.Parser, new[]{ "OldUserName", "NewUserName" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class Connect : pb::IMessage<Connect> {
private static readonly pb::MessageParser<Connect> _parser = new pb::MessageParser<Connect>(() => new Connect());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Connect> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::chat.messages.ChatReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Connect() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Connect(Connect other) : this() {
Sender = other.sender_ != null ? other.Sender.Clone() : null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Connect Clone() {
return new Connect(this);
}
/// <summary>Field number for the "Sender" field.</summary>
public const int SenderFieldNumber = 1;
private global::Proto.PID sender_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Proto.PID Sender {
get { return sender_; }
set {
sender_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Connect);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Connect other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(Sender, other.Sender)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (sender_ != null) hash ^= Sender.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (sender_ != null) {
output.WriteRawTag(10);
output.WriteMessage(Sender);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (sender_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Sender);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Connect other) {
if (other == null) {
return;
}
if (other.sender_ != null) {
if (sender_ == null) {
sender_ = new global::Proto.PID();
}
Sender.MergeFrom(other.Sender);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
if (sender_ == null) {
sender_ = new global::Proto.PID();
}
input.ReadMessage(sender_);
break;
}
}
}
}
}
public sealed partial class Connected : pb::IMessage<Connected> {
private static readonly pb::MessageParser<Connected> _parser = new pb::MessageParser<Connected>(() => new Connected());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Connected> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::chat.messages.ChatReflection.Descriptor.MessageTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Connected() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Connected(Connected other) : this() {
message_ = other.message_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Connected Clone() {
return new Connected(this);
}
/// <summary>Field number for the "Message" field.</summary>
public const int MessageFieldNumber = 1;
private string message_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Message {
get { return message_; }
set {
message_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Connected);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Connected other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Message != other.Message) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Message.Length != 0) hash ^= Message.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Message.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Message);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Message.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Message);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Connected other) {
if (other == null) {
return;
}
if (other.Message.Length != 0) {
Message = other.Message;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Message = input.ReadString();
break;
}
}
}
}
}
public sealed partial class SayRequest : pb::IMessage<SayRequest> {
private static readonly pb::MessageParser<SayRequest> _parser = new pb::MessageParser<SayRequest>(() => new SayRequest());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<SayRequest> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::chat.messages.ChatReflection.Descriptor.MessageTypes[2]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SayRequest() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SayRequest(SayRequest other) : this() {
userName_ = other.userName_;
message_ = other.message_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SayRequest Clone() {
return new SayRequest(this);
}
/// <summary>Field number for the "UserName" field.</summary>
public const int UserNameFieldNumber = 1;
private string userName_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string UserName {
get { return userName_; }
set {
userName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "Message" field.</summary>
public const int MessageFieldNumber = 2;
private string message_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Message {
get { return message_; }
set {
message_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as SayRequest);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(SayRequest other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (UserName != other.UserName) return false;
if (Message != other.Message) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (UserName.Length != 0) hash ^= UserName.GetHashCode();
if (Message.Length != 0) hash ^= Message.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (UserName.Length != 0) {
output.WriteRawTag(10);
output.WriteString(UserName);
}
if (Message.Length != 0) {
output.WriteRawTag(18);
output.WriteString(Message);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (UserName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(UserName);
}
if (Message.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Message);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(SayRequest other) {
if (other == null) {
return;
}
if (other.UserName.Length != 0) {
UserName = other.UserName;
}
if (other.Message.Length != 0) {
Message = other.Message;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
UserName = input.ReadString();
break;
}
case 18: {
Message = input.ReadString();
break;
}
}
}
}
}
public sealed partial class SayResponse : pb::IMessage<SayResponse> {
private static readonly pb::MessageParser<SayResponse> _parser = new pb::MessageParser<SayResponse>(() => new SayResponse());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<SayResponse> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::chat.messages.ChatReflection.Descriptor.MessageTypes[3]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SayResponse() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SayResponse(SayResponse other) : this() {
userName_ = other.userName_;
message_ = other.message_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SayResponse Clone() {
return new SayResponse(this);
}
/// <summary>Field number for the "UserName" field.</summary>
public const int UserNameFieldNumber = 1;
private string userName_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string UserName {
get { return userName_; }
set {
userName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "Message" field.</summary>
public const int MessageFieldNumber = 2;
private string message_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Message {
get { return message_; }
set {
message_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as SayResponse);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(SayResponse other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (UserName != other.UserName) return false;
if (Message != other.Message) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (UserName.Length != 0) hash ^= UserName.GetHashCode();
if (Message.Length != 0) hash ^= Message.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (UserName.Length != 0) {
output.WriteRawTag(10);
output.WriteString(UserName);
}
if (Message.Length != 0) {
output.WriteRawTag(18);
output.WriteString(Message);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (UserName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(UserName);
}
if (Message.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Message);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(SayResponse other) {
if (other == null) {
return;
}
if (other.UserName.Length != 0) {
UserName = other.UserName;
}
if (other.Message.Length != 0) {
Message = other.Message;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
UserName = input.ReadString();
break;
}
case 18: {
Message = input.ReadString();
break;
}
}
}
}
}
public sealed partial class NickRequest : pb::IMessage<NickRequest> {
private static readonly pb::MessageParser<NickRequest> _parser = new pb::MessageParser<NickRequest>(() => new NickRequest());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<NickRequest> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::chat.messages.ChatReflection.Descriptor.MessageTypes[4]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public NickRequest() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public NickRequest(NickRequest other) : this() {
oldUserName_ = other.oldUserName_;
newUserName_ = other.newUserName_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public NickRequest Clone() {
return new NickRequest(this);
}
/// <summary>Field number for the "OldUserName" field.</summary>
public const int OldUserNameFieldNumber = 1;
private string oldUserName_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string OldUserName {
get { return oldUserName_; }
set {
oldUserName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "NewUserName" field.</summary>
public const int NewUserNameFieldNumber = 2;
private string newUserName_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string NewUserName {
get { return newUserName_; }
set {
newUserName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as NickRequest);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(NickRequest other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (OldUserName != other.OldUserName) return false;
if (NewUserName != other.NewUserName) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (OldUserName.Length != 0) hash ^= OldUserName.GetHashCode();
if (NewUserName.Length != 0) hash ^= NewUserName.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (OldUserName.Length != 0) {
output.WriteRawTag(10);
output.WriteString(OldUserName);
}
if (NewUserName.Length != 0) {
output.WriteRawTag(18);
output.WriteString(NewUserName);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (OldUserName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(OldUserName);
}
if (NewUserName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(NewUserName);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(NickRequest other) {
if (other == null) {
return;
}
if (other.OldUserName.Length != 0) {
OldUserName = other.OldUserName;
}
if (other.NewUserName.Length != 0) {
NewUserName = other.NewUserName;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
OldUserName = input.ReadString();
break;
}
case 18: {
NewUserName = input.ReadString();
break;
}
}
}
}
}
public sealed partial class NickResponse : pb::IMessage<NickResponse> {
private static readonly pb::MessageParser<NickResponse> _parser = new pb::MessageParser<NickResponse>(() => new NickResponse());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<NickResponse> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::chat.messages.ChatReflection.Descriptor.MessageTypes[5]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public NickResponse() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public NickResponse(NickResponse other) : this() {
oldUserName_ = other.oldUserName_;
newUserName_ = other.newUserName_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public NickResponse Clone() {
return new NickResponse(this);
}
/// <summary>Field number for the "OldUserName" field.</summary>
public const int OldUserNameFieldNumber = 1;
private string oldUserName_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string OldUserName {
get { return oldUserName_; }
set {
oldUserName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "NewUserName" field.</summary>
public const int NewUserNameFieldNumber = 2;
private string newUserName_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string NewUserName {
get { return newUserName_; }
set {
newUserName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as NickResponse);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(NickResponse other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (OldUserName != other.OldUserName) return false;
if (NewUserName != other.NewUserName) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (OldUserName.Length != 0) hash ^= OldUserName.GetHashCode();
if (NewUserName.Length != 0) hash ^= NewUserName.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (OldUserName.Length != 0) {
output.WriteRawTag(10);
output.WriteString(OldUserName);
}
if (NewUserName.Length != 0) {
output.WriteRawTag(18);
output.WriteString(NewUserName);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (OldUserName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(OldUserName);
}
if (NewUserName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(NewUserName);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(NickResponse other) {
if (other == null) {
return;
}
if (other.OldUserName.Length != 0) {
OldUserName = other.OldUserName;
}
if (other.NewUserName.Length != 0) {
NewUserName = other.NewUserName;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
OldUserName = input.ReadString();
break;
}
case 18: {
NewUserName = input.ReadString();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Linq;
using Xunit;
namespace System.Collections.Tests
{
public static class BitArray_GetSetTests
{
private const int BitsPerByte = 8;
private const int BitsPerInt32 = 32;
public static IEnumerable<object[]> Get_Set_Data()
{
foreach (int size in new[] { 0, 1, BitsPerByte, BitsPerByte * 2, BitsPerInt32, BitsPerInt32 * 2 })
{
foreach (bool def in new[] { true, false })
{
yield return new object[] { def, Enumerable.Repeat(true, size).ToArray() };
yield return new object[] { def, Enumerable.Repeat(false, size).ToArray() };
yield return new object[] { def, Enumerable.Range(0, size).Select(i => i % 2 == 1).ToArray() };
}
}
}
[Theory]
[MemberData(nameof(Get_Set_Data))]
public static void Get_Set(bool def, bool[] newValues)
{
BitArray bitArray = new BitArray(newValues.Length, def);
for (int i = 0; i < newValues.Length; i++)
{
bitArray.Set(i, newValues[i]);
Assert.Equal(newValues[i], bitArray[i]);
Assert.Equal(newValues[i], bitArray.Get(i));
}
}
[Fact]
public static void Get_InvalidIndex_ThrowsArgumentOutOfRangeException()
{
BitArray bitArray = new BitArray(4);
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.Get(-1));
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.Get(bitArray.Length));
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray[-1]);
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray[bitArray.Length]);
}
[Fact]
public static void Set_InvalidIndex_ThrowsArgumentOutOfRangeException()
{
BitArray bitArray = new BitArray(4);
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.Set(-1, true));
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.Set(bitArray.Length, true));
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray[-1] = true);
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray[bitArray.Length] = true);
}
[Theory]
[InlineData(0, true)]
[InlineData(0, false)]
[InlineData(1, true)]
[InlineData(1, false)]
[InlineData(BitsPerByte, true)]
[InlineData(BitsPerByte, false)]
[InlineData(BitsPerByte + 1, true)]
[InlineData(BitsPerByte + 1, false)]
[InlineData(BitsPerInt32, true)]
[InlineData(BitsPerInt32, false)]
[InlineData(BitsPerInt32 + 1, true)]
[InlineData(BitsPerInt32 + 1, false)]
public static void SetAll(int size, bool defaultValue)
{
BitArray bitArray = new BitArray(size, defaultValue);
bitArray.SetAll(!defaultValue);
for (int i = 0; i < bitArray.Length; i++)
{
Assert.Equal(!defaultValue, bitArray[i]);
Assert.Equal(!defaultValue, bitArray.Get(i));
}
bitArray.SetAll(defaultValue);
for (int i = 0; i < bitArray.Length; i++)
{
Assert.Equal(defaultValue, bitArray[i]);
Assert.Equal(defaultValue, bitArray.Get(i));
}
}
public static IEnumerable<object[]> GetEnumerator_Data()
{
foreach (int size in new[] { 0, 1, BitsPerByte, BitsPerByte + 1, BitsPerInt32, BitsPerInt32 + 1 })
{
foreach (bool lead in new[] { true, false })
{
yield return new object[] { Enumerable.Range(0, size).Select(i => lead ^ (i % 2 == 0)).ToArray() };
}
}
}
[Theory]
[MemberData(nameof(GetEnumerator_Data))]
public static void GetEnumerator(bool[] values)
{
BitArray bitArray = new BitArray(values);
Assert.NotSame(bitArray.GetEnumerator(), bitArray.GetEnumerator());
IEnumerator enumerator = bitArray.GetEnumerator();
for (int i = 0; i < 2; i++)
{
int counter = 0;
while (enumerator.MoveNext())
{
Assert.Equal(bitArray[counter], enumerator.Current);
counter++;
}
Assert.Equal(bitArray.Length, counter);
enumerator.Reset();
}
}
[Theory]
[InlineData(0)]
[InlineData(1)]
[InlineData(BitsPerByte)]
[InlineData(BitsPerByte + 1)]
[InlineData(BitsPerInt32)]
[InlineData(BitsPerInt32 + 1)]
public static void GetEnumerator_Invalid(int size)
{
BitArray bitArray = new BitArray(size, true);
IEnumerator enumerator = bitArray.GetEnumerator();
// Has not started enumerating
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
// Has finished enumerating
while (enumerator.MoveNext()) ;
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
// Has resetted enumerating
enumerator.Reset();
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
// Has modified underlying collection
if (size > 0)
{
enumerator.MoveNext();
bitArray[0] = false;
Assert.True((bool)enumerator.Current);
Assert.Throws<InvalidOperationException>(() => enumerator.MoveNext());
Assert.Throws<InvalidOperationException>(() => enumerator.Reset());
}
}
public static IEnumerable<object[]> Length_Set_Data()
{
int[] sizes = { 1, BitsPerByte, BitsPerByte + 1, BitsPerInt32, BitsPerInt32 + 1 };
foreach (int original in sizes.Concat(new[] { 16384 }))
{
foreach (int n in sizes)
{
yield return new object[] { original, n };
}
}
}
[Theory]
[MemberData(nameof(Length_Set_Data))]
public static void Length_Set(int originalSize, int newSize)
{
BitArray bitArray = new BitArray(originalSize, true);
bitArray.Length = newSize;
Assert.Equal(newSize, bitArray.Length);
for (int i = 0; i < Math.Min(originalSize, bitArray.Length); i++)
{
Assert.True(bitArray[i]);
Assert.True(bitArray.Get(i));
}
for (int i = originalSize; i < newSize; i++)
{
Assert.False(bitArray[i]);
Assert.False(bitArray.Get(i));
}
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray[newSize]);
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.Get(newSize));
// Decrease then increase size
bitArray.Length = 0;
Assert.Equal(0, bitArray.Length);
bitArray.Length = newSize;
Assert.Equal(newSize, bitArray.Length);
Assert.False(bitArray.Get(0));
Assert.False(bitArray.Get(newSize - 1));
}
[Fact]
public static void Length_Set_InvalidLength_ThrowsArgumentOutOfRangeException()
{
BitArray bitArray = new BitArray(1);
Assert.Throws<ArgumentOutOfRangeException>(() => bitArray.Length = -1);
}
public static IEnumerable<object[]> CopyTo_Array_TestData()
{
yield return new object[] { new BitArray(0), 0, 0, new bool[0], default(bool) };
yield return new object[] { new BitArray(0), 0, 0, new byte[0], default(byte) };
yield return new object[] { new BitArray(0), 0, 0, new int[0], default(int) };
foreach (int bitArraySize in new[] { 0, 1, BitsPerByte, BitsPerByte * 2, BitsPerInt32, BitsPerInt32 * 2 })
{
BitArray allTrue = new BitArray(Enumerable.Repeat(true, bitArraySize).ToArray());
BitArray allFalse = new BitArray(Enumerable.Repeat(false, bitArraySize).ToArray());
BitArray alternating = new BitArray(Enumerable.Range(0, bitArraySize).Select(i => i % 2 == 1).ToArray());
foreach (var d in new[] { Tuple.Create(bitArraySize, 0),
Tuple.Create(bitArraySize * 2 + 1, 0),
Tuple.Create(bitArraySize * 2 + 1, bitArraySize + 1),
Tuple.Create(bitArraySize * 2 + 1, bitArraySize / 2 + 1) })
{
int arraySize = d.Item1;
int index = d.Item2;
yield return new object[] { allTrue, arraySize, index, Enumerable.Repeat(true, bitArraySize).ToArray(), default(bool) };
yield return new object[] { allFalse, arraySize, index, Enumerable.Repeat(false, bitArraySize).ToArray(), default(bool) };
yield return new object[] { alternating, arraySize, index, Enumerable.Range(0, bitArraySize).Select(i => i % 2 == 1).ToArray(), default(bool) };
if (bitArraySize >= BitsPerByte)
{
yield return new object[] { allTrue, arraySize / BitsPerByte, index / BitsPerByte, Enumerable.Repeat((byte)0xff, bitArraySize / BitsPerByte).ToArray(), default(byte) };
yield return new object[] { allFalse, arraySize / BitsPerByte, index / BitsPerByte, Enumerable.Repeat((byte)0x00, bitArraySize / BitsPerByte).ToArray(), default(byte) };
yield return new object[] { alternating, arraySize / BitsPerByte, index / BitsPerByte, Enumerable.Repeat((byte)0xaa, bitArraySize / BitsPerByte).ToArray(), default(byte) };
}
if (bitArraySize >= BitsPerInt32)
{
yield return new object[] { allTrue, arraySize / BitsPerInt32, index / BitsPerInt32, Enumerable.Repeat(unchecked((int)0xffffffff), bitArraySize / BitsPerInt32).ToArray(), default(int) };
yield return new object[] { allFalse, arraySize / BitsPerInt32, index / BitsPerInt32, Enumerable.Repeat(0x00000000, bitArraySize / BitsPerInt32).ToArray(), default(int) };
yield return new object[] { alternating, arraySize / BitsPerInt32, index / BitsPerInt32, Enumerable.Repeat(unchecked((int)0xaaaaaaaa), bitArraySize / BitsPerInt32).ToArray(), default(int) };
}
}
}
}
[Theory]
[MemberData(nameof(CopyTo_Array_TestData))]
public static void CopyTo<T>(BitArray bitArray, int length, int index, T[] expected, T def)
{
T[] array = (T[])Array.CreateInstance(typeof(T), length);
ICollection collection = bitArray;
collection.CopyTo(array, index);
for (int i = 0; i < index; i++)
{
Assert.Equal(def, array[i]);
}
for (int i = 0; i < expected.Length; i++)
{
Assert.Equal(expected[i], array[i + index]);
}
for (int i = index + expected.Length; i < array.Length; i++)
{
Assert.Equal(def, array[i]);
}
}
[Fact]
public static void CopyTo_Type_Invalid()
{
ICollection bitArray = new BitArray(10);
Assert.Throws<ArgumentNullException>("array", () => bitArray.CopyTo(null, 0));
Assert.Throws<ArgumentException>("array", () => bitArray.CopyTo(new long[10], 0));
Assert.Throws<ArgumentException>("array", () => bitArray.CopyTo(new int[10, 10], 0));
}
[Theory]
[InlineData(default(bool), 1, 0, 0)]
[InlineData(default(bool), 1, 1, 1)]
[InlineData(default(bool), BitsPerByte, BitsPerByte - 1, 0)]
[InlineData(default(bool), BitsPerByte, BitsPerByte, 1)]
[InlineData(default(bool), BitsPerInt32, BitsPerInt32 - 1, 0)]
[InlineData(default(bool), BitsPerInt32, BitsPerInt32, 1)]
[InlineData(default(byte), BitsPerByte, 0, 0)]
[InlineData(default(byte), BitsPerByte, 1, 1)]
[InlineData(default(byte), BitsPerByte * 4, 4 - 1, 0)]
[InlineData(default(byte), BitsPerByte * 4, 4, 1)]
[InlineData(default(int), BitsPerInt32, 0, 0)]
[InlineData(default(int), BitsPerInt32, 1, 1)]
[InlineData(default(int), BitsPerInt32 * 4, 4 - 1, 0)]
[InlineData(default(int), BitsPerInt32 * 4, 4, 1)]
public static void CopyTo_Size_Invalid<T>(T def, int bits, int arraySize, int index)
{
ICollection bitArray = new BitArray(bits);
T[] array = (T[])Array.CreateInstance(typeof(T), arraySize);
Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.CopyTo(array, -1));
Assert.Throws<ArgumentException>(def is int ? string.Empty : null, () => bitArray.CopyTo(array, index));
}
[Fact]
public static void SyncRoot()
{
ICollection bitArray = new BitArray(10);
Assert.Same(bitArray.SyncRoot, bitArray.SyncRoot);
Assert.NotSame(bitArray.SyncRoot, ((ICollection)new BitArray(10)).SyncRoot);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Management.Automation;
using System.Management.Automation.Runspaces;
using System.Runtime.InteropServices;
using System.Threading;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Debugger.Interop;
using Microsoft.VisualStudio.Shell;
using Microsoft.VisualStudio.Shell.Interop;
using PowerShellTools.Common.ServiceManagement.DebuggingContract;
using PowerShellTools.DebugEngine.Definitions;
using Task = System.Threading.Tasks.Task;
using Thread = System.Threading.Thread;
using PowerShellTools.Common;
using PowerShellTools.Common.Logging;
namespace PowerShellTools.DebugEngine
{
/// <summary>
/// The core debug engine for PowerShell.
/// </summary>
/// <remarks>
/// This class is repsonsible for all interactions with the Visual Studio debugger.
/// </remarks>
[ComVisible(true)]
[Guid("C7F9F131-53AB-4FD0-8517-E54D124EA393")]
public class Engine : IDebugEngine2, IDebugEngineLaunch2
{
#region Fields
/// <summary>
/// This is the engine GUID of the sample engine. It needs to be changed here and in the registration
/// when creating a new engine.
/// </summary>
public const string Id = "{43ACAB74-8226-4920-B489-BFCF05372437}";
// A unique identifier for the program being debugged.
//private Guid m_ad7ProgramId;
private ManualResetEvent _runspaceSet;
private EngineEvents _events;
private ScriptProgramNode _node;
private List<ScriptBreakpoint> bps = new List<ScriptBreakpoint>();
private static readonly ILog Log = LogManager.GetLogger(typeof(Engine));
#endregion
#region Properties
public ScriptDebugger Debugger { get { return PowerShellToolsPackage.Debugger; } }
private IEnumerable<PendingBreakpoint> _pendingBreakpoints;
private uint _uiContextCookie;
public IEnumerable<PendingBreakpoint> PendingBreakpoints
{
get { return _pendingBreakpoints; }
set
{
_pendingBreakpoints = value;
_runspaceSet.Set();
}
}
#endregion
public Engine()
{
_runspaceSet = new ManualResetEvent(false);
_uiContextCookie = UiContextUtilities.CreateUiContext(PowerShellTools.Common.Constants.PowerShellDebuggingUiContextGuid);
}
/// <summary>
/// Initiates the execute of the debug engine.
/// </summary>
/// <remarks>
/// The debug engine works in two different ways. The first is by executing a script file. The second
/// is by executing a string of text.
/// </remarks>
public void Execute()
{
if (!PowerShellToolsPackage.PowerShellHostInitialized)
{
// TODO: UI Work required to give user inidcation that it is waiting for debugger to get alive.
PowerShellToolsPackage.DebuggerReadyEvent.WaitOne();
}
if (!_node.IsAttachedProgram)
{
if (!_runspaceSet.WaitOne())
{
throw new Exception("Runspace not set!");
}
while (PendingBreakpoints.Count() > bps.Count)
{
Thread.Sleep(1000);
}
}
Debugger.HostUi.OutputString = _events.OutputString;
Debugger.BreakpointManager.BreakpointHit += Debugger_BreakpointHit;
Debugger.DebuggingBegin += Debugger_DebuggingBegin;
Debugger.DebuggingFinished += Debugger_DebuggingFinished;
Debugger.BreakpointManager.BreakpointUpdated += Debugger_BreakpointUpdated;
Debugger.DebuggerPaused += Debugger_DebuggerPaused;
Debugger.TerminatingException += Debugger_TerminatingException;
_node.Debugger = Debugger;
if (Debugger.DebuggingService.GetRunspaceAvailability() == RunspaceAvailability.Available)
{
Debugger.DebuggerBegin();
Debugger.DebuggingService.SetRunspace(Debugger.OverrideExecutionPolicy);
Debugger.Execute(_node);
}
else
{
Debugger.DebuggerFinished();
}
}
/// <summary>
/// This event occurs when a terminating execption is thrown from the
/// PowerShell debugger.
/// </summary>
/// <remarks>
/// This event triggers a Visual Studio Excecption event so that
/// Visual Studio displays the exception window accordingly.
/// </remarks>
/// <param name="sender"></param>
/// <param name="e"></param>
void Debugger_TerminatingException(object sender, EventArgs<PowerShellRunTerminatingException> e)
{
if (e.Value.Error != null && e.Value.Error.InvocationInfo != null)
{
var scriptLocation = new ScriptLocation(e.Value.Error.InvocationInfo.ScriptName, e.Value.Error.InvocationInfo.ScriptLineNumber, 0);
//_events.Break(_node);
}
_events.Exception(_node, new Exception(e.Value.Message));
}
/// <summary>
/// This event triggers a Visaul Studio OutputString event so that the string
/// provided is written to the Output Pane.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
void Debugger_OutputString(object sender, EventArgs<string> e)
{
_events.OutputString(e.Value);
}
/// <summary>
/// This event triggers a Visual Stujdio Break event, causing the debugger to
/// break.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
void Debugger_DebuggerPaused(object sender, EventArgs<ScriptLocation> e)
{
_events.Break(_node);
}
/// <summary>
/// Placeholder for future support on debugging command in REPL window
/// This event handler adds or removes breakpoints monitored by Visual Studio.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
void Debugger_BreakpointUpdated(object sender, DebuggerBreakpointUpdatedEventArgs e)
{
// TODO: implementaion for future support on debugging command in REPL window
}
/// <summary>
/// This event handler reports to Visual Studio that the debugger has begun
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void Debugger_DebuggingBegin(object sender, EventArgs e)
{
UiContextUtilities.ActivateUiContext(_uiContextCookie);
}
/// <summary>
/// This event handler reports to Visual Studio that the debugger has finished and destroys the program.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
void Debugger_DebuggingFinished(object sender, EventArgs e)
{
bps.Clear();
_events.ProgramDestroyed(_node);
UiContextUtilities.DeactivateUiContext(_uiContextCookie);
}
/// <summary>
/// This event handler notifies Visual Studio that a breakbpoint has been hit.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
void Debugger_BreakpointHit(object sender, EventArgs<ScriptBreakpoint> e)
{
_events.BreakpointHit(e.Value, _node);
}
#region Implementation of IDebugEngine2
/// <summary>
/// Attaches to the specified program nodes. This is the main entry point to debugging.
/// </summary>
/// <remarks>
/// This method is responsible for firing the correct Visual Studio events to begin debugging
/// and then to start the actual PowerShell execution.
/// </remarks>
/// <param name="rgpPrograms">The programs.</param>
/// <param name="rgpProgramNodes">The program nodes.</param>
/// <param name="celtPrograms">The celt programs.</param>
/// <param name="pCallback">The callback.</param>
/// <param name="dwReason">The reason.</param>
/// <returns></returns>
public int Attach(IDebugProgram2[] rgpPrograms, IDebugProgramNode2[] rgpProgramNodes, uint celtPrograms,
IDebugEventCallback2 pCallback, enum_ATTACH_REASON dwReason)
{
Log.Debug("Attaching the debug engine.");
Guid id;
rgpPrograms[0].GetProgramId(out id);
if (_node == null)
{
_node = rgpProgramNodes[0] as ScriptProgramNode;
// during remote attach, the program node is put in the programs array
if (_node == null)
{
_node = rgpPrograms[0] as ScriptProgramNode;
_node.IsRemoteProgram = true;
}
_node.IsAttachedProgram = dwReason == enum_ATTACH_REASON.ATTACH_REASON_USER;
}
_node.Id = id;
var publisher = (IDebugProgramPublisher2)new DebugProgramPublisher();
publisher.PublishProgramNode(_node);
_events = new EngineEvents(this, pCallback);
_events.RunspaceRequest();
_events.EngineCreated();
_events.ProgramCreated(_node);
_events.EngineLoaded();
_events.DebugEntryPoint();
Task.Factory.StartNew(Execute);
return VSConstants.S_OK;
}
// Requests that all programs being debugged by this DE stop execution the next time one of their threads attempts to run.
// This is normally called in response to the user clicking on the pause button in the debugger.
// When the break is complete, an AsyncBreakComplete event will be sent back to the debugger.
int IDebugEngine2.CauseBreak()
{
return ((IDebugProgram2)this).CauseBreak();
}
// Called by the SDM to indicate that a synchronous debug event, previously sent by the DE to the SDM,
// was received and processed. The only event the sample engine sends in this fashion is Program Destroy.
// It responds to that event by shutting down the engine.
int IDebugEngine2.ContinueFromSynchronousEvent(IDebugEvent2 eventObject)
{
return VSConstants.S_OK;
}
// Creates a pending breakpoint in the engine. A pending breakpoint is contains all the information needed to bind a breakpoint to
// a location in the debuggee.
int IDebugEngine2.CreatePendingBreakpoint(IDebugBreakpointRequest2 pBPRequest,
out IDebugPendingBreakpoint2 ppPendingBP)
{
Log.Debug("Engine: CreatePendingBreakPoint");
ppPendingBP = null;
var info = new BP_REQUEST_INFO[1];
info[0].bpLocation.bpLocationType = (uint)enum_BP_LOCATION_TYPE.BPLT_FILE_LINE;
if (pBPRequest.GetRequestInfo(enum_BPREQI_FIELDS.BPREQI_BPLOCATION, info) == VSConstants.S_OK)
{
var position = (IDebugDocumentPosition2)Marshal.GetObjectForIUnknown(info[0].bpLocation.unionmember2);
var start = new TEXT_POSITION[1];
var end = new TEXT_POSITION[1];
string fileName;
position.GetRange(start, end);
position.GetFileName(out fileName);
//VS has a 0 based line\column value. PowerShell starts at 1
var breakpoint = new ScriptBreakpoint(
_node,
fileName,
(int)start[0].dwLine + 1,
(int)start[0].dwColumn,
_events);
ppPendingBP = breakpoint;
_events.BreakpointAdded(breakpoint);
bps.Add(breakpoint);
}
return VSConstants.S_OK;
}
// Informs a DE that the program specified has been atypically terminated and that the DE should
// clean up all references to the program and send a program destroy event.
int IDebugEngine2.DestroyProgram(IDebugProgram2 pProgram)
{
// Tell the SDM that the engine knows that the program is exiting, and that the
// engine will send a program destroy. We do this because the Win32 debug api will always
// tell us that the process exited, and otherwise we have a race condition.
return (HRESULT.E_PROGRAM_DESTROY_PENDING);
}
// Gets the GUID of the DE.
int IDebugEngine2.GetEngineId(out Guid guidEngine)
{
guidEngine = new Guid(Id);
return VSConstants.S_OK;
}
// Removes the list of exceptions the IDE has set for a particular run-time architecture or language.
// The sample engine does not support exceptions in the debuggee so this method is not actually implemented.
int IDebugEngine2.RemoveAllSetExceptions(ref Guid guidType)
{
return VSConstants.S_OK;
}
// Removes the specified exception so it is no longer handled by the debug engine.
// The sample engine does not support exceptions in the debuggee so this method is not actually implemented.
int IDebugEngine2.RemoveSetException(EXCEPTION_INFO[] pException)
{
// The sample engine will always stop on all exceptions.
return VSConstants.S_OK;
}
// Specifies how the DE should handle a given exception.
// The sample engine does not support exceptions in the debuggee so this method is not actually implemented.
int IDebugEngine2.SetException(EXCEPTION_INFO[] pException)
{
return VSConstants.S_OK;
}
// Sets the locale of the DE.
// This method is called by the session debug manager (SDM) to propagate the locale settings of the IDE so that
// strings returned by the DE are properly localized. The sample engine is not localized so this is not implemented.
int IDebugEngine2.SetLocale(ushort wLangID)
{
return VSConstants.S_OK;
}
// A metric is a registry value used to change a debug engine's behavior or to advertise supported functionality.
// This method can forward the call to the appropriate form of the Debugging SDK Helpers function, SetMetric.
int IDebugEngine2.SetMetric(string pszMetric, object varValue)
{
// The sample engine does not need to understand any metric settings.
return VSConstants.S_OK;
}
// Sets the registry root currently in use by the DE. Different installations of Visual Studio can change where their registry information is stored
// This allows the debugger to tell the engine where that location is.
int IDebugEngine2.SetRegistryRoot(string pszRegistryRoot)
{
// The sample engine does not read settings from the registry.
return VSConstants.S_OK;
}
#endregion
#region Implementation of IDebugEngineLaunch2
public int LaunchSuspended(string pszServer, IDebugPort2 pPort, string pszExe, string pszArgs, string pszDir,
string bstrEnv, string pszOptions, enum_LAUNCH_FLAGS dwLaunchFlags, uint hStdInput,
uint hStdOutput, uint hStdError, IDebugEventCallback2 pCallback,
out IDebugProcess2 ppProcess)
{
_runspaceSet.Reset();
if (dwLaunchFlags.HasFlag(enum_LAUNCH_FLAGS.LAUNCH_DEBUG))
{
ppProcess = new ScriptDebugProcess(pPort);
}
else
{
ppProcess = new ScriptDebugProcess(pPort);
}
_node = (ppProcess as ScriptDebugProcess).Node;
if (pszExe == "Selection")
{
_node.IsFile = false;
_node.FileName = pszOptions;
}
else
{
_node.IsFile = true;
_node.FileName = pszExe;
_node.Arguments = pszArgs;
}
_events = new EngineEvents(this, pCallback);
return VSConstants.S_OK;
}
// Determines if a process can be terminated.
int IDebugEngineLaunch2.CanTerminateProcess(IDebugProcess2 process)
{
Log.Debug("Engine: CanTerminateProcess");
return VSConstants.S_OK;
}
// Resume a process launched by IDebugEngineLaunch2.LaunchSuspended
int IDebugEngineLaunch2.ResumeProcess(IDebugProcess2 process)
{
if (process is ScriptDebugProcess)
{
IDebugPort2 port;
process.GetPort(out port);
var defaultPort = (IDebugDefaultPort2)port;
IDebugPortNotify2 notify;
defaultPort.GetPortNotify(out notify);
notify.AddProgramNode((process as ScriptDebugProcess).Node);
return VSConstants.S_OK;
}
return VSConstants.E_UNEXPECTED;
}
// This function is used to terminate a process that the SampleEngine launched
// The debugger will call IDebugEngineLaunch2::CanTerminateProcess before calling this method.
int IDebugEngineLaunch2.TerminateProcess(IDebugProcess2 process)
{
Log.Debug("Engine: TerminateProcess");
IDebugPort2 port;
process.GetPort(out port);
var defaultPort = (IDebugDefaultPort2)port;
IDebugPortNotify2 notify;
defaultPort.GetPortNotify(out notify);
notify.RemoveProgramNode(_node);
Debugger.Stop();
return VSConstants.S_OK;
}
#endregion
#region Deprecated interface methods
int IDebugEngine2.EnumPrograms(out IEnumDebugPrograms2 programs)
{
Debug.Fail("This function is not called by the debugger");
programs = null;
return VSConstants.E_NOTIMPL;
}
#endregion
}
}
| |
using Facebook.Yoga;
namespace ReactNative.UIManager
{
class EdgeSpacing
{
/// <summary>
/// Spacing type that represents the left direction. E.g. <code>marginLeft</code>.
/// </summary>
public const int Left = 0;
/// <summary>
/// Spacing type that represents the top direction. E.g. <code>marginTop</code>.
/// </summary>
public const int Top = 1;
/// <summary>
/// Spacing type that represents the right direction. E.g. <code>marginRight</code>.
/// </summary>
public const int Right = 2;
/// <summary>
/// Spacing type that represents the bottom direction. E.g. <code>marginBottom</code>.
/// </summary>
public const int Bottom = 3;
/// <summary>
/// Spacing type that represents start direction e.g. left in left-to-right, right in right-to-left.
/// </summary>
public const int Start = 4;
/// <summary>
/// Spacing type that represents end direction e.g. right in left-to-right, left in right-to-left.
/// </summary>
public const int End = 5;
/// <summary>
/// Spacing type that represents horizontal direction (left and right). E.g.
/// <code>marginHorizontal</code>.
/// </summary>
public const int Horizontal = 6;
/// <summary>
/// Spacing type that represents vertical direction (top and bottom). E.g. <code>marginVertical</code>.
/// </summary>
public const int Vertical = 7;
/// <summary>
/// Spacing type that represents all directions (left, top, right, bottom). E.g. <code>margin</code>.
/// </summary>
public const int All = 8;
private static readonly int[] s_flagsMap = {
1, /*Left*/
2, /*Top*/
4, /*Right*/
8, /*Bottom*/
16, /*Start*/
32, /*End*/
64, /*Horizontal*/
128, /*Vertical*/
256, /*All*/
};
private readonly YogaValue[] _spacing = NewFullSpacingArray();
private readonly YogaValue _defaultValue;
private int _valueFlags = 0;
private bool _hasAliasesSet;
/// <summary>
/// Instantiates a <see cref="EdgeSpacing"/>
/// </summary>
public EdgeSpacing()
: this(YogaValue.Point(0))
{
}
/// <summary>
/// Instantiates a <see cref="EdgeSpacing"/>.
/// </summary>
/// <param name="defaultValue">The default spacing value.</param>
public EdgeSpacing(YogaValue defaultValue)
{
_defaultValue = defaultValue;
}
/// <summary>
/// Set a spacing value.
/// </summary>
/// <param name="spacingType">
/// One of <see cref="Left" />, <see cref="Top" />, <see cref="Right" />, <see cref="Bottom" />,
/// <see cref="Vertical" />, <see cref="Horizontal" />, <see cref="All" />.
/// </param>
/// <param name="value">the value for this direction.</param>
/// <returns>
/// <code>true</code> if the spacing has changed, or <code>false</code>
/// if the same value was already set.
/// </returns>
public bool Set(int spacingType, YogaValue value)
{
if (!EpsilonEqualityComparer.Instance.Equals(_spacing[spacingType], value))
{
_spacing[spacingType] = value;
if (YogaConstants.IsUndefined(value))
{
_valueFlags &= ~s_flagsMap[spacingType];
}
else
{
_valueFlags |= s_flagsMap[spacingType];
}
_hasAliasesSet =
(_valueFlags & s_flagsMap[All]) != 0 ||
(_valueFlags & s_flagsMap[Vertical]) != 0 ||
(_valueFlags & s_flagsMap[Horizontal]) != 0;
return true;
}
return false;
}
/// <summary>
/// Get the spacing for a direction. This takes into account any default values that have been Set.
/// </summary>
/// <param name="spacingType">
/// One of <see cref="Left"/>, <see cref="Top" />, <see cref="Right" />, <see cref="Bottom" />.
/// </param>
/// <returns>The spacing value.</returns>
public YogaValue Get(int spacingType)
{
var defaultValue = (spacingType == Start || spacingType == End
? YogaConstants.Undefined
: _defaultValue);
if (_valueFlags == 0)
{
return defaultValue;
}
if ((_valueFlags & s_flagsMap[spacingType]) != 0)
{
return _spacing[spacingType];
}
if (_hasAliasesSet)
{
var secondType = spacingType == Top || spacingType == Bottom ? Vertical : Horizontal;
if ((_valueFlags & s_flagsMap[secondType]) != 0)
{
return _spacing[secondType];
}
else if ((_valueFlags & s_flagsMap[All]) != 0)
{
return _spacing[All];
}
}
return defaultValue;
}
/// <summary>
/// Get the raw value (that was Set using <see cref="Set" />), without taking into account
/// any default values.
/// </summary>
/// <param name="spacingType">
/// One of <see cref="Left" />, <see cref="Top" />, <see cref="Right" />,
/// <see cref="Bottom" />, <see cref="Vertical" />, <see cref="Horizontal" />,
/// <see cref="All" />.
/// </param>
/// <returns>The raw spacing value.</returns>
public YogaValue GetRaw(int spacingType)
{
return _spacing[spacingType];
}
/// <summary>
/// Resets the spacing instance to its default state. This method is meant to be used when
/// recycling <see cref="EdgeSpacing" /> instances.
/// </summary>
public void Reset()
{
for (var i = 0; i < _spacing.Length; ++i)
{
_spacing[i] = YogaConstants.Undefined;
}
_hasAliasesSet = false;
_valueFlags = 0;
}
/// <summary>
/// Try to get start value and fallback to given type if not defined. This is used privately
/// by the layout engine as a more efficient way to fetch direction-aware values by
/// avoid extra method invocations.
/// </summary>
/// <param name="spacingType">The preferred spacing type.</param>
/// <param name="fallbackType">The fallback spacing type.</param>
/// <returns>The spacing value.</returns>
internal YogaValue GetWithFallback(int spacingType, int fallbackType)
{
return
(_valueFlags & s_flagsMap[spacingType]) != 0
? _spacing[spacingType]
: Get(fallbackType);
}
private static YogaValue[] NewFullSpacingArray()
{
var undefined = YogaValue.Undefined();
return new[] {
undefined,
undefined,
undefined,
undefined,
undefined,
undefined,
undefined,
undefined,
undefined,
};
}
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
namespace DiscUtils
{
using System;
using System.Collections.Generic;
using System.IO;
/// <summary>
/// Provides a thread-safe wrapping around a sparse stream.
/// </summary>
/// <remarks>
/// <para>Streams are inherently not thread-safe (because read/write is not atomic w.r.t. Position).
/// This method enables multiple 'views' of a stream to be created (each with their own Position), and ensures
/// only a single operation is executing on the wrapped stream at any time.</para>
/// <para>This example shows the pattern of use:</para>
/// <example>
/// <code>
/// SparseStream baseStream = ...;
/// ThreadSafeStream tss = new ThreadSafeStream(baseStream);
/// for(int i = 0; i < 10; ++i)
/// {
/// SparseStream streamForThread = tss.OpenView();
/// }
/// </code>
/// </example>
/// <para>This results in 11 streams that can be used in different streams - <c>tss</c> and ten 'views' created from <c>tss</c>.</para>
/// <para>Note, the stream length cannot be changed.</para>
/// </remarks>
public class ThreadSafeStream : SparseStream
{
private CommonState _common;
private bool _ownsCommon;
private long _position;
/// <summary>
/// Initializes a new instance of the ThreadSafeStream class.
/// </summary>
/// <param name="toWrap">The stream to wrap.</param>
/// <remarks>Do not directly modify <c>toWrap</c> after wrapping it, unless the thread-safe views
/// will no longer be used.</remarks>
public ThreadSafeStream(SparseStream toWrap)
: this(toWrap, Ownership.None)
{
}
/// <summary>
/// Initializes a new instance of the ThreadSafeStream class.
/// </summary>
/// <param name="toWrap">The stream to wrap.</param>
/// <param name="ownership">Whether to transfer ownership of <c>toWrap</c> to the new instance.</param>
/// <remarks>Do not directly modify <c>toWrap</c> after wrapping it, unless the thread-safe views
/// will no longer be used.</remarks>
public ThreadSafeStream(SparseStream toWrap, Ownership ownership)
{
if (!toWrap.CanSeek)
{
throw new ArgumentException("Wrapped stream must support seeking", "toWrap");
}
_common = new CommonState
{
WrappedStream = toWrap,
WrappedStreamOwnership = ownership
};
_ownsCommon = true;
}
private ThreadSafeStream(ThreadSafeStream toClone)
{
_common = toClone._common;
if (_common == null)
{
throw new ObjectDisposedException("toClone");
}
}
/// <summary>
/// Gets the parts of the stream that are stored.
/// </summary>
/// <remarks>This may be an empty enumeration if all bytes are zero.</remarks>
public override IEnumerable<StreamExtent> Extents
{
get
{
lock (_common)
{
return Wrapped.Extents;
}
}
}
/// <summary>
/// Gets a value indicating if this stream supports reads.
/// </summary>
public override bool CanRead
{
get
{
lock (_common)
{
return Wrapped.CanRead;
}
}
}
/// <summary>
/// Gets a value indicating if this stream supports seeking (always true).
/// </summary>
public override bool CanSeek
{
get { return true; }
}
/// <summary>
/// Gets a value indicating if this stream supports writes (currently, always false).
/// </summary>
public override bool CanWrite
{
get
{
lock (_common)
{
return Wrapped.CanWrite;
}
}
}
/// <summary>
/// Gets the length of the stream.
/// </summary>
public override long Length
{
get
{
lock (_common)
{
return Wrapped.Length;
}
}
}
/// <summary>
/// Gets the current stream position - each 'view' has it's own Position.
/// </summary>
public override long Position
{
get
{
return _position;
}
set
{
_position = value;
}
}
private SparseStream Wrapped
{
get
{
SparseStream wrapped = _common.WrappedStream;
if (wrapped == null)
{
throw new ObjectDisposedException("ThreadSafeStream");
}
return wrapped;
}
}
/// <summary>
/// Opens a new thread-safe view on the stream.
/// </summary>
/// <returns>The new view.</returns>
public SparseStream OpenView()
{
return new ThreadSafeStream(this);
}
/// <summary>
/// Gets the parts of a stream that are stored, within a specified range.
/// </summary>
/// <param name="start">The offset of the first byte of interest.</param>
/// <param name="count">The number of bytes of interest.</param>
/// <returns>An enumeration of stream extents, indicating stored bytes.</returns>
public override IEnumerable<StreamExtent> GetExtentsInRange(long start, long count)
{
lock (_common)
{
return Wrapped.GetExtentsInRange(start, count);
}
}
/// <summary>
/// Causes the stream to flush all changes.
/// </summary>
public override void Flush()
{
lock (_common)
{
Wrapped.Flush();
}
}
/// <summary>
/// Reads data from the stream.
/// </summary>
/// <param name="buffer">The buffer to fill.</param>
/// <param name="offset">The first byte in buffer to fill.</param>
/// <param name="count">The requested number of bytes to read.</param>
/// <returns>The actual number of bytes read.</returns>
public override int Read(byte[] buffer, int offset, int count)
{
lock (_common)
{
SparseStream wrapped = Wrapped;
wrapped.Position = _position;
int numRead = wrapped.Read(buffer, offset, count);
_position += numRead;
return numRead;
}
}
/// <summary>
/// Changes the current stream position (each view has it's own Position).
/// </summary>
/// <param name="offset">The relative location to move to.</param>
/// <param name="origin">The origin of the location.</param>
/// <returns>The new location as an absolute position.</returns>
public override long Seek(long offset, SeekOrigin origin)
{
long effectiveOffset = offset;
if (origin == SeekOrigin.Current)
{
effectiveOffset += _position;
}
else if (origin == SeekOrigin.End)
{
effectiveOffset += Length;
}
if (effectiveOffset < 0)
{
throw new IOException("Attempt to move before beginning of disk");
}
else
{
_position = effectiveOffset;
return _position;
}
}
/// <summary>
/// Sets the length of the stream (not supported).
/// </summary>
/// <param name="value">The new length.</param>
public override void SetLength(long value)
{
throw new NotSupportedException();
}
/// <summary>
/// Writes data to the stream (not currently supported).
/// </summary>
/// <param name="buffer">The data to write.</param>
/// <param name="offset">The first byte to write.</param>
/// <param name="count">The number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
{
lock (_common)
{
SparseStream wrapped = Wrapped;
if (_position + count > wrapped.Length)
{
throw new IOException("Attempt to extend stream");
}
wrapped.Position = _position;
wrapped.Write(buffer, offset, count);
_position += count;
}
}
/// <summary>
/// Disposes of this instance, invalidating any remaining views.
/// </summary>
/// <param name="disposing"><c>true</c> if disposing, lese <c>false</c>.</param>
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (_ownsCommon && _common != null)
{
lock (_common)
{
if (_common.WrappedStreamOwnership == Ownership.Dispose)
{
_common.WrappedStream.Dispose();
}
_common.Dispose();
}
}
}
_common = null;
}
private sealed class CommonState : IDisposable
{
public SparseStream WrappedStream;
public Ownership WrappedStreamOwnership;
#region IDisposable Members
public void Dispose()
{
WrappedStream = null;
}
#endregion
}
}
}
| |
using System;
using System.IO;
using System.Text;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// Holds data pertinent to a data descriptor.
/// </summary>
public class DescriptorData
{
/// <summary>
/// Get /set the compressed size of data.
/// </summary>
public long CompressedSize {
get { return compressedSize; }
set { compressedSize = value; }
}
/// <summary>
/// Get / set the uncompressed size of data
/// </summary>
public long Size {
get { return size; }
set { size = value; }
}
/// <summary>
/// Get /set the crc value.
/// </summary>
public long Crc {
get { return crc; }
set { crc = (value & 0xffffffff); }
}
#region Instance Fields
long size;
long compressedSize;
long crc;
#endregion
}
class EntryPatchData
{
public long SizePatchOffset {
get { return sizePatchOffset_; }
set { sizePatchOffset_ = value; }
}
public long CrcPatchOffset {
get { return crcPatchOffset_; }
set { crcPatchOffset_ = value; }
}
#region Instance Fields
long sizePatchOffset_;
long crcPatchOffset_;
#endregion
}
/// <summary>
/// This class assists with writing/reading from Zip files.
/// </summary>
internal class ZipHelperStream : Stream
{
#region Constructors
/// <summary>
/// Initialise an instance of this class.
/// </summary>
/// <param name="name">The name of the file to open.</param>
public ZipHelperStream(string name)
{
stream_ = new FileStream(name, FileMode.Open, FileAccess.ReadWrite);
isOwner_ = true;
}
/// <summary>
/// Initialise a new instance of <see cref="ZipHelperStream"/>.
/// </summary>
/// <param name="stream">The stream to use.</param>
public ZipHelperStream(Stream stream)
{
stream_ = stream;
}
#endregion
/// <summary>
/// Get / set a value indicating wether the the underlying stream is owned or not.
/// </summary>
/// <remarks>If the stream is owned it is closed when this instance is closed.</remarks>
public bool IsStreamOwner {
get { return isOwner_; }
set { isOwner_ = value; }
}
#region Base Stream Methods
public override bool CanRead {
get { return stream_.CanRead; }
}
public override bool CanSeek {
get { return stream_.CanSeek; }
}
public override bool CanTimeout {
get { return stream_.CanTimeout; }
}
public override long Length {
get { return stream_.Length; }
}
public override long Position {
get { return stream_.Position; }
set { stream_.Position = value; }
}
public override bool CanWrite {
get { return stream_.CanWrite; }
}
public override void Flush()
{
stream_.Flush();
}
public override long Seek(long offset, SeekOrigin origin)
{
return stream_.Seek(offset, origin);
}
public override void SetLength(long value)
{
stream_.SetLength(value);
}
public override int Read(byte[] buffer, int offset, int count)
{
return stream_.Read(buffer, offset, count);
}
public override void Write(byte[] buffer, int offset, int count)
{
stream_.Write(buffer, offset, count);
}
/// <summary>
/// Close the stream.
/// </summary>
/// <remarks>
/// The underlying stream is closed only if <see cref="IsStreamOwner"/> is true.
/// </remarks>
override public void Close()
{
Stream toClose = stream_;
stream_ = null;
if (isOwner_ && (toClose != null)) {
isOwner_ = false;
toClose.Close();
}
}
#endregion
// Write the local file header
// TODO: ZipHelperStream.WriteLocalHeader is not yet used and needs checking for ZipFile and ZipOuptutStream usage
void WriteLocalHeader(ZipEntry entry, EntryPatchData patchData)
{
CompressionMethod method = entry.CompressionMethod;
bool headerInfoAvailable = true; // How to get this?
bool patchEntryHeader = false;
WriteLEInt(ZipConstants.LocalHeaderSignature);
WriteLEShort(entry.Version);
WriteLEShort(entry.Flags);
WriteLEShort((byte)method);
WriteLEInt((int)entry.DosTime);
if (headerInfoAvailable == true) {
WriteLEInt((int)entry.Crc);
if (entry.LocalHeaderRequiresZip64) {
WriteLEInt(-1);
WriteLEInt(-1);
} else {
WriteLEInt(entry.IsCrypted ? (int)entry.CompressedSize + ZipConstants.CryptoHeaderSize : (int)entry.CompressedSize);
WriteLEInt((int)entry.Size);
}
} else {
if (patchData != null) {
patchData.CrcPatchOffset = stream_.Position;
}
WriteLEInt(0); // Crc
if (patchData != null) {
patchData.SizePatchOffset = stream_.Position;
}
// For local header both sizes appear in Zip64 Extended Information
if (entry.LocalHeaderRequiresZip64 && patchEntryHeader) {
WriteLEInt(-1);
WriteLEInt(-1);
} else {
WriteLEInt(0); // Compressed size
WriteLEInt(0); // Uncompressed size
}
}
byte[] name = ZipConstants.ConvertToArray(entry.Flags, entry.Name);
if (name.Length > 0xFFFF) {
throw new ZipException("Entry name too long.");
}
var ed = new ZipExtraData(entry.ExtraData);
if (entry.LocalHeaderRequiresZip64 && (headerInfoAvailable || patchEntryHeader)) {
ed.StartNewEntry();
if (headerInfoAvailable) {
ed.AddLeLong(entry.Size);
ed.AddLeLong(entry.CompressedSize);
} else {
ed.AddLeLong(-1);
ed.AddLeLong(-1);
}
ed.AddNewEntry(1);
if (!ed.Find(1)) {
throw new ZipException("Internal error cant find extra data");
}
if (patchData != null) {
patchData.SizePatchOffset = ed.CurrentReadIndex;
}
} else {
ed.Delete(1);
}
byte[] extra = ed.GetEntryData();
WriteLEShort(name.Length);
WriteLEShort(extra.Length);
if (name.Length > 0) {
stream_.Write(name, 0, name.Length);
}
if (entry.LocalHeaderRequiresZip64 && patchEntryHeader) {
patchData.SizePatchOffset += stream_.Position;
}
if (extra.Length > 0) {
stream_.Write(extra, 0, extra.Length);
}
}
/// <summary>
/// Locates a block with the desired <paramref name="signature"/>.
/// </summary>
/// <param name="signature">The signature to find.</param>
/// <param name="endLocation">Location, marking the end of block.</param>
/// <param name="minimumBlockSize">Minimum size of the block.</param>
/// <param name="maximumVariableData">The maximum variable data.</param>
/// <returns>Eeturns the offset of the first byte after the signature; -1 if not found</returns>
public long LocateBlockWithSignature(int signature, long endLocation, int minimumBlockSize, int maximumVariableData)
{
long pos = endLocation - minimumBlockSize;
if (pos < 0) {
return -1;
}
long giveUpMarker = Math.Max(pos - maximumVariableData, 0);
// TODO: This loop could be optimised for speed.
do {
if (pos < giveUpMarker) {
return -1;
}
Seek(pos--, SeekOrigin.Begin);
} while (ReadLEInt() != signature);
return Position;
}
/// <summary>
/// Write Zip64 end of central directory records (File header and locator).
/// </summary>
/// <param name="noOfEntries">The number of entries in the central directory.</param>
/// <param name="sizeEntries">The size of entries in the central directory.</param>
/// <param name="centralDirOffset">The offset of the dentral directory.</param>
public void WriteZip64EndOfCentralDirectory(long noOfEntries, long sizeEntries, long centralDirOffset)
{
long centralSignatureOffset = stream_.Position;
WriteLEInt(ZipConstants.Zip64CentralFileHeaderSignature);
WriteLELong(44); // Size of this record (total size of remaining fields in header or full size - 12)
WriteLEShort(ZipConstants.VersionMadeBy); // Version made by
WriteLEShort(ZipConstants.VersionZip64); // Version to extract
WriteLEInt(0); // Number of this disk
WriteLEInt(0); // number of the disk with the start of the central directory
WriteLELong(noOfEntries); // No of entries on this disk
WriteLELong(noOfEntries); // Total No of entries in central directory
WriteLELong(sizeEntries); // Size of the central directory
WriteLELong(centralDirOffset); // offset of start of central directory
// zip64 extensible data sector not catered for here (variable size)
// Write the Zip64 end of central directory locator
WriteLEInt(ZipConstants.Zip64CentralDirLocatorSignature);
// no of the disk with the start of the zip64 end of central directory
WriteLEInt(0);
// relative offset of the zip64 end of central directory record
WriteLELong(centralSignatureOffset);
// total number of disks
WriteLEInt(1);
}
/// <summary>
/// Write the required records to end the central directory.
/// </summary>
/// <param name="noOfEntries">The number of entries in the directory.</param>
/// <param name="sizeEntries">The size of the entries in the directory.</param>
/// <param name="startOfCentralDirectory">The start of the central directory.</param>
/// <param name="comment">The archive comment. (This can be null).</param>
public void WriteEndOfCentralDirectory(long noOfEntries, long sizeEntries,
long startOfCentralDirectory, byte[] comment)
{
if ((noOfEntries >= 0xffff) ||
(startOfCentralDirectory >= 0xffffffff) ||
(sizeEntries >= 0xffffffff)) {
WriteZip64EndOfCentralDirectory(noOfEntries, sizeEntries, startOfCentralDirectory);
}
WriteLEInt(ZipConstants.EndOfCentralDirectorySignature);
// TODO: ZipFile Multi disk handling not done
WriteLEShort(0); // number of this disk
WriteLEShort(0); // no of disk with start of central dir
// Number of entries
if (noOfEntries >= 0xffff) {
WriteLEUshort(0xffff); // Zip64 marker
WriteLEUshort(0xffff);
} else {
WriteLEShort((short)noOfEntries); // entries in central dir for this disk
WriteLEShort((short)noOfEntries); // total entries in central directory
}
// Size of the central directory
if (sizeEntries >= 0xffffffff) {
WriteLEUint(0xffffffff); // Zip64 marker
} else {
WriteLEInt((int)sizeEntries);
}
// offset of start of central directory
if (startOfCentralDirectory >= 0xffffffff) {
WriteLEUint(0xffffffff); // Zip64 marker
} else {
WriteLEInt((int)startOfCentralDirectory);
}
int commentLength = (comment != null) ? comment.Length : 0;
if (commentLength > 0xffff) {
throw new ZipException(string.Format("Comment length({0}) is too long can only be 64K", commentLength));
}
WriteLEShort(commentLength);
if (commentLength > 0) {
Write(comment, 0, comment.Length);
}
}
#region LE value reading/writing
/// <summary>
/// Read an unsigned short in little endian byte order.
/// </summary>
/// <returns>Returns the value read.</returns>
/// <exception cref="IOException">
/// An i/o error occurs.
/// </exception>
/// <exception cref="EndOfStreamException">
/// The file ends prematurely
/// </exception>
public int ReadLEShort()
{
int byteValue1 = stream_.ReadByte();
if (byteValue1 < 0) {
throw new EndOfStreamException();
}
int byteValue2 = stream_.ReadByte();
if (byteValue2 < 0) {
throw new EndOfStreamException();
}
return byteValue1 | (byteValue2 << 8);
}
/// <summary>
/// Read an int in little endian byte order.
/// </summary>
/// <returns>Returns the value read.</returns>
/// <exception cref="IOException">
/// An i/o error occurs.
/// </exception>
/// <exception cref="System.IO.EndOfStreamException">
/// The file ends prematurely
/// </exception>
public int ReadLEInt()
{
return ReadLEShort() | (ReadLEShort() << 16);
}
/// <summary>
/// Read a long in little endian byte order.
/// </summary>
/// <returns>The value read.</returns>
public long ReadLELong()
{
return (uint)ReadLEInt() | ((long)ReadLEInt() << 32);
}
/// <summary>
/// Write an unsigned short in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLEShort(int value)
{
stream_.WriteByte((byte)(value & 0xff));
stream_.WriteByte((byte)((value >> 8) & 0xff));
}
/// <summary>
/// Write a ushort in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLEUshort(ushort value)
{
stream_.WriteByte((byte)(value & 0xff));
stream_.WriteByte((byte)(value >> 8));
}
/// <summary>
/// Write an int in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLEInt(int value)
{
WriteLEShort(value);
WriteLEShort(value >> 16);
}
/// <summary>
/// Write a uint in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLEUint(uint value)
{
WriteLEUshort((ushort)(value & 0xffff));
WriteLEUshort((ushort)(value >> 16));
}
/// <summary>
/// Write a long in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLELong(long value)
{
WriteLEInt((int)value);
WriteLEInt((int)(value >> 32));
}
/// <summary>
/// Write a ulong in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLEUlong(ulong value)
{
WriteLEUint((uint)(value & 0xffffffff));
WriteLEUint((uint)(value >> 32));
}
#endregion
/// <summary>
/// Write a data descriptor.
/// </summary>
/// <param name="entry">The entry to write a descriptor for.</param>
/// <returns>Returns the number of descriptor bytes written.</returns>
public int WriteDataDescriptor(ZipEntry entry)
{
if (entry == null) {
throw new ArgumentNullException(nameof(entry));
}
int result = 0;
// Add data descriptor if flagged as required
if ((entry.Flags & (int)GeneralBitFlags.Descriptor) != 0) {
// The signature is not PKZIP originally but is now described as optional
// in the PKZIP Appnote documenting trhe format.
WriteLEInt(ZipConstants.DataDescriptorSignature);
WriteLEInt(unchecked((int)(entry.Crc)));
result += 8;
if (entry.LocalHeaderRequiresZip64) {
WriteLELong(entry.CompressedSize);
WriteLELong(entry.Size);
result += 16;
} else {
WriteLEInt((int)entry.CompressedSize);
WriteLEInt((int)entry.Size);
result += 8;
}
}
return result;
}
/// <summary>
/// Read data descriptor at the end of compressed data.
/// </summary>
/// <param name="zip64">if set to <c>true</c> [zip64].</param>
/// <param name="data">The data to fill in.</param>
/// <returns>Returns the number of bytes read in the descriptor.</returns>
public void ReadDataDescriptor(bool zip64, DescriptorData data)
{
int intValue = ReadLEInt();
// In theory this may not be a descriptor according to PKZIP appnote.
// In practise its always there.
if (intValue != ZipConstants.DataDescriptorSignature) {
throw new ZipException("Data descriptor signature not found");
}
data.Crc = ReadLEInt();
if (zip64) {
data.CompressedSize = ReadLELong();
data.Size = ReadLELong();
} else {
data.CompressedSize = ReadLEInt();
data.Size = ReadLEInt();
}
}
#region Instance Fields
bool isOwner_;
Stream stream_;
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Xml;
using System.Collections;
namespace System.Runtime.Serialization
{
// NOTE: XmlReader methods that are not needed have been left un-implemented
internal class ExtensionDataReader : XmlReader
{
private enum ExtensionDataNodeType
{
None,
Element,
EndElement,
Text,
Xml,
ReferencedElement,
NullElement,
}
private ElementData[] _elements;
private ElementData _element;
private ElementData _nextElement;
private ReadState _readState = ReadState.Initial;
private readonly ExtensionDataNodeType _internalNodeType;
private XmlNodeType _nodeType;
private int _depth;
private string _localName;
private string _ns;
private string _prefix;
private string _value;
private int _attributeCount;
private int _attributeIndex;
private static readonly object s_prefixLock = new object();
#pragma warning disable 0649
private readonly XmlNodeReader _xmlNodeReader;
#pragma warning restore 0649
private readonly XmlObjectSerializerReadContext _context;
private static readonly Hashtable s_nsToPrefixTable = new Hashtable();
private static readonly Hashtable s_prefixToNsTable = new Hashtable();
static ExtensionDataReader()
{
AddPrefix(Globals.XsiPrefix, Globals.SchemaInstanceNamespace);
AddPrefix(Globals.SerPrefix, Globals.SerializationNamespace);
AddPrefix(string.Empty, string.Empty);
}
internal ExtensionDataReader(XmlObjectSerializerReadContext context)
{
_attributeIndex = -1;
_context = context;
}
internal IDataNode GetCurrentNode()
{
IDataNode retVal = _element.dataNode;
Skip();
return retVal;
}
internal void Reset()
{
_localName = null;
_ns = null;
_prefix = null;
_value = null;
_attributeCount = 0;
_attributeIndex = -1;
_depth = 0;
_element = null;
_nextElement = null;
_elements = null;
}
private bool IsXmlDataNode { get { return (_internalNodeType == ExtensionDataNodeType.Xml); } }
public override XmlNodeType NodeType { get { return IsXmlDataNode ? _xmlNodeReader.NodeType : _nodeType; } }
public override string LocalName { get { return IsXmlDataNode ? _xmlNodeReader.LocalName : _localName; } }
public override string NamespaceURI { get { return IsXmlDataNode ? _xmlNodeReader.NamespaceURI : _ns; } }
public override string Prefix { get { return IsXmlDataNode ? _xmlNodeReader.Prefix : _prefix; } }
public override string Value { get { return IsXmlDataNode ? _xmlNodeReader.Value : _value; } }
public override int Depth { get { return IsXmlDataNode ? _xmlNodeReader.Depth : _depth; } }
public override int AttributeCount { get { return IsXmlDataNode ? _xmlNodeReader.AttributeCount : _attributeCount; } }
public override bool EOF { get { return IsXmlDataNode ? _xmlNodeReader.EOF : (_readState == ReadState.EndOfFile); } }
public override ReadState ReadState { get { return IsXmlDataNode ? _xmlNodeReader.ReadState : _readState; } }
public override bool IsEmptyElement { get { return IsXmlDataNode ? _xmlNodeReader.IsEmptyElement : false; } }
public override bool IsDefault { get { return IsXmlDataNode ? _xmlNodeReader.IsDefault : base.IsDefault; } }
//public override char QuoteChar { get { return IsXmlDataNode ? xmlNodeReader.QuoteChar : base.QuoteChar; } }
public override XmlSpace XmlSpace { get { return IsXmlDataNode ? _xmlNodeReader.XmlSpace : base.XmlSpace; } }
public override string XmlLang { get { return IsXmlDataNode ? _xmlNodeReader.XmlLang : base.XmlLang; } }
public override string this[int i] { get { return IsXmlDataNode ? _xmlNodeReader[i] : GetAttribute(i); } }
public override string this[string name] { get { return IsXmlDataNode ? _xmlNodeReader[name] : GetAttribute(name); } }
public override string this[string name, string namespaceURI] { get { return IsXmlDataNode ? _xmlNodeReader[name, namespaceURI] : GetAttribute(name, namespaceURI); } }
public override bool MoveToFirstAttribute()
{
if (IsXmlDataNode)
return _xmlNodeReader.MoveToFirstAttribute();
if (_attributeCount == 0)
return false;
MoveToAttribute(0);
return true;
}
public override bool MoveToNextAttribute()
{
if (IsXmlDataNode)
return _xmlNodeReader.MoveToNextAttribute();
if (_attributeIndex + 1 >= _attributeCount)
return false;
MoveToAttribute(_attributeIndex + 1);
return true;
}
public override void MoveToAttribute(int index)
{
if (IsXmlDataNode)
_xmlNodeReader.MoveToAttribute(index);
else
{
if (index < 0 || index >= _attributeCount)
throw new XmlException(SR.InvalidXmlDeserializingExtensionData);
_nodeType = XmlNodeType.Attribute;
AttributeData attribute = _element.attributes[index];
_localName = attribute.localName;
_ns = attribute.ns;
_prefix = attribute.prefix;
_value = attribute.value;
_attributeIndex = index;
}
}
public override string GetAttribute(string name, string namespaceURI)
{
if (IsXmlDataNode)
return _xmlNodeReader.GetAttribute(name, namespaceURI);
for (int i = 0; i < _element.attributeCount; i++)
{
AttributeData attribute = _element.attributes[i];
if (attribute.localName == name && attribute.ns == namespaceURI)
return attribute.value;
}
return null;
}
public override bool MoveToAttribute(string name, string namespaceURI)
{
if (IsXmlDataNode)
return _xmlNodeReader.MoveToAttribute(name, _ns);
for (int i = 0; i < _element.attributeCount; i++)
{
AttributeData attribute = _element.attributes[i];
if (attribute.localName == name && attribute.ns == namespaceURI)
{
MoveToAttribute(i);
return true;
}
}
return false;
}
public override bool MoveToElement()
{
if (IsXmlDataNode)
return _xmlNodeReader.MoveToElement();
if (_nodeType != XmlNodeType.Attribute)
return false;
SetElement();
return true;
}
private void SetElement()
{
_nodeType = XmlNodeType.Element;
_localName = _element.localName;
_ns = _element.ns;
_prefix = _element.prefix;
_value = string.Empty;
_attributeCount = _element.attributeCount;
_attributeIndex = -1;
}
public override string LookupNamespace(string prefix)
{
if (IsXmlDataNode)
return _xmlNodeReader.LookupNamespace(prefix);
return (string)s_prefixToNsTable[prefix];
}
public override void Skip()
{
if (IsXmlDataNode)
_xmlNodeReader.Skip();
else
{
if (ReadState != ReadState.Interactive)
return;
MoveToElement();
if (IsElementNode(_internalNodeType))
{
int depth = 1;
while (depth != 0)
{
if (!Read())
throw new XmlException(SR.InvalidXmlDeserializingExtensionData);
if (IsElementNode(_internalNodeType))
depth++;
else if (_internalNodeType == ExtensionDataNodeType.EndElement)
{
ReadEndElement();
depth--;
}
}
}
else
Read();
}
}
private bool IsElementNode(ExtensionDataNodeType nodeType)
{
return (nodeType == ExtensionDataNodeType.Element ||
nodeType == ExtensionDataNodeType.ReferencedElement ||
nodeType == ExtensionDataNodeType.NullElement);
}
protected override void Dispose(bool disposing)
{
if (IsXmlDataNode)
_xmlNodeReader.Dispose();
else
{
Reset();
_readState = ReadState.Closed;
}
base.Dispose(disposing);
}
public override bool Read()
{
if (_nodeType == XmlNodeType.Attribute && MoveToNextAttribute())
return true;
MoveNext(_element.dataNode);
switch (_internalNodeType)
{
case ExtensionDataNodeType.Element:
case ExtensionDataNodeType.ReferencedElement:
case ExtensionDataNodeType.NullElement:
PushElement();
SetElement();
break;
case ExtensionDataNodeType.Text:
_nodeType = XmlNodeType.Text;
_prefix = string.Empty;
_ns = string.Empty;
_localName = string.Empty;
_attributeCount = 0;
_attributeIndex = -1;
break;
case ExtensionDataNodeType.EndElement:
_nodeType = XmlNodeType.EndElement;
_prefix = string.Empty;
_ns = string.Empty;
_localName = string.Empty;
_value = string.Empty;
_attributeCount = 0;
_attributeIndex = -1;
PopElement();
break;
case ExtensionDataNodeType.None:
if (_depth != 0)
throw new XmlException(SR.InvalidXmlDeserializingExtensionData);
_nodeType = XmlNodeType.None;
_prefix = string.Empty;
_ns = string.Empty;
_localName = string.Empty;
_value = string.Empty;
_attributeCount = 0;
_readState = ReadState.EndOfFile;
return false;
case ExtensionDataNodeType.Xml:
// do nothing
break;
default:
Fx.Assert("ExtensionDataReader in invalid state");
throw new SerializationException(SR.InvalidStateInExtensionDataReader);
}
_readState = ReadState.Interactive;
return true;
}
public override string Name
{
get
{
if (IsXmlDataNode)
{
return _xmlNodeReader.Name;
}
Fx.Assert("ExtensionDataReader Name property should only be called for IXmlSerializable");
return string.Empty;
}
}
public override bool HasValue
{
get
{
if (IsXmlDataNode)
{
return _xmlNodeReader.HasValue;
}
Fx.Assert("ExtensionDataReader HasValue property should only be called for IXmlSerializable");
return false;
}
}
public override string BaseURI
{
get
{
if (IsXmlDataNode)
{
return _xmlNodeReader.BaseURI;
}
Fx.Assert("ExtensionDataReader BaseURI property should only be called for IXmlSerializable");
return string.Empty;
}
}
public override XmlNameTable NameTable
{
get
{
if (IsXmlDataNode)
{
return _xmlNodeReader.NameTable;
}
Fx.Assert("ExtensionDataReader NameTable property should only be called for IXmlSerializable");
return null;
}
}
public override string GetAttribute(string name)
{
if (IsXmlDataNode)
{
return _xmlNodeReader.GetAttribute(name);
}
Fx.Assert("ExtensionDataReader GetAttribute method should only be called for IXmlSerializable");
return null;
}
public override string GetAttribute(int i)
{
if (IsXmlDataNode)
{
return _xmlNodeReader.GetAttribute(i);
}
Fx.Assert("ExtensionDataReader GetAttribute method should only be called for IXmlSerializable");
return null;
}
public override bool MoveToAttribute(string name)
{
if (IsXmlDataNode)
{
return _xmlNodeReader.MoveToAttribute(name);
}
Fx.Assert("ExtensionDataReader MoveToAttribute method should only be called for IXmlSerializable");
return false;
}
public override void ResolveEntity()
{
if (IsXmlDataNode)
{
_xmlNodeReader.ResolveEntity();
}
else
{
Fx.Assert("ExtensionDataReader ResolveEntity method should only be called for IXmlSerializable");
}
}
public override bool ReadAttributeValue()
{
if (IsXmlDataNode)
{
return _xmlNodeReader.ReadAttributeValue();
}
Fx.Assert("ExtensionDataReader ReadAttributeValue method should only be called for IXmlSerializable");
return false;
}
private void MoveNext(IDataNode dataNode)
{
throw NotImplemented.ByDesign;
}
private void PushElement()
{
GrowElementsIfNeeded();
_elements[_depth++] = _element;
if (_nextElement == null)
_element = GetNextElement();
else
{
_element = _nextElement;
_nextElement = null;
}
}
private void PopElement()
{
_prefix = _element.prefix;
_localName = _element.localName;
_ns = _element.ns;
if (_depth == 0)
return;
_depth--;
if (_elements != null)
{
_element = _elements[_depth];
}
}
private void GrowElementsIfNeeded()
{
if (_elements == null)
_elements = new ElementData[8];
else if (_elements.Length == _depth)
{
ElementData[] newElements = new ElementData[_elements.Length * 2];
Array.Copy(_elements, 0, newElements, 0, _elements.Length);
_elements = newElements;
}
}
private ElementData GetNextElement()
{
int nextDepth = _depth + 1;
return (_elements == null || _elements.Length <= nextDepth || _elements[nextDepth] == null)
? new ElementData() : _elements[nextDepth];
}
internal static string GetPrefix(string ns)
{
ns = ns ?? string.Empty;
string prefix = (string)s_nsToPrefixTable[ns];
if (prefix == null)
{
lock (s_prefixLock)
{
prefix = (string)s_nsToPrefixTable[ns];
if (prefix == null)
{
prefix = (ns == null || ns.Length == 0) ? string.Empty : "p" + s_nsToPrefixTable.Count;
AddPrefix(prefix, ns);
}
}
}
return prefix;
}
private static void AddPrefix(string prefix, string ns)
{
s_nsToPrefixTable.Add(ns, prefix);
s_prefixToNsTable.Add(prefix, ns);
}
}
#if USE_REFEMIT
public class AttributeData
#else
internal class AttributeData
#endif
{
public string prefix;
public string ns;
public string localName;
public string value;
}
#if USE_REFEMIT
public class ElementData
#else
internal class ElementData
#endif
{
public string localName;
public string ns;
public string prefix;
public int attributeCount;
public AttributeData[] attributes;
public IDataNode dataNode;
public int childElementIndex;
public void AddAttribute(string prefix, string ns, string name, string value)
{
GrowAttributesIfNeeded();
AttributeData attribute = attributes[attributeCount];
if (attribute == null)
attributes[attributeCount] = attribute = new AttributeData();
attribute.prefix = prefix;
attribute.ns = ns;
attribute.localName = name;
attribute.value = value;
attributeCount++;
}
private void GrowAttributesIfNeeded()
{
if (attributes == null)
attributes = new AttributeData[4];
else if (attributes.Length == attributeCount)
{
AttributeData[] newAttributes = new AttributeData[attributes.Length * 2];
Array.Copy(attributes, 0, newAttributes, 0, attributes.Length);
attributes = newAttributes;
}
}
}
}
| |
// Portions of this source file are adapted from the Windows Presentation Foundation project.
// (https://github.com/dotnet/wpf/)
//
// Licensed to The Avalonia Project under MIT License, courtesy of The .NET Foundation.
using System;
using Avalonia.Controls.Metadata;
using Avalonia.Data;
using Avalonia.Input;
using Avalonia.Layout;
using Avalonia.Metadata;
using Avalonia.Utilities;
namespace Avalonia.Controls.Primitives
{
[PseudoClasses(":vertical", ":horizontal")]
public class Track : Control
{
public static readonly DirectProperty<Track, double> MinimumProperty =
RangeBase.MinimumProperty.AddOwner<Track>(o => o.Minimum, (o, v) => o.Minimum = v);
public static readonly DirectProperty<Track, double> MaximumProperty =
RangeBase.MaximumProperty.AddOwner<Track>(o => o.Maximum, (o, v) => o.Maximum = v);
public static readonly DirectProperty<Track, double> ValueProperty =
RangeBase.ValueProperty.AddOwner<Track>(o => o.Value, (o, v) => o.Value = v);
public static readonly StyledProperty<double> ViewportSizeProperty =
ScrollBar.ViewportSizeProperty.AddOwner<Track>();
public static readonly StyledProperty<Orientation> OrientationProperty =
ScrollBar.OrientationProperty.AddOwner<Track>();
public static readonly StyledProperty<Thumb> ThumbProperty =
AvaloniaProperty.Register<Track, Thumb>(nameof(Thumb));
public static readonly StyledProperty<Button> IncreaseButtonProperty =
AvaloniaProperty.Register<Track, Button>(nameof(IncreaseButton));
public static readonly StyledProperty<Button> DecreaseButtonProperty =
AvaloniaProperty.Register<Track, Button>(nameof(DecreaseButton));
public static readonly StyledProperty<bool> IsDirectionReversedProperty =
AvaloniaProperty.Register<Track, bool>(nameof(IsDirectionReversed));
public static readonly StyledProperty<bool> IgnoreThumbDragProperty =
AvaloniaProperty.Register<Track, bool>(nameof(IsThumbDragHandled));
private double _minimum;
private double _maximum = 100.0;
private double _value;
static Track()
{
ThumbProperty.Changed.AddClassHandler<Track>((x, e) => x.ThumbChanged(e));
IncreaseButtonProperty.Changed.AddClassHandler<Track>((x, e) => x.ButtonChanged(e));
DecreaseButtonProperty.Changed.AddClassHandler<Track>((x, e) => x.ButtonChanged(e));
AffectsArrange<Track>(MinimumProperty, MaximumProperty, ValueProperty, OrientationProperty);
}
public Track()
{
UpdatePseudoClasses(Orientation);
}
public double Minimum
{
get { return _minimum; }
set { SetAndRaise(MinimumProperty, ref _minimum, value); }
}
public double Maximum
{
get { return _maximum; }
set { SetAndRaise(MaximumProperty, ref _maximum, value); }
}
public double Value
{
get { return _value; }
set { SetAndRaise(ValueProperty, ref _value, value); }
}
public double ViewportSize
{
get { return GetValue(ViewportSizeProperty); }
set { SetValue(ViewportSizeProperty, value); }
}
public Orientation Orientation
{
get { return GetValue(OrientationProperty); }
set { SetValue(OrientationProperty, value); }
}
[Content]
public Thumb Thumb
{
get { return GetValue(ThumbProperty); }
set { SetValue(ThumbProperty, value); }
}
public Button IncreaseButton
{
get { return GetValue(IncreaseButtonProperty); }
set { SetValue(IncreaseButtonProperty, value); }
}
public Button DecreaseButton
{
get { return GetValue(DecreaseButtonProperty); }
set { SetValue(DecreaseButtonProperty, value); }
}
public bool IsDirectionReversed
{
get { return GetValue(IsDirectionReversedProperty); }
set { SetValue(IsDirectionReversedProperty, value); }
}
public bool IsThumbDragHandled
{
get { return GetValue(IgnoreThumbDragProperty); }
set { SetValue(IgnoreThumbDragProperty, value); }
}
private double ThumbCenterOffset { get; set; }
private double Density { get; set; }
/// <summary>
/// Calculates the distance along the <see cref="Thumb"/> of a specified point along the
/// track.
/// </summary>
/// <param name="point">The specified point.</param>
/// <returns>
/// The distance between the Thumb and the specified pt value.
/// </returns>
public virtual double ValueFromPoint(Point point)
{
double val;
// Find distance from center of thumb to given point.
if (Orientation == Orientation.Horizontal)
{
val = Value + ValueFromDistance(point.X - ThumbCenterOffset, point.Y - (Bounds.Height * 0.5));
}
else
{
val = Value + ValueFromDistance(point.X - (Bounds.Width * 0.5), point.Y - ThumbCenterOffset);
}
return Math.Max(Minimum, Math.Min(Maximum, val));
}
/// <summary>
/// Calculates the change in the <see cref="Value"/> of the <see cref="Track"/> when the
/// <see cref="Thumb"/> moves.
/// </summary>
/// <param name="horizontal">The horizontal displacement of the thumb.</param>
/// <param name="vertical">The vertical displacement of the thumb.</param>
public virtual double ValueFromDistance(double horizontal, double vertical)
{
double scale = IsDirectionReversed ? -1 : 1;
if (Orientation == Orientation.Horizontal)
{
return scale * horizontal * Density;
}
else
{
// Increases in y cause decreases in Sliders value
return -1 * scale * vertical * Density;
}
}
protected override Size MeasureOverride(Size availableSize)
{
Size desiredSize = new Size(0.0, 0.0);
// Only measure thumb.
// Repeat buttons will be sized based on thumb
if (Thumb != null)
{
Thumb.Measure(availableSize);
desiredSize = Thumb.DesiredSize;
}
if (!double.IsNaN(ViewportSize))
{
// ScrollBar can shrink to 0 in the direction of scrolling
if (Orientation == Orientation.Vertical)
desiredSize = desiredSize.WithHeight(0.0);
else
desiredSize = desiredSize.WithWidth(0.0);
}
return desiredSize;
}
protected override Size ArrangeOverride(Size arrangeSize)
{
double decreaseButtonLength, thumbLength, increaseButtonLength;
var isVertical = Orientation == Orientation.Vertical;
var viewportSize = Math.Max(0.0, ViewportSize);
// If viewport is NaN, compute thumb's size based on its desired size,
// otherwise compute the thumb base on the viewport and extent properties
if (double.IsNaN(ViewportSize))
{
ComputeSliderLengths(arrangeSize, isVertical, out decreaseButtonLength, out thumbLength, out increaseButtonLength);
}
else
{
// Don't arrange if there's not enough content or the track is too small
if (!ComputeScrollBarLengths(arrangeSize, viewportSize, isVertical, out decreaseButtonLength, out thumbLength, out increaseButtonLength))
{
return arrangeSize;
}
}
// Layout the pieces of track
var offset = new Point();
var pieceSize = arrangeSize;
var isDirectionReversed = IsDirectionReversed;
if (isVertical)
{
CoerceLength(ref decreaseButtonLength, arrangeSize.Height);
CoerceLength(ref increaseButtonLength, arrangeSize.Height);
CoerceLength(ref thumbLength, arrangeSize.Height);
offset = offset.WithY(isDirectionReversed ? decreaseButtonLength + thumbLength : 0.0);
pieceSize = pieceSize.WithHeight(increaseButtonLength);
if (IncreaseButton != null)
{
IncreaseButton.Arrange(new Rect(offset, pieceSize));
}
offset = offset.WithY(isDirectionReversed ? 0.0 : increaseButtonLength + thumbLength);
pieceSize = pieceSize.WithHeight(decreaseButtonLength);
if (DecreaseButton != null)
{
DecreaseButton.Arrange(new Rect(offset, pieceSize));
}
offset = offset.WithY(isDirectionReversed ? decreaseButtonLength : increaseButtonLength);
pieceSize = pieceSize.WithHeight(thumbLength);
if (Thumb != null)
{
Thumb.Arrange(new Rect(offset, pieceSize));
}
ThumbCenterOffset = offset.Y + (thumbLength * 0.5);
}
else
{
CoerceLength(ref decreaseButtonLength, arrangeSize.Width);
CoerceLength(ref increaseButtonLength, arrangeSize.Width);
CoerceLength(ref thumbLength, arrangeSize.Width);
offset = offset.WithX(isDirectionReversed ? increaseButtonLength + thumbLength : 0.0);
pieceSize = pieceSize.WithWidth(decreaseButtonLength);
if (DecreaseButton != null)
{
DecreaseButton.Arrange(new Rect(offset, pieceSize));
}
offset = offset.WithX(isDirectionReversed ? 0.0 : decreaseButtonLength + thumbLength);
pieceSize = pieceSize.WithWidth(increaseButtonLength);
if (IncreaseButton != null)
{
IncreaseButton.Arrange(new Rect(offset, pieceSize));
}
offset = offset.WithX(isDirectionReversed ? increaseButtonLength : decreaseButtonLength);
pieceSize = pieceSize.WithWidth(thumbLength);
if (Thumb != null)
{
Thumb.Arrange(new Rect(offset, pieceSize));
}
ThumbCenterOffset = offset.X + (thumbLength * 0.5);
}
return arrangeSize;
}
protected override void OnPropertyChanged<T>(AvaloniaPropertyChangedEventArgs<T> change)
{
base.OnPropertyChanged(change);
if (change.Property == OrientationProperty)
{
UpdatePseudoClasses(change.NewValue.GetValueOrDefault<Orientation>());
}
}
private static void CoerceLength(ref double componentLength, double trackLength)
{
if (componentLength < 0)
{
componentLength = 0.0;
}
else if (componentLength > trackLength || double.IsNaN(componentLength))
{
componentLength = trackLength;
}
}
private void ComputeSliderLengths(Size arrangeSize, bool isVertical, out double decreaseButtonLength, out double thumbLength, out double increaseButtonLength)
{
double min = Minimum;
double range = Math.Max(0.0, Maximum - min);
double offset = Math.Min(range, Value - min);
double trackLength;
// Compute thumb size
if (isVertical)
{
trackLength = arrangeSize.Height;
thumbLength = Thumb == null ? 0 : Thumb.DesiredSize.Height;
}
else
{
trackLength = arrangeSize.Width;
thumbLength = Thumb == null ? 0 : Thumb.DesiredSize.Width;
}
CoerceLength(ref thumbLength, trackLength);
double remainingTrackLength = trackLength - thumbLength;
decreaseButtonLength = remainingTrackLength * offset / range;
CoerceLength(ref decreaseButtonLength, remainingTrackLength);
increaseButtonLength = remainingTrackLength - decreaseButtonLength;
CoerceLength(ref increaseButtonLength, remainingTrackLength);
Density = range / remainingTrackLength;
}
private bool ComputeScrollBarLengths(Size arrangeSize, double viewportSize, bool isVertical, out double decreaseButtonLength, out double thumbLength, out double increaseButtonLength)
{
var min = Minimum;
var range = Math.Max(0.0, Maximum - min);
var offset = Math.Min(range, Value - min);
var extent = Math.Max(0.0, range) + viewportSize;
var trackLength = isVertical ? arrangeSize.Height : arrangeSize.Width;
double thumbMinLength = 10;
StyledProperty<double> minLengthProperty = isVertical ? MinHeightProperty : MinWidthProperty;
var thumb = Thumb;
if (thumb != null && thumb.IsSet(minLengthProperty))
{
thumbMinLength = thumb.GetValue(minLengthProperty);
}
thumbLength = trackLength * viewportSize / extent;
CoerceLength(ref thumbLength, trackLength);
thumbLength = Math.Max(thumbMinLength, thumbLength);
// If we don't have enough content to scroll, disable the track.
var notEnoughContentToScroll = MathUtilities.LessThanOrClose(range, 0.0);
var thumbLongerThanTrack = thumbLength > trackLength;
// if there's not enough content or the thumb is longer than the track,
// hide the track and don't arrange the pieces
if (notEnoughContentToScroll || thumbLongerThanTrack)
{
ShowChildren(false);
ThumbCenterOffset = Double.NaN;
Density = Double.NaN;
decreaseButtonLength = 0.0;
increaseButtonLength = 0.0;
return false; // don't arrange
}
else
{
ShowChildren(true);
}
// Compute lengths of increase and decrease button
double remainingTrackLength = trackLength - thumbLength;
decreaseButtonLength = remainingTrackLength * offset / range;
CoerceLength(ref decreaseButtonLength, remainingTrackLength);
increaseButtonLength = remainingTrackLength - decreaseButtonLength;
CoerceLength(ref increaseButtonLength, remainingTrackLength);
Density = range / remainingTrackLength;
return true;
}
private void ThumbChanged(AvaloniaPropertyChangedEventArgs e)
{
var oldThumb = (Thumb)e.OldValue;
var newThumb = (Thumb)e.NewValue;
if (oldThumb != null)
{
oldThumb.DragDelta -= ThumbDragged;
LogicalChildren.Remove(oldThumb);
VisualChildren.Remove(oldThumb);
}
if (newThumb != null)
{
newThumb.DragDelta += ThumbDragged;
LogicalChildren.Add(newThumb);
VisualChildren.Add(newThumb);
}
}
private void ButtonChanged(AvaloniaPropertyChangedEventArgs e)
{
var oldButton = (Button)e.OldValue;
var newButton = (Button)e.NewValue;
if (oldButton != null)
{
LogicalChildren.Remove(oldButton);
VisualChildren.Remove(oldButton);
}
if (newButton != null)
{
LogicalChildren.Add(newButton);
VisualChildren.Add(newButton);
}
}
private void ThumbDragged(object sender, VectorEventArgs e)
{
if (IsThumbDragHandled)
return;
Value = MathUtilities.Clamp(
Value + ValueFromDistance(e.Vector.X, e.Vector.Y),
Minimum,
Maximum);
}
private void ShowChildren(bool visible)
{
// WPF sets Visible = Hidden here but we don't have that, and setting IsVisible = false
// will cause us to stop being laid out. Instead show/hide the child controls.
if (Thumb != null)
{
Thumb.IsVisible = visible;
}
if (IncreaseButton != null)
{
IncreaseButton.IsVisible = visible;
}
if (DecreaseButton != null)
{
DecreaseButton.IsVisible = visible;
}
}
private void UpdatePseudoClasses(Orientation o)
{
PseudoClasses.Set(":vertical", o == Orientation.Vertical);
PseudoClasses.Set(":horizontal", o == Orientation.Horizontal);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.Win32.SafeHandles;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Text;
namespace System.IO
{
internal static partial class FileSystem
{
internal const int GENERIC_READ = unchecked((int)0x80000000);
public static void CopyFile(string sourceFullPath, string destFullPath, bool overwrite)
{
int errorCode = Interop.Kernel32.CopyFile(sourceFullPath, destFullPath, !overwrite);
if (errorCode != Interop.Errors.ERROR_SUCCESS)
{
string fileName = destFullPath;
if (errorCode != Interop.Errors.ERROR_FILE_EXISTS)
{
// For a number of error codes (sharing violation, path not found, etc) we don't know if the problem was with
// the source or dest file. Try reading the source file.
using (SafeFileHandle handle = Interop.Kernel32.CreateFile(sourceFullPath, GENERIC_READ, FileShare.Read, FileMode.Open, 0))
{
if (handle.IsInvalid)
fileName = sourceFullPath;
}
if (errorCode == Interop.Errors.ERROR_ACCESS_DENIED)
{
if (DirectoryExists(destFullPath))
throw new IOException(SR.Format(SR.Arg_FileIsDirectory_Name, destFullPath), Interop.Errors.ERROR_ACCESS_DENIED);
}
}
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fileName);
}
}
public static void ReplaceFile(string sourceFullPath, string destFullPath, string destBackupFullPath, bool ignoreMetadataErrors)
{
int flags = ignoreMetadataErrors ? Interop.Kernel32.REPLACEFILE_IGNORE_MERGE_ERRORS : 0;
if (!Interop.Kernel32.ReplaceFile(destFullPath, sourceFullPath, destBackupFullPath, flags, IntPtr.Zero, IntPtr.Zero))
{
throw Win32Marshal.GetExceptionForWin32Error(Marshal.GetLastWin32Error());
}
}
public static void CreateDirectory(string fullPath)
{
// We can save a bunch of work if the directory we want to create already exists. This also
// saves us in the case where sub paths are inaccessible (due to ERROR_ACCESS_DENIED) but the
// final path is accessible and the directory already exists. For example, consider trying
// to create c:\Foo\Bar\Baz, where everything already exists but ACLS prevent access to c:\Foo
// and c:\Foo\Bar. In that case, this code will think it needs to create c:\Foo, and c:\Foo\Bar
// and fail to due so, causing an exception to be thrown. This is not what we want.
if (DirectoryExists(fullPath))
return;
List<string> stackDir = new List<string>();
// Attempt to figure out which directories don't exist, and only
// create the ones we need. Note that InternalExists may fail due
// to Win32 ACL's preventing us from seeing a directory, and this
// isn't threadsafe.
bool somepathexists = false;
int length = fullPath.Length;
// We need to trim the trailing slash or the code will try to create 2 directories of the same name.
if (length >= 2 && PathHelpers.EndsInDirectorySeparator(fullPath))
length--;
int lengthRoot = PathInternal.GetRootLength(fullPath);
if (length > lengthRoot)
{
// Special case root (fullpath = X:\\)
int i = length - 1;
while (i >= lengthRoot && !somepathexists)
{
string dir = fullPath.Substring(0, i + 1);
if (!DirectoryExists(dir)) // Create only the ones missing
stackDir.Add(dir);
else
somepathexists = true;
while (i > lengthRoot && !PathInternal.IsDirectorySeparator(fullPath[i])) i--;
i--;
}
}
int count = stackDir.Count;
// If we were passed a DirectorySecurity, convert it to a security
// descriptor and set it in he call to CreateDirectory.
Interop.Kernel32.SECURITY_ATTRIBUTES secAttrs = default;
bool r = true;
int firstError = 0;
string errorString = fullPath;
// If all the security checks succeeded create all the directories
while (stackDir.Count > 0)
{
string name = stackDir[stackDir.Count - 1];
stackDir.RemoveAt(stackDir.Count - 1);
r = Interop.Kernel32.CreateDirectory(name, ref secAttrs);
if (!r && (firstError == 0))
{
int currentError = Marshal.GetLastWin32Error();
// While we tried to avoid creating directories that don't
// exist above, there are at least two cases that will
// cause us to see ERROR_ALREADY_EXISTS here. InternalExists
// can fail because we didn't have permission to the
// directory. Secondly, another thread or process could
// create the directory between the time we check and the
// time we try using the directory. Thirdly, it could
// fail because the target does exist, but is a file.
if (currentError != Interop.Errors.ERROR_ALREADY_EXISTS)
firstError = currentError;
else
{
// If there's a file in this directory's place, or if we have ERROR_ACCESS_DENIED when checking if the directory already exists throw.
if (File.InternalExists(name) || (!DirectoryExists(name, out currentError) && currentError == Interop.Errors.ERROR_ACCESS_DENIED))
{
firstError = currentError;
errorString = name;
}
}
}
}
// We need this check to mask OS differences
// Handle CreateDirectory("X:\\") when X: doesn't exist. Similarly for n/w paths.
if ((count == 0) && !somepathexists)
{
string root = Directory.InternalGetDirectoryRoot(fullPath);
if (!DirectoryExists(root))
throw Win32Marshal.GetExceptionForWin32Error(Interop.Errors.ERROR_PATH_NOT_FOUND, root);
return;
}
// Only throw an exception if creating the exact directory we
// wanted failed to work correctly.
if (!r && (firstError != 0))
throw Win32Marshal.GetExceptionForWin32Error(firstError, errorString);
}
public static void DeleteFile(string fullPath)
{
bool r = Interop.Kernel32.DeleteFile(fullPath);
if (!r)
{
int errorCode = Marshal.GetLastWin32Error();
if (errorCode == Interop.Errors.ERROR_FILE_NOT_FOUND)
return;
else
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
}
}
public static bool DirectoryExists(string fullPath)
{
return DirectoryExists(fullPath, out int lastError);
}
private static bool DirectoryExists(string path, out int lastError)
{
Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA data = new Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA();
lastError = FillAttributeInfo(path, ref data, returnErrorOnNotFound: true);
return (lastError == 0) && (data.dwFileAttributes != -1)
&& ((data.dwFileAttributes & Interop.Kernel32.FileAttributes.FILE_ATTRIBUTE_DIRECTORY) != 0);
}
public static IEnumerable<string> EnumeratePaths(string fullPath, string searchPattern, SearchOption searchOption, SearchTarget searchTarget)
{
FindEnumerableFactory.NormalizeInputs(ref fullPath, ref searchPattern);
switch (searchTarget)
{
case SearchTarget.Files:
return FindEnumerableFactory.UserFiles(fullPath, searchPattern, searchOption == SearchOption.AllDirectories);
case SearchTarget.Directories:
return FindEnumerableFactory.UserDirectories(fullPath, searchPattern, searchOption == SearchOption.AllDirectories);
case SearchTarget.Both:
return FindEnumerableFactory.UserEntries(fullPath, searchPattern, searchOption == SearchOption.AllDirectories);
default:
throw new ArgumentOutOfRangeException(nameof(searchTarget));
}
}
public static IEnumerable<FileSystemInfo> EnumerateFileSystemInfos(string fullPath, string searchPattern, SearchOption searchOption, SearchTarget searchTarget)
{
FindEnumerableFactory.NormalizeInputs(ref fullPath, ref searchPattern);
switch (searchTarget)
{
case SearchTarget.Directories:
return FindEnumerableFactory.DirectoryInfos(fullPath, searchPattern, searchOption == SearchOption.AllDirectories);
case SearchTarget.Files:
return FindEnumerableFactory.FileInfos(fullPath, searchPattern, searchOption == SearchOption.AllDirectories);
case SearchTarget.Both:
return FindEnumerableFactory.FileSystemInfos(fullPath, searchPattern, searchOption == SearchOption.AllDirectories);
default:
throw new ArgumentException(SR.ArgumentOutOfRange_Enum, nameof(searchTarget));
}
}
/// <summary>
/// Returns 0 on success, otherwise a Win32 error code. Note that
/// classes should use -1 as the uninitialized state for dataInitialized.
/// </summary>
/// <param name="returnErrorOnNotFound">Return the error code for not found errors?</param>
internal static int FillAttributeInfo(string path, ref Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA data, bool returnErrorOnNotFound)
{
int errorCode = Interop.Errors.ERROR_SUCCESS;
// Neither GetFileAttributes or FindFirstFile like trailing separators
path = path.TrimEnd(PathHelpers.DirectorySeparatorChars);
using (DisableMediaInsertionPrompt.Create())
{
if (!Interop.Kernel32.GetFileAttributesEx(path, Interop.Kernel32.GET_FILEEX_INFO_LEVELS.GetFileExInfoStandard, ref data))
{
errorCode = Marshal.GetLastWin32Error();
if (errorCode == Interop.Errors.ERROR_ACCESS_DENIED)
{
// Files that are marked for deletion will not let you GetFileAttributes,
// ERROR_ACCESS_DENIED is given back without filling out the data struct.
// FindFirstFile, however, will. Historically we always gave back attributes
// for marked-for-deletion files.
var findData = new Interop.Kernel32.WIN32_FIND_DATA();
using (SafeFindHandle handle = Interop.Kernel32.FindFirstFile(path, ref findData))
{
if (handle.IsInvalid)
{
errorCode = Marshal.GetLastWin32Error();
}
else
{
errorCode = Interop.Errors.ERROR_SUCCESS;
data.PopulateFrom(ref findData);
}
}
}
}
}
if (errorCode != Interop.Errors.ERROR_SUCCESS && !returnErrorOnNotFound)
{
switch (errorCode)
{
case Interop.Errors.ERROR_FILE_NOT_FOUND:
case Interop.Errors.ERROR_PATH_NOT_FOUND:
case Interop.Errors.ERROR_NOT_READY: // Removable media not ready
// Return default value for backward compatibility
data.dwFileAttributes = -1;
return Interop.Errors.ERROR_SUCCESS;
}
}
return errorCode;
}
public static bool FileExists(string fullPath)
{
Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA data = new Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA();
int errorCode = FillAttributeInfo(fullPath, ref data, returnErrorOnNotFound: true);
return (errorCode == 0) && (data.dwFileAttributes != -1)
&& ((data.dwFileAttributes & Interop.Kernel32.FileAttributes.FILE_ATTRIBUTE_DIRECTORY) == 0);
}
public static FileAttributes GetAttributes(string fullPath)
{
Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA data = new Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA();
int errorCode = FillAttributeInfo(fullPath, ref data, returnErrorOnNotFound: true);
if (errorCode != 0)
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
return (FileAttributes)data.dwFileAttributes;
}
public static string GetCurrentDirectory()
{
StringBuilder sb = StringBuilderCache.Acquire(Interop.Kernel32.MAX_PATH + 1);
if (Interop.Kernel32.GetCurrentDirectory(sb.Capacity, sb) == 0)
throw Win32Marshal.GetExceptionForLastWin32Error();
string currentDirectory = sb.ToString();
// Note that if we have somehow put our command prompt into short
// file name mode (i.e. by running edlin or a DOS grep, etc), then
// this will return a short file name.
if (currentDirectory.IndexOf('~') >= 0)
{
int r = Interop.Kernel32.GetLongPathName(currentDirectory, sb, sb.Capacity);
if (r == 0 || r >= Interop.Kernel32.MAX_PATH)
{
int errorCode = Marshal.GetLastWin32Error();
if (r >= Interop.Kernel32.MAX_PATH)
errorCode = Interop.Errors.ERROR_FILENAME_EXCED_RANGE;
if (errorCode != Interop.Errors.ERROR_FILE_NOT_FOUND &&
errorCode != Interop.Errors.ERROR_PATH_NOT_FOUND &&
errorCode != Interop.Errors.ERROR_INVALID_FUNCTION && // by design - enough said.
errorCode != Interop.Errors.ERROR_ACCESS_DENIED)
throw Win32Marshal.GetExceptionForWin32Error(errorCode);
}
currentDirectory = sb.ToString();
}
StringBuilderCache.Release(sb);
return currentDirectory;
}
public static DateTimeOffset GetCreationTime(string fullPath)
{
Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA data = new Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA();
int errorCode = FillAttributeInfo(fullPath, ref data, returnErrorOnNotFound: false);
if (errorCode != 0)
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
return data.ftCreationTime.ToDateTimeOffset();
}
public static FileSystemInfo GetFileSystemInfo(string fullPath, bool asDirectory)
{
return asDirectory ?
(FileSystemInfo)new DirectoryInfo(fullPath, null) :
(FileSystemInfo)new FileInfo(fullPath, null);
}
public static DateTimeOffset GetLastAccessTime(string fullPath)
{
Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA data = new Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA();
int errorCode = FillAttributeInfo(fullPath, ref data, returnErrorOnNotFound: false);
if (errorCode != 0)
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
return data.ftLastAccessTime.ToDateTimeOffset();
}
public static DateTimeOffset GetLastWriteTime(string fullPath)
{
Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA data = new Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA();
int errorCode = FillAttributeInfo(fullPath, ref data, returnErrorOnNotFound: false);
if (errorCode != 0)
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
return data.ftLastWriteTime.ToDateTimeOffset();
}
public static void MoveDirectory(string sourceFullPath, string destFullPath)
{
if (!Interop.Kernel32.MoveFile(sourceFullPath, destFullPath))
{
int errorCode = Marshal.GetLastWin32Error();
if (errorCode == Interop.Errors.ERROR_FILE_NOT_FOUND)
throw Win32Marshal.GetExceptionForWin32Error(Interop.Errors.ERROR_PATH_NOT_FOUND, sourceFullPath);
// This check was originally put in for Win9x (unfortunately without special casing it to be for Win9x only). We can't change the NT codepath now for backcomp reasons.
if (errorCode == Interop.Errors.ERROR_ACCESS_DENIED) // WinNT throws IOException. This check is for Win9x. We can't change it for backcomp.
throw new IOException(SR.Format(SR.UnauthorizedAccess_IODenied_Path, sourceFullPath), Win32Marshal.MakeHRFromErrorCode(errorCode));
throw Win32Marshal.GetExceptionForWin32Error(errorCode);
}
}
public static void MoveFile(string sourceFullPath, string destFullPath)
{
if (!Interop.Kernel32.MoveFile(sourceFullPath, destFullPath))
{
throw Win32Marshal.GetExceptionForLastWin32Error();
}
}
private static SafeFileHandle OpenHandle(string fullPath, bool asDirectory)
{
string root = fullPath.Substring(0, PathInternal.GetRootLength(fullPath));
if (root == fullPath && root[1] == Path.VolumeSeparatorChar)
{
// intentionally not fullpath, most upstack public APIs expose this as path.
throw new ArgumentException(SR.Arg_PathIsVolume, "path");
}
SafeFileHandle handle = Interop.Kernel32.CreateFile(
fullPath,
Interop.Kernel32.GenericOperations.GENERIC_WRITE,
FileShare.ReadWrite | FileShare.Delete,
FileMode.Open,
asDirectory ? Interop.Kernel32.FileOperations.FILE_FLAG_BACKUP_SEMANTICS : 0);
if (handle.IsInvalid)
{
int errorCode = Marshal.GetLastWin32Error();
// NT5 oddity - when trying to open "C:\" as a File,
// we usually get ERROR_PATH_NOT_FOUND from the OS. We should
// probably be consistent w/ every other directory.
if (!asDirectory && errorCode == Interop.Errors.ERROR_PATH_NOT_FOUND && fullPath.Equals(Directory.GetDirectoryRoot(fullPath)))
errorCode = Interop.Errors.ERROR_ACCESS_DENIED;
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
}
return handle;
}
public static void RemoveDirectory(string fullPath, bool recursive)
{
// Do not recursively delete through reparse points.
if (!recursive || IsReparsePoint(fullPath))
{
RemoveDirectoryInternal(fullPath, topLevel: true);
return;
}
// We want extended syntax so we can delete "extended" subdirectories and files
// (most notably ones with trailing whitespace or periods)
fullPath = PathInternal.EnsureExtendedPrefix(fullPath);
Interop.Kernel32.WIN32_FIND_DATA findData = new Interop.Kernel32.WIN32_FIND_DATA();
RemoveDirectoryRecursive(fullPath, ref findData, topLevel: true);
}
private static bool IsReparsePoint(string fullPath)
{
Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA data = new Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA();
int errorCode = FillAttributeInfo(fullPath, ref data, returnErrorOnNotFound: true);
if (errorCode != Interop.Errors.ERROR_SUCCESS)
{
// File not found doesn't make much sense coming from a directory delete.
if (errorCode == Interop.Errors.ERROR_FILE_NOT_FOUND)
errorCode = Interop.Errors.ERROR_PATH_NOT_FOUND;
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
}
return (((FileAttributes)data.dwFileAttributes & FileAttributes.ReparsePoint) != 0);
}
private static void RemoveDirectoryRecursive(string fullPath, ref Interop.Kernel32.WIN32_FIND_DATA findData, bool topLevel)
{
int errorCode;
Exception exception = null;
using (SafeFindHandle handle = Interop.Kernel32.FindFirstFile(Directory.EnsureTrailingDirectorySeparator(fullPath) + "*", ref findData))
{
if (handle.IsInvalid)
throw Win32Marshal.GetExceptionForLastWin32Error(fullPath);
do
{
if ((findData.dwFileAttributes & Interop.Kernel32.FileAttributes.FILE_ATTRIBUTE_DIRECTORY) == 0)
{
// File
string fileName = findData.cFileName.GetStringFromFixedBuffer();
if (!Interop.Kernel32.DeleteFile(Path.Combine(fullPath, fileName)) && exception == null)
{
errorCode = Marshal.GetLastWin32Error();
// We don't care if something else deleted the file first
if (errorCode != Interop.Errors.ERROR_FILE_NOT_FOUND)
{
exception = Win32Marshal.GetExceptionForWin32Error(errorCode, fileName);
}
}
}
else
{
// Directory, skip ".", "..".
if (findData.cFileName.FixedBufferEqualsString(".") || findData.cFileName.FixedBufferEqualsString(".."))
continue;
string fileName = findData.cFileName.GetStringFromFixedBuffer();
if ((findData.dwFileAttributes & (int)FileAttributes.ReparsePoint) == 0)
{
// Not a reparse point, recurse.
try
{
RemoveDirectoryRecursive(
Path.Combine(fullPath, fileName),
findData: ref findData,
topLevel: false);
}
catch (Exception e)
{
if (exception == null)
exception = e;
}
}
else
{
// Reparse point, don't recurse, just remove. (dwReserved0 is documented for this flag)
if (findData.dwReserved0 == Interop.Kernel32.IOReparseOptions.IO_REPARSE_TAG_MOUNT_POINT)
{
// Mount point. Unmount using full path plus a trailing '\'.
// (Note: This doesn't remove the underlying directory)
string mountPoint = Path.Combine(fullPath, fileName + PathHelpers.DirectorySeparatorCharAsString);
if (!Interop.Kernel32.DeleteVolumeMountPoint(mountPoint) && exception == null)
{
errorCode = Marshal.GetLastWin32Error();
if (errorCode != Interop.Errors.ERROR_SUCCESS &&
errorCode != Interop.Errors.ERROR_PATH_NOT_FOUND)
{
exception = Win32Marshal.GetExceptionForWin32Error(errorCode, fileName);
}
}
}
// Note that RemoveDirectory on a symbolic link will remove the link itself.
if (!Interop.Kernel32.RemoveDirectory(Path.Combine(fullPath, fileName)) && exception == null)
{
errorCode = Marshal.GetLastWin32Error();
if (errorCode != Interop.Errors.ERROR_PATH_NOT_FOUND)
{
exception = Win32Marshal.GetExceptionForWin32Error(errorCode, fileName);
}
}
}
}
} while (Interop.Kernel32.FindNextFile(handle, ref findData));
if (exception != null)
throw exception;
errorCode = Marshal.GetLastWin32Error();
if (errorCode != Interop.Errors.ERROR_SUCCESS && errorCode != Interop.Errors.ERROR_NO_MORE_FILES)
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
}
// As we successfully removed all of the files we shouldn't care about the directory itself
// not being empty. As file deletion is just a marker to remove the file when all handles
// are closed we could still have contents hanging around.
RemoveDirectoryInternal(fullPath, topLevel: topLevel, allowDirectoryNotEmpty: true);
}
private static void RemoveDirectoryInternal(string fullPath, bool topLevel, bool allowDirectoryNotEmpty = false)
{
if (!Interop.Kernel32.RemoveDirectory(fullPath))
{
int errorCode = Marshal.GetLastWin32Error();
switch (errorCode)
{
case Interop.Errors.ERROR_FILE_NOT_FOUND:
// File not found doesn't make much sense coming from a directory delete.
errorCode = Interop.Errors.ERROR_PATH_NOT_FOUND;
goto case Interop.Errors.ERROR_PATH_NOT_FOUND;
case Interop.Errors.ERROR_PATH_NOT_FOUND:
// We only throw for the top level directory not found, not for any contents.
if (!topLevel)
return;
break;
case Interop.Errors.ERROR_DIR_NOT_EMPTY:
if (allowDirectoryNotEmpty)
return;
break;
case Interop.Errors.ERROR_ACCESS_DENIED:
// This conversion was originally put in for Win9x. Keeping for compatibility.
throw new IOException(SR.Format(SR.UnauthorizedAccess_IODenied_Path, fullPath));
}
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
}
}
public static void SetAttributes(string fullPath, FileAttributes attributes)
{
if (!Interop.Kernel32.SetFileAttributes(fullPath, (int)attributes))
{
int errorCode = Marshal.GetLastWin32Error();
if (errorCode == Interop.Errors.ERROR_INVALID_PARAMETER)
throw new ArgumentException(SR.Arg_InvalidFileAttrs, nameof(attributes));
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
}
}
public static void SetCreationTime(string fullPath, DateTimeOffset time, bool asDirectory)
{
using (SafeFileHandle handle = OpenHandle(fullPath, asDirectory))
{
if (!Interop.Kernel32.SetFileTime(handle, creationTime: time.ToFileTime()))
{
throw Win32Marshal.GetExceptionForLastWin32Error(fullPath);
}
}
}
public static void SetCurrentDirectory(string fullPath)
{
if (!Interop.Kernel32.SetCurrentDirectory(fullPath))
{
// If path doesn't exist, this sets last error to 2 (File
// not Found). LEGACY: This may potentially have worked correctly
// on Win9x, maybe.
int errorCode = Marshal.GetLastWin32Error();
if (errorCode == Interop.Errors.ERROR_FILE_NOT_FOUND)
errorCode = Interop.Errors.ERROR_PATH_NOT_FOUND;
throw Win32Marshal.GetExceptionForWin32Error(errorCode, fullPath);
}
}
public static void SetLastAccessTime(string fullPath, DateTimeOffset time, bool asDirectory)
{
using (SafeFileHandle handle = OpenHandle(fullPath, asDirectory))
{
if (!Interop.Kernel32.SetFileTime(handle, lastAccessTime: time.ToFileTime()))
{
throw Win32Marshal.GetExceptionForLastWin32Error(fullPath);
}
}
}
public static void SetLastWriteTime(string fullPath, DateTimeOffset time, bool asDirectory)
{
using (SafeFileHandle handle = OpenHandle(fullPath, asDirectory))
{
if (!Interop.Kernel32.SetFileTime(handle, lastWriteTime: time.ToFileTime()))
{
throw Win32Marshal.GetExceptionForLastWin32Error(fullPath);
}
}
}
public static string[] GetLogicalDrives()
{
return DriveInfoInternal.GetLogicalDrives();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using FluentAssertions;
using LogShark.Plugins.Backgrounder;
using LogShark.Plugins.Backgrounder.Model;
using LogShark.Shared;
using LogShark.Shared.LogReading.Containers;
using LogShark.Tests.Plugins.Helpers;
using Moq;
using Xunit;
namespace LogShark.Tests.Plugins.Backgrounder
{
public class BackgrounderEventParserTests : InvariantCultureTestsBase
{
private static readonly LogFileInfo TestLogFileInfo = new LogFileInfo("backgrounder-1.log.2018-07-11",
@"folder1/backgrounder-1.log.2018-07-11", "worker0", DateTime.MinValue);
private readonly Mock<IBackgrounderEventPersister> _persisterMock;
public BackgrounderEventParserTests()
{
_persisterMock = new Mock<IBackgrounderEventPersister>();
}
[Fact]
public void BadOrNoOpInput()
{
var processingNotificationsCollector = new ProcessingNotificationsCollector(10);
var parser = new BackgrounderEventParser(_persisterMock.Object, processingNotificationsCollector);
var logLine = _startTestCases[0].GetLogLine();
parser.ParseAndPersistLine(logLine, null); // Bad input
parser.ParseAndPersistLine(logLine, "I am not a backgrounder line!"); // Good input, but doesn't match regex
parser.ParseAndPersistLine(logLine, "2018-08-08 11:17:13.491 +1000 (,,,,,:purge_expired_wgsessions,-) scheduled-background-job-runner-1 backgrounder: INFO com.tableausoftware.backgrounder.runner.BackgroundJobRunner - Running job of type PurgeExpiredWgSessions; no timeout; priority: 0; id: 9; args: null"); // Good input, but not error nor has ID
_persisterMock.VerifyNoOtherCalls();
processingNotificationsCollector.TotalErrorsReported.Should().Be(2);
}
[Theory]
//valid 2020.1 lines that should not have error messages
[InlineData("2020-04-03 02:00:55.731 -0400 (,,,,3752759,:viz_recommendations_trainer,2680d163-b7cd-496c-a720-2264720faee8) pool-111-thread-1 backgrounder: INFO com.tableausoftware.recommendations.service.VizRecommendationsDataValidator { method=validateViewsStats, site=1 } - Start validating viewsStats. 9 records.")]
[InlineData("2020-04-03 03:01:02.166 -0400 (CareBI,,,,3752821,:refresh_extracts,25fb3ebd-8aa0-481b-8a9e-affb480d469d) ActiveMQ Task-1 backgrounder: INFO org.apache.activemq.transport.failover.FailoverTransport - Successfully connected to ssl")]
public void ValidLinesThatAreNotPersisted(string input)
{
var processingNotificationsCollector = new ProcessingNotificationsCollector(10);
var parser = new BackgrounderEventParser(_persisterMock.Object, processingNotificationsCollector);
var logLine = _startTestCases[0].GetLogLine();
parser.ParseAndPersistLine(logLine, input);
_persisterMock.VerifyNoOtherCalls();
processingNotificationsCollector.TotalErrorsReported.Should().Be(0);
}
[Fact]
public void ErrorEvents()
{
var output = new List<BackgrounderJobError>();
_persisterMock.Setup(m => m.AddErrorEvent(It.IsAny<BackgrounderJobError>())).Callback<BackgrounderJobError>(job => output.Add(job));
RunTestCasesAndAssertOutput(_errorTestCases, output, _persisterMock.Object);
_persisterMock.Verify(m => m.AddErrorEvent(It.IsAny<BackgrounderJobError>()), Times.Exactly(_errorTestCases.Count));
_persisterMock.VerifyNoOtherCalls();
}
[Fact]
public void StartEvents()
{
var output = new List<BackgrounderJob>();
_persisterMock.Setup(m => m.AddStartEvent(It.IsAny<BackgrounderJob>())).Callback<BackgrounderJob>(job => output.Add(job));
RunTestCasesAndAssertOutput(_startTestCases, output, _persisterMock.Object);
_persisterMock.Verify(m => m.AddStartEvent(It.IsAny<BackgrounderJob>()), Times.Exactly(_startTestCases.Count));
_persisterMock.VerifyNoOtherCalls();
}
[Fact]
public void EndEvents()
{
var output = new List<BackgrounderJob>();
_persisterMock.Setup(m => m.AddEndEvent(It.IsAny<BackgrounderJob>())).Callback<BackgrounderJob>(job => output.Add(job));
RunTestCasesAndAssertOutput(_endTestCases, output, _persisterMock.Object);
_persisterMock.Verify(m => m.AddEndEvent(It.IsAny<BackgrounderJob>()), Times.Exactly(_endTestCases.Count));
_persisterMock.VerifyNoOtherCalls();
}
[Fact]
public void ExtractJobDetailsEvents()
{
var output = new List<BackgrounderExtractJobDetail>();
_persisterMock.Setup(m => m.AddExtractJobDetails(It.IsAny<BackgrounderExtractJobDetail>())).Callback<BackgrounderExtractJobDetail>(job => output.Add(job));
RunTestCasesAndAssertOutput(_extractJobDetailTestCases, output, _persisterMock.Object);
_persisterMock.Verify(m => m.AddExtractJobDetails(It.IsAny<BackgrounderExtractJobDetail>()), Times.Exactly(_extractJobDetailTestCases.Count));
_persisterMock.VerifyNoOtherCalls();
}
[Fact]
public void SubscriptionJobDetailsEvents()
{
var output = new List<BackgrounderSubscriptionJobDetail>();
_persisterMock.Setup(m => m.AddSubscriptionJobDetails(It.IsAny<BackgrounderSubscriptionJobDetail>())).Callback<BackgrounderSubscriptionJobDetail>(job => output.Add(job));
RunTestCasesAndAssertOutput(_subscriptionJobDetailTestCases, output, _persisterMock.Object);
_persisterMock.Verify(m => m.AddSubscriptionJobDetails(It.IsAny<BackgrounderSubscriptionJobDetail>()), Times.Exactly(_subscriptionJobDetailTestCases.Count));
_persisterMock.VerifyNoOtherCalls();
}
private static void RunTestCasesAndAssertOutput<T>(List<PluginTestCase> testCases, List<T> outputList, IBackgrounderEventPersister persister)
{
var parser = new BackgrounderEventParser(persister, null);
foreach (var testCase in testCases)
{
parser.ParseAndPersistLine(testCase.GetLogLine(), testCase.LogContents.ToString());
}
var expectedOutput = testCases.Select(testCase => testCase.ExpectedOutput).ToList();
outputList.Should().BeEquivalentTo(expectedOutput);
}
private readonly List<PluginTestCase> _errorTestCases = new List<PluginTestCase>
{
new PluginTestCase
{
LogContents = "2018-07-12 23:37:17.201 -0700 (Default,,,,1369448,:refresh_extracts,-) pool-4-thread-1 backgrounder: ERROR com.tableausoftware.core.configuration.ConfigurationSupportService - unable to convert site id string: to integer for extract refresh time out overrides list skipping this site, will continue with the remainder.",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 123,
ExpectedOutput = new
{
BackgrounderJobId = 1369448,
Class = "com.tableausoftware.core.configuration.ConfigurationSupportService",
File = TestLogFileInfo.FileName,
Line = 123,
Message = "unable to convert site id string: to integer for extract refresh time out overrides list skipping this site, will continue with the remainder.",
Severity = "ERROR",
Site = "Default",
Thread = "pool-4-thread-1",
Timestamp = new DateTime(2018, 7, 12, 23, 37, 17, 201)
}},
new PluginTestCase
{
LogContents = "2018-07-12 23:37:17.201 -0700 (Default,,,,1369448,:refresh_extracts,-) pool-4-thread-1 backgrounder: FATAL com.tableausoftware.core.configuration.ConfigurationSupportService - unable to convert site id string: to integer for extract refresh time out overrides list skipping this site, will continue with the remainder.",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 124,
ExpectedOutput = new
{
BackgrounderJobId = 1369448,
Class = "com.tableausoftware.core.configuration.ConfigurationSupportService",
File = TestLogFileInfo.FileName,
Line = 124,
Message = "unable to convert site id string: to integer for extract refresh time out overrides list skipping this site, will continue with the remainder.",
Severity = "FATAL",
Site = "Default",
Thread = "pool-4-thread-1",
Timestamp = new DateTime(2018, 7, 12, 23, 37, 17, 201)
}},
new PluginTestCase
{
LogContents = "2018-07-12 23:37:17.201 -0700 (Default,,,,,:refresh_extracts,-) pool-4-thread-1 backgrounder: FATAL com.tableausoftware.core.configuration.ConfigurationSupportService - unable to convert site id string: to integer for extract refresh time out overrides list skipping this site, will continue with the remainder.",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 125,
ExpectedOutput = new
{
BackgrounderJobId = (int?) null,
Class = "com.tableausoftware.core.configuration.ConfigurationSupportService",
File = TestLogFileInfo.FileName,
Line = 125,
Message = "unable to convert site id string: to integer for extract refresh time out overrides list skipping this site, will continue with the remainder.",
Severity = "FATAL",
Site = "Default",
Thread = "pool-4-thread-1",
Timestamp = new DateTime(2018, 7, 12, 23, 37, 17, 201)
}},
};
private readonly List<PluginTestCase> _startTestCases = new List<PluginTestCase>
{
new PluginTestCase
{
LogContents =
"2018-08-08 11:17:13.491 +1000 (,,,,9,:purge_expired_wgsessions,-) scheduled-background-job-runner-1 backgrounder: INFO com.tableausoftware.backgrounder.runner.BackgroundJobRunner - Running job of type PurgeExpiredWgSessions; no timeout; priority: 0; id: 9; args: null",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 123,
ExpectedOutput = new
{
Args = (string) null,
BackgrounderId = 1,
EndFile = (string) null,
EndLine = (int?) null,
EndTime = (DateTime?) null,
ErrorMessage = (string) null,
JobId = 9,
JobType = "purge_expired_wgsessions",
Notes = (string) null,
Priority = 0,
RunTime = (int?) null,
StartFile = TestLogFileInfo.FileName,
StartLine = 123,
StartTime = new DateTime(2018, 8, 8, 11, 17, 13, 491),
Success = (bool?) null,
Timeout = (int?) null,
TotalTime = (int?) null,
WorkerId = "worker0",
}
},
new PluginTestCase
{
LogContents =
"2018-08-08 14:45:17.152 +1000 (,,,,326,:enqueue_data_alerts,-) scheduled-background-job-runner-1 backgrounder: INFO com.tableausoftware.backgrounder.runner.BackgroundJobRunner - Running job of type EnqueueDataAlerts; no timeout; priority: 10; id: 326; args: null",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 124,
ExpectedOutput = new
{
Args = (string) null,
BackgrounderId = 1,
EndFile = (string) null,
EndLine = (int?) null,
EndTime = (DateTime?) null,
ErrorMessage = (string) null,
JobId = 326,
JobType = "enqueue_data_alerts",
Notes = (string) null,
Priority = 10,
RunTime = (int?) null,
StartFile = TestLogFileInfo.FileName,
StartLine = 124,
StartTime = new DateTime(2018, 8, 8, 14, 45, 17, 152),
Success = (bool?) null,
Timeout = (int?) null,
TotalTime = (int?) null,
WorkerId = "worker0",
}
},
new PluginTestCase
{
LogContents =
"2018-08-08 14:46:17.152 +1000 (,,,,327,:enqueue_data_alerts,-) scheduled-background-job-runner-1 backgrounder: INFO com.tableausoftware.backgrounder.runner.BackgroundJobRunner - Running job of type EnqueueDataAlerts; timeout: 9000; priority: 10; id: 327; args: test1 test2",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 125,
ExpectedOutput = new
{
Args = "test1 test2",
BackgrounderId = 1,
EndFile = (string) null,
EndLine = (int?) null,
EndTime = (DateTime?) null,
ErrorMessage = (string) null,
JobId = 327,
JobType = "enqueue_data_alerts",
Notes = (string) null,
Priority = 10,
RunTime = (int?) null,
StartFile = TestLogFileInfo.FileName,
StartLine = 125,
StartTime = new DateTime(2018, 8, 8, 14, 46, 17, 152),
Success = (bool?) null,
Timeout = 9000,
TotalTime = (int?) null,
WorkerId = "worker0",
}
},
//2020.1
new PluginTestCase
{
LogContents =
"2020-04-05 23:00:59.601 -0500 (kpi,,,,1968280,:refresh_extracts,6ee0e44f-0e70-4d31-bb34-57a6e09a6d72) scheduled-background-job-runner-1 backgrounder: INFO com.tableausoftware.backgrounder.runner.BackgroundJobRunner - activity=backgrounder-job-start job_id=1968280 job_type=RefreshExtracts request_id=6ee0e44f-0e70-4d31-bb34-57a6e09a6d72 args=\"[Workbook, 9, Test COSMOS, 243, null]\" site=kpi site_id=3 timeout=9000",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 125,
ExpectedOutput = new
{
Args = "Workbook, 9, Test COSMOS, 243, null",
BackgrounderId = 1,
EndFile = (string) null,
EndLine = (int?) null,
EndTime = (DateTime?) null,
ErrorMessage = (string) null,
JobId = 1968280,
JobType = "refresh_extracts",
Notes = (string) null,
Priority = 0,
RunTime = (int?) null,
StartFile = TestLogFileInfo.FileName,
StartLine = 125,
StartTime = new DateTime(2020, 4, 5, 23, 00, 59, 601),
Success = (bool?) null,
Timeout = 9000,
TotalTime = (int?) null,
WorkerId = "worker0",
}
}
};
private readonly List<PluginTestCase> _endTestCases = new List<PluginTestCase>
{
new PluginTestCase
{
LogContents =
"2018-08-08 11:17:13.402 +1000 (,,,,7,:reap_auto_saves,-) scheduled-background-job-runner-1 backgrounder: INFO com.tableausoftware.backgrounder.runner.BackgroundJobRunner - Job finished: SUCCESS; name: Reap Auto Saves; type :reap_auto_saves; id: 7; notes: null; total time: 1 sec; run time: 0 sec",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 123,
ExpectedOutput = new
{
Args = (string) null,
BackgrounderId = (int?) null,
EndFile = TestLogFileInfo.FileName,
EndLine = 123,
EndTime = new DateTime(2018, 8, 8, 11, 17, 13, 402),
ErrorMessage = (string) null,
JobId = 7,
JobType = (string) null,
Notes = (string) null,
Priority = 0,
RunTime = 0,
StartFile = (string) null,
StartLine = 0,
StartTime = default(DateTime),
Success = true,
Timeout = (int?) null,
TotalTime = 1,
WorkerId = (string) null,
}
},
new PluginTestCase
{
LogContents =
"2018-08-08 11:17:13.402 +1000 (,,,,7,:reap_auto_saves,-) scheduled-background-job-runner-1 backgrounder: INFO com.tableausoftware.backgrounder.runner.BackgroundJobRunner - Job finished: SUCCESS; name: Reap Auto Saves; type :reap_auto_saves; id: 7; notes: test note here; total time: 1 sec; run time: 0 sec",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 124,
ExpectedOutput = new
{
Args = (string) null,
BackgrounderId = (int?) null,
EndFile = TestLogFileInfo.FileName,
EndLine = 124,
EndTime = new DateTime(2018, 8, 8, 11, 17, 13, 402),
ErrorMessage = (string) null,
JobId = 7,
JobType = (string) null,
Notes = "test note here",
Priority = 0,
RunTime = 0,
StartFile = (string) null,
StartLine = 0,
StartTime = default(DateTime),
Success = true,
Timeout = (int?) null,
TotalTime = 1,
WorkerId = (string) null,
}
},
new PluginTestCase
{
LogContents =
"2018-08-08 11:16:32.386 +1000 (,,,,2,:sanitize_dataserver_workbooks,-) scheduled-background-job-runner-1 backgrounder: INFO com.tableausoftware.backgrounder.runner.BackgroundJobRunner - Job finished: ERROR; name: Sanitize Data Server Workbooks; type :sanitize_dataserver_workbooks; id: 2; notes: null; total time: 598 sec; run time: 0 sec",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 125,
ExpectedOutput = new
{
Args = (string) null,
BackgrounderId = (int?) null,
EndFile = TestLogFileInfo.FileName,
EndLine = 125,
EndTime = new DateTime(2018, 8, 8, 11, 16, 32, 386),
ErrorMessage =
"Job finished: ERROR; name: Sanitize Data Server Workbooks; type :sanitize_dataserver_workbooks; id: 2; notes: null; total time: 598 sec; run time: 0 sec",
JobId = 2,
JobType = (string) null,
Notes = (string) null,
Priority = 0,
RunTime = (int?) null,
StartFile = (string) null,
StartLine = 0,
StartTime = default(DateTime),
Success = false,
Timeout = (int?) null,
TotalTime = (int?) null,
WorkerId = (string) null,
}
},
new PluginTestCase
{
LogContents =
"2020-05-13 19:00:46.479 -0500 (,,,,5470253,:sos_reconcile,bded9cdd-acfd-4378-acf3-3f65760a6706) scheduled-background-job-runner-1 backgrounder: INFO com.tableausoftware.backgrounder.runner.BackgroundJobRunner - Job finished: SUCCESS; name: Simple Object Storage Reconcile; type :sos_reconcile; id: 5470253; total time: 7 sec; run time: 0 sec",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 123,
ExpectedOutput = new
{
Args = (string) null,
BackgrounderId = (int?) null,
EndFile = TestLogFileInfo.FileName,
EndLine = 123,
EndTime = new DateTime(2020, 05, 13, 19, 00, 46, 479),
ErrorMessage = (string) null,
JobId = 5470253L,
JobType = (string) null,
Notes = (string) null,
Priority = 0,
RunTime = 0,
StartFile = (string) null,
StartLine = 0,
StartTime = default(DateTime),
Success = true,
Timeout = (int?) null,
TotalTime = 7,
WorkerId = (string) null,
}
},
};
private readonly List<PluginTestCase> _extractJobDetailTestCases = new List<PluginTestCase>
{
new PluginTestCase // old format
{
LogContents = "2018-07-13 02:05:24.969 -0700 (Default,,,D7A2D1F664E5466B87C4637ABBC31D63,1369448,:refresh_extracts,-) pool-4-thread-1 backgrounder: INFO com.tableausoftware.model.workgroup.service.VqlSessionService - Storing to SOS: MDAPP2018_1_2/extract reducedDataId:bd5c5cc4-1c35-443f-bac7-3a4acac54a4b size:71878 (twb) + 1048641536 (guid={5EEC2CCA-6F82-4EFF-9DBC-FDB471269B06}) = 1048713414",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 123,
ExpectedOutput = new
{
BackgrounderJobId = 1369448,
ExtractGuid = "5EEC2CCA-6F82-4EFF-9DBC-FDB471269B06",
ExtractId = "bd5c5cc4-1c35-443f-bac7-3a4acac54a4b",
ExtractSize = 1048641536L,
ExtractUrl = "MDAPP2018_1_2",
JobNotes = (string) null,
ResourceName = (string) null,
ResourceType = (string) null,
ScheduleName = (string) null,
Site = (string) null,
TotalSize = 1048713414L,
TwbSize = 71878L,
VizqlSessionId = "D7A2D1F664E5466B87C4637ABBC31D63",
}},
new PluginTestCase // new format
{
LogContents = "2019-08-09 21:50:17.641 +0000 (Default,,,,201,:refresh_extracts,ee6dd62e-f472-4252-a931-caf4dfb0009f) pool-12-thread-1 backgrounder: INFO com.tableausoftware.model.workgroup.workers.RefreshExtractsWorker - |status=ExtractTimingSuccess|jobId=201|jobLuid=ee6dd62e-f472-4252-a931-caf4dfb0009f|siteName=\"Default\"|workbookName=\"Large1\"|refreshedAt=\"2019-08-09T21:50:17.638Z\"|sessionId=F7162DFF82CB48D386850188BD5B190A-1:1|scheduleName=\"Weekday early mornings\"|scheduleType=\"FullRefresh\"|jobName=\"Refresh Extracts\"|jobType=\"RefreshExtracts\"|totalTimeSeconds=48|runTimeSeconds=46|queuedTime=\"2019-08-09T21:49:29.076Z\"|startedTime=\"2019-08-09T21:49:31.262Z\"|endTime=\"2019-08-09T21:50:17.638Z\"|correlationId=65|priority=0|serialId=null|extractsSizeBytes=57016320|jobNotes=\"Finished refresh of extracts (new extract id:{78C1FCC2-E70E-4B25-BFFE-7B7F0096A4FE}) for Workbook 'Large1' \"",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 123,
ExpectedOutput = new
{
BackgrounderJobId = 201,
ExtractGuid = (string) null,
ExtractId = "78C1FCC2-E70E-4B25-BFFE-7B7F0096A4FE",
ExtractSize = 57016320,
ExtractUrl = "Large1",
JobNotes = "Finished refresh of extracts (new extract id:{78C1FCC2-E70E-4B25-BFFE-7B7F0096A4FE}) for Workbook 'Large1' ",
ResourceName = (string) null,
ResourceType = (string) null,
ScheduleName = "Weekday early mornings",
Site = "Default",
TotalSize = (long?) null,
TwbSize = (long?) null,
VizqlSessionId = "F7162DFF82CB48D386850188BD5B190A-1:1",
}},
new PluginTestCase // new format for data source
{
LogContents = "2019-08-09 21:50:17.641 +0000 (Default,,,,201,:refresh_extracts,ee6dd62e-f472-4252-a931-caf4dfb0009f) pool-12-thread-1 backgrounder: INFO com.tableausoftware.model.workgroup.workers.RefreshExtractsWorker - |status=ExtractTimingSuccess|jobId=201|jobLuid=ee6dd62e-f472-4252-a931-caf4dfb0009f|siteName=\"Sales\"|datasourceName=\"Sales data\"|refreshedAt=\"2019-08-09T21:50:17.638Z\"|sessionId=F7162DFF82CB48D386850188BD5B190A-1:1|scheduleName=\"Weekday early mornings\"|scheduleType=\"FullRefresh\"|jobName=\"Refresh Extracts\"|jobType=\"RefreshExtracts\"|totalTimeSeconds=48|runTimeSeconds=46|queuedTime=\"2019-08-09T21:49:29.076Z\"|startedTime=\"2019-08-09T21:49:31.262Z\"|endTime=\"2019-08-09T21:50:17.638Z\"|correlationId=65|priority=0|serialId=null|extractsSizeBytes=57016320|jobNotes=\"Finished refresh of extracts (new extract id:{1811744A-39A0-47AA-9234-594A7891DCBE}) for Data Source 'Sales data' \"",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 123,
ExpectedOutput = new
{
BackgrounderJobId = 201,
ExtractGuid = (string) null,
ExtractId = "1811744A-39A0-47AA-9234-594A7891DCBE",
ExtractSize = 57016320,
ExtractUrl = "Sales data",
JobNotes = "Finished refresh of extracts (new extract id:{1811744A-39A0-47AA-9234-594A7891DCBE}) for Data Source 'Sales data' ",
ResourceName = (string) null,
ResourceType = (string) null,
ScheduleName = "Weekday early mornings",
Site = "Sales",
TotalSize = (long?) null,
TwbSize = (long?) null,
VizqlSessionId = "F7162DFF82CB48D386850188BD5B190A-1:1",
}},
new PluginTestCase // new format - partial event
{
LogContents = "2019-08-09 21:50:17.641 +0000 (Default,,,,201,:refresh_extracts,ee6dd62e-f472-4252-a931-caf4dfb0009f) pool-12-thread-1 backgrounder: INFO com.tableausoftware.model.workgroup.workers.RefreshExtractsWorker - |status=ExtractTimingSuccess|jobId=201|jobLuid=ee6dd62e-f472-4252-a931-caf4dfb0009f|siteName=\"Default\"|workbookName=\"Large1\"",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 123,
ExpectedOutput = new
{
BackgrounderJobId = 201,
ExtractGuid = (string) null,
ExtractId = (string) null,
ExtractSize = (long?) null,
ExtractUrl = "Large1",
JobNotes = (string) null,
ResourceName = (string) null,
ResourceType = (string) null,
ScheduleName = (string) null,
Site = "Default",
TotalSize = (long?) null,
TwbSize = (long?) null,
VizqlSessionId = (string) null
}},
};
private readonly List<PluginTestCase> _subscriptionJobDetailTestCases = new List<PluginTestCase>
{
new PluginTestCase
{
LogContents = "2018-07-11 16:00:53.506 -0700 (Default,john.doe,,FA88A9BC626A40A29228ECE09F04A76B,1367091,:single_subscription_notify,-) pool-4-thread-1 backgrounder: INFO com.tableausoftware.model.workgroup.service.VqlSessionService - Created session id:FA88A9BC626A40A29228ECE09F04A76B",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 123,
ExpectedOutput = new
{
BackgrounderJobId = 1367091,
RecipientEmail = (string) null,
SenderEmail = (string) null,
SmtpServer = (string) null,
SubscriptionName = (string) null,
VizqlSessionId = "FA88A9BC626A40A29228ECE09F04A76B",
}},
new PluginTestCase
{
LogContents = "2018-07-11 16:00:53.445 -0700 (Default,john.doe,,,1367091,:single_subscription_notify,-) pool-4-thread-1 backgrounder: INFO com.tableausoftware.model.workgroup.service.subscriptions.SubscriptionRunner - Starting subscription Id 66 for User John.Smith \"Weekly Report\"",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 124,
ExpectedOutput = new
{
BackgrounderJobId = 1367091,
RecipientEmail = (string) null,
SenderEmail = (string) null,
SmtpServer = (string) null,
SubscriptionName = "Weekly Report",
VizqlSessionId = (string) null,
}},
new PluginTestCase
{
LogContents = "2019-12-03 08:24:37.802 +0100 (Default,TestUser,,,1993727,:single_subscription_notify,1865c919-2e6a-45c7-994e-f281a78de6fa) pool-5-thread-1 backgrounder: INFO com.tableausoftware.domain.subscription.SubscriptionRunner - Starting Subscription Id 97 for User TestUser Created by TestUser with Subject test test (12BB)",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 124,
ExpectedOutput = new
{
BackgrounderJobId = 1993727,
RecipientEmail = (string) null,
SenderEmail = (string) null,
SmtpServer = (string) null,
SubscriptionName = "test test (12BB)",
VizqlSessionId = (string) null,
}},
new PluginTestCase
{
LogContents = "2018-07-11 16:00:53.445 -0700 (Default,john.doe,,,1367091,:single_subscription_notify,-) pool-4-thread-1 backgrounder: INFO com.tableausoftware.model.workgroup.service.subscriptions.SubscriptionRunner - Starting Subscription Id 66 for User John.Smith Weekly Report",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 124,
ExpectedOutput = new
{
BackgrounderJobId = 1367091,
RecipientEmail = (string) null,
SenderEmail = (string) null,
SmtpServer = (string) null,
SubscriptionName = "Weekly Report",
VizqlSessionId = (string) null,
}},
new PluginTestCase
{
LogContents = "2019-11-24 08:16:44.452 +0800 (RBAC-AE,TestUser,,,1253164,:single_subscription_notify,-) pool-3-thread-1 backgrounder: INFO com.tableausoftware.model.workgroup.service.subscriptions.SubscriptionRunner - Starting Subscription Id 34 for User TestUser Subject Pin \"Bent\" 1423",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 124,
ExpectedOutput = new
{
BackgrounderJobId = 1253164,
RecipientEmail = (string) null,
SenderEmail = (string) null,
SmtpServer = (string) null,
SubscriptionName = "Pin \"Bent\" 1423",
VizqlSessionId = (string) null,
}},
new PluginTestCase
{
LogContents = "2018-07-11 16:01:00.629 -0700 (Default,john.doe,,,1367091,:single_subscription_notify,-) pool-4-thread-1 backgrounder: INFO com.tableausoftware.model.workgroup.util.EmailHelper - Sending email from tableau@test.com to john.doe@test.com from server mail.test.com",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 125,
ExpectedOutput = new
{
BackgrounderJobId = 1367091,
RecipientEmail = "john.doe@test.com",
SenderEmail = "tableau@test.com",
SmtpServer = "mail.test.com",
SubscriptionName = (string) null,
VizqlSessionId = (string) null,
}},
new PluginTestCase
{
LogContents = "2018-07-12 16:01:00.629 -0700 (Default,john.doe,,,1367091,:single_subscription_notify,-) pool-4-thread-1 backgrounder: INFO com.tableausoftware.model.workgroup.util.EmailHelper - Sending email from to null from server smtp.testmailserver.com",
LogType = LogType.BackgrounderJava,
LogFileInfo = TestLogFileInfo,
LineNumber = 126,
ExpectedOutput = new
{
BackgrounderJobId = 1367091,
RecipientEmail = "null",
SenderEmail = string.Empty,
SmtpServer = "smtp.testmailserver.com",
SubscriptionName = (string) null,
VizqlSessionId = (string) null,
}},
};
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Threading.Tasks;
namespace System.Net.Sockets
{
// The System.Net.Sockets.UdpClient class provides access to UDP services at a higher abstraction
// level than the System.Net.Sockets.Socket class. System.Net.Sockets.UdpClient is used to
// connect to a remote host and to receive connections from a remote client.
public partial class UdpClient : IDisposable
{
private const int MaxUDPSize = 0x10000;
private Socket _clientSocket;
private bool _active;
private byte[] _buffer = new byte[MaxUDPSize];
private AddressFamily _family = AddressFamily.InterNetwork;
// Initializes a new instance of the System.Net.Sockets.UdpClientclass.
public UdpClient() : this(AddressFamily.InterNetwork)
{
}
// Initializes a new instance of the System.Net.Sockets.UdpClientclass.
public UdpClient(AddressFamily family)
{
// Validate the address family.
if (family != AddressFamily.InterNetwork && family != AddressFamily.InterNetworkV6)
{
throw new ArgumentException(SR.Format(SR.net_protocol_invalid_family, "UDP"), nameof(family));
}
_family = family;
CreateClientSocket();
}
// Creates a new instance of the UdpClient class that communicates on the
// specified port number.
//
// NOTE: We should obsolete this. This also breaks IPv6-only scenarios.
// But fixing it has many complications that we have decided not
// to fix it and instead obsolete it later.
public UdpClient(int port) : this(port, AddressFamily.InterNetwork)
{
}
// Creates a new instance of the UdpClient class that communicates on the
// specified port number.
public UdpClient(int port, AddressFamily family)
{
// Validate input parameters.
if (!TcpValidationHelpers.ValidatePortNumber(port))
{
throw new ArgumentOutOfRangeException(nameof(port));
}
// Validate the address family.
if (family != AddressFamily.InterNetwork && family != AddressFamily.InterNetworkV6)
{
throw new ArgumentException(SR.net_protocol_invalid_family, nameof(family));
}
IPEndPoint localEP;
_family = family;
if (_family == AddressFamily.InterNetwork)
{
localEP = new IPEndPoint(IPAddress.Any, port);
}
else
{
localEP = new IPEndPoint(IPAddress.IPv6Any, port);
}
CreateClientSocket();
_clientSocket.Bind(localEP);
}
// Creates a new instance of the UdpClient class that communicates on the
// specified end point.
public UdpClient(IPEndPoint localEP)
{
// Validate input parameters.
if (localEP == null)
{
throw new ArgumentNullException(nameof(localEP));
}
// IPv6 Changes: Set the AddressFamily of this object before
// creating the client socket.
_family = localEP.AddressFamily;
CreateClientSocket();
_clientSocket.Bind(localEP);
}
// Used by the class to indicate that a connection to a remote host has been made.
protected bool Active
{
get
{
return _active;
}
set
{
_active = value;
}
}
public int Available
{
get
{
return _clientSocket.Available;
}
}
public Socket Client
{
get
{
return _clientSocket;
}
set
{
_clientSocket = value;
}
}
public short Ttl
{
get
{
return _clientSocket.Ttl;
}
set
{
_clientSocket.Ttl = value;
}
}
public bool DontFragment
{
get
{
return _clientSocket.DontFragment;
}
set
{
_clientSocket.DontFragment = value;
}
}
public bool MulticastLoopback
{
get
{
return _clientSocket.MulticastLoopback;
}
set
{
_clientSocket.MulticastLoopback = value;
}
}
public bool EnableBroadcast
{
get
{
return _clientSocket.EnableBroadcast;
}
set
{
_clientSocket.EnableBroadcast = value;
}
}
public bool ExclusiveAddressUse
{
get
{
return _clientSocket.ExclusiveAddressUse;
}
set
{
_clientSocket.ExclusiveAddressUse = value;
}
}
public void AllowNatTraversal(bool allowed)
{
_clientSocket.SetIPProtectionLevel(allowed ? IPProtectionLevel.Unrestricted : IPProtectionLevel.EdgeRestricted);
}
private bool _cleanedUp = false;
private void FreeResources()
{
// The only resource we need to free is the network stream, since this
// is based on the client socket, closing the stream will cause us
// to flush the data to the network, close the stream and (in the
// NetoworkStream code) close the socket as well.
if (_cleanedUp)
{
return;
}
Socket chkClientSocket = _clientSocket;
if (chkClientSocket != null)
{
// If the NetworkStream wasn't retrieved, the Socket might
// still be there and needs to be closed to release the effect
// of the Bind() call and free the bound IPEndPoint.
chkClientSocket.InternalShutdown(SocketShutdown.Both);
chkClientSocket.Dispose();
_clientSocket = null;
}
_cleanedUp = true;
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this);
FreeResources();
GC.SuppressFinalize(this);
}
}
private bool _isBroadcast;
private void CheckForBroadcast(IPAddress ipAddress)
{
// Here we check to see if the user is trying to use a Broadcast IP address
// we only detect IPAddress.Broadcast (which is not the only Broadcast address)
// and in that case we set SocketOptionName.Broadcast on the socket to allow its use.
// if the user really wants complete control over Broadcast addresses he needs to
// inherit from UdpClient and gain control over the Socket and do whatever is appropriate.
if (_clientSocket != null && !_isBroadcast && IsBroadcast(ipAddress))
{
// We need to set the Broadcast socket option.
// Note that once we set the option on the Socket we never reset it.
_isBroadcast = true;
_clientSocket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.Broadcast, 1);
}
}
private bool IsBroadcast(IPAddress address)
{
if (address.AddressFamily == AddressFamily.InterNetworkV6)
{
// No such thing as a broadcast address for IPv6.
return false;
}
else
{
return address.Equals(IPAddress.Broadcast);
}
}
public IAsyncResult BeginSend(byte[] datagram, int bytes, IPEndPoint endPoint, AsyncCallback requestCallback, object state)
{
// Validate input parameters.
if (_cleanedUp)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
if (datagram == null)
{
throw new ArgumentNullException(nameof(datagram));
}
if (bytes > datagram.Length || bytes < 0)
{
throw new ArgumentOutOfRangeException(nameof(bytes));
}
if (_active && endPoint != null)
{
// Do not allow sending packets to arbitrary host when connected.
throw new InvalidOperationException(SR.net_udpconnected);
}
if (endPoint == null)
{
return _clientSocket.BeginSend(datagram, 0, bytes, SocketFlags.None, requestCallback, state);
}
CheckForBroadcast(endPoint.Address);
return _clientSocket.BeginSendTo(datagram, 0, bytes, SocketFlags.None, endPoint, requestCallback, state);
}
public IAsyncResult BeginSend(byte[] datagram, int bytes, string hostname, int port, AsyncCallback requestCallback, object state)
{
if (_active && ((hostname != null) || (port != 0)))
{
// Do not allow sending packets to arbitrary host when connected.
throw new InvalidOperationException(SR.net_udpconnected);
}
IPEndPoint ipEndPoint = null;
if (hostname != null && port != 0)
{
IPAddress[] addresses = Dns.GetHostAddressesAsync(hostname).GetAwaiter().GetResult();
int i = 0;
for (; i < addresses.Length && addresses[i].AddressFamily != _family; i++)
{
}
if (addresses.Length == 0 || i == addresses.Length)
{
throw new ArgumentException(SR.net_invalidAddressList, nameof(hostname));
}
CheckForBroadcast(addresses[i]);
ipEndPoint = new IPEndPoint(addresses[i], port);
}
return BeginSend(datagram, bytes, ipEndPoint, requestCallback, state);
}
public IAsyncResult BeginSend(byte[] datagram, int bytes, AsyncCallback requestCallback, object state)
{
return BeginSend(datagram, bytes, null, requestCallback, state);
}
public int EndSend(IAsyncResult asyncResult)
{
if (_cleanedUp)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
if (_active)
{
return _clientSocket.EndSend(asyncResult);
}
else
{
return _clientSocket.EndSendTo(asyncResult);
}
}
public IAsyncResult BeginReceive(AsyncCallback requestCallback, object state)
{
// Validate input parameters.
if (_cleanedUp)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
// Due to the nature of the ReceiveFrom() call and the ref parameter convention,
// we need to cast an IPEndPoint to its base class EndPoint and cast it back down
// to IPEndPoint.
EndPoint tempRemoteEP;
if (_family == AddressFamily.InterNetwork)
{
tempRemoteEP = IPEndPointStatics.Any;
}
else
{
tempRemoteEP = IPEndPointStatics.IPv6Any;
}
return _clientSocket.BeginReceiveFrom(_buffer, 0, MaxUDPSize, SocketFlags.None, ref tempRemoteEP, requestCallback, state);
}
public byte[] EndReceive(IAsyncResult asyncResult, ref IPEndPoint remoteEP)
{
if (_cleanedUp)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
EndPoint tempRemoteEP;
if (_family == AddressFamily.InterNetwork)
{
tempRemoteEP = IPEndPointStatics.Any;
}
else
{
tempRemoteEP = IPEndPointStatics.IPv6Any;
}
int received = _clientSocket.EndReceiveFrom(asyncResult, ref tempRemoteEP);
remoteEP = (IPEndPoint)tempRemoteEP;
// Because we don't return the actual length, we need to ensure the returned buffer
// has the appropriate length.
if (received < MaxUDPSize)
{
byte[] newBuffer = new byte[received];
Buffer.BlockCopy(_buffer, 0, newBuffer, 0, received);
return newBuffer;
}
return _buffer;
}
// Joins a multicast address group.
public void JoinMulticastGroup(IPAddress multicastAddr)
{
// Validate input parameters.
if (_cleanedUp)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
if (multicastAddr == null)
{
throw new ArgumentNullException(nameof(multicastAddr));
}
// IPv6 Changes: we need to create the correct MulticastOption and
// must also check for address family compatibility.
if (multicastAddr.AddressFamily != _family)
{
throw new ArgumentException(SR.Format(SR.net_protocol_invalid_multicast_family, "UDP"), nameof(multicastAddr));
}
if (_family == AddressFamily.InterNetwork)
{
MulticastOption mcOpt = new MulticastOption(multicastAddr);
_clientSocket.SetSocketOption(
SocketOptionLevel.IP,
SocketOptionName.AddMembership,
mcOpt);
}
else
{
IPv6MulticastOption mcOpt = new IPv6MulticastOption(multicastAddr);
_clientSocket.SetSocketOption(
SocketOptionLevel.IPv6,
SocketOptionName.AddMembership,
mcOpt);
}
}
public void JoinMulticastGroup(IPAddress multicastAddr, IPAddress localAddress)
{
// Validate input parameters.
if (_cleanedUp)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
if (_family != AddressFamily.InterNetwork)
{
throw new SocketException((int)SocketError.OperationNotSupported);
}
MulticastOption mcOpt = new MulticastOption(multicastAddr, localAddress);
_clientSocket.SetSocketOption(
SocketOptionLevel.IP,
SocketOptionName.AddMembership,
mcOpt);
}
// Joins an IPv6 multicast address group.
public void JoinMulticastGroup(int ifindex, IPAddress multicastAddr)
{
// Validate input parameters.
if (_cleanedUp)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
if (multicastAddr == null)
{
throw new ArgumentNullException(nameof(multicastAddr));
}
if (ifindex < 0)
{
throw new ArgumentException(SR.net_value_cannot_be_negative, nameof(ifindex));
}
// Ensure that this is an IPv6 client, otherwise throw WinSock
// Operation not supported socked exception.
if (_family != AddressFamily.InterNetworkV6)
{
throw new SocketException((int)SocketError.OperationNotSupported);
}
IPv6MulticastOption mcOpt = new IPv6MulticastOption(multicastAddr, ifindex);
_clientSocket.SetSocketOption(
SocketOptionLevel.IPv6,
SocketOptionName.AddMembership,
mcOpt);
}
// Joins a multicast address group with the specified time to live (TTL).
public void JoinMulticastGroup(IPAddress multicastAddr, int timeToLive)
{
// parameter validation;
if (_cleanedUp)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
if (multicastAddr == null)
{
throw new ArgumentNullException(nameof(multicastAddr));
}
if (!RangeValidationHelpers.ValidateRange(timeToLive, 0, 255))
{
throw new ArgumentOutOfRangeException(nameof(timeToLive));
}
// Join the Multicast Group.
JoinMulticastGroup(multicastAddr);
// Set Time To Live (TTL).
_clientSocket.SetSocketOption(
(_family == AddressFamily.InterNetwork) ? SocketOptionLevel.IP : SocketOptionLevel.IPv6,
SocketOptionName.MulticastTimeToLive,
timeToLive);
}
// Leaves a multicast address group.
public void DropMulticastGroup(IPAddress multicastAddr)
{
// Validate input parameters.
if (_cleanedUp)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
if (multicastAddr == null)
{
throw new ArgumentNullException(nameof(multicastAddr));
}
// IPv6 Changes: we need to create the correct MulticastOption and
// must also check for address family compatibility.
if (multicastAddr.AddressFamily != _family)
{
throw new ArgumentException(SR.Format(SR.net_protocol_invalid_multicast_family, "UDP"), nameof(multicastAddr));
}
if (_family == AddressFamily.InterNetwork)
{
MulticastOption mcOpt = new MulticastOption(multicastAddr);
_clientSocket.SetSocketOption(
SocketOptionLevel.IP,
SocketOptionName.DropMembership,
mcOpt);
}
else
{
IPv6MulticastOption mcOpt = new IPv6MulticastOption(multicastAddr);
_clientSocket.SetSocketOption(
SocketOptionLevel.IPv6,
SocketOptionName.DropMembership,
mcOpt);
}
}
// Leaves an IPv6 multicast address group.
public void DropMulticastGroup(IPAddress multicastAddr, int ifindex)
{
// Validate input parameters.
if (_cleanedUp)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
if (multicastAddr == null)
{
throw new ArgumentNullException(nameof(multicastAddr));
}
if (ifindex < 0)
{
throw new ArgumentException(SR.net_value_cannot_be_negative, nameof(ifindex));
}
// Ensure that this is an IPv6 client, otherwise throw WinSock
// Operation not supported socked exception.
if (_family != AddressFamily.InterNetworkV6)
{
throw new SocketException((int)SocketError.OperationNotSupported);
}
IPv6MulticastOption mcOpt = new IPv6MulticastOption(multicastAddr, ifindex);
_clientSocket.SetSocketOption(
SocketOptionLevel.IPv6,
SocketOptionName.DropMembership,
mcOpt);
}
public Task<int> SendAsync(byte[] datagram, int bytes)
{
return Task<int>.Factory.FromAsync(
(targetDatagram, targetBytes, callback, state) => ((UdpClient)state).BeginSend(targetDatagram, targetBytes, callback, state),
asyncResult => ((UdpClient)asyncResult.AsyncState).EndSend(asyncResult),
datagram,
bytes,
state: this);
}
public Task<int> SendAsync(byte[] datagram, int bytes, IPEndPoint endPoint)
{
return Task<int>.Factory.FromAsync(
(targetDatagram, targetBytes, targetEndpoint, callback, state) => ((UdpClient)state).BeginSend(targetDatagram, targetBytes, targetEndpoint, callback, state),
asyncResult => ((UdpClient)asyncResult.AsyncState).EndSend(asyncResult),
datagram,
bytes,
endPoint,
state: this);
}
public Task<int> SendAsync(byte[] datagram, int bytes, string hostname, int port)
{
Tuple<byte[], string> packedArguments = Tuple.Create(datagram, hostname);
return Task<int>.Factory.FromAsync(
(targetPackedArguments, targetBytes, targetPort, callback, state) =>
{
byte[] targetDatagram = targetPackedArguments.Item1;
string targetHostname = targetPackedArguments.Item2;
var client = (UdpClient)state;
return client.BeginSend(targetDatagram, targetBytes, targetHostname, targetPort, callback, state);
},
asyncResult => ((UdpClient)asyncResult.AsyncState).EndSend(asyncResult),
packedArguments,
bytes,
port,
state: this);
}
public Task<UdpReceiveResult> ReceiveAsync()
{
return Task<UdpReceiveResult>.Factory.FromAsync(
(callback, state) => ((UdpClient)state).BeginReceive(callback, state),
asyncResult =>
{
var client = (UdpClient)asyncResult.AsyncState;
IPEndPoint remoteEP = null;
byte[] buffer = client.EndReceive(asyncResult, ref remoteEP);
return new UdpReceiveResult(buffer, remoteEP);
},
state: this);
}
private void CreateClientSocket()
{
// Common initialization code.
//
// IPv6 Changes: Use the AddressFamily of this class rather than hardcode.
_clientSocket = new Socket(_family, SocketType.Dgram, ProtocolType.Udp);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Security.Permissions
{
using System;
using SiteString = System.Security.Util.SiteString;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.Serialization;
[System.Runtime.InteropServices.ComVisible(true)]
sealed public class SiteIdentityPermission : CodeAccessPermission, IBuiltInPermission
{
//------------------------------------------------------
//
// PRIVATE STATE DATA
//
//------------------------------------------------------
[OptionalField(VersionAdded = 2)]
private bool m_unrestricted;
[OptionalField(VersionAdded = 2)]
private SiteString[] m_sites;
//------------------------------------------------------
//
// PUBLIC CONSTRUCTORS
//
//------------------------------------------------------
public SiteIdentityPermission(PermissionState state)
{
if (state == PermissionState.Unrestricted)
{
m_unrestricted = true;
}
else if (state == PermissionState.None)
{
m_unrestricted = false;
}
else
{
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidPermissionState"));
}
}
public SiteIdentityPermission( String site )
{
Site = site;
}
//------------------------------------------------------
//
// PUBLIC ACCESSOR METHODS
//
//------------------------------------------------------
public String Site
{
set
{
m_unrestricted = false;
m_sites = new SiteString[1];
m_sites[0] = new SiteString( value );
}
get
{
if(m_sites == null)
return "";
if(m_sites.Length == 1)
return m_sites[0].ToString();
throw new NotSupportedException(Environment.GetResourceString("NotSupported_AmbiguousIdentity"));
}
}
//------------------------------------------------------
//
// PRIVATE AND PROTECTED HELPERS FOR ACCESSORS AND CONSTRUCTORS
//
//------------------------------------------------------
//------------------------------------------------------
//
// CODEACCESSPERMISSION IMPLEMENTATION
//
//------------------------------------------------------
//------------------------------------------------------
//
// IPERMISSION IMPLEMENTATION
//
//------------------------------------------------------
public override IPermission Copy()
{
SiteIdentityPermission perm = new SiteIdentityPermission( PermissionState.None );
perm.m_unrestricted = this.m_unrestricted;
if (this.m_sites != null)
{
perm.m_sites = new SiteString[this.m_sites.Length];
int n;
for(n = 0; n < this.m_sites.Length; n++)
perm.m_sites[n] = (SiteString)this.m_sites[n].Copy();
}
return perm;
}
public override bool IsSubsetOf(IPermission target)
{
if (target == null)
{
if(m_unrestricted)
return false;
if(m_sites == null)
return true;
if(m_sites.Length == 0)
return true;
return false;
}
SiteIdentityPermission that = target as SiteIdentityPermission;
if(that == null)
throw new ArgumentException(Environment.GetResourceString("Argument_WrongType", this.GetType().FullName));
if(that.m_unrestricted)
return true;
if(m_unrestricted)
return false;
if(this.m_sites != null)
{
foreach(SiteString ssThis in this.m_sites)
{
bool bOK = false;
if(that.m_sites != null)
{
foreach(SiteString ssThat in that.m_sites)
{
if(ssThis.IsSubsetOf(ssThat))
{
bOK = true;
break;
}
}
}
if(!bOK)
return false;
}
}
return true;
}
public override IPermission Intersect(IPermission target)
{
if (target == null)
return null;
SiteIdentityPermission that = target as SiteIdentityPermission;
if(that == null)
throw new ArgumentException(Environment.GetResourceString("Argument_WrongType", this.GetType().FullName));
if(this.m_unrestricted && that.m_unrestricted)
{
SiteIdentityPermission res = new SiteIdentityPermission(PermissionState.None);
res.m_unrestricted = true;
return res;
}
if(this.m_unrestricted)
return that.Copy();
if(that.m_unrestricted)
return this.Copy();
if(this.m_sites == null || that.m_sites == null || this.m_sites.Length == 0 || that.m_sites.Length == 0)
return null;
List<SiteString> alSites = new List<SiteString>();
foreach(SiteString ssThis in this.m_sites)
{
foreach(SiteString ssThat in that.m_sites)
{
SiteString ssInt = (SiteString)ssThis.Intersect(ssThat);
if(ssInt != null)
alSites.Add(ssInt);
}
}
if(alSites.Count == 0)
return null;
SiteIdentityPermission result = new SiteIdentityPermission(PermissionState.None);
result.m_sites = alSites.ToArray();
return result;
}
public override IPermission Union(IPermission target)
{
if (target == null)
{
if((this.m_sites == null || this.m_sites.Length == 0) && !this.m_unrestricted)
return null;
return this.Copy();
}
SiteIdentityPermission that = target as SiteIdentityPermission;
if(that == null)
throw new ArgumentException(Environment.GetResourceString("Argument_WrongType", this.GetType().FullName));
if(this.m_unrestricted || that.m_unrestricted)
{
SiteIdentityPermission res = new SiteIdentityPermission(PermissionState.None);
res.m_unrestricted = true;
return res;
}
if (this.m_sites == null || this.m_sites.Length == 0)
{
if(that.m_sites == null || that.m_sites.Length == 0)
return null;
return that.Copy();
}
if(that.m_sites == null || that.m_sites.Length == 0)
return this.Copy();
List<SiteString> alSites = new List<SiteString>();
foreach(SiteString ssThis in this.m_sites)
alSites.Add(ssThis);
foreach(SiteString ssThat in that.m_sites)
{
bool bDupe = false;
foreach(SiteString ss in alSites)
{
if(ssThat.Equals(ss))
{
bDupe = true;
break;
}
}
if(!bDupe)
alSites.Add(ssThat);
}
SiteIdentityPermission result = new SiteIdentityPermission(PermissionState.None);
result.m_sites = alSites.ToArray();
return result;
}
/// <internalonly/>
int IBuiltInPermission.GetTokenIndex()
{
return SiteIdentityPermission.GetTokenIndex();
}
internal static int GetTokenIndex()
{
return BuiltInPermissionIndex.SiteIdentityPermissionIndex;
}
}
}
| |
using AutoMapper.Configuration;
using AutoMapper.Mappers;
using AutoMapper.QueryableExtensions.Impl;
using static System.Linq.Expressions.Expression;
namespace AutoMapper
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using System.Runtime.CompilerServices;
using Configuration;
using Execution;
public class ValueResolverConfiguration
{
public IValueResolver Instance { get; }
public Type Type { get; }
public LambdaExpression SourceMember { get; set; }
public string SourceMemberName { get; set; }
public Func<IValueResolver> Constructor { get; set; }
public ValueResolverConfiguration(Type type)
{
Type = type;
}
public ValueResolverConfiguration(IValueResolver instance)
{
Instance = instance;
}
}
[DebuggerDisplay("{DestinationProperty.Name}")]
public class PropertyMap
{
internal readonly TypeMap _typeMap;
internal readonly List<IMemberGetter> _memberChain = new List<IMemberGetter>();
internal bool _ignored;
internal int _mappingOrder;
internal Func<object, ResolutionContext, object> _customResolverFunc;
internal bool _sealed;
internal Func<object, object, ResolutionContext, bool> _condition;
internal Func<ResolutionContext, bool> _preCondition;
internal Action<object, ResolutionContext> _mapperFunc;
internal MemberInfo _sourceMember;
internal LambdaExpression _customExpression;
internal Expression<Action<object, object, ResolutionContext>> _finalMapperExpr;
internal LambdaExpression _mapperExpr;
public PropertyMap(IMemberAccessor destinationProperty, TypeMap typeMap)
{
_typeMap = typeMap;
UseDestinationValue = true;
DestinationProperty = destinationProperty;
}
public PropertyMap(PropertyMap inheritedMappedProperty, TypeMap typeMap)
: this(inheritedMappedProperty.DestinationProperty, typeMap)
{
ApplyInheritedPropertyMap(inheritedMappedProperty);
}
public IMemberAccessor DestinationProperty { get; }
public Type DestinationPropertyType => DestinationProperty.MemberType;
public IEnumerable<IMemberGetter> SourceMembers => _memberChain;
public LambdaExpression CustomExpression
{
get { return _customExpression; }
private set
{
_customExpression = value;
if (value != null)
SourceType = value.ReturnType;
}
}
public Type SourceType { get; private set; }
public MemberInfo SourceMember
{
get
{
return _sourceMember ?? _memberChain.LastOrDefault()?.MemberInfo;
}
internal set
{
_sourceMember = value;
if (value != null)
SourceType = value.GetMemberType();
}
}
public bool UseDestinationValue { get; set; }
public bool ExplicitExpansion { get; set; }
public object CustomValue { get; private set; }
public object NullSubstitute { get; private set; }
public ValueResolverConfiguration ValueResolverConfig { get; set; }
public void ChainMembers(IEnumerable<IMemberGetter> members)
{
var getters = members as IList<IMemberGetter> ?? members.ToList();
_memberChain.AddRange(getters);
SourceType = getters.LastOrDefault()?.MemberType;
}
public void ApplyInheritedPropertyMap(PropertyMap inheritedMappedProperty)
{
if (!CanResolveValue() && inheritedMappedProperty.IsIgnored())
{
Ignore();
}
CustomExpression = CustomExpression ?? inheritedMappedProperty.CustomExpression;
_customResolverFunc = _customResolverFunc ?? inheritedMappedProperty._customResolverFunc;
if (_condition == null && inheritedMappedProperty._condition != null)
{
ApplyCondition(inheritedMappedProperty._condition);
}
if (NullSubstitute == null)
{
SetNullSubstitute(inheritedMappedProperty.NullSubstitute);
}
if (_mappingOrder == 0)
{
SetMappingOrder(inheritedMappedProperty._mappingOrder);
}
SourceType = SourceType ?? inheritedMappedProperty.SourceType;
CustomValue = CustomValue ?? inheritedMappedProperty.CustomValue;
}
internal void Seal(TypeMapRegistry typeMapRegistry)
{
if (_sealed)
{
return;
}
if (!CanResolveValue())
{
_mapperFunc = (_, __) => { };
return;
}
var srcParam = Parameter(typeof(object), "src");
var source = Convert(srcParam, _typeMap.SourceType);
var destParam = Parameter(typeof(object), "dest");
var destination = Convert(destParam, _typeMap.DestinationType);
_mapperExpr = this.CreateExpression(typeMapRegistry);
var exp = _mapperExpr.ReplaceParameters(source, destination);
_finalMapperExpr = Lambda<Action<object, object, ResolutionContext>>(exp, srcParam, destParam, _mapperExpr.Parameters[2]);
var mapperFunc = _finalMapperExpr.Compile();
_mapperFunc = (dest, ctxt) => GetValue(mapperFunc, ctxt, dest);
_sealed = true;
}
private void GetValue(Action<object, object, ResolutionContext> mapperFunc, ResolutionContext ctxt, object dest)
{
mapperFunc(ctxt.SourceValue, dest, ctxt);
}
public void AssignCustomExpression<TSource, TMember>(Func<TSource, ResolutionContext, TMember> resolverFunc)
{
//Expression<Func<TSource, ResolutionContext, TMember>> expr = (s, c) => resolverFunc(s, c);
_customResolverFunc = (s, c) => resolverFunc((TSource) s, c);
SourceType = typeof (TMember);
//AssignCustomExpression(expr);
}
public void Ignore()
{
_ignored = true;
}
public bool IsIgnored()
{
return _ignored;
}
public void SetMappingOrder(int mappingOrder)
{
_mappingOrder = mappingOrder;
}
public int GetMappingOrder()
{
return _mappingOrder;
}
public bool IsMapped()
{
return _memberChain.Count > 0
|| ValueResolverConfig != null
|| _customResolverFunc != null
|| SourceMember != null
|| CustomValue != null
|| CustomExpression != null
|| _ignored;
}
public bool CanResolveValue()
{
return (_memberChain.Count > 0
|| ValueResolverConfig != null
|| _customResolverFunc != null
|| SourceMember != null
|| CustomValue != null
|| CustomExpression != null) && !_ignored;
}
public void SetNullSubstitute(object nullSubstitute)
{
NullSubstitute = nullSubstitute;
}
public void AssignCustomValue(object value)
{
CustomValue = value;
}
public bool Equals(PropertyMap other)
{
if (ReferenceEquals(null, other)) return false;
if (ReferenceEquals(this, other)) return true;
return Equals(other.DestinationProperty, DestinationProperty);
}
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) return false;
if (ReferenceEquals(this, obj)) return true;
if (obj.GetType() != typeof (PropertyMap)) return false;
return Equals((PropertyMap) obj);
}
public override int GetHashCode()
{
return DestinationProperty.GetHashCode();
}
public void ApplyCondition(Func<object, object, ResolutionContext, bool> condition)
{
_condition = condition;
}
public void ApplyPreCondition(Func<ResolutionContext, bool> condition)
{
_preCondition = condition;
}
public bool ShouldAssignValue(object resolvedValue, object destinationValue, ResolutionContext context)
{
return _condition(resolvedValue, destinationValue, context);
}
public bool ShouldAssignValuePreResolving(ResolutionContext context)
{
return _preCondition(context);
}
public void SetCustomValueResolverExpression<TSource, TMember>(Expression<Func<TSource, TMember>> sourceMember)
{
var finder = new MemberFinderVisitor();
finder.Visit(sourceMember);
if (finder.Member != null)
{
SourceMember = finder.Member.Member;
}
CustomExpression = sourceMember;
_ignored = false;
}
public object GetDestinationValue(object mappedObject)
{
return UseDestinationValue
? DestinationProperty.GetValue(mappedObject)
: null;
}
public void MapValue(object mappedObject, ResolutionContext context)
{
_mapperFunc(mappedObject, context);
}
private class MemberFinderVisitor : ExpressionVisitor
{
public MemberExpression Member { get; private set; }
protected override Expression VisitMember(MemberExpression node)
{
Member = node;
return base.VisitMember(node);
}
}
}
internal class ConvertingVisitor : ExpressionVisitor
{
private readonly ParameterExpression _newParam;
private readonly ParameterExpression _oldParam;
public ConvertingVisitor(ParameterExpression oldParam, ParameterExpression newParam)
{
_newParam = newParam;
_oldParam = oldParam;
}
protected override Expression VisitMember(MemberExpression node)
{
return node.Expression == _oldParam
? MakeMemberAccess(Convert(_newParam, _oldParam.Type), node.Member)
: base.VisitMember(node);
}
protected override Expression VisitParameter(ParameterExpression node)
{
return node == _oldParam ? _newParam : base.VisitParameter(node);
}
protected override Expression VisitMethodCall(MethodCallExpression node)
{
return node.Object == _oldParam
? Call(Convert(_newParam, _oldParam.Type), node.Method)
: base.VisitMethodCall(node);
}
}
internal class IfNotNullVisitor : ExpressionVisitor
{
private readonly IList<MemberExpression> AllreadyUpdated = new List<MemberExpression>();
protected override Expression VisitMember(MemberExpression node)
{
if (AllreadyUpdated.Contains(node))
return base.VisitMember(node);
AllreadyUpdated.Add(node);
return Visit(DelegateFactory.IfNotNullExpression(node));
}
}
internal class ReplaceExpressionVisitor : ExpressionVisitor
{
private readonly Expression _oldExpression;
private readonly Expression _newExpression;
public ReplaceExpressionVisitor(Expression oldExpression, Expression newExpression)
{
_oldExpression = oldExpression;
_newExpression = newExpression;
}
public override Expression Visit(Expression node)
{
return _oldExpression == node ? _newExpression : base.Visit(node);
}
}
internal class ExpressionConcatVisitor : ExpressionVisitor
{
private readonly LambdaExpression _overrideExpression;
public ExpressionConcatVisitor(LambdaExpression overrideExpression)
{
_overrideExpression = overrideExpression;
}
public override Expression Visit(Expression node)
{
if (_overrideExpression == null)
return node;
if (node.NodeType != ExpressionType.Lambda && node.NodeType != ExpressionType.Parameter)
{
var expression = node;
if (node.Type == typeof(object))
expression = Convert(node, _overrideExpression.Parameters[0].Type);
return _overrideExpression.ReplaceParameters(expression);
}
return base.Visit(node);
}
protected override Expression VisitLambda<T>(Expression<T> node)
{
return Lambda(Visit(node.Body), node.Parameters);
}
}
internal static class ExpressionVisitors
{
private static readonly ExpressionVisitor IfNullVisitor = new IfNotNullVisitor();
public static Expression ReplaceParameters(this LambdaExpression exp, params Expression[] replace)
{
var replaceExp = exp.Body;
for (var i = 0; i < Math.Min(replace.Count(), exp.Parameters.Count()); i++)
replaceExp = replaceExp.Replace(exp.Parameters[i], replace[i]);
return replaceExp;
}
public static Expression ConvertReplaceParameters(this LambdaExpression exp, params ParameterExpression[] replace)
{
var replaceExp = exp.Body;
for (var i = 0; i < Math.Min(replace.Count(), exp.Parameters.Count()); i++)
replaceExp = new ConvertingVisitor(exp.Parameters[i], replace[i]).Visit(replaceExp);
return replaceExp;
}
public static Expression Replace(this Expression exp, Expression old, Expression replace) => new ReplaceExpressionVisitor(old, replace).Visit(exp);
public static LambdaExpression Concat(this LambdaExpression expr, LambdaExpression concat) => (LambdaExpression)new ExpressionConcatVisitor(expr).Visit(concat);
public static Expression IfNotNull(this Expression expression) => IfNullVisitor.Visit(expression);
public static Expression IfNullElse(this Expression expression, params Expression[] ifElse)
{
return ifElse.Any()
? Condition(NotEqual(expression, Default(expression.Type)), expression, ifElse.First().IfNullElse(ifElse.Skip(1).ToArray()))
: expression;
}
public static LambdaExpression CreateExpression(this PropertyMap propertyMap, TypeMapRegistry typeMapRegistry)
{
var srcParam = Parameter(propertyMap._typeMap.SourceType, "src");
var destParam = Parameter(propertyMap._typeMap.DestinationType, "dest");
var ctxtParam = Parameter(typeof(ResolutionContext), "ctxt");
var valueResolverExpr = BuildValueResolverFunc(propertyMap, typeMapRegistry, srcParam, ctxtParam);
var innerResolverExpr = valueResolverExpr;
var destMember = MakeMemberAccess(destParam,propertyMap.DestinationProperty.MemberInfo);
Expression getter;
if (propertyMap.DestinationProperty.MemberInfo is PropertyInfo &&
((PropertyInfo)propertyMap.DestinationProperty.MemberInfo).GetGetMethod(true) == null)
{
getter = Default(propertyMap._typeMap.DestinationType);
}
else
{
getter = destMember;
}
var destValueExpr = propertyMap.UseDestinationValue
? getter
: Constant(null, propertyMap.DestinationPropertyType);
if (propertyMap.SourceType == null
|| (propertyMap.SourceType.IsEnumerableType() && propertyMap.SourceType != typeof(string))
|| typeMapRegistry.GetTypeMap(new TypePair(propertyMap.SourceType, propertyMap.DestinationPropertyType)) != null
|| ((!EnumMapper.EnumToEnumMapping(new TypePair(propertyMap.SourceType, propertyMap.DestinationPropertyType)) ||
EnumMapper.EnumToNullableTypeMapping(new TypePair(propertyMap.SourceType, propertyMap.DestinationPropertyType))) &&
EnumMapper.EnumToEnumMapping(new TypePair(propertyMap.SourceType, propertyMap.DestinationPropertyType)))
|| !propertyMap.DestinationPropertyType.IsAssignableFrom(propertyMap.SourceType))
{
var ifTrue = propertyMap.SourceType != null
? (Expression)Constant(propertyMap.SourceType, typeof(Type))
: MakeMemberAccess(ctxtParam, typeof(ResolutionContext).GetProperty("SourceType"));
var ifFalse = Call(valueResolverExpr, typeof(object).GetMethod("GetType"));
var a = Condition(Equal(valueResolverExpr.ToObject(), Constant(null)),
ifTrue,
ifFalse);
var mapperProp = MakeMemberAccess(ctxtParam, typeof(ResolutionContext).GetProperty("Mapper"));
var mapMethod = typeof(IRuntimeMapper).GetMethod("Map", new[] { typeof(object), typeof(object), typeof(Type), typeof(Type), typeof(ResolutionContext) });
var second = Call(
mapperProp,
mapMethod,
valueResolverExpr.ToObject(),
destValueExpr.ToObject(),
a,
Constant(propertyMap.DestinationPropertyType),
ctxtParam
);
valueResolverExpr = Convert(second, propertyMap.DestinationPropertyType);
}
if (propertyMap._condition != null)
{
valueResolverExpr =
Condition(
Invoke(
Constant(propertyMap._condition),
innerResolverExpr.ToObject(),
destValueExpr.ToObject(),
ctxtParam
),
Convert(valueResolverExpr, propertyMap.DestinationPropertyType),
destValueExpr
);
}
Expression mapperExpr;
if (propertyMap.DestinationProperty.MemberInfo is FieldInfo)
{
{
if (propertyMap.SourceType != propertyMap.DestinationPropertyType)
mapperExpr = Assign(destMember, Convert(valueResolverExpr, propertyMap.DestinationPropertyType));
else
mapperExpr = Assign(getter,valueResolverExpr);
}
}
else
{
var setter = ((PropertyInfo)propertyMap.DestinationProperty.MemberInfo).GetSetMethod(true);
if (setter == null)
{
mapperExpr = valueResolverExpr;
}
else
{
if (propertyMap.SourceType != propertyMap.DestinationPropertyType)
mapperExpr = Assign(destMember, Convert(valueResolverExpr, propertyMap.DestinationPropertyType));
else
mapperExpr = Assign(destMember, valueResolverExpr);
}
}
if (propertyMap._preCondition != null)
{
mapperExpr = IfThen(
Invoke(Constant(propertyMap._preCondition), ctxtParam),
mapperExpr
);
}
return Lambda(mapperExpr, srcParam, destParam, ctxtParam);
}
private static Expression BuildValueResolverFunc(PropertyMap propertyMap, TypeMapRegistry typeMapRegistry,
ParameterExpression srcParam,
ParameterExpression ctxtParam)
{
Expression valueResolverFunc;
var valueResolverConfig = propertyMap.ValueResolverConfig;
var typeMap = propertyMap._typeMap;
if (valueResolverConfig != null)
{
Expression ctor;
if (valueResolverConfig.Instance != null)
{
ctor = Constant(valueResolverConfig.Instance);
}
else if (valueResolverConfig.Constructor != null)
{
ctor = Invoke(Constant(valueResolverConfig.Constructor));
}
else
{
ctor = Convert(
Invoke(
MakeMemberAccess(
MakeMemberAccess(ctxtParam, typeof(ResolutionContext).GetProperty("Options")),
typeof(MappingOperationOptions).GetProperty("ServiceCtor"))
, Constant(valueResolverConfig.Type)),
typeof(IValueResolver)
);
}
Expression sourceFunc;
if (valueResolverConfig.SourceMember != null)
{
sourceFunc = valueResolverConfig.SourceMember.ReplaceParameters(srcParam);
}
else if (valueResolverConfig.SourceMemberName != null)
{
sourceFunc = MakeMemberAccess(srcParam,
typeMap.SourceType.GetFieldOrProperty(valueResolverConfig.SourceMemberName));
}
else
{
sourceFunc = srcParam;
}
valueResolverFunc = Convert(Call(ctor, typeof(IValueResolver).GetMethod("Resolve"), sourceFunc.ToObject(), ctxtParam), propertyMap.DestinationPropertyType);
}
else if (propertyMap.CustomValue != null)
{
valueResolverFunc = Convert(Constant(propertyMap.CustomValue), propertyMap.DestinationPropertyType);
}
else if (propertyMap._customResolverFunc != null)
{
valueResolverFunc = TryCatch(Convert(Invoke(Constant(propertyMap._customResolverFunc), srcParam, ctxtParam), propertyMap.DestinationPropertyType), Catch(typeof(Exception), Default(propertyMap.DestinationPropertyType)));
}
else if (propertyMap.CustomExpression != null)
{
valueResolverFunc = propertyMap.CustomExpression.ReplaceParameters(srcParam).IfNotNull();
}
else if (propertyMap._sourceMember != null)
{
valueResolverFunc = MakeMemberAccess(srcParam, propertyMap._sourceMember);
}
else if (propertyMap._memberChain.Any()
&& propertyMap.SourceType != null
)
{
var last = propertyMap._memberChain.Last();
if (last.MemberInfo is PropertyInfo && ((PropertyInfo)last.MemberInfo).GetGetMethod(true) == null)
{
valueResolverFunc = Default(last.MemberType);
}
else
{
valueResolverFunc = propertyMap._memberChain.Aggregate(
(Expression)srcParam,
(inner, getter) => getter.MemberInfo is MethodInfo
? getter.MemberInfo.IsStatic()
? Call(null, (MethodInfo)getter.MemberInfo, inner)
: (Expression)Call(inner, (MethodInfo)getter.MemberInfo)
: MakeMemberAccess(getter.MemberInfo.IsStatic() ? null : inner, getter.MemberInfo)
);
valueResolverFunc = valueResolverFunc.IfNotNull();
}
}
else
{
valueResolverFunc = Throw(Constant(new Exception("I done blowed up")));
}
if (propertyMap.DestinationPropertyType == typeof(string) && valueResolverFunc.Type != typeof(string)
&& typeMapRegistry.GetTypeMap(new TypePair(valueResolverFunc.Type, propertyMap.DestinationPropertyType)) == null)
{
valueResolverFunc = Call(valueResolverFunc, valueResolverFunc.Type.GetMethod("ToString", new Type[0]));
}
if (propertyMap.NullSubstitute != null)
{
Expression value = Constant(propertyMap.NullSubstitute);
if (propertyMap.NullSubstitute.GetType() != propertyMap.DestinationPropertyType)
value = Convert(value, propertyMap.DestinationPropertyType);
valueResolverFunc = MakeBinary(ExpressionType.Coalesce, valueResolverFunc, value);
}
else if (!typeMap.Profile.AllowNullDestinationValues)
{
var toCreate = propertyMap.SourceType ?? propertyMap.DestinationPropertyType;
if (!toCreate.GetTypeInfo().IsValueType)
{
valueResolverFunc = MakeBinary(ExpressionType.Coalesce,
valueResolverFunc,
Convert(Call(
typeof(ObjectCreator).GetMethod("CreateNonNullValue"),
Constant(toCreate)
), propertyMap.SourceType));
}
}
return valueResolverFunc;
}
}
}
| |
// This file is part of the Harvest Management library for LANDIS-II.
using Landis.Utilities;
using Landis.SpatialModeling;
using System;
using System.Collections.Generic;
using Landis.Core;
using System.Linq;
namespace Landis.Library.HarvestManagement
{
/// <summary>
/// The application of a repeat-harvest to a management area.
/// </summary>
public class AppliedRepeatHarvest
: AppliedPrescription
{
private delegate void SetAsideMethod(Stand stand);
//---------------------------------------------------------------------
private RepeatHarvest repeatHarvest;
private bool isMultipleRepeatHarvest;
private SetAsideMethod setAside;
// tjs 2009.01.09
private bool hasBeenHarvested;
// The queue is in the chronological order.
public Queue<ReservedStand> reservedStands;
IDictionary<ISpecies, double> totalBiomassBySpecies = new Dictionary<ISpecies, double>();
// The management area the prescription is currently acting on
public ManagementArea ActiveMgmtArea;
//---------------------------------------------------------------------
public AppliedRepeatHarvest(RepeatHarvest repeatHarvest,
Percentage percentageToHarvest,
Percentage percentStandsToHarvest,
int beginTime,
int endTime)
: base(repeatHarvest,
percentageToHarvest,
percentStandsToHarvest,
beginTime,
endTime)
{
this.repeatHarvest = repeatHarvest;
// tjs 2009.01.09
hasBeenHarvested = false;
if (repeatHarvest is SingleRepeatHarvest) {
isMultipleRepeatHarvest = false;
setAside = SetAsideForSingleHarvest;
}
else {
isMultipleRepeatHarvest = true;
setAside = SetAsideForMultipleHarvests;
}
this.reservedStands = new Queue<ReservedStand>();
this.ActiveMgmtArea = null;
}
//---------------------------------------------------------------------
// <summary>
// Has this ever been harvested - tjs 2009.01.09
// </summary>
public bool HasBeenHarvested
{
get
{
return hasBeenHarvested;
}
set
{
hasBeenHarvested = value;
}
}
// <summary>
// Time interval for repeat harvest - tjs 2008.12.17
// </summary>
public int Interval
{
get
{
return repeatHarvest.Interval;
}
}
// <summary>
// Whether the prescription is a mutliple repeat harvest.
// </summary>
public bool IsMultipleRepeatHarvest
{
get {
return isMultipleRepeatHarvest;
}
}
//---------------------------------------------------------------------
/// <summary>
/// Sets a stand aside for a single additional harvest.
/// </summary>
public void SetAsideForSingleHarvest(Stand stand)
{
stand.SetAsideUntil(Model.Core.CurrentTime + repeatHarvest.Interval);
}
//---------------------------------------------------------------------
/// <summary>
/// Sets a stand aside for multiple additional harvests. These will be set aside until the end of the run
/// </summary>
public void SetAsideForMultipleHarvests(Stand stand)
{
stand.SetAsideUntil(Model.Core.EndTime);
}
//---------------------------------------------------------------------
/// <summary>
/// Harvests the highest-ranked stand which hasn't been harvested yet
/// during the current timestep.
/// </summary>
public override void HarvestHighestRankedStand()
{
base.HarvestHighestRankedStand();
foreach (Stand stand in repeatHarvest.HarvestedStands)
{
if (!stand.IsSetAside)
{
setAside(stand);
ScheduleNextHarvest(stand);
}
}
}
//---------------------------------------------------------------------
/// <summary>
/// Schedules the next harvest for a stand that's been set aside
/// (reserved).
/// </summary>
protected void ScheduleNextHarvest(Stand stand)
{
int nextTimeToHarvest = Model.Core.CurrentTime + repeatHarvest.Interval;
if (nextTimeToHarvest <= Model.Core.EndTime && stand.RepeatNumber < this.repeatHarvest.TimesToRepeat)
{
reservedStands.Enqueue(new ReservedStand(stand, nextTimeToHarvest));
}
else
{
if (stand.RepeatNumber >= this.repeatHarvest.TimesToRepeat)
{
stand.SetAsideUntil(Model.Core.CurrentTime);
}
stand.ResetRepeatNumber();
}
}
//---------------------------------------------------------------------
/// <summary>
/// Harvests the stands that have repeat harvests scheduled for the
/// current time step.
/// </summary>
public void HarvestReservedStands()
{
while (reservedStands.Count > 0 &&
reservedStands.Peek().NextTimeToHarvest <= Model.Core.CurrentTime)
{
//Stand stand = reservedStands.Dequeue().Stand;
Stand stand = reservedStands.Peek().Stand;
uint repeat = stand.RepeatNumber;
repeatHarvest.Harvest(stand);
stand.SetRepeatHarvested();
stand.IncrementRepeat();
HarvestExtensionMain.OnRepeatStandHarvest(this, stand, stand.RepeatNumber);
stand = reservedStands.Dequeue().Stand;
// Record every instance of a repeat harvest
if (reservedStands.Count > 0 && reservedStands.Peek().Stand.RepeatNumber != repeat)
{
HarvestExtensionMain.OnRepeatHarvestFinished(this, this, this.ActiveMgmtArea, repeat + 1, false);
}
else if (reservedStands.Count == 0 || reservedStands.Peek().NextTimeToHarvest > Model.Core.CurrentTime)
{
HarvestExtensionMain.OnRepeatHarvestFinished(this, this, this.ActiveMgmtArea, repeat + 1, true);
}
if (isMultipleRepeatHarvest)
{
ScheduleNextHarvest(stand);
}
else
{
stand.ResetRepeatNumber();
}
}
}
}
}
| |
using J2N.Runtime.CompilerServices;
using Lucene.Net.Diagnostics;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.CompilerServices;
using JCG = J2N.Collections.Generic;
namespace Lucene.Net.Codecs.PerField
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
using BytesRef = Lucene.Net.Util.BytesRef;
using FieldInfo = Lucene.Net.Index.FieldInfo;
using IBits = Lucene.Net.Util.IBits;
using IOUtils = Lucene.Net.Util.IOUtils;
using NumericDocValues = Lucene.Net.Index.NumericDocValues;
using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator;
using SegmentReadState = Lucene.Net.Index.SegmentReadState;
using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
using SortedDocValues = Lucene.Net.Index.SortedDocValues;
using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
/// <summary>
/// Enables per field docvalues support.
/// <para/>
/// Note, when extending this class, the name (<see cref="DocValuesFormat.Name"/>) is
/// written into the index. In order for the field to be read, the
/// name must resolve to your implementation via <see cref="DocValuesFormat.ForName(string)"/>.
/// This method uses <see cref="IDocValuesFormatFactory.GetDocValuesFormat(string)"/> to resolve format names.
/// See <see cref="DefaultDocValuesFormatFactory"/> for information about how to implement your own <see cref="DocValuesFormat"/>.
/// <para/>
/// Files written by each docvalues format have an additional suffix containing the
/// format name. For example, in a per-field configuration instead of <c>_1.dat</c>
/// filenames would look like <c>_1_Lucene40_0.dat</c>.
/// <para/>
/// @lucene.experimental
/// </summary>
/// <seealso cref="IDocValuesFormatFactory"/>
/// <seealso cref="DefaultDocValuesFormatFactory"/>
[DocValuesFormatName("PerFieldDV40")]
public abstract class PerFieldDocValuesFormat : DocValuesFormat
{
// LUCENENET specific: Removing this static variable, since name is now determined by the DocValuesFormatNameAttribute.
///// <summary>
///// Name of this <seealso cref="PostingsFormat"/>. </summary>
//public static readonly string PER_FIELD_NAME = "PerFieldDV40";
/// <summary>
/// <see cref="FieldInfo"/> attribute name used to store the
/// format name for each field.
/// </summary>
public static readonly string PER_FIELD_FORMAT_KEY = typeof(PerFieldDocValuesFormat).Name + ".format";
/// <summary>
/// <see cref="FieldInfo"/> attribute name used to store the
/// segment suffix name for each field.
/// </summary>
public static readonly string PER_FIELD_SUFFIX_KEY = typeof(PerFieldDocValuesFormat).Name + ".suffix";
/// <summary>
/// Sole constructor. </summary>
protected PerFieldDocValuesFormat() // LUCENENET: CA1012: Abstract types should not have constructors (marked protected)
: base()
{
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override sealed DocValuesConsumer FieldsConsumer(SegmentWriteState state)
{
return new FieldsWriter(this, state);
}
internal class ConsumerAndSuffix : IDisposable
{
internal DocValuesConsumer Consumer { get; set; }
internal int Suffix { get; set; }
public void Dispose()
{
Consumer.Dispose();
}
}
private class FieldsWriter : DocValuesConsumer
{
private readonly PerFieldDocValuesFormat outerInstance;
internal readonly IDictionary<DocValuesFormat, ConsumerAndSuffix> formats = new Dictionary<DocValuesFormat, ConsumerAndSuffix>();
internal readonly IDictionary<string, int?> suffixes = new Dictionary<string, int?>();
internal readonly SegmentWriteState segmentWriteState;
public FieldsWriter(PerFieldDocValuesFormat outerInstance, SegmentWriteState state)
{
this.outerInstance = outerInstance;
segmentWriteState = state;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
{
GetInstance(field).AddNumericField(field, values);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
{
GetInstance(field).AddBinaryField(field, values);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
{
GetInstance(field).AddSortedField(field, values, docToOrd);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
{
GetInstance(field).AddSortedSetField(field, values, docToOrdCount, ords);
}
internal virtual DocValuesConsumer GetInstance(FieldInfo field)
{
DocValuesFormat format = null;
if (field.DocValuesGen != -1)
{
string formatName = field.GetAttribute(PER_FIELD_FORMAT_KEY);
// this means the field never existed in that segment, yet is applied updates
if (formatName != null)
{
format = DocValuesFormat.ForName(formatName);
}
}
if (format is null)
{
format = outerInstance.GetDocValuesFormatForField(field.Name);
}
if (format is null)
{
throw IllegalStateException.Create("invalid null DocValuesFormat for field=\"" + field.Name + "\"");
}
string formatName_ = format.Name;
string previousValue = field.PutAttribute(PER_FIELD_FORMAT_KEY, formatName_);
if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesGen != -1 || previousValue is null,"formatName={0} prevValue={1}", formatName_, previousValue);
int? suffix = null;
if (!formats.TryGetValue(format, out ConsumerAndSuffix consumer) || consumer is null)
{
// First time we are seeing this format; create a new instance
if (field.DocValuesGen != -1)
{
string suffixAtt = field.GetAttribute(PER_FIELD_SUFFIX_KEY);
// even when dvGen is != -1, it can still be a new field, that never
// existed in the segment, and therefore doesn't have the recorded
// attributes yet.
if (suffixAtt != null)
{
suffix = Convert.ToInt32(suffixAtt, CultureInfo.InvariantCulture);
}
}
if (suffix is null)
{
// bump the suffix
if (!suffixes.TryGetValue(formatName_, out suffix) || suffix is null)
{
suffix = 0;
}
else
{
suffix = suffix + 1;
}
}
suffixes[formatName_] = suffix;
string segmentSuffix = GetFullSegmentSuffix(segmentWriteState.SegmentSuffix, GetSuffix(formatName_, Convert.ToString(suffix, CultureInfo.InvariantCulture)));
consumer = new ConsumerAndSuffix();
consumer.Consumer = format.FieldsConsumer(new SegmentWriteState(segmentWriteState, segmentSuffix));
consumer.Suffix = suffix.Value; // LUCENENET NOTE: At this point suffix cannot be null
formats[format] = consumer;
}
else
{
// we've already seen this format, so just grab its suffix
if (Debugging.AssertsEnabled) Debugging.Assert(suffixes.ContainsKey(formatName_));
suffix = consumer.Suffix;
}
previousValue = field.PutAttribute(PER_FIELD_SUFFIX_KEY, Convert.ToString(suffix, CultureInfo.InvariantCulture));
if (Debugging.AssertsEnabled) Debugging.Assert(field.DocValuesGen != -1 || previousValue is null,"suffix={0} prevValue={1}", suffix, previousValue);
// TODO: we should only provide the "slice" of FIS
// that this DVF actually sees ...
return consumer.Consumer;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected override void Dispose(bool disposing)
{
if (disposing)
{
// Close all subs
IOUtils.Dispose(formats.Values);
}
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static string GetSuffix(string formatName, string suffix)
{
return formatName + "_" + suffix;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static string GetFullSegmentSuffix(string outerSegmentSuffix, string segmentSuffix)
{
if (outerSegmentSuffix.Length == 0)
{
return segmentSuffix;
}
else
{
return outerSegmentSuffix + "_" + segmentSuffix;
}
}
private class FieldsReader : DocValuesProducer
{
private readonly PerFieldDocValuesFormat outerInstance;
// LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java
internal readonly IDictionary<string, DocValuesProducer> fields = new JCG.SortedDictionary<string, DocValuesProducer>(StringComparer.Ordinal);
internal readonly IDictionary<string, DocValuesProducer> formats = new Dictionary<string, DocValuesProducer>();
public FieldsReader(PerFieldDocValuesFormat outerInstance, SegmentReadState readState)
{
this.outerInstance = outerInstance;
// Read _X.per and init each format:
bool success = false;
try
{
// Read field name -> format name
foreach (FieldInfo fi in readState.FieldInfos)
{
if (fi.HasDocValues)
{
string fieldName = fi.Name;
string formatName = fi.GetAttribute(PER_FIELD_FORMAT_KEY);
if (formatName != null)
{
// null formatName means the field is in fieldInfos, but has no docvalues!
string suffix = fi.GetAttribute(PER_FIELD_SUFFIX_KEY);
if (Debugging.AssertsEnabled) Debugging.Assert(suffix != null);
DocValuesFormat format = DocValuesFormat.ForName(formatName);
string segmentSuffix = GetFullSegmentSuffix(readState.SegmentSuffix, GetSuffix(formatName, suffix));
// LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey
if (!formats.TryGetValue(segmentSuffix, out DocValuesProducer field))
{
formats[segmentSuffix] = field = format.FieldsProducer(new SegmentReadState(readState, segmentSuffix));
}
fields[fieldName] = field;
}
}
}
success = true;
}
finally
{
if (!success)
{
IOUtils.DisposeWhileHandlingException(formats.Values);
}
}
}
internal FieldsReader(PerFieldDocValuesFormat outerInstance, FieldsReader other)
{
this.outerInstance = outerInstance;
IDictionary<DocValuesProducer, DocValuesProducer> oldToNew = new JCG.Dictionary<DocValuesProducer, DocValuesProducer>(IdentityEqualityComparer<DocValuesProducer>.Default);
// First clone all formats
foreach (KeyValuePair<string, DocValuesProducer> ent in other.formats)
{
DocValuesProducer values = ent.Value;
formats[ent.Key] = values;
oldToNew[ent.Value] = values;
}
// Then rebuild fields:
foreach (KeyValuePair<string, DocValuesProducer> ent in other.fields)
{
oldToNew.TryGetValue(ent.Value, out DocValuesProducer producer);
if (Debugging.AssertsEnabled) Debugging.Assert(producer != null);
fields[ent.Key] = producer;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override NumericDocValues GetNumeric(FieldInfo field)
{
if (fields.TryGetValue(field.Name, out DocValuesProducer producer) && producer != null)
{
return producer.GetNumeric(field);
}
return null;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override BinaryDocValues GetBinary(FieldInfo field)
{
if (fields.TryGetValue(field.Name, out DocValuesProducer producer) && producer != null)
{
return producer.GetBinary(field);
}
return null;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override SortedDocValues GetSorted(FieldInfo field)
{
if (fields.TryGetValue(field.Name, out DocValuesProducer producer) && producer != null)
{
return producer.GetSorted(field);
}
return null;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override SortedSetDocValues GetSortedSet(FieldInfo field)
{
if (fields.TryGetValue(field.Name, out DocValuesProducer producer) && producer != null)
{
return producer.GetSortedSet(field);
}
return null;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override IBits GetDocsWithField(FieldInfo field)
{
if (fields.TryGetValue(field.Name, out DocValuesProducer producer) && producer != null)
{
return producer.GetDocsWithField(field);
}
return null;
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
IOUtils.Dispose(formats.Values);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public object Clone()
{
return new FieldsReader(outerInstance, this);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override long RamBytesUsed()
{
long size = 0;
foreach (KeyValuePair<string, DocValuesProducer> entry in formats)
{
size += (entry.Key.Length * RamUsageEstimator.NUM_BYTES_CHAR)
+ entry.Value.RamBytesUsed();
}
return size;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override void CheckIntegrity()
{
foreach (DocValuesProducer format in formats.Values)
{
format.CheckIntegrity();
}
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public override sealed DocValuesProducer FieldsProducer(SegmentReadState state)
{
return new FieldsReader(this, state);
}
/// <summary>
/// Returns the doc values format that should be used for writing
/// new segments of <paramref name="field"/>.
/// <para/>
/// The field to format mapping is written to the index, so
/// this method is only invoked when writing, not when reading.
/// </summary>
public abstract DocValuesFormat GetDocValuesFormatForField(string field);
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
namespace ASC.Mail.Net.Mime
{
#region usings
using System;
using System.Globalization;
using System.IO;
using System.Text;
using StringReader=ASC.Mail.Net.StringReader;
#endregion
/// <summary>
/// Provides mime related utility methods.
/// </summary>
[Obsolete("See LumiSoft.Net.MIME or LumiSoft.Net.Mail namepaces for replacement.")]
public class MimeUtils
{
// TODO get rid of this method, only IMAP uses it
#region Methods
/// <summary>
/// Parses rfc 2822 datetime.
/// </summary>
/// <param name="date">Date string.</param>
/// <returns></returns>
public static DateTime ParseDate(string date)
{
/* Rfc 2822 3.3. Date and Time Specification.
date-time = [ day-of-week "," ] date FWS time [CFWS]
date = day month year
time = hour ":" minute [ ":" second ] FWS zone
*/
/* IMAP date format.
date-time = date FWS time [CFWS]
date = day-month-year
time = hour ":" minute [ ":" second ] FWS zone
*/
// zone = (( "+" / "-" ) 4DIGIT)
//--- Replace timezone constants -------//
/*
UT -0000
GMT -0000
EDT -0400
EST -0500
CDT -0500
CST -0600
MDT -0600
MST -0700
PDT -0700
PST -0800
BST +0100 British Summer Time
*/
date = date.ToLower();
date = date.Replace("ut", "-0000");
date = date.Replace("gmt", "-0000");
date = date.Replace("edt", "-0400");
date = date.Replace("est", "-0500");
date = date.Replace("cdt", "-0500");
date = date.Replace("cst", "-0600");
date = date.Replace("mdt", "-0600");
date = date.Replace("mst", "-0700");
date = date.Replace("pdt", "-0700");
date = date.Replace("pst", "-0800");
date = date.Replace("bst", "+0100");
//----------------------------------------//
//--- Replace month constants ---//
date = date.Replace("jan", "01");
date = date.Replace("feb", "02");
date = date.Replace("mar", "03");
date = date.Replace("apr", "04");
date = date.Replace("may", "05");
date = date.Replace("jun", "06");
date = date.Replace("jul", "07");
date = date.Replace("aug", "08");
date = date.Replace("sep", "09");
date = date.Replace("oct", "10");
date = date.Replace("nov", "11");
date = date.Replace("dec", "12");
//-------------------------------//
// If date contains optional "day-of-week,", remove it
if (date.IndexOf(',') > -1)
{
date = date.Substring(date.IndexOf(',') + 1);
}
// Remove () from date. "Mon, 13 Oct 2003 20:50:57 +0300 (EEST)"
if (date.IndexOf(" (") > -1)
{
date = date.Substring(0, date.IndexOf(" ("));
}
int year = 1900;
int month = 1;
int day = 1;
int hour = -1;
int minute = -1;
int second = -1;
int zoneMinutes = -1;
StringReader s = new StringReader(date);
//--- Pase date --------------------------------------------------------------------//
try
{
day = Convert.ToInt32(s.ReadWord(true, new[] {'.', '-', ' '}, true));
}
catch
{
throw new Exception("Invalid date value '" + date + "', invalid day value !");
}
try
{
month = Convert.ToInt32(s.ReadWord(true, new[] {'.', '-', ' '}, true));
}
catch
{
throw new Exception("Invalid date value '" + date + "', invalid month value !");
}
try
{
year = Convert.ToInt32(s.ReadWord(true, new[] {'.', '-', ' '}, true));
}
catch
{
throw new Exception("Invalid date value '" + date + "', invalid year value !");
}
//----------------------------------------------------------------------------------//
//--- Parse time -------------------------------------------------------------------//
// Time is optional, so parse it if its included.
if (s.Available > 0)
{
try
{
hour = Convert.ToInt32(s.ReadWord(true, new[] {':'}, true));
}
catch
{
throw new Exception("Invalid date value '" + date + "', invalid hour value !");
}
try
{
minute = Convert.ToInt32(s.ReadWord(true, new[] {':'}, false));
}
catch
{
throw new Exception("Invalid date value '" + date + "', invalid minute value !");
}
s.ReadToFirstChar();
if (s.StartsWith(":"))
{
s.ReadSpecifiedLength(1);
try
{
string secondString = s.ReadWord(true, new[] {' '}, true);
// Milli seconds specified, remove them.
if (secondString.IndexOf('.') > -1)
{
secondString = secondString.Substring(0, secondString.IndexOf('.'));
}
second = Convert.ToInt32(secondString);
}
catch
{
throw new Exception("Invalid date value '" + date + "', invalid second value !");
}
}
s.ReadToFirstChar();
if (s.Available > 3)
{
string timezone = s.SourceString.Replace(":", "");
if (timezone.StartsWith("+") || timezone.StartsWith("-"))
{
bool utc_add_time = timezone.StartsWith("+");
// Remove +/- sign
timezone = timezone.Substring(1);
// padd time zone to 4 symbol. For example 200, will be 0200.
while (timezone.Length < 4)
{
timezone = "0" + timezone;
}
try
{
// time zone format hours|minutes
int h = Convert.ToInt32(timezone.Substring(0, 2));
int m = Convert.ToInt32(timezone.Substring(2));
if (utc_add_time)
{
zoneMinutes = 0 - ((h*60) + m);
}
else
{
zoneMinutes = (h*60) + m;
}
}
catch
{
// Just skip time zone, if can't parse
}
}
}
}
//---------------------------------------------------------------------------------//
// Convert time to UTC
if (hour != -1 && minute != -1 && second != -1)
{
DateTime d = new DateTime(year, month, day, hour, minute, second).AddMinutes(zoneMinutes);
return
new DateTime(d.Year, d.Month, d.Day, d.Hour, d.Minute, d.Second, DateTimeKind.Utc).
ToLocalTime();
}
else
{
return new DateTime(year, month, day);
}
}
/// <summary>
/// Converts date to rfc 2822 date time string.
/// </summary>
/// <param name="dateTime">Date time value.</param>
/// <returns></returns>
public static string DateTimeToRfc2822(DateTime dateTime)
{
return dateTime.ToUniversalTime().ToString("r", DateTimeFormatInfo.InvariantInfo);
}
/// <summary>
/// Parses headers from message or mime entry.
/// </summary>
/// <param name="entryStrm">Stream from where to read headers.</param>
/// <returns>Returns header lines.</returns>
public static string ParseHeaders(Stream entryStrm)
{
/* Rfc 2822 3.1. GENERAL DESCRIPTION
A message consists of header fields and, optionally, a body.
The body is simply a sequence of lines containing ASCII charac-
ters. It is separated from the headers by a null line (i.e., a
line with nothing preceding the CRLF).
*/
byte[] crlf = new[] {(byte) '\r', (byte) '\n'};
MemoryStream msHeaders = new MemoryStream();
StreamLineReader r = new StreamLineReader(entryStrm);
byte[] lineData = r.ReadLine();
while (lineData != null)
{
if (lineData.Length == 0)
{
break;
}
msHeaders.Write(lineData, 0, lineData.Length);
msHeaders.Write(crlf, 0, crlf.Length);
lineData = r.ReadLine();
}
return Encoding.Default.GetString(msHeaders.ToArray());
}
/// <summary>
/// Parse header specified header field value.
///
/// Use this method only if you need to get only one header field, otherwise use
/// MimeParser.ParseHeaderField(string fieldName,string headers).
/// This avoid parsing headers multiple times.
/// </summary>
/// <param name="fieldName">Header field which to parse. Eg. Subject: .</param>
/// <param name="entryStrm">Stream from where to read headers.</param>
/// <returns></returns>
public static string ParseHeaderField(string fieldName, Stream entryStrm)
{
return ParseHeaderField(fieldName, ParseHeaders(entryStrm));
}
/// <summary>
/// Parse header specified header field value.
/// </summary>
/// <param name="fieldName">Header field which to parse. Eg. Subject: .</param>
/// <param name="headers">Full headers string. Use MimeParser.ParseHeaders() to get this value.</param>
public static string ParseHeaderField(string fieldName, string headers)
{
/* Rfc 2822 2.2 Header Fields
Header fields are lines composed of a field name, followed by a colon
(":"), followed by a field body, and terminated by CRLF. A field
name MUST be composed of printable US-ASCII characters (i.e.,
characters that have values between 33 and 126, inclusive), except
colon. A field body may be composed of any US-ASCII characters,
except for CR and LF. However, a field body may contain CRLF when
used in header "folding" and "unfolding" as described in section
2.2.3. All field bodies MUST conform to the syntax described in
sections 3 and 4 of this standard.
Rfc 2822 2.2.3 (Multiline header fields)
The process of moving from this folded multiple-line representation
of a header field to its single line representation is called
"unfolding". Unfolding is accomplished by simply removing any CRLF
that is immediately followed by WSP. Each header field should be
treated in its unfolded form for further syntactic and semantic
evaluation.
Example:
Subject: aaaaa<CRLF>
<TAB or SP>aaaaa<CRLF>
*/
using (TextReader r = new StreamReader(new MemoryStream(Encoding.Default.GetBytes(headers))))
{
string line = r.ReadLine();
while (line != null)
{
// Find line where field begins
if (line.ToUpper().StartsWith(fieldName.ToUpper()))
{
// Remove field name and start reading value
string fieldValue = line.Substring(fieldName.Length).Trim();
// see if multi line value. See commnt above.
line = r.ReadLine();
while (line != null && (line.StartsWith("\t") || line.StartsWith(" ")))
{
fieldValue += line;
line = r.ReadLine();
}
return fieldValue;
}
line = r.ReadLine();
}
}
return "";
}
/// <summary>
/// Parses header field parameter value.
/// For example: CONTENT-TYPE: application\octet-stream; name="yourFileName.xxx",
/// fieldName="CONTENT-TYPE:" and subFieldName="name".
/// </summary>
/// <param name="fieldName">Main header field name.</param>
/// <param name="parameterName">Header field's parameter name.</param>
/// <param name="headers">Full headrs string.</param>
/// <returns></returns>
public static string ParseHeaderFiledParameter(string fieldName, string parameterName, string headers)
{
string mainFiled = ParseHeaderField(fieldName, headers);
// Parse sub field value
if (mainFiled.Length > 0)
{
int index = mainFiled.ToUpper().IndexOf(parameterName.ToUpper());
if (index > -1)
{
mainFiled = mainFiled.Substring(index + parameterName.Length + 1);
// Remove "subFieldName="
// subFieldName value may be in "" and without
if (mainFiled.StartsWith("\""))
{
return mainFiled.Substring(1, mainFiled.IndexOf("\"", 1) - 1);
}
// value without ""
else
{
int endIndex = mainFiled.Length;
if (mainFiled.IndexOf(" ") > -1)
{
endIndex = mainFiled.IndexOf(" ");
}
return mainFiled.Substring(0, endIndex);
}
}
}
return "";
}
/// <summary>
/// Parses MediaType_enum from <b>Content-Type:</b> header field value.
/// </summary>
/// <param name="headerFieldValue"><b>Content-Type:</b> header field value. This value can be null, then MediaType_enum.NotSpecified.</param>
/// <returns></returns>
public static MediaType_enum ParseMediaType(string headerFieldValue)
{
if (headerFieldValue == null)
{
return MediaType_enum.NotSpecified;
}
string contentType = TextUtils.SplitString(headerFieldValue, ';')[0].ToLower();
//--- Text/xxx --------------------------------//
if (contentType.IndexOf("text/plain") > -1)
{
return MediaType_enum.Text_plain;
}
else if (contentType.IndexOf("text/html") > -1)
{
return MediaType_enum.Text_html;
}
else if (contentType.IndexOf("text/xml") > -1)
{
return MediaType_enum.Text_xml;
}
else if (contentType.IndexOf("text/rtf") > -1)
{
return MediaType_enum.Text_rtf;
}
else if (contentType.IndexOf("text") > -1)
{
return MediaType_enum.Text;
}
//---------------------------------------------//
//--- Image/xxx -------------------------------//
else if (contentType.IndexOf("image/gif") > -1)
{
return MediaType_enum.Image_gif;
}
else if (contentType.IndexOf("image/tiff") > -1)
{
return MediaType_enum.Image_tiff;
}
else if (contentType.IndexOf("image/jpeg") > -1)
{
return MediaType_enum.Image_jpeg;
}
else if (contentType.IndexOf("image") > -1)
{
return MediaType_enum.Image;
}
//---------------------------------------------//
//--- Audio/xxx -------------------------------//
else if (contentType.IndexOf("audio") > -1)
{
return MediaType_enum.Audio;
}
//---------------------------------------------//
//--- Video/xxx -------------------------------//
else if (contentType.IndexOf("video") > -1)
{
return MediaType_enum.Video;
}
//---------------------------------------------//
//--- Application/xxx -------------------------//
else if (contentType.IndexOf("application/octet-stream") > -1)
{
return MediaType_enum.Application_octet_stream;
}
else if (contentType.IndexOf("application") > -1)
{
return MediaType_enum.Application;
}
//---------------------------------------------//
//--- Multipart/xxx ---------------------------//
else if (contentType.IndexOf("multipart/mixed") > -1)
{
return MediaType_enum.Multipart_mixed;
}
else if (contentType.IndexOf("multipart/alternative") > -1)
{
return MediaType_enum.Multipart_alternative;
}
else if (contentType.IndexOf("multipart/parallel") > -1)
{
return MediaType_enum.Multipart_parallel;
}
else if (contentType.IndexOf("multipart/related") > -1)
{
return MediaType_enum.Multipart_related;
}
else if (contentType.IndexOf("multipart/signed") > -1)
{
return MediaType_enum.Multipart_signed;
}
else if (contentType.IndexOf("multipart") > -1)
{
return MediaType_enum.Multipart;
}
//---------------------------------------------//
//--- Message/xxx -----------------------------//
else if (contentType.IndexOf("message/rfc822") > -1)
{
return MediaType_enum.Message_rfc822;
}
else if (contentType.IndexOf("message") > -1)
{
return MediaType_enum.Message;
}
//---------------------------------------------//
else
{
return MediaType_enum.Unknown;
}
}
/// <summary>
/// Converts MediaType_enum to string. NOTE: Returns null for MediaType_enum.NotSpecified.
/// </summary>
/// <param name="mediaType">MediaType_enum value to convert.</param>
/// <returns></returns>
public static string MediaTypeToString(MediaType_enum mediaType)
{
//--- Text/xxx --------------------------------//
if (mediaType == MediaType_enum.Text_plain)
{
return "text/plain";
}
else if (mediaType == MediaType_enum.Text_html)
{
return "text/html";
}
else if (mediaType == MediaType_enum.Text_xml)
{
return "text/xml";
}
else if (mediaType == MediaType_enum.Text_rtf)
{
return "text/rtf";
}
else if (mediaType == MediaType_enum.Text)
{
return "text";
}
//---------------------------------------------//
//--- Image/xxx -------------------------------//
else if (mediaType == MediaType_enum.Image_gif)
{
return "image/gif";
}
else if (mediaType == MediaType_enum.Image_tiff)
{
return "image/tiff";
}
else if (mediaType == MediaType_enum.Image_jpeg)
{
return "image/jpeg";
}
else if (mediaType == MediaType_enum.Image)
{
return "image";
}
//---------------------------------------------//
//--- Audio/xxx -------------------------------//
else if (mediaType == MediaType_enum.Audio)
{
return "audio";
}
//---------------------------------------------//
//--- Video/xxx -------------------------------//
else if (mediaType == MediaType_enum.Video)
{
return "video";
}
//---------------------------------------------//
//--- Application/xxx -------------------------//
else if (mediaType == MediaType_enum.Application_octet_stream)
{
return "application/octet-stream";
}
else if (mediaType == MediaType_enum.Application)
{
return "application";
}
//---------------------------------------------//
//--- Multipart/xxx ---------------------------//
else if (mediaType == MediaType_enum.Multipart_mixed)
{
return "multipart/mixed";
}
else if (mediaType == MediaType_enum.Multipart_alternative)
{
return "multipart/alternative";
}
else if (mediaType == MediaType_enum.Multipart_parallel)
{
return "multipart/parallel";
}
else if (mediaType == MediaType_enum.Multipart_related)
{
return "multipart/related";
}
else if (mediaType == MediaType_enum.Multipart_signed)
{
return "multipart/signed";
}
else if (mediaType == MediaType_enum.Multipart)
{
return "multipart";
}
//---------------------------------------------//
//--- Message/xxx -----------------------------//
else if (mediaType == MediaType_enum.Message_rfc822)
{
return "message/rfc822";
}
else if (mediaType == MediaType_enum.Message)
{
return "message";
}
//---------------------------------------------//
else if (mediaType == MediaType_enum.Unknown)
{
return "unknown";
}
else
{
return null;
}
}
/// <summary>
/// Parses ContentTransferEncoding_enum from <b>Content-Transfer-Encoding:</b> header field value.
/// </summary>
/// <param name="headerFieldValue"><b>Content-Transfer-Encoding:</b> header field value. This value can be null, then ContentTransferEncoding_enum.NotSpecified.</param>
/// <returns></returns>
public static ContentTransferEncoding_enum ParseContentTransferEncoding(string headerFieldValue)
{
if (headerFieldValue == null)
{
return ContentTransferEncoding_enum.NotSpecified;
}
string encoding = headerFieldValue.ToLower();
if (encoding == "7bit")
{
return ContentTransferEncoding_enum._7bit;
}
else if (encoding == "quoted-printable")
{
return ContentTransferEncoding_enum.QuotedPrintable;
}
else if (encoding == "base64")
{
return ContentTransferEncoding_enum.Base64;
}
else if (encoding == "8bit")
{
return ContentTransferEncoding_enum._8bit;
}
else if (encoding == "binary")
{
return ContentTransferEncoding_enum.Binary;
}
else
{
return ContentTransferEncoding_enum.Unknown;
}
}
/// <summary>
/// Converts ContentTransferEncoding_enum to string. NOTE: Returns null for ContentTransferEncoding_enum.NotSpecified.
/// </summary>
/// <param name="encoding">ContentTransferEncoding_enum value to convert.</param>
/// <returns></returns>
public static string ContentTransferEncodingToString(ContentTransferEncoding_enum encoding)
{
if (encoding == ContentTransferEncoding_enum._7bit)
{
return "7bit";
}
else if (encoding == ContentTransferEncoding_enum.QuotedPrintable)
{
return "quoted-printable";
}
else if (encoding == ContentTransferEncoding_enum.Base64)
{
return "base64";
}
else if (encoding == ContentTransferEncoding_enum._8bit)
{
return "8bit";
}
else if (encoding == ContentTransferEncoding_enum.Binary)
{
return "binary";
}
else if (encoding == ContentTransferEncoding_enum.Unknown)
{
return "unknown";
}
else
{
return null;
}
}
/// <summary>
/// Parses ContentDisposition_enum from <b>Content-Disposition:</b> header field value.
/// </summary>
/// <param name="headerFieldValue"><b>Content-Disposition:</b> header field value. This value can be null, then ContentDisposition_enum.NotSpecified.</param>
/// <returns></returns>
public static ContentDisposition_enum ParseContentDisposition(string headerFieldValue)
{
if (headerFieldValue == null)
{
return ContentDisposition_enum.NotSpecified;
}
string disposition = headerFieldValue.ToLower();
if (disposition.IndexOf("attachment") > -1)
{
return ContentDisposition_enum.Attachment;
}
else if (disposition.IndexOf("inline") > -1)
{
return ContentDisposition_enum.Inline;
}
else
{
return ContentDisposition_enum.Unknown;
}
}
/// <summary>
/// Converts ContentDisposition_enum to string. NOTE: Returns null for ContentDisposition_enum.NotSpecified.
/// </summary>
/// <param name="disposition">ContentDisposition_enum value to convert.</param>
/// <returns></returns>
public static string ContentDispositionToString(ContentDisposition_enum disposition)
{
if (disposition == ContentDisposition_enum.Attachment)
{
return "attachment";
}
else if (disposition == ContentDisposition_enum.Inline)
{
return "inline";
}
else if (disposition == ContentDisposition_enum.Unknown)
{
return "unknown";
}
else
{
return null;
}
}
/// <summary>
/// Encodes specified text as "encoded-word" if encode is required. For more information see RFC 2047.
/// </summary>
/// <param name="text">Text to encode.</param>
/// <returns>Returns encoded word.</returns>
public static string EncodeWord(string text)
{
if (text == null)
{
return null;
}
if (Core.IsAscii(text))
{
return text;
}
/* RFC 2047 2. Syntax of encoded-words.
An 'encoded-word' is defined by the following ABNF grammar. The
notation of RFC 822 is used, with the exception that white space
characters MUST NOT appear between components of an 'encoded-word'.
encoded-word = "=?" charset "?" encoding "?" encoded-text "?="
charset = token ; see section 3
encoding = token ; see section 4
token = 1*<Any CHAR except SPACE, CTLs, and especials>
especials = "(" / ")" / "<" / ">" / "@" / "," / ";" / ":" / "
<"> / "/" / "[" / "]" / "?" / "." / "="
encoded-text = 1*<Any printable ASCII character other than "?" or SPACE>
; (but see "Use of encoded-words in message headers", section 5)
Both 'encoding' and 'charset' names are case-independent. Thus the
charset name "ISO-8859-1" is equivalent to "iso-8859-1", and the
encoding named "Q" may be spelled either "Q" or "q".
An 'encoded-word' may not be more than 75 characters long, including
'charset', 'encoding', 'encoded-text', and delimiters. If it is
desirable to encode more text than will fit in an 'encoded-word' of
75 characters, multiple 'encoded-word's (separated by CRLF SPACE) may
be used.
IMPORTANT: 'encoded-word's are designed to be recognized as 'atom's
by an RFC 822 parser. As a consequence, unencoded white space
characters (such as SPACE and HTAB) are FORBIDDEN within an
'encoded-word'. For example, the character sequence
=?iso-8859-1?q?this is some text?=
would be parsed as four 'atom's, rather than as a single 'atom' (by
an RFC 822 parser) or 'encoded-word' (by a parser which understands
'encoded-words'). The correct way to encode the string "this is some
text" is to encode the SPACE characters as well, e.g.
=?iso-8859-1?q?this=20is=20some=20text?=
*/
/*
char[] especials = new char[]{'(',')','<','>','@',',',';',':','"','/','[',']','?','.','='};
// See if need to enode.
if(!Core.IsAscii(text)){
}*/
return Core.CanonicalEncode(text, "utf-8");
}
/// <summary>
/// Decodes "encoded-word"'s from the specified text. For more information see RFC 2047.
/// </summary>
/// <param name="text">Text to decode.</param>
/// <returns>Returns decoded text.</returns>
public static string DecodeWords(string text)
{
if (text == null)
{
return null;
}
/* RFC 2047 2. Syntax of encoded-words.
An 'encoded-word' is defined by the following ABNF grammar. The
notation of RFC 822 is used, with the exception that white space
characters MUST NOT appear between components of an 'encoded-word'.
encoded-word = "=?" charset "?" encoding "?" encoded-text "?="
charset = token ; see section 3
encoding = token ; see section 4
token = 1*<Any CHAR except SPACE, CTLs, and especials>
especials = "(" / ")" / "<" / ">" / "@" / "," / ";" / ":" / "
<"> / "/" / "[" / "]" / "?" / "." / "="
encoded-text = 1*<Any printable ASCII character other than "?" or SPACE>
; (but see "Use of encoded-words in message headers", section 5)
Both 'encoding' and 'charset' names are case-independent. Thus the
charset name "ISO-8859-1" is equivalent to "iso-8859-1", and the
encoding named "Q" may be spelled either "Q" or "q".
An 'encoded-word' may not be more than 75 characters long, including
'charset', 'encoding', 'encoded-text', and delimiters. If it is
desirable to encode more text than will fit in an 'encoded-word' of
75 characters, multiple 'encoded-word's (separated by CRLF SPACE) may
be used.
IMPORTANT: 'encoded-word's are designed to be recognized as 'atom's
by an RFC 822 parser. As a consequence, unencoded white space
characters (such as SPACE and HTAB) are FORBIDDEN within an
'encoded-word'. For example, the character sequence
=?iso-8859-1?q?this is some text?=
would be parsed as four 'atom's, rather than as a single 'atom' (by
an RFC 822 parser) or 'encoded-word' (by a parser which understands
'encoded-words'). The correct way to encode the string "this is some
text" is to encode the SPACE characters as well, e.g.
=?iso-8859-1?q?this=20is=20some=20text?=
*/
StringReader r = new StringReader(text);
StringBuilder retVal = new StringBuilder();
// We need to loop all words, if encoded word, decode it, othwerwise just append to return value.
bool lastIsEncodedWord = false;
while (r.Available > 0)
{
string whiteSpaces = r.ReadToFirstChar();
// Probably is encoded-word, we try to parse it.
if (r.StartsWith("=?") && r.SourceString.IndexOf("?=") > -1)
{
StringBuilder encodedWord = new StringBuilder();
string decodedWord = null;
try
{
// NOTE: We can't read encoded word and then split !!!, we need to read each part.
// Remove =?
encodedWord.Append(r.ReadSpecifiedLength(2));
// Read charset
string charset = r.QuotedReadToDelimiter('?');
encodedWord.Append(charset + "?");
// Read encoding
string encoding = r.QuotedReadToDelimiter('?');
encodedWord.Append(encoding + "?");
// Read text
string encodedText = r.QuotedReadToDelimiter('?');
encodedWord.Append(encodedText + "?");
// We must have remaining '=' here
if (r.StartsWith("="))
{
encodedWord.Append(r.ReadSpecifiedLength(1));
Encoding c = EncodingTools.GetEncodingByCodepageName(charset);
if (c != null)
{
if (encoding.ToLower() == "q")
{
decodedWord = Core.QDecode(c, encodedText);
}
else if (encoding.ToLower() == "b")
{
decodedWord =
c.GetString(Core.Base64Decode(Encoding.Default.GetBytes(encodedText)));
}
}
}
}
catch
{
// Not encoded-word or contains unknwon charset/encoding, so leave
// encoded-word as is.
}
/* RFC 2047 6.2.
When displaying a particular header field that contains multiple
'encoded-word's, any 'linear-white-space' that separates a pair of
adjacent 'encoded-word's is ignored. (This is to allow the use of
multiple 'encoded-word's to represent long strings of unencoded text,
without having to separate 'encoded-word's where spaces occur in the
unencoded text.)
*/
if (!lastIsEncodedWord)
{
retVal.Append(whiteSpaces);
}
// Decoding failed for that encoded-word, leave encoded-word as is.
if (decodedWord == null)
{
retVal.Append(encodedWord.ToString());
}
// We deocded encoded-word successfully.
else
{
retVal.Append(decodedWord);
}
lastIsEncodedWord = true;
}
// Normal word.
else if (r.StartsWithWord())
{
retVal.Append(whiteSpaces + r.ReadWord(false));
lastIsEncodedWord = false;
}
// We have some separator or parenthesize.
else
{
retVal.Append(whiteSpaces + r.ReadSpecifiedLength(1));
}
}
return retVal.ToString();
}
/// <summary>
/// Encodes header field with quoted-printable encoding, if value contains ANSI or UNICODE chars.
/// </summary>
/// <param name="text">Text to encode.</param>
/// <returns></returns>
public static string EncodeHeaderField(string text)
{
if (Core.IsAscii(text))
{
return text;
}
// First try to encode quoted strings("unicode-text") only, if no
// quoted strings, encode full text.
if (text.IndexOf("\"") > -1)
{
string retVal = text;
int offset = 0;
while (offset < retVal.Length - 1)
{
int quoteStartIndex = retVal.IndexOf("\"", offset);
// There is no more qouted strings, but there are some text left
if (quoteStartIndex == -1)
{
break;
}
int quoteEndIndex = retVal.IndexOf("\"", quoteStartIndex + 1);
// If there isn't closing quote, encode full text
if (quoteEndIndex == -1)
{
break;
}
string leftPart = retVal.Substring(0, quoteStartIndex);
string rightPart = retVal.Substring(quoteEndIndex + 1);
string quotedString = retVal.Substring(quoteStartIndex + 1,
quoteEndIndex - quoteStartIndex - 1);
// Encode only not ASCII text
if (!Core.IsAscii(quotedString))
{
string quotedStringCEncoded = Core.CanonicalEncode(quotedString, "utf-8");
retVal = leftPart + "\"" + quotedStringCEncoded + "\"" + rightPart;
offset += quoteEndIndex + 1 + quotedStringCEncoded.Length - quotedString.Length;
}
else
{
offset += quoteEndIndex + 1;
}
}
// See if all encoded ok, if not encode all text
if (Core.IsAscii(retVal))
{
return retVal;
}
else
{
// REMOVE ME:(12.10.2006) Fixed, return Core.CanonicalEncode(retVal,"utf-8");
return Core.CanonicalEncode(text, "utf-8");
}
}
return Core.CanonicalEncode(text, "utf-8");
}
/// <summary>
/// Creates Rfc 2822 3.6.4 message-id. Syntax: '<' id-left '@' id-right '>'.
/// </summary>
/// <returns></returns>
public static string CreateMessageID()
{
return "<" + Guid.NewGuid().ToString().Replace("-", "") + "@" +
Guid.NewGuid().ToString().Replace("-", "") + ">";
}
/// <summary>
/// Folds long data line to folded lines.
/// </summary>
/// <param name="data">Data to fold.</param>
/// <returns></returns>
public static string FoldData(string data)
{
/* Folding rules:
*) Line may not be bigger than 76 chars.
*) If possible fold between TAB or SP
*) If no fold point, just fold from char 76
*/
// Data line too big, we need to fold data.
if (data.Length > 76)
{
int startPosition = 0;
int lastPossibleFoldPos = -1;
StringBuilder retVal = new StringBuilder();
for (int i = 0; i < data.Length; i++)
{
char c = data[i];
// We have possible fold point
if (c == ' ' || c == '\t')
{
lastPossibleFoldPos = i;
}
// End of data reached
if (i == (data.Length - 1))
{
retVal.Append(data.Substring(startPosition));
}
// We need to fold
else if ((i - startPosition) >= 76)
{
// There wasn't any good fold point(word is bigger than line), just fold from current position.
if (lastPossibleFoldPos == -1)
{
lastPossibleFoldPos = i;
}
retVal.Append(data.Substring(startPosition, lastPossibleFoldPos - startPosition) +
"\r\n\t");
i = lastPossibleFoldPos;
lastPossibleFoldPos = -1;
startPosition = i;
}
}
return retVal.ToString();
}
else
{
return data;
}
}
#endregion
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using QuantConnect.Data;
using QuantConnect.Interfaces;
using QuantConnect.Orders;
using QuantConnect.Securities;
namespace QuantConnect.Algorithm.CSharp
{
/// <summary>
/// This regression algorithm tests Out of The Money (OTM) future option expiry for short calls.
/// We expect 2 orders from the algorithm, which are:
///
/// * Initial entry, sell ES Call Option (expiring OTM)
/// - Profit the option premium, since the option was not assigned.
///
/// * Liquidation of ES call OTM contract on the last trade date
///
/// Additionally, we test delistings for future options and assert that our
/// portfolio holdings reflect the orders the algorithm has submitted.
/// </summary>
public class FutureOptionShortCallOTMExpiryRegressionAlgorithm : QCAlgorithm, IRegressionAlgorithmDefinition
{
private Symbol _es19m20;
private Symbol _esOption;
private Symbol _expectedContract;
public override void Initialize()
{
SetStartDate(2020, 1, 5);
SetEndDate(2020, 6, 30);
_es19m20 = AddFutureContract(
QuantConnect.Symbol.CreateFuture(
Futures.Indices.SP500EMini,
Market.CME,
new DateTime(2020, 6, 19)),
Resolution.Minute).Symbol;
// Select a future option expiring ITM, and adds it to the algorithm.
_esOption = AddFutureOptionContract(OptionChainProvider.GetOptionContractList(_es19m20, Time)
.Where(x => x.ID.StrikePrice >= 3400m && x.ID.OptionRight == OptionRight.Call)
.OrderBy(x => x.ID.StrikePrice)
.Take(1)
.Single(), Resolution.Minute).Symbol;
_expectedContract = QuantConnect.Symbol.CreateOption(_es19m20, Market.CME, OptionStyle.American, OptionRight.Call, 3400m, new DateTime(2020, 6, 19));
if (_esOption != _expectedContract)
{
throw new Exception($"Contract {_expectedContract} was not found in the chain");
}
Schedule.On(DateRules.Tomorrow, TimeRules.AfterMarketOpen(_es19m20, 1), () =>
{
MarketOrder(_esOption, -1);
});
}
public override void OnData(Slice data)
{
// Assert delistings, so that we can make sure that we receive the delisting warnings at
// the expected time. These assertions detect bug #4872
foreach (var delisting in data.Delistings.Values)
{
if (delisting.Type == DelistingType.Warning)
{
if (delisting.Time != new DateTime(2020, 6, 19))
{
throw new Exception($"Delisting warning issued at unexpected date: {delisting.Time}");
}
}
if (delisting.Type == DelistingType.Delisted)
{
if (delisting.Time != new DateTime(2020, 6, 20))
{
throw new Exception($"Delisting happened at unexpected date: {delisting.Time}");
}
}
}
}
public override void OnOrderEvent(OrderEvent orderEvent)
{
if (orderEvent.Status != OrderStatus.Filled)
{
// There's lots of noise with OnOrderEvent, but we're only interested in fills.
return;
}
if (!Securities.ContainsKey(orderEvent.Symbol))
{
throw new Exception($"Order event Symbol not found in Securities collection: {orderEvent.Symbol}");
}
var security = Securities[orderEvent.Symbol];
if (security.Symbol == _es19m20)
{
throw new Exception($"Expected no order events for underlying Symbol {security.Symbol}");
}
if (security.Symbol == _expectedContract)
{
AssertFutureOptionContractOrder(orderEvent, security);
}
else
{
throw new Exception($"Received order event for unknown Symbol: {orderEvent.Symbol}");
}
Log($"{orderEvent}");
}
private void AssertFutureOptionContractOrder(OrderEvent orderEvent, Security optionContract)
{
if (orderEvent.Direction == OrderDirection.Sell && optionContract.Holdings.Quantity != -1)
{
throw new Exception($"No holdings were created for option contract {optionContract.Symbol}");
}
if (orderEvent.Direction == OrderDirection.Buy && optionContract.Holdings.Quantity != 0)
{
throw new Exception("Expected no options holdings after closing position");
}
if (orderEvent.IsAssignment)
{
throw new Exception($"Assignment was not expected for {orderEvent.Symbol}");
}
}
/// <summary>
/// Ran at the end of the algorithm to ensure the algorithm has no holdings
/// </summary>
/// <exception cref="Exception">The algorithm has holdings</exception>
public override void OnEndOfAlgorithm()
{
if (Portfolio.Invested)
{
throw new Exception($"Expected no holdings at end of algorithm, but are invested in: {string.Join(", ", Portfolio.Keys)}");
}
}
/// <summary>
/// This is used by the regression test system to indicate if the open source Lean repository has the required data to run this algorithm.
/// </summary>
public bool CanRunLocally { get; } = true;
/// <summary>
/// This is used by the regression test system to indicate which languages this algorithm is written in.
/// </summary>
public Language[] Languages { get; } = { Language.CSharp, Language.Python };
/// <summary>
/// This is used by the regression test system to indicate what the expected statistics are from running the algorithm
/// </summary>
public Dictionary<string, string> ExpectedStatistics => new Dictionary<string, string>
{
{"Total Trades", "2"},
{"Average Win", "1.81%"},
{"Average Loss", "0%"},
{"Compounding Annual Return", "3.756%"},
{"Drawdown", "0.000%"},
{"Expectancy", "0"},
{"Net Profit", "1.811%"},
{"Sharpe Ratio", "1.183"},
{"Probabilistic Sharpe Ratio", "60.811%"},
{"Loss Rate", "0%"},
{"Win Rate", "100%"},
{"Profit-Loss Ratio", "0"},
{"Alpha", "0.026"},
{"Beta", "-0.001"},
{"Annual Standard Deviation", "0.022"},
{"Annual Variance", "0"},
{"Information Ratio", "0.012"},
{"Tracking Error", "0.375"},
{"Treynor Ratio", "-24.052"},
{"Total Fees", "$1.85"},
{"Estimated Strategy Capacity", "$78000000.00"},
{"Lowest Capacity Asset", "ES XFH59UPNF7B8|ES XFH59UK0MYO1"},
{"Fitness Score", "0"},
{"Kelly Criterion Estimate", "0"},
{"Kelly Criterion Probability Value", "0"},
{"Sortino Ratio", "79228162514264337593543950335"},
{"Return Over Maximum Drawdown", "95.594"},
{"Portfolio Turnover", "0"},
{"Total Insights Generated", "0"},
{"Total Insights Closed", "0"},
{"Total Insights Analysis Completed", "0"},
{"Long Insight Count", "0"},
{"Short Insight Count", "0"},
{"Long/Short Ratio", "100%"},
{"Estimated Monthly Alpha Value", "$0"},
{"Total Accumulated Estimated Alpha Value", "$0"},
{"Mean Population Estimated Insight Value", "$0"},
{"Mean Population Direction", "0%"},
{"Mean Population Magnitude", "0%"},
{"Rolling Averaged Population Direction", "0%"},
{"Rolling Averaged Population Magnitude", "0%"},
{"OrderListHash", "8cb012d36057103bf26a897fe5fa54d6"}
};
}
}
| |
using UnityEngine;
using System.Collections.Generic;
// Empty namespace declaration to avoid errors in the free version
// Which does not have any classes in the RVO namespace
namespace Pathfinding.RVO {}
namespace Pathfinding {
using Pathfinding.Util;
#if UNITY_5_0
/** Used in Unity 5.0 since the HelpURLAttribute was first added in Unity 5.1 */
public class HelpURLAttribute : Attribute {}
#endif
[System.Serializable]
/** Stores editor colors */
public class AstarColor {
public Color _NodeConnection;
public Color _UnwalkableNode;
public Color _BoundsHandles;
public Color _ConnectionLowLerp;
public Color _ConnectionHighLerp;
public Color _MeshEdgeColor;
/** Holds user set area colors.
* Use GetAreaColor to get an area color */
public Color[] _AreaColors;
public static Color NodeConnection = new Color(1, 1, 1, 0.9F);
public static Color UnwalkableNode = new Color(1, 0, 0, 0.5F);
public static Color BoundsHandles = new Color(0.29F, 0.454F, 0.741F, 0.9F);
public static Color ConnectionLowLerp = new Color(0, 1, 0, 0.5F);
public static Color ConnectionHighLerp = new Color(1, 0, 0, 0.5F);
public static Color MeshEdgeColor = new Color(0, 0, 0, 0.5F);
/** Holds user set area colors.
* Use GetAreaColor to get an area color */
private static Color[] AreaColors;
/** Returns an color for an area, uses both user set ones and calculated.
* If the user has set a color for the area, it is used, but otherwise the color is calculated using Mathfx.IntToColor
* \see #AreaColors */
public static Color GetAreaColor (uint area) {
if (AreaColors == null || area >= AreaColors.Length) {
return AstarMath.IntToColor((int)area, 1F);
}
return AreaColors[(int)area];
}
/** Pushes all local variables out to static ones.
* This is done because that makes it so much easier to access the colors during Gizmo rendering
* and it has a positive performance impact as well (gizmo rendering is hot code).
*/
public void OnEnable () {
NodeConnection = _NodeConnection;
UnwalkableNode = _UnwalkableNode;
BoundsHandles = _BoundsHandles;
ConnectionLowLerp = _ConnectionLowLerp;
ConnectionHighLerp = _ConnectionHighLerp;
MeshEdgeColor = _MeshEdgeColor;
AreaColors = _AreaColors;
}
public AstarColor () {
// Set default colors
_NodeConnection = new Color(1, 1, 1, 0.9F);
_UnwalkableNode = new Color(1, 0, 0, 0.5F);
_BoundsHandles = new Color(0.29F, 0.454F, 0.741F, 0.9F);
_ConnectionLowLerp = new Color(0, 1, 0, 0.5F);
_ConnectionHighLerp = new Color(1, 0, 0, 0.5F);
_MeshEdgeColor = new Color(0, 0, 0, 0.5F);
}
}
/** Returned by graph ray- or linecasts containing info about the hit. This will only be set up if something was hit. */
public struct GraphHitInfo {
/** Start of the line/ray */
public Vector3 origin;
/** Hit point */
public Vector3 point;
/** Node which contained the edge which was hit */
public GraphNode node;
/** Where the tangent starts. tangentOrigin and tangent together actually describes the edge which was hit */
public Vector3 tangentOrigin;
/** Tangent of the edge which was hit */
public Vector3 tangent;
public float distance {
get {
return (point-origin).magnitude;
}
}
public GraphHitInfo (Vector3 point) {
tangentOrigin = Vector3.zero;
origin = Vector3.zero;
this.point = point;
node = null;
tangent = Vector3.zero;
//this.distance = distance;
}
}
/** Nearest node constraint. Constrains which nodes will be returned by the GetNearest function */
public class NNConstraint {
/** Graphs treated as valid to search on.
* This is a bitmask meaning that bit 0 specifies whether or not the first graph in the graphs list should be able to be included in the search,
* bit 1 specifies whether or not the second graph should be included and so on.
* \code
* //Enables the first and third graphs to be included, but not the rest
* myNNConstraint.graphMask = (1 << 0) | (1 << 2);
* \endcode
* \note This does only affect which nodes are returned from a GetNearest call, if an invalid graph is linked to from a valid graph, it might be searched anyway.
*
* \see AstarPath.GetNearest */
public int graphMask = -1;
/** Only treat nodes in the area #area as suitable. Does not affect anything if #area is less than 0 (zero) */
public bool constrainArea;
/** Area ID to constrain to. Will not affect anything if less than 0 (zero) or if #constrainArea is false */
public int area = -1;
/** Only treat nodes with the walkable flag set to the same as #walkable as suitable */
public bool constrainWalkability = true;
/** What must the walkable flag on a node be for it to be suitable. Does not affect anything if #constrainWalkability if false */
public bool walkable = true;
/** if available, do an XZ check instead of checking on all axes. The RecastGraph supports this */
public bool distanceXZ;
/** Sets if tags should be constrained */
public bool constrainTags = true;
/** Nodes which have any of these tags set are suitable. This is a bitmask, i.e bit 0 indicates that tag 0 is good, bit 3 indicates tag 3 is good etc. */
public int tags = -1;
/** Constrain distance to node.
* Uses distance from AstarPath.maxNearestNodeDistance.
* If this is false, it will completely ignore the distance limit.
* \note This value is not used in this class, it is used by the AstarPath.GetNearest function.
*/
public bool constrainDistance = true;
/** Returns whether or not the graph conforms to this NNConstraint's rules.
* Note that only the first 31 graphs are considered using this function.
* If the graphMask has bit 31 set (i.e the last graph possible to fit in the mask), all graphs
* above index 31 will also be considered suitable.
*/
public virtual bool SuitableGraph (int graphIndex, NavGraph graph) {
return ((graphMask >> graphIndex) & 1) != 0;
}
/** Returns whether or not the node conforms to this NNConstraint's rules */
public virtual bool Suitable (GraphNode node) {
if (constrainWalkability && node.Walkable != walkable) return false;
if (constrainArea && area >= 0 && node.Area != area) return false;
if (constrainTags && ((tags >> (int)node.Tag) & 0x1) == 0) return false;
return true;
}
/** The default NNConstraint.
* Equivalent to new NNConstraint ().
* This NNConstraint has settings which works for most, it only finds walkable nodes
* and it constrains distance set by A* Inspector -> Settings -> Max Nearest Node Distance */
public static NNConstraint Default {
get {
return new NNConstraint();
}
}
/** Returns a constraint which will not filter the results */
public static NNConstraint None {
get {
return new NNConstraint {
constrainWalkability = false,
constrainArea = false,
constrainTags = false,
constrainDistance = false,
graphMask = -1,
};
}
}
/** Default constructor. Equals to the property #Default */
public NNConstraint () {
}
}
/** A special NNConstraint which can use different logic for the start node and end node in a path.
* A PathNNConstraint can be assigned to the Path.nnConstraint field, the path will first search for the start node, then it will call #SetStart and proceed with searching for the end node (nodes in the case of a MultiTargetPath).\n
* The default PathNNConstraint will constrain the end point to lie inside the same area as the start point.
*/
public class PathNNConstraint : NNConstraint {
public static new PathNNConstraint Default {
get {
return new PathNNConstraint {
constrainArea = true
};
}
}
/** Called after the start node has been found. This is used to get different search logic for the start and end nodes in a path */
public virtual void SetStart (GraphNode node) {
if (node != null) {
area = (int)node.Area;
} else {
constrainArea = false;
}
}
}
/** Internal result of a nearest node query.
* \see NNInfo
*/
public struct NNInfoInternal {
/** Closest node found.
* This node is not necessarily accepted by any NNConstraint passed.
* \see constrainedNode
*/
public GraphNode node;
/** Optional to be filled in.
* If the search will be able to find the constrained node without any extra effort it can fill it in. */
public GraphNode constrainedNode;
/** The position clamped to the closest point on the #node.
*/
public Vector3 clampedPosition;
/** Clamped position for the optional constrainedNode */
public Vector3 constClampedPosition;
public NNInfoInternal (GraphNode node) {
this.node = node;
constrainedNode = null;
clampedPosition = Vector3.zero;
constClampedPosition = Vector3.zero;
UpdateInfo();
}
/** Updates #clampedPosition and #constClampedPosition from node positions */
public void UpdateInfo () {
clampedPosition = node != null ? (Vector3)node.position : Vector3.zero;
constClampedPosition = constrainedNode != null ? (Vector3)constrainedNode.position : Vector3.zero;
}
}
/** Result of a nearest node query */
public struct NNInfo {
/** Closest node */
public readonly GraphNode node;
/** Closest point on the navmesh.
* This is the query position clamped to the closest point on the #node.
*/
public readonly Vector3 position;
/** Closest point on the navmesh.
* \deprecated This field has been renamed to #position
*/
[System.Obsolete("This field has been renamed to 'position'")]
public Vector3 clampedPosition {
get {
return position;
}
}
public NNInfo (NNInfoInternal internalInfo) {
node = internalInfo.node;
position = internalInfo.clampedPosition;
}
public static explicit operator Vector3 (NNInfo ob) {
return ob.position;
}
public static explicit operator GraphNode (NNInfo ob) {
return ob.node;
}
}
/** Progress info for e.g a progressbar.
* Used by the scan functions in the project
* \see AstarPath.ScanAsync
*/
public struct Progress {
public readonly float progress;
public readonly string description;
public Progress (float p, string d) {
progress = p;
description = d;
}
public override string ToString () {
return progress.ToString("0.0") + " " + description;
}
}
/** Graphs which can be updated during runtime */
public interface IUpdatableGraph {
/** Updates an area using the specified GraphUpdateObject.
*
* Notes to implementators.
* This function should (in order):
* -# Call o.WillUpdateNode on the GUO for every node it will update, it is important that this is called BEFORE any changes are made to the nodes.
* -# Update walkabilty using special settings such as the usePhysics flag used with the GridGraph.
* -# Call Apply on the GUO for every node which should be updated with the GUO.
* -# Update eventual connectivity info if appropriate (GridGraphs updates connectivity, but most other graphs don't since then the connectivity cannot be recovered later).
*/
void UpdateArea (GraphUpdateObject o);
/** May be called on the Unity thread before starting the update.
* \see CanUpdateAsync
*/
void UpdateAreaInit (GraphUpdateObject o);
/** May be called on the Unity thread after executing the update.
* \see CanUpdateAsync
*/
void UpdateAreaPost (GraphUpdateObject o);
GraphUpdateThreading CanUpdateAsync (GraphUpdateObject o);
}
[System.Serializable]
/** Holds a tagmask.
* This is used to store which tags to change and what to set them to in a Pathfinding.GraphUpdateObject.
* All variables are bitmasks.\n
* I wanted to make it a struct, but due to technical limitations when working with Unity's GenericMenu, I couldn't.
* So be wary of this when passing it as it will be passed by reference, not by value as e.g LayerMask.
*
* \deprecated This class is being phased out
*/
public class TagMask {
public int tagsChange;
public int tagsSet;
public TagMask () {}
public TagMask (int change, int set) {
tagsChange = change;
tagsSet = set;
}
public override string ToString () {
return ""+System.Convert.ToString(tagsChange, 2)+"\n"+System.Convert.ToString(tagsSet, 2);
}
}
/** Represents a collection of settings used to update nodes in a specific region of a graph.
* \see AstarPath.UpdateGraphs
* \see \ref graph-updates
*/
public class GraphUpdateObject {
/** The bounds to update nodes within.
* Defined in world space.
*/
public Bounds bounds;
/** Controlls if a flood fill will be carried out after this GUO has been applied.
* Disabling this can be used to gain a performance boost, but use with care.
* If you are sure that a GUO will not modify walkability or connections. You can set this to false.
* For example when only updating penalty values it can save processing power when setting this to false. Especially on large graphs.
* \note If you set this to false, even though it does change e.g walkability, it can lead to paths returning that they failed even though there is a path,
* or the try to search the whole graph for a path even though there is none, and will in the processes use wast amounts of processing power.
*
* If using the basic GraphUpdateObject (not a derived class), a quick way to check if it is going to need a flood fill is to check if #modifyWalkability is true or #updatePhysics is true.
*
*/
public bool requiresFloodFill = true;
/** Use physics checks to update nodes.
* When updating a grid graph and this is true, the nodes' position and walkability will be updated using physics checks
* with settings from "Collision Testing" and "Height Testing".
*
* When updating a PointGraph, setting this to true will make it re-evaluate all connections in the graph which passes through the #bounds.
* This has no effect when updating GridGraphs if #modifyWalkability is turned on.
*
* On RecastGraphs, having this enabled will trigger a complete recalculation of all tiles intersecting the bounds.
* This is quite slow (but powerful). If you only want to update e.g penalty on existing nodes, leave it disabled.
*/
public bool updatePhysics = true;
/** When #updatePhysics is true, GridGraphs will normally reset penalties, with this option you can override it.
* Good to use when you want to keep old penalties even when you update the graph.
*
* The images below shows two overlapping graph update objects, the right one happened to be applied before the left one. They both have updatePhysics = true and are
* set to increase the penalty of the nodes by some amount.
*
* The first image shows the result when resetPenaltyOnPhysics is false. Both penalties are added correctly.
* \shadowimage{resetPenaltyOnPhysics_False.png}
*
* This second image shows when resetPenaltyOnPhysics is set to true. The first GUO is applied correctly, but then the second one (the left one) is applied
* and during its updating, it resets the penalties first and then adds penalty to the nodes. The result is that the penalties from both GUOs are not added together.
* The green patch in at the border is there because physics recalculation (recalculation of the position of the node, checking for obstacles etc.) affects a slightly larger
* area than the original GUO bounds because of the Grid Graph -> Collision Testing -> Diameter setting (it is enlarged by that value). So some extra nodes have their penalties reset.
*
* \shadowimage{resetPenaltyOnPhysics_True.png}
*/
public bool resetPenaltyOnPhysics = true;
/** Update Erosion for GridGraphs.
* When enabled, erosion will be recalculated for grid graphs
* after the GUO has been applied.
*
* In the below image you can see the different effects you can get with the different values.\n
* The first image shows the graph when no GUO has been applied. The blue box is not identified as an obstacle by the graph, the reason
* there are unwalkable nodes around it is because there is a height difference (nodes are placed on top of the box) so erosion will be applied (an erosion value of 2 is used in this graph).
* The orange box is identified as an obstacle, so the area of unwalkable nodes around it is a bit larger since both erosion and collision has made
* nodes unwalkable.\n
* The GUO used simply sets walkability to true, i.e making all nodes walkable.
*
* \shadowimage{updateErosion.png}
*
* When updateErosion=True, the reason the blue box still has unwalkable nodes around it is because there is still a height difference
* so erosion will still be applied. The orange box on the other hand has no height difference and all nodes are set to walkable.\n
* \n
* When updateErosion=False, all nodes walkability are simply set to be walkable in this example.
*
* \see Pathfinding.GridGraph
*/
public bool updateErosion = true;
/** NNConstraint to use.
* The Pathfinding.NNConstraint.SuitableGraph function will be called on the NNConstraint to enable filtering of which graphs to update.\n
* \note As the Pathfinding.NNConstraint.SuitableGraph function is A* Pathfinding Project Pro only, this variable doesn't really affect anything in the free version.
*
*
* \astarpro */
public NNConstraint nnConstraint = NNConstraint.None;
/** Penalty to add to the nodes.
* A penalty of 1000 is equivalent to the cost of moving 1 world unit.
*/
public int addPenalty;
/** If true, all nodes' \a walkable variable will be set to #setWalkability */
public bool modifyWalkability;
/** If #modifyWalkability is true, the nodes' \a walkable variable will be set to this value */
public bool setWalkability;
/** If true, all nodes' \a tag will be set to #setTag */
public bool modifyTag;
/** If #modifyTag is true, all nodes' \a tag will be set to this value */
public int setTag;
/** Track which nodes are changed and save backup data.
* Used internally to revert changes if needed.
*/
public bool trackChangedNodes;
/** Nodes which were updated by this GraphUpdateObject.
* Will only be filled if #trackChangedNodes is true.
* \note It might take a few frames for graph update objects to be applied.
* If you need this info directly, use AstarPath.FlushGraphUpdates.
*/
public List<GraphNode> changedNodes;
private List<uint> backupData;
private List<Int3> backupPositionData;
/** A shape can be specified if a bounds object does not give enough precision.
* Note that if you set this, you should set the bounds so that it encloses the shape
* because the bounds will be used as an initial fast check for which nodes that should
* be updated.
*/
public GraphUpdateShape shape;
/** Should be called on every node which is updated with this GUO before it is updated.
* \param node The node to save fields for. If null, nothing will be done
* \see #trackChangedNodes
*/
public virtual void WillUpdateNode (GraphNode node) {
if (trackChangedNodes && node != null) {
if (changedNodes == null) { changedNodes = ListPool<GraphNode>.Claim(); backupData = ListPool<uint>.Claim(); backupPositionData = ListPool<Int3>.Claim(); }
changedNodes.Add(node);
backupPositionData.Add(node.position);
backupData.Add(node.Penalty);
backupData.Add(node.Flags);
var gridNode = node as GridNode;
if (gridNode != null) backupData.Add(gridNode.InternalGridFlags);
}
}
/** Reverts penalties and flags (which includes walkability) on every node which was updated using this GUO.
* Data for reversion is only saved if #trackChangedNodes is true */
public virtual void RevertFromBackup () {
if (trackChangedNodes) {
if (changedNodes == null) return;
int counter = 0;
for (int i = 0; i < changedNodes.Count; i++) {
changedNodes[i].Penalty = backupData[counter];
counter++;
changedNodes[i].Flags = backupData[counter];
counter++;
var gridNode = changedNodes[i] as GridNode;
if (gridNode != null) {
gridNode.InternalGridFlags = (ushort)backupData[counter];
counter++;
}
changedNodes[i].position = backupPositionData[i];
}
ListPool<GraphNode>.Release(changedNodes);
ListPool<uint>.Release(backupData);
ListPool<Int3>.Release(backupPositionData);
} else {
throw new System.InvalidOperationException("Changed nodes have not been tracked, cannot revert from backup");
}
}
/** Updates the specified node using this GUO's settings */
public virtual void Apply (GraphNode node) {
if (shape == null || shape.Contains(node)) {
//Update penalty and walkability
node.Penalty = (uint)(node.Penalty+addPenalty);
if (modifyWalkability) {
node.Walkable = setWalkability;
}
//Update tags
if (modifyTag) node.Tag = (uint)setTag;
}
}
public GraphUpdateObject () {
}
/** Creates a new GUO with the specified bounds */
public GraphUpdateObject (Bounds b) {
bounds = b;
}
}
public interface ITransformedGraph {
GraphTransform transform { get; }
}
public interface IRaycastableGraph {
bool Linecast (Vector3 start, Vector3 end);
bool Linecast (Vector3 start, Vector3 end, GraphNode hint);
bool Linecast (Vector3 start, Vector3 end, GraphNode hint, out GraphHitInfo hit);
bool Linecast (Vector3 start, Vector3 end, GraphNode hint, out GraphHitInfo hit, List<GraphNode> trace);
}
/** Holds info about one pathfinding thread.
* Mainly used to send information about how the thread should execute when starting it
*/
public struct PathThreadInfo {
public readonly int threadIndex;
public readonly AstarPath astar;
public readonly PathHandler runData;
public PathThreadInfo (int index, AstarPath astar, PathHandler runData) {
this.threadIndex = index;
this.astar = astar;
this.runData = runData;
}
}
/** Integer Rectangle.
* Works almost like UnityEngine.Rect but with integer coordinates
*/
public struct IntRect {
public int xmin, ymin, xmax, ymax;
public IntRect (int xmin, int ymin, int xmax, int ymax) {
this.xmin = xmin;
this.xmax = xmax;
this.ymin = ymin;
this.ymax = ymax;
}
public bool Contains (int x, int y) {
return !(x < xmin || y < ymin || x > xmax || y > ymax);
}
public int Width {
get {
return xmax-xmin+1;
}
}
public int Height {
get {
return ymax-ymin+1;
}
}
/** Returns if this rectangle is valid.
* An invalid rect could have e.g xmin > xmax.
* Rectamgles with a zero area area invalid.
*/
public bool IsValid () {
return xmin <= xmax && ymin <= ymax;
}
public static bool operator == (IntRect a, IntRect b) {
return a.xmin == b.xmin && a.xmax == b.xmax && a.ymin == b.ymin && a.ymax == b.ymax;
}
public static bool operator != (IntRect a, IntRect b) {
return a.xmin != b.xmin || a.xmax != b.xmax || a.ymin != b.ymin || a.ymax != b.ymax;
}
public override bool Equals (System.Object obj) {
var rect = (IntRect)obj;
return xmin == rect.xmin && xmax == rect.xmax && ymin == rect.ymin && ymax == rect.ymax;
}
public override int GetHashCode () {
return xmin*131071 ^ xmax*3571 ^ ymin*3109 ^ ymax*7;
}
/** Returns the intersection rect between the two rects.
* The intersection rect is the area which is inside both rects.
* If the rects do not have an intersection, an invalid rect is returned.
* \see IsValid
*/
public static IntRect Intersection (IntRect a, IntRect b) {
return new IntRect(
System.Math.Max(a.xmin, b.xmin),
System.Math.Max(a.ymin, b.ymin),
System.Math.Min(a.xmax, b.xmax),
System.Math.Min(a.ymax, b.ymax)
);
}
/** Returns if the two rectangles intersect each other
*/
public static bool Intersects (IntRect a, IntRect b) {
return !(a.xmin > b.xmax || a.ymin > b.ymax || a.xmax < b.xmin || a.ymax < b.ymin);
}
/** Returns a new rect which contains both input rects.
* This rectangle may contain areas outside both input rects as well in some cases.
*/
public static IntRect Union (IntRect a, IntRect b) {
return new IntRect(
System.Math.Min(a.xmin, b.xmin),
System.Math.Min(a.ymin, b.ymin),
System.Math.Max(a.xmax, b.xmax),
System.Math.Max(a.ymax, b.ymax)
);
}
/** Returns a new IntRect which is expanded to contain the point */
public IntRect ExpandToContain (int x, int y) {
return new IntRect(
System.Math.Min(xmin, x),
System.Math.Min(ymin, y),
System.Math.Max(xmax, x),
System.Math.Max(ymax, y)
);
}
/** Returns a new rect which is expanded by \a range in all directions.
* \param range How far to expand. Negative values are permitted.
*/
public IntRect Expand (int range) {
return new IntRect(xmin-range,
ymin-range,
xmax+range,
ymax+range
);
}
/** Matrices for rotation.
* Each group of 4 elements is a 2x2 matrix.
* The XZ position is multiplied by this.
* So
* \code
* //A rotation by 90 degrees clockwise, second matrix in the array
* (5,2) * ((0, 1), (-1, 0)) = (2,-5)
* \endcode
*/
private static readonly int[] Rotations = {
1, 0, //Identity matrix
0, 1,
0, 1,
-1, 0,
-1, 0,
0, -1,
0, -1,
1, 0
};
/** Returns a new rect rotated around the origin 90*r degrees.
* Ensures that a valid rect is returned.
*/
public IntRect Rotate (int r) {
int mx1 = Rotations[r*4+0];
int mx2 = Rotations[r*4+1];
int my1 = Rotations[r*4+2];
int my2 = Rotations[r*4+3];
int p1x = mx1*xmin + mx2*ymin;
int p1y = my1*xmin + my2*ymin;
int p2x = mx1*xmax + mx2*ymax;
int p2y = my1*xmax + my2*ymax;
return new IntRect(
System.Math.Min(p1x, p2x),
System.Math.Min(p1y, p2y),
System.Math.Max(p1x, p2x),
System.Math.Max(p1y, p2y)
);
}
/** Returns a new rect which is offset by the specified amount.
*/
public IntRect Offset (Int2 offset) {
return new IntRect(xmin+offset.x, ymin + offset.y, xmax + offset.x, ymax + offset.y);
}
/** Returns a new rect which is offset by the specified amount.
*/
public IntRect Offset (int x, int y) {
return new IntRect(xmin+x, ymin + y, xmax + x, ymax + y);
}
public override string ToString () {
return "[x: "+xmin+"..."+xmax+", y: " + ymin +"..."+ymax+"]";
}
/** Draws some debug lines representing the rect */
public void DebugDraw (GraphTransform transform, Color color) {
Vector3 p1 = transform.Transform(new Vector3(xmin, 0, ymin));
Vector3 p2 = transform.Transform(new Vector3(xmin, 0, ymax));
Vector3 p3 = transform.Transform(new Vector3(xmax, 0, ymax));
Vector3 p4 = transform.Transform(new Vector3(xmax, 0, ymin));
Debug.DrawLine(p1, p2, color);
Debug.DrawLine(p2, p3, color);
Debug.DrawLine(p3, p4, color);
Debug.DrawLine(p4, p1, color);
}
}
#region Delegates
/* Delegate with on Path object as parameter.
* This is used for callbacks when a path has finished calculation.\n
* Example function:
* \code
* public void Start () {
* //Assumes a Seeker component is attached to the GameObject
* Seeker seeker = GetComponent<Seeker>();
*
* //seeker.pathCallback is a OnPathDelegate, we add the function OnPathComplete to it so it will be called whenever a path has finished calculating on that seeker
* seeker.pathCallback += OnPathComplete;
* }
*
* public void OnPathComplete (Path p) {
* Debug.Log ("This is called when a path is completed on the seeker attached to this GameObject");
* }
* \endcode
*/
public delegate void OnPathDelegate (Path p);
public delegate void OnGraphDelegate (NavGraph graph);
public delegate void OnScanDelegate (AstarPath script);
public delegate void OnScanStatus (Progress progress);
#endregion
#region Enums
public enum GraphUpdateThreading {
/** Call UpdateArea in the unity thread.
* This is the default value.
* Not compatible with SeparateThread.
*/
UnityThread = 0,
/** Call UpdateArea in a separate thread. Not compatible with UnityThread. */
SeparateThread = 1 << 0,
/** Calls UpdateAreaInit in the Unity thread before everything else */
UnityInit = 1 << 1,
/** Calls UpdateAreaPost in the Unity thread after everything else.
* This is used together with SeparateThread to apply the result of the multithreaded
* calculations to the graph without modifying it at the same time as some other script
* might be using it (e.g calling GetNearest).
*/
UnityPost = 1 << 2,
SeparateAndUnityInit = SeparateThread | UnityInit
}
/** How path results are logged by the system */
public enum PathLog {
/** Does not log anything. This is recommended for release since logging path results has a performance overhead. */
None,
/** Logs basic info about the paths */
Normal,
/** Includes additional info */
Heavy,
/** Same as heavy, but displays the info in-game using GUI */
InGame,
/** Same as normal, but logs only paths which returned an error */
OnlyErrors
}
/** Heuristic to use. Heuristic is the estimated cost from the current node to the target */
public enum Heuristic {
Manhattan,
DiagonalManhattan,
Euclidean,
None
}
/** What data to draw the graph debugging with */
public enum GraphDebugMode {
Areas,
G,
H,
F,
Penalty,
Connections,
Tags
}
public enum ThreadCount {
AutomaticLowLoad = -1,
AutomaticHighLoad = -2,
None = 0,
One = 1,
Two,
Three,
Four,
Five,
Six,
Seven,
Eight
}
public enum PathState {
Created = 0,
PathQueue = 1,
Processing = 2,
ReturnQueue = 3,
Returned = 4
}
public enum PathCompleteState {
NotCalculated = 0,
Error = 1,
Complete = 2,
Partial = 3
}
#endregion
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Diagnostics;
using System.Windows.Controls;
using Microsoft.CodeAnalysis.Editor.Implementation.IntelliSense.QuickInfo;
using Microsoft.CodeAnalysis.Editor.UnitTests.Classification;
using Microsoft.CodeAnalysis.LanguageServices;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Xunit;
namespace Microsoft.CodeAnalysis.Editor.UnitTests.QuickInfo
{
public abstract class AbstractSemanticQuickInfoSourceTests
{
protected readonly ClassificationBuilder ClassificationBuilder;
protected AbstractSemanticQuickInfoSourceTests()
{
this.ClassificationBuilder = new ClassificationBuilder();
}
[DebuggerStepThrough]
protected Tuple<string, string> Struct(string value)
{
return ClassificationBuilder.Struct(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Enum(string value)
{
return ClassificationBuilder.Enum(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Interface(string value)
{
return ClassificationBuilder.Interface(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Class(string value)
{
return ClassificationBuilder.Class(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Delegate(string value)
{
return ClassificationBuilder.Delegate(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> TypeParameter(string value)
{
return ClassificationBuilder.TypeParameter(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> String(string value)
{
return ClassificationBuilder.String(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Verbatim(string value)
{
return ClassificationBuilder.Verbatim(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Keyword(string value)
{
return ClassificationBuilder.Keyword(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> WhiteSpace(string value)
{
return ClassificationBuilder.WhiteSpace(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Text(string value)
{
return ClassificationBuilder.Text(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> NumericLiteral(string value)
{
return ClassificationBuilder.NumericLiteral(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> PPKeyword(string value)
{
return ClassificationBuilder.PPKeyword(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> PPText(string value)
{
return ClassificationBuilder.PPText(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Identifier(string value)
{
return ClassificationBuilder.Identifier(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Inactive(string value)
{
return ClassificationBuilder.Inactive(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Comment(string value)
{
return ClassificationBuilder.Comment(value);
}
[DebuggerStepThrough]
protected Tuple<string, string> Number(string value)
{
return ClassificationBuilder.Number(value);
}
protected ClassificationBuilder.PunctuationClassificationTypes Punctuation
{
get { return ClassificationBuilder.Punctuation; }
}
protected ClassificationBuilder.OperatorClassificationTypes Operators
{
get { return ClassificationBuilder.Operator; }
}
protected ClassificationBuilder.XmlDocClassificationTypes XmlDoc
{
get { return ClassificationBuilder.XmlDoc; }
}
protected string Lines(params string[] lines)
{
return string.Join("\r\n", lines);
}
protected Tuple<string, string>[] ExpectedClassifications(
params Tuple<string, string>[] expectedClassifications)
{
return expectedClassifications;
}
protected Tuple<string, string>[] NoClassifications()
{
return null;
}
protected void WaitForDocumentationComment(object content)
{
if (content is QuickInfoDisplayDeferredContent)
{
var docCommentDeferredContent = ((QuickInfoDisplayDeferredContent)content).Documentation as DocumentationCommentDeferredContent;
if (docCommentDeferredContent != null)
{
docCommentDeferredContent.WaitForDocumentationCommentTask_ForTestingPurposesOnly();
}
}
}
internal Action<object> SymbolGlyph(Glyph expectedGlyph)
{
return (content) =>
{
var actualIcon = ((QuickInfoDisplayDeferredContent)content).SymbolGlyph;
Assert.Equal(expectedGlyph, actualIcon.Glyph);
};
}
protected Action<object> MainDescription(
string expectedText,
Tuple<string, string>[] expectedClassifications = null)
{
return (content) =>
{
content.TypeSwitch(
(QuickInfoDisplayDeferredContent qiContent) =>
{
var actualContent = ((QuickInfoDisplayDeferredContent)qiContent).MainDescription.ClassifiableContent;
ClassificationTestHelper.Verify(expectedText, expectedClassifications, actualContent);
},
(ClassifiableDeferredContent classifiable) =>
{
var actualContent = ((ClassifiableDeferredContent)classifiable).ClassifiableContent;
ClassificationTestHelper.Verify(expectedText, expectedClassifications, actualContent);
});
};
}
protected Action<object> Documentation(
string expectedText,
Tuple<string, string>[] expectedClassifications = null)
{
return (content) =>
{
var documentationCommentContent = ((QuickInfoDisplayDeferredContent)content).Documentation;
documentationCommentContent.TypeSwitch(
(DocumentationCommentDeferredContent docComment) =>
{
var documentationCommentBlock = (TextBlock)docComment.Create();
var actualText = documentationCommentBlock.Text;
Assert.Equal(expectedText, actualText);
},
(ClassifiableDeferredContent classifiable) =>
{
var actualContent = classifiable.ClassifiableContent;
Assert.Equal(expectedText, actualContent.GetFullText());
ClassificationTestHelper.Verify(expectedText, expectedClassifications, actualContent);
});
};
}
protected Action<object> TypeParameterMap(
string expectedText,
Tuple<string, string>[] expectedClassifications = null)
{
return (content) =>
{
var actualContent = ((QuickInfoDisplayDeferredContent)content).TypeParameterMap.ClassifiableContent;
// The type parameter map should have an additional line break at the beginning. We
// create a copy here because we've captured expectedText and this delegate might be
// executed more than once (e.g. with different parse options).
// var expectedTextCopy = "\r\n" + expectedText;
ClassificationTestHelper.Verify(expectedText, expectedClassifications, actualContent);
};
}
protected Action<object> AnonymousTypes(
string expectedText,
Tuple<string, string>[] expectedClassifications = null)
{
return (content) =>
{
var actualContent = ((QuickInfoDisplayDeferredContent)content).AnonymousTypes.ClassifiableContent;
// The type parameter map should have an additional line break at the beginning. We
// create a copy here because we've captured expectedText and this delegate might be
// executed more than once (e.g. with different parse options).
// var expectedTextCopy = "\r\n" + expectedText;
ClassificationTestHelper.Verify(expectedText, expectedClassifications, actualContent);
};
}
protected Action<object> NoTypeParameterMap
{
get
{
return (content) =>
{
Assert.Equal(string.Empty, ((QuickInfoDisplayDeferredContent)content).TypeParameterMap.ClassifiableContent.GetFullText());
};
}
}
protected Action<object> Usage(string expectedText, bool expectsWarningGlyph = false)
{
return (content) =>
{
var quickInfoContent = (QuickInfoDisplayDeferredContent)content;
Assert.Equal(expectedText, quickInfoContent.UsageText.ClassifiableContent.GetFullText());
Assert.Equal(expectsWarningGlyph, quickInfoContent.WarningGlyph != null && quickInfoContent.WarningGlyph.Glyph == Glyph.CompletionWarning);
};
}
protected static bool CanUseSpeculativeSemanticModel(Document document, int position)
{
var service = document.Project.LanguageServices.GetService<ISyntaxFactsService>();
var node = document.GetSyntaxRootAsync().Result.FindToken(position).Parent;
return !service.GetMemberBodySpanForSpeculativeBinding(node).IsEmpty;
}
protected abstract void Test(string markup, params Action<object>[] expectedResults);
}
}
| |
// "Therefore those skilled at the unorthodox
// are infinite as heaven and earth,
// inexhaustible as the great rivers.
// When they come to an end,
// they begin again,
// like the days and months;
// they die and are reborn,
// like the four seasons."
//
// - Sun Tsu,
// "The Art of War"
using System;
using System.Drawing;
using System.Drawing.Drawing2D;
using TheArtOfDev.HtmlRenderer.Adapters.Entities;
using TheArtOfDev.HtmlRenderer.Core.Utils;
using TheArtOfDev.HtmlRenderer.Adapters;
using TheArtOfDev.HtmlRenderer.WinForms.Utilities;
namespace TheArtOfDev.HtmlRenderer.WinForms.Adapters
{
/// <summary>
/// Adapter for WinForms Graphics for core.
/// </summary>
internal sealed class GraphicsAdapter : RGraphics
{
#region Fields and Consts
/// <summary>
/// used for <see cref="MeasureString(string,RFont,double,out int,out double)"/> calculation.
/// </summary>
private static readonly int[] _charFit = new int[1];
/// <summary>
/// used for <see cref="MeasureString(string,RFont,double,out int,out double)"/> calculation.
/// </summary>
private static readonly int[] _charFitWidth = new int[1000];
/// <summary>
/// Used for GDI+ measure string.
/// </summary>
private static readonly CharacterRange[] _characterRanges = new CharacterRange[1];
/// <summary>
/// The string format to use for measuring strings for GDI+ text rendering
/// </summary>
private static readonly StringFormat _stringFormat;
/// <summary>
/// The string format to use for rendering strings for GDI+ text rendering
/// </summary>
private static readonly StringFormat _stringFormat2;
/// <summary>
/// The wrapped WinForms graphics object
/// </summary>
private readonly Graphics _g;
/// <summary>
/// Use GDI+ text rendering to measure/draw text.
/// </summary>
private readonly bool _useGdiPlusTextRendering;
#if !MONO
/// <summary>
/// the initialized HDC used
/// </summary>
private IntPtr _hdc;
#endif
/// <summary>
/// if to release the graphics object on dispose
/// </summary>
private readonly bool _releaseGraphics;
/// <summary>
/// If text alignment was set to RTL
/// </summary>
private bool _setRtl;
#endregion
/// <summary>
/// Init static resources.
/// </summary>
static GraphicsAdapter()
{
_stringFormat = new StringFormat(StringFormat.GenericTypographic);
_stringFormat.FormatFlags = StringFormatFlags.NoClip | StringFormatFlags.MeasureTrailingSpaces;
_stringFormat2 = new StringFormat(StringFormat.GenericTypographic);
}
/// <summary>
/// Init.
/// </summary>
/// <param name="g">the win forms graphics object to use</param>
/// <param name="useGdiPlusTextRendering">Use GDI+ text rendering to measure/draw text</param>
/// <param name="releaseGraphics">optional: if to release the graphics object on dispose (default - false)</param>
public GraphicsAdapter(Graphics g, bool useGdiPlusTextRendering, bool releaseGraphics = false)
: base(WinFormsAdapter.Instance, Utils.Convert(g.ClipBounds))
{
ArgChecker.AssertArgNotNull(g, "g");
_g = g;
_releaseGraphics = releaseGraphics;
#if MONO
_useGdiPlusTextRendering = true;
#else
_useGdiPlusTextRendering = useGdiPlusTextRendering;
#endif
}
public override void PopClip()
{
ReleaseHdc();
_clipStack.Pop();
_g.SetClip(Utils.Convert(_clipStack.Peek()), CombineMode.Replace);
}
public override void PushClip(RRect rect)
{
ReleaseHdc();
_clipStack.Push(rect);
_g.SetClip(Utils.Convert(rect), CombineMode.Replace);
}
public override void PushClipExclude(RRect rect)
{
ReleaseHdc();
_clipStack.Push(_clipStack.Peek());
_g.SetClip(Utils.Convert(rect), CombineMode.Exclude);
}
public override Object SetAntiAliasSmoothingMode()
{
ReleaseHdc();
var prevMode = _g.SmoothingMode;
_g.SmoothingMode = SmoothingMode.AntiAlias;
return prevMode;
}
public override void ReturnPreviousSmoothingMode(Object prevMode)
{
if (prevMode != null)
{
ReleaseHdc();
_g.SmoothingMode = (SmoothingMode)prevMode;
}
}
public override RSize MeasureString(string str, RFont font)
{
if (_useGdiPlusTextRendering)
{
ReleaseHdc();
var fontAdapter = (FontAdapter)font;
var realFont = fontAdapter.Font;
_characterRanges[0] = new CharacterRange(0, str.Length);
_stringFormat.SetMeasurableCharacterRanges(_characterRanges);
var size = _g.MeasureCharacterRanges(str, realFont, RectangleF.Empty, _stringFormat)[0].GetBounds(_g).Size;
if (font.Height < 0)
{
var height = realFont.Height;
var descent = realFont.Size * realFont.FontFamily.GetCellDescent(realFont.Style) / realFont.FontFamily.GetEmHeight(realFont.Style);
#if !MONO
fontAdapter.SetMetrics(height, (int)Math.Round((height - descent + .5f)));
#else
fontAdapter.SetMetrics(height, (int)Math.Round((height - descent + 1f)));
#endif
}
return Utils.Convert(size);
}
else
{
#if !MONO
SetFont(font);
var size = new Size();
Win32Utils.GetTextExtentPoint32(_hdc, str, str.Length, ref size);
if (font.Height < 0)
{
TextMetric lptm;
Win32Utils.GetTextMetrics(_hdc, out lptm);
((FontAdapter)font).SetMetrics(size.Height, lptm.tmHeight - lptm.tmDescent + lptm.tmUnderlined + 1);
}
return Utils.Convert(size);
#else
throw new InvalidProgramException("Invalid Mono code");
#endif
}
}
public override void MeasureString(string str, RFont font, double maxWidth, out int charFit, out double charFitWidth)
{
charFit = 0;
charFitWidth = 0;
if (_useGdiPlusTextRendering)
{
ReleaseHdc();
var size = MeasureString(str, font);
for (int i = 1; i <= str.Length; i++)
{
charFit = i - 1;
RSize pSize = MeasureString(str.Substring(0, i), font);
if (pSize.Height <= size.Height && pSize.Width < maxWidth)
charFitWidth = pSize.Width;
else
break;
}
}
else
{
#if !MONO
SetFont(font);
var size = new Size();
Win32Utils.GetTextExtentExPoint(_hdc, str, str.Length, (int)Math.Round(maxWidth), _charFit, _charFitWidth, ref size);
charFit = _charFit[0];
charFitWidth = charFit > 0 ? _charFitWidth[charFit - 1] : 0;
#endif
}
}
public override void DrawString(string str, RFont font, RColor color, RPoint point, RSize size, bool rtl)
{
if (_useGdiPlusTextRendering)
{
ReleaseHdc();
SetRtlAlignGdiPlus(rtl);
var brush = ((BrushAdapter)_adapter.GetSolidBrush(color)).Brush;
_g.DrawString(str, ((FontAdapter)font).Font, brush, (int)(Math.Round(point.X) + (rtl ? size.Width : 0)), (int)Math.Round(point.Y), _stringFormat2);
}
else
{
#if !MONO
var pointConv = Utils.ConvertRound(point);
var colorConv = Utils.Convert(color);
if (color.A == 255)
{
SetFont(font);
SetTextColor(colorConv);
SetRtlAlignGdi(rtl);
Win32Utils.TextOut(_hdc, pointConv.X, pointConv.Y, str, str.Length);
}
else
{
InitHdc();
SetRtlAlignGdi(rtl);
DrawTransparentText(_hdc, str, font, pointConv, Utils.ConvertRound(size), colorConv);
}
#endif
}
}
public override RBrush GetTextureBrush(RImage image, RRect dstRect, RPoint translateTransformLocation)
{
var brush = new TextureBrush(((ImageAdapter)image).Image, Utils.Convert(dstRect));
brush.TranslateTransform((float)translateTransformLocation.X, (float)translateTransformLocation.Y);
return new BrushAdapter(brush, true);
}
public override RGraphicsPath GetGraphicsPath()
{
return new GraphicsPathAdapter();
}
public override void Dispose()
{
ReleaseHdc();
if (_releaseGraphics)
_g.Dispose();
if (_useGdiPlusTextRendering && _setRtl)
_stringFormat2.FormatFlags ^= StringFormatFlags.DirectionRightToLeft;
}
#region Delegate graphics methods
public override void DrawLine(RPen pen, double x1, double y1, double x2, double y2)
{
ReleaseHdc();
_g.DrawLine(((PenAdapter)pen).Pen, (float)x1, (float)y1, (float)x2, (float)y2);
}
public override void DrawRectangle(RPen pen, double x, double y, double width, double height)
{
ReleaseHdc();
_g.DrawRectangle(((PenAdapter)pen).Pen, (float)x, (float)y, (float)width, (float)height);
}
public override void DrawRectangle(RBrush brush, double x, double y, double width, double height)
{
ReleaseHdc();
_g.FillRectangle(((BrushAdapter)brush).Brush, (float)x, (float)y, (float)width, (float)height);
}
public override void DrawImage(RImage image, RRect destRect, RRect srcRect)
{
ReleaseHdc();
_g.DrawImage(((ImageAdapter)image).Image, Utils.Convert(destRect), Utils.Convert(srcRect), GraphicsUnit.Pixel);
}
public override void DrawImage(RImage image, RRect destRect)
{
ReleaseHdc();
_g.DrawImage(((ImageAdapter)image).Image, Utils.Convert(destRect));
}
public override void DrawPath(RPen pen, RGraphicsPath path)
{
_g.DrawPath(((PenAdapter)pen).Pen, ((GraphicsPathAdapter)path).GraphicsPath);
}
public override void DrawPath(RBrush brush, RGraphicsPath path)
{
ReleaseHdc();
_g.FillPath(((BrushAdapter)brush).Brush, ((GraphicsPathAdapter)path).GraphicsPath);
}
public override void DrawPolygon(RBrush brush, RPoint[] points)
{
if (points != null && points.Length > 0)
{
ReleaseHdc();
_g.FillPolygon(((BrushAdapter)brush).Brush, Utils.Convert(points));
}
}
#endregion
#region Private methods
/// <summary>
/// Release current HDC to be able to use <see cref="Graphics"/> methods.
/// </summary>
private void ReleaseHdc()
{
#if !MONO
if (_hdc != IntPtr.Zero)
{
Win32Utils.SelectClipRgn(_hdc, IntPtr.Zero);
_g.ReleaseHdc(_hdc);
_hdc = IntPtr.Zero;
}
#endif
}
#if !MONO
/// <summary>
/// Init HDC for the current graphics object to be used to call GDI directly.
/// </summary>
private void InitHdc()
{
if (_hdc == IntPtr.Zero)
{
var clip = _g.Clip.GetHrgn(_g);
_hdc = _g.GetHdc();
_setRtl = false;
Win32Utils.SetBkMode(_hdc, 1);
Win32Utils.SelectClipRgn(_hdc, clip);
Win32Utils.DeleteObject(clip);
}
}
/// <summary>
/// Set a resource (e.g. a font) for the specified device context.
/// WARNING: Calling Font.ToHfont() many times without releasing the font handle crashes the app.
/// </summary>
private void SetFont(RFont font)
{
InitHdc();
Win32Utils.SelectObject(_hdc, ((FontAdapter)font).HFont);
}
/// <summary>
/// Set the text color of the device context.
/// </summary>
private void SetTextColor(Color color)
{
InitHdc();
int rgb = (color.B & 0xFF) << 16 | (color.G & 0xFF) << 8 | color.R;
Win32Utils.SetTextColor(_hdc, rgb);
}
/// <summary>
/// Change text align to Left-to-Right or Right-to-Left if required.
/// </summary>
private void SetRtlAlignGdi(bool rtl)
{
if (_setRtl)
{
if (!rtl)
Win32Utils.SetTextAlign(_hdc, Win32Utils.TextAlignDefault);
}
else if (rtl)
{
Win32Utils.SetTextAlign(_hdc, Win32Utils.TextAlignRtl);
}
_setRtl = rtl;
}
/// <summary>
/// Special draw logic to draw transparent text using GDI.<br/>
/// 1. Create in-memory DC<br/>
/// 2. Copy background to in-memory DC<br/>
/// 3. Draw the text to in-memory DC<br/>
/// 4. Copy the in-memory DC to the proper location with alpha blend<br/>
/// </summary>
private static void DrawTransparentText(IntPtr hdc, string str, RFont font, Point point, Size size, Color color)
{
IntPtr dib;
var memoryHdc = Win32Utils.CreateMemoryHdc(hdc, size.Width, size.Height, out dib);
try
{
// copy target background to memory HDC so when copied back it will have the proper background
Win32Utils.BitBlt(memoryHdc, 0, 0, size.Width, size.Height, hdc, point.X, point.Y, Win32Utils.BitBltCopy);
// Create and select font
Win32Utils.SelectObject(memoryHdc, ((FontAdapter)font).HFont);
Win32Utils.SetTextColor(memoryHdc, (color.B & 0xFF) << 16 | (color.G & 0xFF) << 8 | color.R);
// Draw text to memory HDC
Win32Utils.TextOut(memoryHdc, 0, 0, str, str.Length);
// copy from memory HDC to normal HDC with alpha blend so achieve the transparent text
Win32Utils.AlphaBlend(hdc, point.X, point.Y, size.Width, size.Height, memoryHdc, 0, 0, size.Width, size.Height, new BlendFunction(color.A));
}
finally
{
Win32Utils.ReleaseMemoryHdc(memoryHdc, dib);
}
}
#endif
/// <summary>
/// Change text align to Left-to-Right or Right-to-Left if required.
/// </summary>
private void SetRtlAlignGdiPlus(bool rtl)
{
if (_setRtl)
{
if (!rtl)
_stringFormat2.FormatFlags ^= StringFormatFlags.DirectionRightToLeft;
}
else if (rtl)
{
_stringFormat2.FormatFlags |= StringFormatFlags.DirectionRightToLeft;
}
_setRtl = rtl;
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using Microsoft.CSharp.RuntimeBinder;
using Xunit;
namespace System.Dynamic.Tests
{
public class UnaryOperationTests
{
private class MinimumOverrideUnaryOperationBinder : UnaryOperationBinder
{
public MinimumOverrideUnaryOperationBinder(ExpressionType operation)
: base(operation)
{
}
public override DynamicMetaObject FallbackUnaryOperation(DynamicMetaObject target, DynamicMetaObject errorSuggestion)
{
throw new NotSupportedException();
}
}
private static readonly int[] SomeInt32 = { 0, 1, 2, -1, int.MinValue, int.MaxValue, int.MaxValue - 1 };
private static IEnumerable<object[]> Int32Args() => SomeInt32.Select(i => new object[] {i});
private static IEnumerable<object[]> BooleanArgs()
{
yield return new object[] {false};
yield return new object[] {true};
}
private static IEnumerable<object[]> UnaryExpressionTypes()
{
yield return new object[] {ExpressionType.Decrement};
yield return new object[] {ExpressionType.Extension};
yield return new object[] {ExpressionType.Increment};
yield return new object[] {ExpressionType.IsFalse};
yield return new object[] {ExpressionType.IsTrue};
yield return new object[] {ExpressionType.Negate};
yield return new object[] {ExpressionType.Not};
yield return new object[] {ExpressionType.OnesComplement};
yield return new object[] {ExpressionType.UnaryPlus};
}
private static IEnumerable<object[]> NonUnaryExpressionTypes()
{
yield return new object[] {ExpressionType.Add};
yield return new object[] {ExpressionType.AddAssign};
yield return new object[] {ExpressionType.AddAssignChecked};
yield return new object[] {ExpressionType.AddChecked};
yield return new object[] {ExpressionType.And};
yield return new object[] {ExpressionType.AndAlso};
yield return new object[] {ExpressionType.AndAssign};
yield return new object[] {ExpressionType.ArrayIndex};
yield return new object[] {ExpressionType.ArrayLength};
yield return new object[] {ExpressionType.Assign};
yield return new object[] {ExpressionType.Block};
yield return new object[] {ExpressionType.Call};
yield return new object[] {ExpressionType.Coalesce};
yield return new object[] {ExpressionType.Conditional};
yield return new object[] {ExpressionType.Constant};
yield return new object[] {ExpressionType.Convert};
yield return new object[] {ExpressionType.ConvertChecked};
yield return new object[] {ExpressionType.DebugInfo};
yield return new object[] {ExpressionType.Default};
yield return new object[] {ExpressionType.Divide};
yield return new object[] {ExpressionType.DivideAssign};
yield return new object[] {ExpressionType.Dynamic};
yield return new object[] {ExpressionType.Equal};
yield return new object[] {ExpressionType.ExclusiveOr};
yield return new object[] {ExpressionType.ExclusiveOrAssign};
yield return new object[] {ExpressionType.Goto};
yield return new object[] {ExpressionType.GreaterThan};
yield return new object[] {ExpressionType.GreaterThanOrEqual};
yield return new object[] {ExpressionType.Index};
yield return new object[] {ExpressionType.Invoke};
yield return new object[] {ExpressionType.Label};
yield return new object[] {ExpressionType.Lambda};
yield return new object[] {ExpressionType.LeftShift};
yield return new object[] {ExpressionType.LeftShiftAssign};
yield return new object[] {ExpressionType.LessThan};
yield return new object[] {ExpressionType.LessThanOrEqual};
yield return new object[] {ExpressionType.ListInit};
yield return new object[] {ExpressionType.Loop};
yield return new object[] {ExpressionType.MemberAccess};
yield return new object[] {ExpressionType.MemberInit};
yield return new object[] {ExpressionType.Modulo};
yield return new object[] {ExpressionType.ModuloAssign};
yield return new object[] {ExpressionType.Multiply};
yield return new object[] {ExpressionType.MultiplyAssign};
yield return new object[] {ExpressionType.MultiplyAssignChecked};
yield return new object[] {ExpressionType.MultiplyChecked};
yield return new object[] {ExpressionType.NegateChecked};
yield return new object[] {ExpressionType.New};
yield return new object[] {ExpressionType.NewArrayBounds};
yield return new object[] {ExpressionType.NewArrayInit};
yield return new object[] {ExpressionType.NotEqual};
yield return new object[] {ExpressionType.Or};
yield return new object[] {ExpressionType.OrAssign};
yield return new object[] {ExpressionType.OrElse};
yield return new object[] {ExpressionType.Parameter};
yield return new object[] {ExpressionType.PostDecrementAssign};
yield return new object[] {ExpressionType.PostIncrementAssign};
yield return new object[] {ExpressionType.Power};
yield return new object[] {ExpressionType.PowerAssign};
yield return new object[] {ExpressionType.PreDecrementAssign};
yield return new object[] {ExpressionType.PreIncrementAssign};
yield return new object[] {ExpressionType.Quote};
yield return new object[] {ExpressionType.RightShift};
yield return new object[] {ExpressionType.RightShiftAssign};
yield return new object[] {ExpressionType.RuntimeVariables};
yield return new object[] {ExpressionType.Subtract};
yield return new object[] {ExpressionType.SubtractAssign};
yield return new object[] {ExpressionType.SubtractAssignChecked};
yield return new object[] {ExpressionType.SubtractChecked};
yield return new object[] {ExpressionType.Switch};
yield return new object[] {ExpressionType.Throw};
yield return new object[] {ExpressionType.Try};
yield return new object[] {ExpressionType.TypeAs};
yield return new object[] {ExpressionType.TypeEqual};
yield return new object[] {ExpressionType.TypeIs};
yield return new object[] {ExpressionType.Unbox};
}
[Theory, MemberData(nameof(Int32Args))]
public void DecrementPrefixInt32(int x)
{
dynamic d = x;
unchecked
{
Assert.Equal(x - 1, --d);
Assert.Equal(x - 1, d);
}
}
[Theory, MemberData(nameof(Int32Args))]
public void DecrementPostfixInt32(int x)
{
dynamic d = x;
unchecked
{
Assert.Equal(x, d--);
Assert.Equal(x - 1, d);
}
}
[Theory, MemberData(nameof(Int32Args))]
public void DecrementPrefixOvfInt32(int x)
{
dynamic d = x;
if (x == int.MinValue)
{
Assert.Throws<OverflowException>(() => checked(--d));
}
else
{
checked
{
Assert.Equal(x - 1, --d);
Assert.Equal(x - 1, d);
}
}
}
[Theory, MemberData(nameof(Int32Args))]
public void DecrementPostfixOvfInt32(int x)
{
dynamic d = x;
if (x == int.MinValue)
{
Assert.Throws<OverflowException>(() => checked(d--));
}
else
{
checked
{
Assert.Equal(x, d--);
Assert.Equal(x - 1, d);
}
}
}
[Theory, MemberData(nameof(Int32Args))]
public void IncrementPrefixInt32(int x)
{
dynamic d = x;
unchecked
{
Assert.Equal(x + 1, ++d);
Assert.Equal(x + 1, d);
}
}
[Theory, MemberData(nameof(Int32Args))]
public void IncrementPostfixInt32(int x)
{
dynamic d = x;
unchecked
{
Assert.Equal(x, d++);
Assert.Equal(x + 1, d);
}
}
[Theory, MemberData(nameof(Int32Args))]
public void IncrementPrefixOvfInt32(int x)
{
dynamic d = x;
if (x == int.MaxValue)
{
Assert.Throws<OverflowException>(() => checked(++d));
}
else
{
checked
{
Assert.Equal(x + 1, ++d);
Assert.Equal(x + 1, d);
}
}
}
[Theory, MemberData(nameof(Int32Args))]
public void IncrementPostfixOvfInt32(int x)
{
dynamic d = x;
if (x == int.MaxValue)
{
Assert.Throws<OverflowException>(() => checked(d++));
}
else
{
checked
{
Assert.Equal(x, d++);
Assert.Equal(x + 1, d);
}
}
}
[Theory, MemberData(nameof(Int32Args))]
public void NegateInt32(int x)
{
dynamic d = x;
Assert.Equal(unchecked(-x), unchecked(-d));
}
[Theory, MemberData(nameof(Int32Args))]
public void NegateOvfInt32(int x)
{
dynamic d = x;
if (x == int.MinValue)
{
Assert.Throws<OverflowException>(() => checked(-d));
}
else
{
Assert.Equal(-x, -d);
}
}
[Theory, MemberData(nameof(Int32Args))]
public void UnaryPlusInt32(int x)
{
dynamic d = x;
Assert.Equal(x, +d);
}
[Theory, MemberData(nameof(Int32Args))]
public void OnesComplementInt32(int x)
{
dynamic d = x;
Assert.Equal(~x, ~d);
}
[Theory, MemberData(nameof(BooleanArgs))]
public void NotBoolean(bool x)
{
dynamic d = x;
Assert.Equal(!x, !d);
}
[Theory, MemberData(nameof(BooleanArgs))]
public void IsTrueBoolean(bool x)
{
dynamic d = x;
Assert.Equal(x ? 1 : 2, d ? 1 : 2);
}
[Theory, MemberData(nameof(BooleanArgs))]
public void IsFalse(bool x)
{
dynamic d = x;
Assert.Equal(x, d && true);
}
[Theory, MemberData(nameof(NonUnaryExpressionTypes))]
public void NonUnaryOperation(ExpressionType type)
{
AssertExtensions.Throws<ArgumentException>("operation", () => new MinimumOverrideUnaryOperationBinder(type));
}
[Theory, MemberData(nameof(UnaryExpressionTypes))]
public void ReturnType(ExpressionType type)
{
Assert.Equal(
type >= ExpressionType.IsTrue ? typeof(bool) : typeof(object),
new MinimumOverrideUnaryOperationBinder(type).ReturnType);
}
[Theory, MemberData(nameof(UnaryExpressionTypes))]
public void ExpressionTypeMatches(ExpressionType type)
{
Assert.Equal(type, new MinimumOverrideUnaryOperationBinder(type).Operation);
}
[Fact]
public void NullTarget()
{
var binder = new MinimumOverrideUnaryOperationBinder(ExpressionType.Negate);
AssertExtensions.Throws<ArgumentNullException>("target", () => binder.Bind(null, null));
}
[Fact]
public void ArgumentPassed()
{
var target = new DynamicMetaObject(Expression.Parameter(typeof(object), null), BindingRestrictions.Empty);
var arg = new DynamicMetaObject(Expression.Parameter(typeof(object), null), BindingRestrictions.Empty);
var binder = new MinimumOverrideUnaryOperationBinder(ExpressionType.Negate);
AssertExtensions.Throws<ArgumentException>("args", () => binder.Bind(target, new[] {arg}));
}
[Fact]
public void InvalidOperationForType()
{
dynamic d = "23";
Assert.Throws<RuntimeBinderException>(() => -d);
d = 23;
Assert.Throws<RuntimeBinderException>(() => !d);
}
}
}
| |
// ********************************************************************************************************
// Product Name: DotSpatial.Projection
// Description: The basic module for MapWindow version 6.0
// ********************************************************************************************************
//
// The Original Code is from MapWindow.dll version 6.0
//
// The Initial Developer of this Original Code is Ted Dunsford. Created 8/14/2009 4:16:32 PM
//
// Contributor(s): (Open source contributors should list themselves and their modifications here).
// Name | Date | Comment
// --------------------|------------|------------------------------------------------------------
// Ted Dunsford | 5/3/2010 | Updated project to DotSpatial.Projection and license to LGPL
// ********************************************************************************************************
#pragma warning disable 1591
namespace DotSpatial.Projections.GeographicCategories
{
/// <summary>
/// SolarSystem
/// </summary>
public class SolarSystem : CoordinateSystemCategory
{
#region Private Variables
public readonly ProjectionInfo Adrastea2000;
public readonly ProjectionInfo Amalthea2000;
public readonly ProjectionInfo Ananke2000;
public readonly ProjectionInfo Ariel2000;
public readonly ProjectionInfo Atlas2000;
public readonly ProjectionInfo Belinda2000;
public readonly ProjectionInfo Bianca2000;
public readonly ProjectionInfo Callisto2000;
public readonly ProjectionInfo Calypso2000;
public readonly ProjectionInfo Carme2000;
public readonly ProjectionInfo Charon2000;
public readonly ProjectionInfo Cordelia2000;
public readonly ProjectionInfo Cressida2000;
public readonly ProjectionInfo Deimos2000;
public readonly ProjectionInfo Desdemona2000;
public readonly ProjectionInfo Despina2000;
public readonly ProjectionInfo Dione2000;
public readonly ProjectionInfo Elara2000;
public readonly ProjectionInfo Enceladus2000;
public readonly ProjectionInfo Epimetheus2000;
public readonly ProjectionInfo Europa2000;
public readonly ProjectionInfo Galatea2000;
public readonly ProjectionInfo Ganymede2000;
public readonly ProjectionInfo Helene2000;
public readonly ProjectionInfo Himalia2000;
public readonly ProjectionInfo Hyperion2000;
public readonly ProjectionInfo Iapetus2000;
public readonly ProjectionInfo Io2000;
public readonly ProjectionInfo Janus2000;
public readonly ProjectionInfo Juliet2000;
public readonly ProjectionInfo Jupiter2000;
public readonly ProjectionInfo Larissa2000;
public readonly ProjectionInfo Leda2000;
public readonly ProjectionInfo Lysithea2000;
public readonly ProjectionInfo Mars1979;
public readonly ProjectionInfo Mars2000;
public readonly ProjectionInfo Mercury2000;
public readonly ProjectionInfo Metis2000;
public readonly ProjectionInfo Mimas2000;
public readonly ProjectionInfo Miranda2000;
public readonly ProjectionInfo Moon2000;
public readonly ProjectionInfo Naiad2000;
public readonly ProjectionInfo Neptune2000;
public readonly ProjectionInfo Nereid2000;
public readonly ProjectionInfo Oberon2000;
public readonly ProjectionInfo Ophelia2000;
public readonly ProjectionInfo Pan2000;
public readonly ProjectionInfo Pandora2000;
public readonly ProjectionInfo Pasiphae2000;
public readonly ProjectionInfo Phobos2000;
public readonly ProjectionInfo Phoebe2000;
public readonly ProjectionInfo Pluto2000;
public readonly ProjectionInfo Portia2000;
public readonly ProjectionInfo Prometheus2000;
public readonly ProjectionInfo Proteus2000;
public readonly ProjectionInfo Puck2000;
public readonly ProjectionInfo Rhea2000;
public readonly ProjectionInfo Rosalind2000;
public readonly ProjectionInfo Saturn2000;
public readonly ProjectionInfo Sinope2000;
public readonly ProjectionInfo Telesto2000;
public readonly ProjectionInfo Tethys2000;
public readonly ProjectionInfo Thalassa2000;
public readonly ProjectionInfo Thebe2000;
public readonly ProjectionInfo Titan2000;
public readonly ProjectionInfo Titania2000;
public readonly ProjectionInfo Triton2000;
public readonly ProjectionInfo Umbriel2000;
public readonly ProjectionInfo Uranus2000;
public readonly ProjectionInfo Venus1985;
public readonly ProjectionInfo Venus2000;
#endregion
#region Constructors
/// <summary>
/// Creates a new instance of SolarSystem
/// </summary>
public SolarSystem()
{
Adrastea2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=8200 +b=8200 +no_defs ");
Amalthea2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=83500 +b=83500 +no_defs ");
Ananke2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=10000 +b=10000 +no_defs ");
Ariel2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=578900 +b=578900 +no_defs ");
Atlas2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=16000 +b=16000 +no_defs ");
Belinda2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=33000 +b=33000 +no_defs ");
Bianca2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=21000 +b=21000 +no_defs ");
Callisto2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=2409300 +b=2409300 +no_defs ");
Calypso2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=9500 +b=9500 +no_defs ");
Carme2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=15000 +b=15000 +no_defs ");
Charon2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=593000 +b=593000 +no_defs ");
Cordelia2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=13000 +b=13000 +no_defs ");
Cressida2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=31000 +b=31000 +no_defs ");
Deimos2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=6200 +b=6200 +no_defs ");
Desdemona2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=27000 +b=27000 +no_defs ");
Despina2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=74000 +b=74000 +no_defs ");
Dione2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=560000 +b=560000 +no_defs ");
Elara2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=40000 +b=40000 +no_defs ");
Enceladus2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=249400 +b=249400 +no_defs ");
Epimetheus2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=59500 +b=59500 +no_defs ");
Europa2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=1562090 +b=1562090 +no_defs ");
Galatea2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=79000 +b=79000 +no_defs ");
Ganymede2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=2632345 +b=2632345 +no_defs ");
Helene2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=17500 +b=700.0000000000046 +no_defs ");
Himalia2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=85000 +b=85000 +no_defs ");
Hyperion2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=133000 +b=133000 +no_defs ");
Iapetus2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=718000 +b=718000 +no_defs ");
Io2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=1821460 +b=1821460 +no_defs ");
Janus2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=888000 +b=888000 +no_defs ");
Juliet2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=42000 +b=42000 +no_defs ");
Jupiter2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=71492000 +b=66853999.99999999 +no_defs ");
Larissa2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=104000 +b=89000 +no_defs ");
Leda2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=5000 +b=5000 +no_defs ");
Lysithea2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=12000 +b=12000 +no_defs ");
Mars1979 = ProjectionInfo.FromProj4String("+proj=longlat +a=3393400 +b=3375730 +no_defs ");
Mars2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=3396190 +b=3376200 +no_defs ");
Mercury2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=2439700 +b=2439700 +no_defs ");
Metis2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=30000 +b=20000 +no_defs ");
Mimas2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=1986300 +b=1986300 +no_defs ");
Miranda2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=235800 +b=235800 +no_defs ");
Moon2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=1737400 +b=1737400 +no_defs ");
Naiad2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=29000 +b=29000 +no_defs ");
Neptune2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=24764000 +b=24341000 +no_defs ");
Nereid2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=170000 +b=170000 +no_defs ");
Oberon2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=761400 +b=761400 +no_defs ");
Ophelia2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=15000 +b=15000 +no_defs ");
Pan2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=10000 +b=10000 +no_defs ");
Pandora2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=41900 +b=41900 +no_defs ");
Pasiphae2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=18000 +b=18000 +no_defs ");
Phobos2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=11100 +b=11100 +no_defs ");
Phoebe2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=110000 +b=110000 +no_defs ");
Pluto2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=1195000 +b=1195000 +no_defs ");
Portia2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=54000 +b=54000 +no_defs ");
Prometheus2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=50100 +b=50100 +no_defs ");
Proteus2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=208000 +b=208000 +no_defs ");
Puck2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=77000 +b=77000 +no_defs ");
Rhea2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=764000 +b=764000 +no_defs ");
Rosalind2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=27000 +b=27000 +no_defs ");
Saturn2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=60268000 +b=54364000 +no_defs ");
Sinope2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=14000 +b=14000 +no_defs ");
Telesto2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=11000 +b=11000 +no_defs ");
Tethys2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=529800 +b=529800 +no_defs ");
Thalassa2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=40000 +b=40000 +no_defs ");
Thebe2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=49300 +b=49300 +no_defs ");
Titan2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=2575000 +b=2575000 +no_defs ");
Titania2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=788900 +b=788900 +no_defs ");
Triton2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=1352600 +b=1352600 +no_defs ");
Umbriel2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=584700 +b=584700 +no_defs ");
Uranus2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=25559000 +b=24973000 +no_defs ");
Venus1985 = ProjectionInfo.FromProj4String("+proj=longlat +a=6051000 +b=6051000 +no_defs ");
Venus2000 = ProjectionInfo.FromProj4String("+proj=longlat +a=6051800 +b=6051800 +no_defs ");
Adrastea2000.GeographicInfo.Name = "GCS_Adrastea_2000";
Amalthea2000.GeographicInfo.Name = "GCS_Amalthea_2000";
Ananke2000.GeographicInfo.Name = "GCS_Ananke_2000";
Ariel2000.GeographicInfo.Name = "GCS_Ariel_2000";
Atlas2000.GeographicInfo.Name = "GCS_Atlas_2000";
Belinda2000.GeographicInfo.Name = "GCS_Belinda_2000";
Bianca2000.GeographicInfo.Name = "GCS_Bianca_2000";
Callisto2000.GeographicInfo.Name = "GCS_Callisto_2000";
Calypso2000.GeographicInfo.Name = "GCS_Calypso_2000";
Carme2000.GeographicInfo.Name = "GCS_Carme_2000";
Charon2000.GeographicInfo.Name = "GCS_Charon_2000";
Cordelia2000.GeographicInfo.Name = "GCS_Cordelia_2000";
Cressida2000.GeographicInfo.Name = "GCS_Cressida_2000";
Deimos2000.GeographicInfo.Name = "GCS_Deimos_2000";
Desdemona2000.GeographicInfo.Name = "GCS_Desdemona_2000";
Despina2000.GeographicInfo.Name = "GCS_Despina_2000";
Dione2000.GeographicInfo.Name = "GCS_Dione_2000";
Elara2000.GeographicInfo.Name = "GCS_Elara_2000";
Enceladus2000.GeographicInfo.Name = "GCS_Enceladus_2000";
Epimetheus2000.GeographicInfo.Name = "GCS_Epimetheus_2000";
Europa2000.GeographicInfo.Name = "GCS_Europa_2000";
Galatea2000.GeographicInfo.Name = "GCS_Galatea_2000";
Ganymede2000.GeographicInfo.Name = "GCS_Ganymede_2000";
Helene2000.GeographicInfo.Name = "GCS_Helene_2000";
Himalia2000.GeographicInfo.Name = "GCS_Himalia_2000";
Hyperion2000.GeographicInfo.Name = "GCS_Hyperion_2000";
Iapetus2000.GeographicInfo.Name = "GCS_Iapetus_2000";
Io2000.GeographicInfo.Name = "GCS_Io_2000";
Janus2000.GeographicInfo.Name = "GCS_Janus_2000";
Juliet2000.GeographicInfo.Name = "GCS_Juliet_2000";
Jupiter2000.GeographicInfo.Name = "GCS_Jupiter_2000";
Larissa2000.GeographicInfo.Name = "GCS_Larissa_2000";
Leda2000.GeographicInfo.Name = "GCS_Leda_2000";
Lysithea2000.GeographicInfo.Name = "GCS_Lysithea_2000";
Mars1979.GeographicInfo.Name = "GCS_Mars_1979";
Mars2000.GeographicInfo.Name = "GCS_Mars_2000";
Mercury2000.GeographicInfo.Name = "GCS_Mercury_2000";
Metis2000.GeographicInfo.Name = "GCS_Metis_2000";
Mimas2000.GeographicInfo.Name = "GCS_Mimas_2000";
Miranda2000.GeographicInfo.Name = "GCS_Miranda_2000";
Moon2000.GeographicInfo.Name = "GCS_Moon_2000";
Naiad2000.GeographicInfo.Name = "GCS_Naiad_2000";
Neptune2000.GeographicInfo.Name = "GCS_Neptune_2000";
Nereid2000.GeographicInfo.Name = "GCS_Nereid_2000";
Oberon2000.GeographicInfo.Name = "GCS_Oberon_2000";
Ophelia2000.GeographicInfo.Name = "GCS_Ophelia_2000";
Pan2000.GeographicInfo.Name = "GCS_Pan_2000";
Pandora2000.GeographicInfo.Name = "GCS_Pandora_2000";
Pasiphae2000.GeographicInfo.Name = "GCS_Pasiphae_2000";
Phobos2000.GeographicInfo.Name = "GCS_Phobos_2000";
Phoebe2000.GeographicInfo.Name = "GCS_Phoebe_2000";
Pluto2000.GeographicInfo.Name = "GCS_Pluto_2000";
Portia2000.GeographicInfo.Name = "GCS_Portia_2000";
Prometheus2000.GeographicInfo.Name = "GCS_Prometheus_2000";
Proteus2000.GeographicInfo.Name = "GCS_Proteus_2000";
Puck2000.GeographicInfo.Name = "GCS_Puck_2000";
Rhea2000.GeographicInfo.Name = "GCS_Rhea_2000";
Rosalind2000.GeographicInfo.Name = "GCS_Rosalind_2000";
Saturn2000.GeographicInfo.Name = "GCS_Saturn_2000";
Sinope2000.GeographicInfo.Name = "GCS_Sinope_2000";
Telesto2000.GeographicInfo.Name = "GCS_Telesto_2000";
Tethys2000.GeographicInfo.Name = "GCS_Tethys_2000";
Thalassa2000.GeographicInfo.Name = "GCS_Thalassa_2000";
Thebe2000.GeographicInfo.Name = "GCS_Thebe_2000";
Titan2000.GeographicInfo.Name = "GCS_Titan_2000";
Titania2000.GeographicInfo.Name = "GCS_Titania_2000";
Triton2000.GeographicInfo.Name = "GCS_Triton_2000";
Umbriel2000.GeographicInfo.Name = "GCS_Umbriel_2000";
Uranus2000.GeographicInfo.Name = "GCS_Uranus_2000";
Venus1985.GeographicInfo.Name = "GCS_Venus_1985";
Venus2000.GeographicInfo.Name = "GCS_Venus_2000";
Adrastea2000.GeographicInfo.Datum.Name = "D_Adrastea_2000";
Amalthea2000.GeographicInfo.Datum.Name = "D_Amalthea_2000";
Ananke2000.GeographicInfo.Datum.Name = "D_Ananke_2000";
Ariel2000.GeographicInfo.Datum.Name = "D_Ariel_2000";
Atlas2000.GeographicInfo.Datum.Name = "D_Atlas_2000";
Belinda2000.GeographicInfo.Datum.Name = "D_Belinda_2000";
Bianca2000.GeographicInfo.Datum.Name = "D_Bianca_2000";
Callisto2000.GeographicInfo.Datum.Name = "D_Callisto_2000";
Calypso2000.GeographicInfo.Datum.Name = "D_Calypso_2000";
Carme2000.GeographicInfo.Datum.Name = "D_Carme_2000";
Charon2000.GeographicInfo.Datum.Name = "D_Charon_2000";
Cordelia2000.GeographicInfo.Datum.Name = "D_Cordelia_2000";
Cressida2000.GeographicInfo.Datum.Name = "D_Cressida_2000";
Deimos2000.GeographicInfo.Datum.Name = "D_Deimos_2000";
Desdemona2000.GeographicInfo.Datum.Name = "D_Desdemona_2000";
Despina2000.GeographicInfo.Datum.Name = "D_Despina_2000";
Dione2000.GeographicInfo.Datum.Name = "D_Dione_2000";
Elara2000.GeographicInfo.Datum.Name = "D_Elara_2000";
Enceladus2000.GeographicInfo.Datum.Name = "D_Enceladus_2000";
Epimetheus2000.GeographicInfo.Datum.Name = "D_Epimetheus_2000";
Europa2000.GeographicInfo.Datum.Name = "D_Europa_2000";
Galatea2000.GeographicInfo.Datum.Name = "D_Galatea_2000";
Ganymede2000.GeographicInfo.Datum.Name = "D_Ganymede_2000";
Helene2000.GeographicInfo.Datum.Name = "D_Helene_2000";
Himalia2000.GeographicInfo.Datum.Name = "D_Himalia_2000";
Hyperion2000.GeographicInfo.Datum.Name = "D_Hyperion_2000";
Iapetus2000.GeographicInfo.Datum.Name = "D_Iapetus_2000";
Io2000.GeographicInfo.Datum.Name = "D_Io_2000";
Janus2000.GeographicInfo.Datum.Name = "D_Janus_2000";
Juliet2000.GeographicInfo.Datum.Name = "D_Juliet_2000";
Jupiter2000.GeographicInfo.Datum.Name = "D_Jupiter_2000";
Larissa2000.GeographicInfo.Datum.Name = "D_Larissa_2000";
Leda2000.GeographicInfo.Datum.Name = "D_Leda_2000";
Lysithea2000.GeographicInfo.Datum.Name = "D_Lysithea_2000";
Mars1979.GeographicInfo.Datum.Name = "D_Mars_1979";
Mars2000.GeographicInfo.Datum.Name = "D_Mars_2000";
Mercury2000.GeographicInfo.Datum.Name = "D_Mercury_2000";
Metis2000.GeographicInfo.Datum.Name = "D_Metis_2000";
Mimas2000.GeographicInfo.Datum.Name = "D_Mimas_2000";
Miranda2000.GeographicInfo.Datum.Name = "D_Miranda_2000";
Moon2000.GeographicInfo.Datum.Name = "D_Moon_2000";
Naiad2000.GeographicInfo.Datum.Name = "D_Naiad_2000";
Neptune2000.GeographicInfo.Datum.Name = "D_Neptune_2000";
Nereid2000.GeographicInfo.Datum.Name = "D_Nereid_2000";
Oberon2000.GeographicInfo.Datum.Name = "D_Oberon_2000";
Ophelia2000.GeographicInfo.Datum.Name = "D_Ophelia_2000";
Pan2000.GeographicInfo.Datum.Name = "D_Pan_2000";
Pandora2000.GeographicInfo.Datum.Name = "D_Pandora_2000";
Pasiphae2000.GeographicInfo.Datum.Name = "D_Pasiphae_2000";
Phobos2000.GeographicInfo.Datum.Name = "D_Phobos_2000";
Phoebe2000.GeographicInfo.Datum.Name = "D_Phoebe_2000";
Pluto2000.GeographicInfo.Datum.Name = "D_Pluto_2000";
Portia2000.GeographicInfo.Datum.Name = "D_Portia_2000";
Prometheus2000.GeographicInfo.Datum.Name = "D_Prometheus_2000";
Proteus2000.GeographicInfo.Datum.Name = "D_Proteus_2000";
Puck2000.GeographicInfo.Datum.Name = "D_Puck_2000";
Rhea2000.GeographicInfo.Datum.Name = "D_Rhea_2000";
Rosalind2000.GeographicInfo.Datum.Name = "D_Rosalind_2000";
Saturn2000.GeographicInfo.Datum.Name = "D_Saturn_2000";
Sinope2000.GeographicInfo.Datum.Name = "D_Sinope_2000";
Telesto2000.GeographicInfo.Datum.Name = "D_Telesto_2000";
Tethys2000.GeographicInfo.Datum.Name = "D_Tethys_2000";
Thalassa2000.GeographicInfo.Datum.Name = "D_Thalassa_2000";
Thebe2000.GeographicInfo.Datum.Name = "D_Thebe_2000";
Titan2000.GeographicInfo.Datum.Name = "D_Titan_2000";
Titania2000.GeographicInfo.Datum.Name = "D_Titania_2000";
Triton2000.GeographicInfo.Datum.Name = "D_Triton_2000";
Umbriel2000.GeographicInfo.Datum.Name = "D_Umbriel_2000";
Uranus2000.GeographicInfo.Datum.Name = "D_Uranus_2000";
Venus1985.GeographicInfo.Datum.Name = "D_Venus_1985";
Venus2000.GeographicInfo.Datum.Name = "D_Venus_2000";
}
#endregion
}
}
#pragma warning restore 1591
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.