context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
/* Copyright (c) Citrix Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.ComponentModel;
using XenAdmin.Controls;
using XenAPI;
using XenAdmin.Core;
using XenAdmin.Properties;
using System.Threading;
using System.Drawing;
using System.Drawing.Design;
using System.Collections.ObjectModel;
using XenAdmin.Network;
namespace XenAdmin.Commands
{
/// <summary>
/// This is the base ToolStripMenuItem for StartVMOnHostToolStripMenuItem, ResumeVMOnHostToolStripMenuItem and MigrateVMToolStripMenuItem.
/// </summary>
internal abstract class VMOperationToolStripMenuItem : CommandToolStripMenuItem
{
private static readonly log4net.ILog log = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
private readonly vm_operations _operation;
private readonly bool _resumeAfter;
protected VMOperationToolStripMenuItem(Command command, bool inContextMenu, vm_operations operation)
: base(command, inContextMenu)
{
if (operation != vm_operations.start_on && operation != vm_operations.resume_on && operation != vm_operations.pool_migrate)
{
throw new ArgumentException("Invalid operation", "operation");
}
if (operation.Equals(vm_operations.resume_on))
_resumeAfter = true;
_operation = operation;
base.DropDownItems.Add(new ToolStripMenuItem());
}
protected override void OnDropDownOpening(EventArgs e)
{
base.DropDownItems.Clear();
// Work around bug in tool kit where disabled menu items show their dropdown menus
if (!Enabled)
{
ToolStripMenuItem emptyMenuItem = new ToolStripMenuItem(Messages.HOST_MENU_EMPTY);
emptyMenuItem.Font = Program.DefaultFont;
emptyMenuItem.Enabled = false;
base.DropDownItems.Add(emptyMenuItem);
return;
}
VisualMenuItemAlignData.ParentStrip = this;
IXenConnection connection = Command.GetSelection()[0].Connection;
bool wlb = Helpers.WlbEnabled(connection);
if (wlb)
{
base.DropDownItems.Add(new VMOperationToolStripMenuSubItem(Messages.WLB_OPT_MENU_OPTIMAL_SERVER, Images.StaticImages._000_ServerWlb_h32bit_16));
}
else
{
base.DropDownItems.Add(new VMOperationToolStripMenuSubItem(Messages.HOME_SERVER_MENU_ITEM, Images.StaticImages._000_ServerHome_h32bit_16));
}
List<Host> hosts = new List<Host>(connection.Cache.Hosts);
hosts.Sort();
foreach (Host host in hosts)
{
VMOperationToolStripMenuSubItem item = new VMOperationToolStripMenuSubItem(String.Format(Messages.MAINWINDOW_CONTEXT_UPDATING, host.name_label.EscapeAmpersands()), Images.StaticImages._000_ServerDisconnected_h32bit_16);
item.Tag = host;
base.DropDownItems.Add(item);
}
// start a new thread to evaluate which hosts can be used.
ThreadPool.QueueUserWorkItem(delegate
{
SelectedItemCollection selection = Command.GetSelection();
Session session = selection[0].Connection.DuplicateSession();
WlbRecommendations recommendations = new WlbRecommendations(selection.AsXenObjects<VM>(), session);
recommendations.Initialize();
if (recommendations.IsError)
{
EnableAppropriateHostsNoWlb(session);
}
else
{
EnableAppropriateHostsWlb(session, recommendations);
}
});
}
private void EnableAppropriateHostsWlb(Session session, WlbRecommendations recommendations)
{
SelectedItemCollection selection = Command.GetSelection();
// set the first menu item to be the WLB optimal server menu item
Program.Invoke(Program.MainWindow, delegate
{
VMOperationToolStripMenuSubItem firstItem = (VMOperationToolStripMenuSubItem)base.DropDownItems[0];
firstItem.Command = new VMOperationWlbOptimalServerCommand(Command.MainWindowCommandInterface, selection, _operation, recommendations);
});
List<VMOperationToolStripMenuSubItem> hostMenuItems = new List<VMOperationToolStripMenuSubItem>();
Program.Invoke(Program.MainWindow, delegate
{
foreach (VMOperationToolStripMenuSubItem item in base.DropDownItems)
{
Host host = item.Tag as Host;
if (host != null)
{
item.Command = new VMOperationWlbHostCommand(Command.MainWindowCommandInterface, selection, host, _operation, recommendations.GetStarRating(host));
hostMenuItems.Add(item);
}
}
});
// Shuffle the list to make it look cool
Helpers.ShuffleList(hostMenuItems);
// sort the hostMenuItems by star rating
hostMenuItems.Sort(new WlbHostStarCompare());
// refresh the drop-down-items from the menuItems.
Program.Invoke(Program.MainWindow, delegate()
{
foreach (VMOperationToolStripMenuSubItem menuItem in hostMenuItems)
{
base.DropDownItems.Insert(hostMenuItems.IndexOf(menuItem) + 1, menuItem);
}
});
Program.Invoke(Program.MainWindow, () => AddAdditionalMenuItems(selection));
}
private void EnableAppropriateHostsNoWlb(Session session)
{
SelectedItemCollection selection = Command.GetSelection();
IXenConnection connection = selection[0].Connection;
VMOperationCommand cmdHome = new VMOperationHomeServerCommand(Command.MainWindowCommandInterface, selection, _operation, session);
Host affinityHost = connection.Resolve(((VM)Command.GetSelection()[0].XenObject).affinity);
VMOperationCommand cpmCmdHome = new CrossPoolMigrateToHomeCommand(Command.MainWindowCommandInterface, selection, affinityHost);
Program.Invoke(Program.MainWindow, delegate
{
var firstItem = (VMOperationToolStripMenuSubItem)base.DropDownItems[0];
bool oldMigrateToHomeCmdCanRun = cmdHome.CanExecute();
if (affinityHost == null || _operation == vm_operations.start_on || !oldMigrateToHomeCmdCanRun && !cpmCmdHome.CanExecute())
firstItem.Command = cmdHome;
else
firstItem.Command = oldMigrateToHomeCmdCanRun ? cmdHome : cpmCmdHome;
});
List<VMOperationToolStripMenuSubItem> dropDownItems = DropDownItems.Cast<VMOperationToolStripMenuSubItem>().ToList();
foreach (VMOperationToolStripMenuSubItem item in dropDownItems)
{
Host host = item.Tag as Host;
if (host != null)
{
VMOperationCommand cmd = new VMOperationHostCommand(Command.MainWindowCommandInterface, selection, delegate { return host; }, host.Name.EscapeAmpersands(), _operation, session);
CrossPoolMigrateCommand cpmCmd = new CrossPoolMigrateCommand(Command.MainWindowCommandInterface, selection, host, _resumeAfter);
VMOperationToolStripMenuSubItem tempItem = item;
Program.Invoke(Program.MainWindow, delegate
{
bool oldMigrateCmdCanRun = cmd.CanExecute();
if (_operation == vm_operations.start_on || (!oldMigrateCmdCanRun && !cpmCmd.CanExecute() && string.IsNullOrEmpty(cpmCmd.CantExecuteReason)))
tempItem.Command = cmd;
else
tempItem.Command = oldMigrateCmdCanRun ? cmd : cpmCmd;
});
}
}
Program.Invoke(Program.MainWindow, () => AddAdditionalMenuItems(selection));
}
/// <summary>
/// Hook to add additional members to the menu item
/// Note: Called on main window thread by executing code
/// </summary>
/// <param name="selection"></param>
protected virtual void AddAdditionalMenuItems(SelectedItemCollection selection) { return; }
/// <summary>
/// This class is an implementation of the 'IComparer' interface
/// for sorting vm placement menuItem List when wlb is enabled
/// </summary>
private class WlbHostStarCompare : IComparer<VMOperationToolStripMenuSubItem>
{
public int Compare(VMOperationToolStripMenuSubItem x, VMOperationToolStripMenuSubItem y)
{
int result = 0;
// if x and y are enabled, compare their start rating
if (x.Enabled && y.Enabled)
result = y.StarRating.CompareTo(x.StarRating);
// if x and y are disabled, they are equal
else if (!x.Enabled && !y.Enabled)
result = 0;
// if x is disabled, y is greater
else if (!x.Enabled)
result = 1;
// if y is disabled, x is greater
else if (!y.Enabled)
result = -1;
return result;
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Xunit;
using System.Xml;
namespace XmlDocumentTests.XmlNodeTests
{
public class HasChildNodesTests
{
[Fact]
public static void ElementWithManyChildren()
{
var xml = "<root>\r\n text node one\r\n <elem1 child1=\"\" child2=\"duu\" child3=\"e1;e2;\" child4=\"a1\" child5=\"goody\">\r\n text node two e1; text node three\r\n </elem1><!-- comment3 --><?PI3 processing instruction?>e2;<foo /><![CDATA[ <opentag> without an </endtag> and & <! are all ok here ]]><elem2 att1=\"id1\" att2=\"up\" att3=\"attribute3\"><a /></elem2><elem2> \r\n elem2-text1\r\n <a> \r\n this-is-a \r\n </a> \r\n\r\n elem2-text2\r\n e3;e4;<!-- elem2-comment1-->\r\n elem2-text3\r\n\r\n <b> \r\n this-is-b\r\n </b>\r\n\r\n elem2-text4\r\n <?elem2_PI elem2-PI?>\r\n elem2-text5\r\n\r\n </elem2></root>";
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml(xml);
Assert.True(xmlDocument.HasChildNodes);
}
[Fact]
public static void DocumentWithOnlyDocumentElement()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<doc/>");
Assert.False(xmlDocument.DocumentElement.HasChildNodes);
Assert.True(xmlDocument.HasChildNodes);
}
[Fact]
public static void AddAnAttributeToAnElementWithNoChild()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<doc><elem1/></doc>");
var node = (XmlElement)xmlDocument.DocumentElement.FirstChild;
node.SetAttribute("att1", "foo");
Assert.False(node.HasChildNodes);
}
[Fact]
public static void AppendAnElementNOdeToAnElementNodeWithNoChild()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<doc><elem1 att1='foo'/></doc>");
var node = (XmlElement)xmlDocument.DocumentElement.FirstChild;
var newNode = xmlDocument.CreateElement("newElem");
Assert.False(node.HasChildNodes);
node.AppendChild(newNode);
Assert.True(node.HasChildNodes);
}
[Fact]
public static void AttributeWithEmptyString()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root attr='value'/>");
Assert.True(xmlDocument.DocumentElement.Attributes[0].HasChildNodes);
}
[Fact]
public static void AttributeWithStringValue()
{
var xmlDocument = new XmlDocument();
var node = xmlDocument.CreateAttribute("attribute");
node.Value = "attribute_value";
Assert.True(node.HasChildNodes);
}
[Fact]
public static void ElementWithAttributeAndNoChild()
{
var xmlDocument = new XmlDocument();
var node = xmlDocument.CreateElement("elem1");
var attribute = xmlDocument.CreateAttribute("attrib");
attribute.Value = "foo";
node.Attributes.Append(attribute);
Assert.False(node.HasChildNodes);
}
[Fact]
public static void CloneAnElementWithChildNode()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<elem1 att1='foo'>text<a /></elem1>");
var clonedTrue = xmlDocument.DocumentElement.CloneNode(true);
var clonedFalse = xmlDocument.DocumentElement.CloneNode(false);
Assert.True(clonedTrue.HasChildNodes);
Assert.False(clonedFalse.HasChildNodes);
}
[Fact]
public static void RemoveAllChildren()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/><child2/><child3/></root>");
Assert.Equal(3, xmlDocument.DocumentElement.ChildNodes.Count);
Assert.True(xmlDocument.DocumentElement.HasChildNodes);
for (int i = 0; i < 3; i++)
xmlDocument.DocumentElement.RemoveChild(xmlDocument.DocumentElement.ChildNodes[0]);
Assert.Equal(0, xmlDocument.DocumentElement.ChildNodes.Count);
Assert.False(xmlDocument.DocumentElement.HasChildNodes);
}
[Fact]
public static void RemoveAllChildrenAddAnElement()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/><child2/><child3/></root>");
Assert.Equal(3, xmlDocument.DocumentElement.ChildNodes.Count);
Assert.True(xmlDocument.DocumentElement.HasChildNodes);
for (int i = 0; i < 3; i++)
xmlDocument.DocumentElement.RemoveChild(xmlDocument.DocumentElement.ChildNodes[0]);
Assert.Equal(0, xmlDocument.DocumentElement.ChildNodes.Count);
Assert.False(xmlDocument.DocumentElement.HasChildNodes);
var elem = xmlDocument.CreateElement("elem");
xmlDocument.DocumentElement.AppendChild(elem);
Assert.Equal(1, xmlDocument.DocumentElement.ChildNodes.Count);
Assert.True(xmlDocument.DocumentElement.HasChildNodes);
}
[Fact]
public static void RemoveOnlyChildOfNode()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/></root>");
Assert.Equal(1, xmlDocument.DocumentElement.ChildNodes.Count);
Assert.True(xmlDocument.DocumentElement.HasChildNodes);
xmlDocument.DocumentElement.RemoveChild(xmlDocument.DocumentElement.ChildNodes[0]);
Assert.Equal(0, xmlDocument.DocumentElement.ChildNodes.Count);
Assert.False(xmlDocument.DocumentElement.HasChildNodes);
}
[Fact]
public static void InsertAChildToDocumentFragment()
{
var xmlDocument = new XmlDocument();
var fragment = xmlDocument.CreateDocumentFragment();
var elem = xmlDocument.CreateElement("elem");
fragment.AppendChild(elem);
Assert.True(fragment.HasChildNodes);
}
[Fact]
public static void CheckNoChildrenOnPI()
{
var xmlDocument = new XmlDocument();
Assert.False(xmlDocument.CreateProcessingInstruction("PI", "info").HasChildNodes);
}
[Fact]
public static void CheckNoChildrenOnComment()
{
var xmlDocument = new XmlDocument();
Assert.False(xmlDocument.CreateComment("info").HasChildNodes);
}
[Fact]
public static void CheckNoChildrenOnText()
{
var xmlDocument = new XmlDocument();
Assert.False(xmlDocument.CreateTextNode("info").HasChildNodes);
}
[Fact]
public static void CheckNoChildrenOnCData()
{
var xmlDocument = new XmlDocument();
Assert.False(xmlDocument.CreateCDataSection("info").HasChildNodes);
}
[Fact]
public static void ReplaceNodeWithChildrenWithEmptyNode()
{
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml("<root><child1/><child2/></root>");
var newNode = xmlDocument.CreateElement("newElement");
Assert.True(xmlDocument.DocumentElement.HasChildNodes);
xmlDocument.ReplaceChild(newNode, xmlDocument.DocumentElement);
Assert.False(xmlDocument.DocumentElement.HasChildNodes);
}
}
}
| |
// Presidents.cs (c) 2004 Kari Laitinen
// http://www.naturalprogramming.com
// 2004-11-06 Last modification.
// Compilation: csc Presidents.cs Date.cs
using System ;
class President
{
string president_name ;
Date birth_date_of_president ;
string birth_state_of_president ;
string party_name ;
Date inauguration_date ;
Date last_day_in_office ;
string vice_president_name ;
public President( string given_president_name,
string birth_date_as_string,
string given_birth_state,
string given_party_name,
string inauguration_date_as_string,
string last_day_in_office_as_string,
string given_vice_president_name )
{
president_name = given_president_name ;
birth_date_of_president = new Date( birth_date_as_string ) ;
birth_state_of_president = given_birth_state ;
party_name = given_party_name ;
inauguration_date = new Date( inauguration_date_as_string ) ;
last_day_in_office = new Date( last_day_in_office_as_string ) ;
vice_president_name = given_vice_president_name ;
}
public string get_president_name()
{
return president_name ;
}
public bool was_president_on( Date given_date )
{
return ( given_date.is_within_dates( inauguration_date,
last_day_in_office ) ) ;
}
public string get_brief_president_info()
{
return ( "\n " + president_name.PadRight( 25 )
+ " president from " + inauguration_date
+ " to " + last_day_in_office ) ;
}
public string get_full_president_data()
{
int years_in_office, months_in_office, days_in_office ;
inauguration_date.get_distance_to( last_day_in_office,
out years_in_office,
out months_in_office,
out days_in_office ) ;
return ( "\n "
+ president_name + " born "
+ birth_date_of_president + ", "
+ birth_state_of_president
+ "\n Inauguration date : " + inauguration_date
+ "\n Last day in office : " + last_day_in_office
+ "\n Total time in office: " + years_in_office
+ " years, " + months_in_office + " months, and "
+ days_in_office + " days."
+ "\n Party: " + party_name
+ "\n Vice president(s): " + vice_president_name ) ;
}
}
class PresidentInfoApplication
{
President[] president_table = new President[ 80 ] ;
int number_of_presidents_in_table ;
int index_of_last_printing ;
const int SEARCH_NOT_READY = 1 ;
const int SEARCH_IS_READY = 2 ;
const int SEARCH_IS_SUCCESSFUL = 3 ;
const int SEARCH_NOT_SUCCESSFUL = 4 ;
public PresidentInfoApplication()
{
president_table[ 0 ] = new
President( "George Washington", "02/22/1732", "Virginia",
"Federalist", "04/30/1789", "03/03/1797", "John Adams");
president_table[ 1 ] = new
President("John Adams", "10/30/1735", "Massachusetts",
"Federalist", "03/04/1797", "03/03/1801", "Thomas Jefferson");
president_table[ 2 ] = new
President("Thomas Jefferson", "04/13/1743", "Virginia", "Dem.-Rep.",
"03/04/1801", "03/03/1809", "Aaron Burr + George Clinton");
president_table[ 3 ] = new
President("James Madison", "03/16/1751", "Virginia", "Dem.-Rep.",
"03/04/1809", "03/03/1817", "George Clinton + Elbridge Gerry" );
president_table[ 4 ] = new
President( "James Monroe", "04/28/1758", "Virginia", "Dem.-Rep.",
"03/04/1817", "03/03/1825", "Daniel D. Tompkins" );
president_table[ 5 ] = new
President( "John Quincy Adams", "07/11/1767", "Massachusetts",
"Dem.-Rep.", "03/04/1825", "03/03/1829", "John C. Calhoun" );
president_table[ 6 ] = new
President( "Andrew Jackson", "03/15/1767", "South Carolina","Democrat",
"03/04/1829", "03/03/1837", "John C. Calhoun + Martin Van Buren" );
president_table[ 7 ] = new
President( "Martin Van Buren", "12/05/1782", "New York",
"Democrat", "03/04/1837", "03/03/1841", "Richard M. Johnson" );
president_table[ 8 ] = new
President( "William Henry Harrison", "02/09/1773", "Virginia",
"Whig", "03/04/1841", "04/04/1841", "John Tyler" );
president_table[ 9 ] = new
President( "John Tyler", "03/29/1790", "Virginia",
"Whig", "04/06/1841", "03/03/1845", "" );
president_table[ 10 ] = new
President( "James Knox Polk", "11/02/1795", "North Carolina",
"Democrat", "03/04/1845", "03/03/1849", "George M. Dallas" );
president_table[ 11 ] = new
President( "Zachary Taylor", "11/24/1784", "Virginia",
"Whig", "03/05/1849", "07/09/1850", "Millard Fillmore" );
president_table[ 12 ] = new
President( "Millard Fillmore", "01/07/1800", "New York",
"Whig", "07/10/1850", "03/03/1853", "" );
president_table[ 13 ] = new
President( "Franklin Pierce", "11/23/1804", "New Hampshire",
"Democrat", "03/04/1853", "03/03/1857", "William R. King" );
president_table[ 14 ] = new
President( "James Buchanan", "04/23/1791", "Pennsylvania",
"Democrat", "03/04/1857", "03/03/1861", "John C. Breckinridge");
president_table[ 15 ] = new
President( "Abraham Lincoln", "02/12/1809", "Kentucky", "Republican",
"03/04/1861", "04/15/1865", "Hannibal Hamlin + Andrew Johnson" );
president_table[ 16 ] = new
President( "Andrew Johnson", "12/29/1808", "North Carolina",
"Democrat", "04/15/1865", "03/03/1869", "" );
president_table[ 17 ] = new
President( "Ulysses Simpson Grant", "04/27/1822", "Ohio", "Republican",
"03/04/1869", "03/03/1877", "Schuyler Colfax + Henry Wilson" );
president_table[ 18 ] = new
President( "Rutherford Birchard Hayes", "10/04/1822", "Ohio",
"Republican", "03/04/1877", "03/03/1881", "William A. Wheeler");
president_table[ 19 ] = new
President( "James Abram Garfield", "11/19/1831", "Ohio",
"Republican", "03/04/1881", "09/19/1881", "Chester Alan Arthur");
president_table[ 20 ] = new
President( "Chester Alan Arthur", "10/05/1829", "Vermont",
"Republican", "09/20/1881", "03/03/1885", "" );
president_table[ 21 ] = new
President( "Grover Cleveland", "03/18/1837", "New Jersey",
"Democrat", "03/04/1885", "03/03/1889", "Thomas A. Hendrics" );
president_table[ 22 ] = new
President( "Benjamin Harrison", "08/20/1933", "Ohio",
"Republican", "03/04/1889", "03/03/1893", "Levi P. Morton" );
president_table[ 23 ] = new
President( "Grover Cleveland", "03/18/1837", "New Jersey",
"Democrat", "03/04/1893", "03/03/1897", "Adlai E. Stevenson" );
president_table[ 24 ] = new
President( "William McKinley", "01/29/1843", "Ohio", "Republican",
"03/04/1897", "09/14/1901", "Garret A. Hobart + Theodore Roosevelt" );
president_table[ 25 ] = new
President( "Theodore Roosevelt", "10/27/1858", "New York",
"Republican", "09/14/1901","03/03/1909","Charles W. Fairbanks");
president_table[ 26 ] = new
President( "William Howard Taft", "09/15/1857", "Ohio",
"Republican", "03/04/1909", "03/03/1913", "James S. Sherman");
president_table[ 27 ] = new
President( "Woodrow Wilson", "12/28/1856", "Virginia",
"Democrat", "03/04/1913", "03/03/1921", "Thomas R. Marshall" );
president_table[ 28 ] = new
President( "Warren Gamaliel Harding", "11/02/1865", "Ohio",
"Republican", "03/04/1921", "08/02/1923", "Calvin Coolidge" );
president_table[ 29 ] = new
President( "Calvin Coolidge", "07/04/1872", "Vermont",
"Republican", "08/03/1923", "03/03/1929", "Charles G. Dawes" );
president_table[ 30 ] = new
President( "Herbert Clark Hoover", "08/10/1874", "Iowa",
"Republican", "03/04/1929", "03/03/1933", "Charles Curtis" );
president_table[ 31 ] = new
President( "Franklin Delano Roosevelt","01/30/1882","New York",
"Democrat", "03/04/1933", "04/12/1945",
"John N. Garner + Henry A. Wallace + Harry S. Truman" );
president_table[ 32 ] = new
President( "Harry S. Truman", "05/08/1884", "Missouri",
"Democrat", "04/12/1945", "01/20/1953", "Alben W. Barkley" );
president_table[ 33 ] = new
President( "Dwight David Eisenhover", "10/14/1890", "Texas",
"Republican","01/20/1953","01/20/1961","Richard Milhous Nixon");
president_table[ 34 ] = new
President( "John Fitzgerald Kennedy", "05/29/1917", "Massachusetts",
"Democrat", "01/20/1961", "11/22/1963", "Lyndon Baines Johnson" );
president_table[ 35 ] = new
President( "Lyndon Baines Johnson", "08/27/1908", "Texas",
"Democrat", "11/22/1963", "01/20/1969", "Hubert H. Humphrey");
president_table[ 36 ] = new
President( "Richard Milhous Nixon", "01/09/1913", "California",
"Republican", "01/20/1969", "08/09/1974",
"Spiro T. Agnew + Gerald Rudolph Ford");
president_table[ 37 ] = new
President( "Gerald Rudolph Ford", "07/14/1913", "Nebraska",
"Republican","08/09/1974","01/20/1977","Nelson A. Rockefeller");
president_table[ 38 ] = new
President( "Jimmy (James Earl) Carter", "10/01/1924", "Georgia",
"Democrat", "01/20/1977", "01/20/1981", "Walter F. Mondale" );
president_table[ 39 ] = new
President( "Ronald Wilson Reagan", "02/06/1911", "Illinois",
"Republican", "01/20/1981", "01/20/1989", "George Bush" ) ;
president_table[ 40 ] = new
President( "George Bush", "06/12/1924", "Massachusetts",
"Republican", "01/20/1989", "01/20/1993", "Dan Quayle" ) ;
president_table[ 41 ] = new
President( "Bill Clinton", "08/19/1946", "Arkansas",
"Democrat", "01/20/1993", "01/20/2001", "Albert Gore" ) ;
president_table[ 42 ] = new
President( "George W. Bush", "07/06/1946", "Connecticut",
"Republican", "01/20/2001", "01/20/2009", "Richard Cheney" ) ;
// The value of the following variable must be updated
// when new presidents are added to president_table.
number_of_presidents_in_table = 43 ;
index_of_last_printing = 0 ;
}
public void search_president_by_name()
{
Console.Write( "\n Enter first, last, or full name of president: ") ;
string given_president_name = Console.ReadLine() ;
int president_index = 0 ;
int array_search_status = SEARCH_NOT_READY ;
while ( array_search_status == SEARCH_NOT_READY )
{
if ( president_table[ president_index ].get_president_name()
.IndexOf( given_president_name ) != -1 )
{
array_search_status = SEARCH_IS_SUCCESSFUL ;
}
else if ( president_index >= number_of_presidents_in_table - 1 )
{
array_search_status = SEARCH_NOT_SUCCESSFUL ;
}
else
{
president_index ++ ;
}
}
if ( array_search_status == SEARCH_IS_SUCCESSFUL )
{
Console.Write( "\n\n THE #" + ( president_index + 1 )
+ " PRESIDENT OF THE UNITED STATES: \n"
+ president_table[ president_index ].
get_full_president_data() ) ;
index_of_last_printing = president_index ;
}
else
{
Console.Write( "\n\n Sorry, could not find \""
+ given_president_name + "\" in table.\n" ) ;
}
}
public void search_president_for_given_date()
{
Console.Write( "\n Please, type in a date in form MM/DD/YYYY "
+ "\n Use two digits for days and months, and "
+ "\n four digits for year: " ) ;
string date_as_string = Console.ReadLine() ;
Date date_of_interest = new Date( date_as_string ) ;
int president_index = 0 ;
int array_search_status = SEARCH_NOT_READY ;
while ( array_search_status == SEARCH_NOT_READY )
{
if ( president_table[ president_index ].
was_president_on( date_of_interest ) )
{
array_search_status = SEARCH_IS_SUCCESSFUL ;
}
else if ( president_index >= number_of_presidents_in_table - 1)
{
array_search_status = SEARCH_NOT_SUCCESSFUL ;
}
else
{
president_index ++ ;
}
}
if ( array_search_status == SEARCH_IS_SUCCESSFUL )
{
Console.Write( "\n\n ON " + date_of_interest
+ ", THE PRESIDENT OF THE UNITED STATES WAS: \n"
+ president_table[ president_index ].
get_full_president_data() ) ;
index_of_last_printing = president_index ;
}
else
{
Console.Write( "\n\n Sorry, no president was on duty on "
+ date_of_interest + ".\n" ) ;
}
}
public void print_data_of_next_president()
{
index_of_last_printing ++ ;
if ( index_of_last_printing < number_of_presidents_in_table )
{
Console.Write( "\n\n THE #" + ( index_of_last_printing + 1 )
+ " PRESIDENT OF THE UNITED STATES: \n"
+ president_table[ index_of_last_printing ].
get_full_president_data() ) ;
}
else
{
Console.Write( "\n Sorry, no more presidents in table." ) ;
}
}
public void print_list_of_all_presidents()
{
int president_index = 0 ;
while ( president_index < number_of_presidents_in_table )
{
Console.Write( president_table[ president_index ].
get_brief_president_info() ) ;
president_index ++ ;
if ( ( president_index % 15 ) == 0 )
{
Console.Write( "\nPress <Enter> to continue ....." ) ;
string any_string_from_keyboard = Console.ReadLine() ;
}
}
}
public void run()
{
string user_selection = "????" ;
Console.Write("\n This program provides information about all"
+ "\n presidents of the U.S.A. Please, select from"
+ "\n the following menu by typing in a letter. ") ;
while ( user_selection[ 0 ] != 'e' )
{
Console.Write("\n\n p Search president by name."
+ "\n d Search president for a given date."
+ "\n n Print data of next president."
+ "\n a Print list of all presidents."
+ "\n e Exit the program.\n\n " ) ;
user_selection = Console.ReadLine() ;
if ( user_selection[ 0 ] == 'p' )
{
search_president_by_name() ;
}
else if ( user_selection[ 0 ] == 'd' )
{
search_president_for_given_date() ;
}
else if ( user_selection[ 0 ] == 'n' )
{
print_data_of_next_president() ;
}
else if ( user_selection[ 0 ] == 'a' )
{
print_list_of_all_presidents() ;
}
}
}
}
class PresidentInfoApplicationRunner
{
static void Main()
{
PresidentInfoApplication this_president_info_application
= new PresidentInfoApplication() ;
this_president_info_application.run() ;
}
}
| |
using NUnit.Framework;
using OpenQA.Selenium.Environment;
using OpenQA.Selenium.Internal;
using System;
using System.Collections.ObjectModel;
using System.Drawing;
namespace OpenQA.Selenium.Interactions
{
[TestFixture]
public class CombinedInputActionsTest : DriverTestFixture
{
[SetUp]
public void Setup()
{
new Actions(driver).SendKeys(Keys.Null).Perform();
IActionExecutor actionExecutor = driver as IActionExecutor;
if (actionExecutor != null)
{
actionExecutor.ResetInputState();
}
}
[TearDown]
public void ReleaseModifierKeys()
{
new Actions(driver).SendKeys(Keys.Null).Perform();
IActionExecutor actionExecutor = driver as IActionExecutor;
if (actionExecutor != null)
{
actionExecutor.ResetInputState();
}
}
[Test]
[IgnoreBrowser(Browser.IE, "IE reports [0,0] as location for <option> elements")]
public void PlainClickingOnMultiSelectionList()
{
driver.Url = formSelectionPage;
ReadOnlyCollection<IWebElement> options = driver.FindElements(By.TagName("option"));
Actions actionBuider = new Actions(driver);
IAction selectThreeOptions = actionBuider.Click(options[1])
.Click(options[2])
.Click(options[3]).Build();
selectThreeOptions.Perform();
IWebElement showButton = driver.FindElement(By.Name("showselected"));
showButton.Click();
IWebElement resultElement = driver.FindElement(By.Id("result"));
Assert.AreEqual("cheddar", resultElement.Text, "Should have picked the third option only.");
}
[Test]
[IgnoreBrowser(Browser.IE, "IE reports [0,0] as location for <option> elements")]
public void ShiftClickingOnMultiSelectionList()
{
driver.Url = formSelectionPage;
ReadOnlyCollection<IWebElement> options = driver.FindElements(By.TagName("option"));
Actions actionBuider = new Actions(driver);
IAction selectThreeOptions = actionBuider.Click(options[1])
.KeyDown(Keys.Shift)
.Click(options[2])
.Click(options[3])
.KeyUp(Keys.Shift).Build();
selectThreeOptions.Perform();
IWebElement showButton = driver.FindElement(By.Name("showselected"));
showButton.Click();
IWebElement resultElement = driver.FindElement(By.Id("result"));
Assert.AreEqual("roquefort parmigiano cheddar", resultElement.Text, "Should have picked the last three options.");
}
[Test]
[IgnoreBrowser(Browser.IE, "IE reports [0,0] as location for <option> elements")]
[IgnoreBrowser(Browser.Safari, "Control + click in macOS results in context menu, not multiselect.")]
public void ControlClickingOnMultiSelectionList()
{
driver.Url = formSelectionPage;
ReadOnlyCollection<IWebElement> options = driver.FindElements(By.TagName("option"));
Actions actionBuider = new Actions(driver);
IAction selectThreeOptions = actionBuider.Click(options[1])
.KeyDown(Keys.Control)
.Click(options[3])
.KeyUp(Keys.Control).Build();
selectThreeOptions.Perform();
IWebElement showButton = driver.FindElement(By.Name("showselected"));
showButton.Click();
IWebElement resultElement = driver.FindElement(By.Id("result"));
Assert.AreEqual("roquefort cheddar", resultElement.Text, "Should have picked the first and third options.");
}
[Test]
public void ControlClickingOnCustomMultiSelectionList()
{
driver.Url = selectableItemsPage;
IWebElement reportingElement = driver.FindElement(By.Id("infodiv"));
Assert.AreEqual("no info", reportingElement.Text);
ReadOnlyCollection<IWebElement> listItems = driver.FindElements(By.TagName("li"));
IAction selectThreeItems = new Actions(driver).KeyDown(Keys.Control)
.Click(listItems[1])
.Click(listItems[3])
.Click(listItems[5])
.KeyUp(Keys.Control).Build();
selectThreeItems.Perform();
Assert.AreEqual("#item2 #item4 #item6", reportingElement.Text);
// Now click on another element, make sure that's the only one selected.
new Actions(driver).Click(listItems[6]).Build().Perform();
Assert.AreEqual("#item7", reportingElement.Text);
}
[Test]
public void CanMoveMouseToAnElementInAnIframeAndClick()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("click_tests/click_in_iframe.html");
WaitFor<IWebElement>(() => driver.FindElement(By.Id("ifr")), "Did not find element");
driver.SwitchTo().Frame("ifr");
try
{
IWebElement link = driver.FindElement(By.Id("link"));
new Actions(driver)
.MoveToElement(link)
.Click()
.Perform();
WaitFor(() => driver.Title == "Submitted Successfully!", "Browser title not correct");
}
finally
{
driver.SwitchTo().DefaultContent();
}
}
[Test]
public void CanClickOnLinks()
{
this.NavigateToClicksPageAndClickLink();
}
[Test]
public void CanClickOnLinksWithAnOffset()
{
driver.Url = clicksPage;
WaitFor(() => { return driver.FindElement(By.Id("normal")); }, "Could not find element with id 'normal'");
IWebElement link = driver.FindElement(By.Id("normal"));
new Actions(driver)
.MoveToElement(link, 1, 1)
.Click()
.Perform();
WaitFor(() => { return driver.Title == "XHTML Test Page"; }, "Browser title is not 'XHTML Test Page'");
}
[Test]
public void ClickAfterMoveToAnElementWithAnOffsetShouldUseLastMousePosition()
{
driver.Url = clickEventPage;
IWebElement element = driver.FindElement(By.Id("eventish"));
Point location = element.Location;
new Actions(driver)
.MoveToElement(element, 20, 10)
.Click()
.Perform();
WaitFor<IWebElement>(() => driver.FindElement(By.Id("pageX")), "Did not find element with ID pageX");
// This will fail for IE 9 or earlier. The correct code would look something like
// this:
// int x;
// int y;
// if (TestUtilities.IsInternetExplorer(driver) && !TestUtilities.IsIE10OrHigher(driver))
//{
// x = int.Parse(driver.FindElement(By.Id("clientX")).Text);
// y = int.Parse(driver.FindElement(By.Id("clientY")).Text);
//}
//else
//{
// x = int.Parse(driver.FindElement(By.Id("pageX")).Text);
// y = int.Parse(driver.FindElement(By.Id("pageY")).Text);
//}
int x = int.Parse(driver.FindElement(By.Id("pageX")).Text);
int y = int.Parse(driver.FindElement(By.Id("pageY")).Text);
Assert.That(FuzzyPositionMatching(location.X + 20, location.Y + 10, string.Format("{0},{1}", x, y)), Is.True);
}
/**
* This test demonstrates the following problem: When the representation of
* the mouse in the driver keeps the wrong state, mouse movement will end
* up at the wrong coordinates.
*/
[Test]
public void MouseMovementWorksWhenNavigatingToAnotherPage()
{
NavigateToClicksPageAndClickLink();
IWebElement linkId = driver.FindElement(By.Id("linkId"));
new Actions(driver)
.MoveToElement(linkId, 1, 1)
.Click()
.Perform();
WaitFor(() => { return driver.Title == "We Arrive Here"; }, "Browser title is not 'We Arrive Here'");
}
[Test]
public void ChordControlCutAndPaste()
{
// FIXME: macs don't have CONRTROL key
//if (getEffectivePlatform().is(Platform.MAC)) {
// return;
//}
//if (getEffectivePlatform().is(Platform.WINDOWS) &&
// (isInternetExplorer(driver) || isFirefox(driver))) {
// System.out.println("Skipping testChordControlCutAndPaste on Windows: native events library" +
// " does not support storing modifiers state yet.");
// return;
//}
driver.Url = javascriptPage;
IWebElement element = driver.FindElement(By.Id("keyReporter"));
// Must scroll element into view for W3C-compliant drivers.
((IJavaScriptExecutor)driver).ExecuteScript("arguments[0].scrollIntoView()", element);
new Actions(driver)
.SendKeys(element, "abc def")
.Perform();
Assert.AreEqual("abc def", element.GetAttribute("value"));
//TODO: Figure out why calling sendKey(Key.CONTROL + "a") and then
//sendKeys("x") does not work on Linux.
new Actions(driver).KeyDown(Keys.Control)
.SendKeys("a" + "x")
.Perform();
// Release keys before next step.
new Actions(driver).SendKeys(Keys.Null).Perform();
Assert.AreEqual(string.Empty, element.GetAttribute("value"));
new Actions(driver).KeyDown(Keys.Control)
.SendKeys("v")
.SendKeys("v")
.Perform();
new Actions(driver).SendKeys(Keys.Null).Perform();
Assert.AreEqual("abc defabc def", element.GetAttribute("value"));
}
[Test]
[NeedsFreshDriver(IsCreatedBeforeTest = true)]
[IgnoreBrowser(Browser.Opera)]
public void CombiningShiftAndClickResultsInANewWindow()
{
driver.Url = linkedImage;
IWebElement link = driver.FindElement(By.Id("link"));
string originalTitle = driver.Title;
new Actions(driver)
.MoveToElement(link)
.KeyDown(Keys.Shift)
.Click()
.KeyUp(Keys.Shift)
.Perform();
WaitFor(() => { return driver.WindowHandles.Count > 1; }, "Did not receive new window");
Assert.AreEqual(2, driver.WindowHandles.Count, "Should have opened a new window.");
Assert.AreEqual(originalTitle, driver.Title, "Should not have navigated away.");
string originalHandle = driver.CurrentWindowHandle;
foreach(string newHandle in driver.WindowHandles)
{
if (newHandle != originalHandle)
{
driver.SwitchTo().Window(newHandle);
driver.Close();
}
}
driver.SwitchTo().Window(originalHandle);
}
[Test]
[IgnoreBrowser(Browser.Opera)]
public void HoldingDownShiftKeyWhileClicking()
{
driver.Url = clickEventPage;
IWebElement toClick = driver.FindElement(By.Id("eventish"));
new Actions(driver).MoveToElement(toClick).KeyDown(Keys.Shift).Click().KeyUp(Keys.Shift).Perform();
IWebElement shiftInfo = WaitFor(() => { return driver.FindElement(By.Id("shiftKey")); }, "Could not find element with id 'shiftKey'");
Assert.AreEqual("true", shiftInfo.Text);
}
[Test]
public void CanClickOnSuckerFishStyleMenu()
{
driver.Url = javascriptPage;
// Move to a different element to make sure the mouse is not over the
// element with id 'item1' (from a previous test).
new Actions(driver).MoveToElement(driver.FindElement(By.Id("dynamo"))).Build().Perform();
IWebElement element = driver.FindElement(By.Id("menu1"));
IWebElement target = driver.FindElement(By.Id("item1"));
Assert.AreEqual(string.Empty, target.Text);
((IJavaScriptExecutor)driver).ExecuteScript("arguments[0].style.background = 'green'", element);
new Actions(driver).MoveToElement(element).Build().Perform();
// Intentionally wait to make sure hover persists.
System.Threading.Thread.Sleep(2000);
target.Click();
IWebElement result = driver.FindElement(By.Id("result"));
WaitFor(() => { return result.Text.Contains("item 1"); }, "Result element does not contain text 'item 1'");
}
[Test]
public void CanClickOnSuckerFishMenuItem()
{
driver.Url = javascriptPage;
// Move to a different element to make sure the mouse is not over the
// element with id 'item1' (from a previous test).
new Actions(driver).MoveToElement(driver.FindElement(By.Id("dynamo"))).Build().Perform();
IWebElement element = driver.FindElement(By.Id("menu1"));
new Actions(driver).MoveToElement(element).Build().Perform();
IWebElement target = driver.FindElement(By.Id("item1"));
Assert.That(target.Displayed, "Target element was not displayed");
target.Click();
IWebElement result = driver.FindElement(By.Id("result"));
WaitFor(() => { return result.Text.Contains("item 1"); }, "Result element does not contain text 'item 1'");
}
private bool FuzzyPositionMatching(int expectedX, int expectedY, string locationTuple)
{
string[] splitString = locationTuple.Split(',');
int gotX = int.Parse(splitString[0].Trim());
int gotY = int.Parse(splitString[1].Trim());
// Everything within 5 pixels range is OK
const int ALLOWED_DEVIATION = 5;
return Math.Abs(expectedX - gotX) < ALLOWED_DEVIATION && Math.Abs(expectedY - gotY) < ALLOWED_DEVIATION;
}
private void NavigateToClicksPageAndClickLink()
{
driver.Url = clicksPage;
WaitFor(() => { return driver.FindElement(By.Id("normal")); }, "Could not find element with id 'normal'");
IWebElement link = driver.FindElement(By.Id("normal"));
new Actions(driver)
.Click(link)
.Perform();
WaitFor(() => { return driver.Title == "XHTML Test Page"; }, "Browser title is not 'XHTML Test Page'");
}
}
}
| |
using System;
using System.Runtime.InteropServices;
using System.Text;
using System.Collections.Generic;
using OpenHome.Net.Core;
namespace OpenHome.Net.Device.Providers
{
public interface IDvProviderAvOpenhomeOrgExakt1 : IDisposable
{
/// <summary>
/// Set the value of the DeviceList property
/// </summary>
/// <param name="aValue">New value for the property</param>
/// <returns>true if the value has been updated; false if aValue was the same as the previous value</returns>
bool SetPropertyDeviceList(string aValue);
/// <summary>
/// Get a copy of the value of the DeviceList property
/// </summary>
/// <returns>Value of the DeviceList property.</param>
string PropertyDeviceList();
/// <summary>
/// Set the value of the ConnectionStatus property
/// </summary>
/// <param name="aValue">New value for the property</param>
/// <returns>true if the value has been updated; false if aValue was the same as the previous value</returns>
bool SetPropertyConnectionStatus(string aValue);
/// <summary>
/// Get a copy of the value of the ConnectionStatus property
/// </summary>
/// <returns>Value of the ConnectionStatus property.</param>
string PropertyConnectionStatus();
}
/// <summary>
/// Provider for the av.openhome.org:Exakt:1 UPnP service
/// </summary>
public class DvProviderAvOpenhomeOrgExakt1 : DvProvider, IDisposable, IDvProviderAvOpenhomeOrgExakt1
{
private GCHandle iGch;
private ActionDelegate iDelegateDeviceList;
private ActionDelegate iDelegateDeviceSettings;
private ActionDelegate iDelegateConnectionStatus;
private ActionDelegate iDelegateSet;
private ActionDelegate iDelegateReprogram;
private ActionDelegate iDelegateReprogramFallback;
private PropertyString iPropertyDeviceList;
private PropertyString iPropertyConnectionStatus;
/// <summary>
/// Constructor
/// </summary>
/// <param name="aDevice">Device which owns this provider</param>
protected DvProviderAvOpenhomeOrgExakt1(DvDevice aDevice)
: base(aDevice, "av.openhome.org", "Exakt", 1)
{
iGch = GCHandle.Alloc(this);
}
/// <summary>
/// Enable the DeviceList property.
/// </summary>
public void EnablePropertyDeviceList()
{
List<String> allowedValues = new List<String>();
iPropertyDeviceList = new PropertyString(new ParameterString("DeviceList", allowedValues));
AddProperty(iPropertyDeviceList);
}
/// <summary>
/// Enable the ConnectionStatus property.
/// </summary>
public void EnablePropertyConnectionStatus()
{
List<String> allowedValues = new List<String>();
iPropertyConnectionStatus = new PropertyString(new ParameterString("ConnectionStatus", allowedValues));
AddProperty(iPropertyConnectionStatus);
}
/// <summary>
/// Set the value of the DeviceList property
/// </summary>
/// <remarks>Can only be called if EnablePropertyDeviceList has previously been called.</remarks>
/// <param name="aValue">New value for the property</param>
/// <returns>true if the value has been updated; false if aValue was the same as the previous value</returns>
public bool SetPropertyDeviceList(string aValue)
{
if (iPropertyDeviceList == null)
throw new PropertyDisabledError();
return SetPropertyString(iPropertyDeviceList, aValue);
}
/// <summary>
/// Get a copy of the value of the DeviceList property
/// </summary>
/// <remarks>Can only be called if EnablePropertyDeviceList has previously been called.</remarks>
/// <returns>Value of the DeviceList property.</returns>
public string PropertyDeviceList()
{
if (iPropertyDeviceList == null)
throw new PropertyDisabledError();
return iPropertyDeviceList.Value();
}
/// <summary>
/// Set the value of the ConnectionStatus property
/// </summary>
/// <remarks>Can only be called if EnablePropertyConnectionStatus has previously been called.</remarks>
/// <param name="aValue">New value for the property</param>
/// <returns>true if the value has been updated; false if aValue was the same as the previous value</returns>
public bool SetPropertyConnectionStatus(string aValue)
{
if (iPropertyConnectionStatus == null)
throw new PropertyDisabledError();
return SetPropertyString(iPropertyConnectionStatus, aValue);
}
/// <summary>
/// Get a copy of the value of the ConnectionStatus property
/// </summary>
/// <remarks>Can only be called if EnablePropertyConnectionStatus has previously been called.</remarks>
/// <returns>Value of the ConnectionStatus property.</returns>
public string PropertyConnectionStatus()
{
if (iPropertyConnectionStatus == null)
throw new PropertyDisabledError();
return iPropertyConnectionStatus.Value();
}
/// <summary>
/// Signal that the action DeviceList is supported.
/// </summary>
/// <remarks>The action's availability will be published in the device's service.xml.
/// DeviceList must be overridden if this is called.</remarks>
protected void EnableActionDeviceList()
{
OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("DeviceList");
action.AddOutputParameter(new ParameterRelated("List", iPropertyDeviceList));
iDelegateDeviceList = new ActionDelegate(DoDeviceList);
EnableAction(action, iDelegateDeviceList, GCHandle.ToIntPtr(iGch));
}
/// <summary>
/// Signal that the action DeviceSettings is supported.
/// </summary>
/// <remarks>The action's availability will be published in the device's service.xml.
/// DeviceSettings must be overridden if this is called.</remarks>
protected void EnableActionDeviceSettings()
{
OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("DeviceSettings");
List<String> allowedValues = new List<String>();
action.AddInputParameter(new ParameterString("DeviceId", allowedValues));
action.AddOutputParameter(new ParameterString("Settings", allowedValues));
iDelegateDeviceSettings = new ActionDelegate(DoDeviceSettings);
EnableAction(action, iDelegateDeviceSettings, GCHandle.ToIntPtr(iGch));
}
/// <summary>
/// Signal that the action ConnectionStatus is supported.
/// </summary>
/// <remarks>The action's availability will be published in the device's service.xml.
/// ConnectionStatus must be overridden if this is called.</remarks>
protected void EnableActionConnectionStatus()
{
OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("ConnectionStatus");
action.AddOutputParameter(new ParameterRelated("ConnectionStatus", iPropertyConnectionStatus));
iDelegateConnectionStatus = new ActionDelegate(DoConnectionStatus);
EnableAction(action, iDelegateConnectionStatus, GCHandle.ToIntPtr(iGch));
}
/// <summary>
/// Signal that the action Set is supported.
/// </summary>
/// <remarks>The action's availability will be published in the device's service.xml.
/// Set must be overridden if this is called.</remarks>
protected void EnableActionSet()
{
OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("Set");
List<String> allowedValues = new List<String>();
action.AddInputParameter(new ParameterString("DeviceId", allowedValues));
action.AddInputParameter(new ParameterUint("BankId"));
action.AddInputParameter(new ParameterString("FileUri", allowedValues));
action.AddInputParameter(new ParameterBool("Mute"));
action.AddInputParameter(new ParameterBool("Persist"));
iDelegateSet = new ActionDelegate(DoSet);
EnableAction(action, iDelegateSet, GCHandle.ToIntPtr(iGch));
}
/// <summary>
/// Signal that the action Reprogram is supported.
/// </summary>
/// <remarks>The action's availability will be published in the device's service.xml.
/// Reprogram must be overridden if this is called.</remarks>
protected void EnableActionReprogram()
{
OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("Reprogram");
List<String> allowedValues = new List<String>();
action.AddInputParameter(new ParameterString("DeviceId", allowedValues));
action.AddInputParameter(new ParameterString("FileUri", allowedValues));
iDelegateReprogram = new ActionDelegate(DoReprogram);
EnableAction(action, iDelegateReprogram, GCHandle.ToIntPtr(iGch));
}
/// <summary>
/// Signal that the action ReprogramFallback is supported.
/// </summary>
/// <remarks>The action's availability will be published in the device's service.xml.
/// ReprogramFallback must be overridden if this is called.</remarks>
protected void EnableActionReprogramFallback()
{
OpenHome.Net.Core.Action action = new OpenHome.Net.Core.Action("ReprogramFallback");
List<String> allowedValues = new List<String>();
action.AddInputParameter(new ParameterString("DeviceId", allowedValues));
action.AddInputParameter(new ParameterString("FileUri", allowedValues));
iDelegateReprogramFallback = new ActionDelegate(DoReprogramFallback);
EnableAction(action, iDelegateReprogramFallback, GCHandle.ToIntPtr(iGch));
}
/// <summary>
/// DeviceList action.
/// </summary>
/// <remarks>Will be called when the device stack receives an invocation of the
/// DeviceList action for the owning device.
///
/// Must be implemented iff EnableActionDeviceList was called.</remarks>
/// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param>
/// <param name="aList"></param>
protected virtual void DeviceList(IDvInvocation aInvocation, out string aList)
{
throw (new ActionDisabledError());
}
/// <summary>
/// DeviceSettings action.
/// </summary>
/// <remarks>Will be called when the device stack receives an invocation of the
/// DeviceSettings action for the owning device.
///
/// Must be implemented iff EnableActionDeviceSettings was called.</remarks>
/// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param>
/// <param name="aDeviceId"></param>
/// <param name="aSettings"></param>
protected virtual void DeviceSettings(IDvInvocation aInvocation, string aDeviceId, out string aSettings)
{
throw (new ActionDisabledError());
}
/// <summary>
/// ConnectionStatus action.
/// </summary>
/// <remarks>Will be called when the device stack receives an invocation of the
/// ConnectionStatus action for the owning device.
///
/// Must be implemented iff EnableActionConnectionStatus was called.</remarks>
/// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param>
/// <param name="aConnectionStatus"></param>
protected virtual void ConnectionStatus(IDvInvocation aInvocation, out string aConnectionStatus)
{
throw (new ActionDisabledError());
}
/// <summary>
/// Set action.
/// </summary>
/// <remarks>Will be called when the device stack receives an invocation of the
/// Set action for the owning device.
///
/// Must be implemented iff EnableActionSet was called.</remarks>
/// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param>
/// <param name="aDeviceId"></param>
/// <param name="aBankId"></param>
/// <param name="aFileUri"></param>
/// <param name="aMute"></param>
/// <param name="aPersist"></param>
protected virtual void Set(IDvInvocation aInvocation, string aDeviceId, uint aBankId, string aFileUri, bool aMute, bool aPersist)
{
throw (new ActionDisabledError());
}
/// <summary>
/// Reprogram action.
/// </summary>
/// <remarks>Will be called when the device stack receives an invocation of the
/// Reprogram action for the owning device.
///
/// Must be implemented iff EnableActionReprogram was called.</remarks>
/// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param>
/// <param name="aDeviceId"></param>
/// <param name="aFileUri"></param>
protected virtual void Reprogram(IDvInvocation aInvocation, string aDeviceId, string aFileUri)
{
throw (new ActionDisabledError());
}
/// <summary>
/// ReprogramFallback action.
/// </summary>
/// <remarks>Will be called when the device stack receives an invocation of the
/// ReprogramFallback action for the owning device.
///
/// Must be implemented iff EnableActionReprogramFallback was called.</remarks>
/// <param name="aInvocation">Interface allowing querying of aspects of this particular action invocation.</param>
/// <param name="aDeviceId"></param>
/// <param name="aFileUri"></param>
protected virtual void ReprogramFallback(IDvInvocation aInvocation, string aDeviceId, string aFileUri)
{
throw (new ActionDisabledError());
}
private static int DoDeviceList(IntPtr aPtr, IntPtr aInvocation)
{
GCHandle gch = GCHandle.FromIntPtr(aPtr);
DvProviderAvOpenhomeOrgExakt1 self = (DvProviderAvOpenhomeOrgExakt1)gch.Target;
DvInvocation invocation = new DvInvocation(aInvocation);
string list;
try
{
invocation.ReadStart();
invocation.ReadEnd();
self.DeviceList(invocation, out list);
}
catch (ActionError e)
{
invocation.ReportActionError(e, "DeviceList");
return -1;
}
catch (PropertyUpdateError)
{
invocation.ReportError(501, String.Format("Invalid value for property {0}", new object[] { "DeviceList" }));
return -1;
}
catch (Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "DeviceList" });
System.Diagnostics.Debug.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions");
return -1;
}
try
{
invocation.WriteStart();
invocation.WriteString("List", list);
invocation.WriteEnd();
}
catch (ActionError)
{
return -1;
}
catch (System.Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "DeviceList" });
System.Diagnostics.Debug.WriteLine(" Only ActionError can be thrown by action response writer");
}
return 0;
}
private static int DoDeviceSettings(IntPtr aPtr, IntPtr aInvocation)
{
GCHandle gch = GCHandle.FromIntPtr(aPtr);
DvProviderAvOpenhomeOrgExakt1 self = (DvProviderAvOpenhomeOrgExakt1)gch.Target;
DvInvocation invocation = new DvInvocation(aInvocation);
string deviceId;
string settings;
try
{
invocation.ReadStart();
deviceId = invocation.ReadString("DeviceId");
invocation.ReadEnd();
self.DeviceSettings(invocation, deviceId, out settings);
}
catch (ActionError e)
{
invocation.ReportActionError(e, "DeviceSettings");
return -1;
}
catch (PropertyUpdateError)
{
invocation.ReportError(501, String.Format("Invalid value for property {0}", new object[] { "DeviceSettings" }));
return -1;
}
catch (Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "DeviceSettings" });
System.Diagnostics.Debug.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions");
return -1;
}
try
{
invocation.WriteStart();
invocation.WriteString("Settings", settings);
invocation.WriteEnd();
}
catch (ActionError)
{
return -1;
}
catch (System.Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "DeviceSettings" });
System.Diagnostics.Debug.WriteLine(" Only ActionError can be thrown by action response writer");
}
return 0;
}
private static int DoConnectionStatus(IntPtr aPtr, IntPtr aInvocation)
{
GCHandle gch = GCHandle.FromIntPtr(aPtr);
DvProviderAvOpenhomeOrgExakt1 self = (DvProviderAvOpenhomeOrgExakt1)gch.Target;
DvInvocation invocation = new DvInvocation(aInvocation);
string connectionStatus;
try
{
invocation.ReadStart();
invocation.ReadEnd();
self.ConnectionStatus(invocation, out connectionStatus);
}
catch (ActionError e)
{
invocation.ReportActionError(e, "ConnectionStatus");
return -1;
}
catch (PropertyUpdateError)
{
invocation.ReportError(501, String.Format("Invalid value for property {0}", new object[] { "ConnectionStatus" }));
return -1;
}
catch (Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "ConnectionStatus" });
System.Diagnostics.Debug.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions");
return -1;
}
try
{
invocation.WriteStart();
invocation.WriteString("ConnectionStatus", connectionStatus);
invocation.WriteEnd();
}
catch (ActionError)
{
return -1;
}
catch (System.Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "ConnectionStatus" });
System.Diagnostics.Debug.WriteLine(" Only ActionError can be thrown by action response writer");
}
return 0;
}
private static int DoSet(IntPtr aPtr, IntPtr aInvocation)
{
GCHandle gch = GCHandle.FromIntPtr(aPtr);
DvProviderAvOpenhomeOrgExakt1 self = (DvProviderAvOpenhomeOrgExakt1)gch.Target;
DvInvocation invocation = new DvInvocation(aInvocation);
string deviceId;
uint bankId;
string fileUri;
bool mute;
bool persist;
try
{
invocation.ReadStart();
deviceId = invocation.ReadString("DeviceId");
bankId = invocation.ReadUint("BankId");
fileUri = invocation.ReadString("FileUri");
mute = invocation.ReadBool("Mute");
persist = invocation.ReadBool("Persist");
invocation.ReadEnd();
self.Set(invocation, deviceId, bankId, fileUri, mute, persist);
}
catch (ActionError e)
{
invocation.ReportActionError(e, "Set");
return -1;
}
catch (PropertyUpdateError)
{
invocation.ReportError(501, String.Format("Invalid value for property {0}", new object[] { "Set" }));
return -1;
}
catch (Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "Set" });
System.Diagnostics.Debug.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions");
return -1;
}
try
{
invocation.WriteStart();
invocation.WriteEnd();
}
catch (ActionError)
{
return -1;
}
catch (System.Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "Set" });
System.Diagnostics.Debug.WriteLine(" Only ActionError can be thrown by action response writer");
}
return 0;
}
private static int DoReprogram(IntPtr aPtr, IntPtr aInvocation)
{
GCHandle gch = GCHandle.FromIntPtr(aPtr);
DvProviderAvOpenhomeOrgExakt1 self = (DvProviderAvOpenhomeOrgExakt1)gch.Target;
DvInvocation invocation = new DvInvocation(aInvocation);
string deviceId;
string fileUri;
try
{
invocation.ReadStart();
deviceId = invocation.ReadString("DeviceId");
fileUri = invocation.ReadString("FileUri");
invocation.ReadEnd();
self.Reprogram(invocation, deviceId, fileUri);
}
catch (ActionError e)
{
invocation.ReportActionError(e, "Reprogram");
return -1;
}
catch (PropertyUpdateError)
{
invocation.ReportError(501, String.Format("Invalid value for property {0}", new object[] { "Reprogram" }));
return -1;
}
catch (Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "Reprogram" });
System.Diagnostics.Debug.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions");
return -1;
}
try
{
invocation.WriteStart();
invocation.WriteEnd();
}
catch (ActionError)
{
return -1;
}
catch (System.Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "Reprogram" });
System.Diagnostics.Debug.WriteLine(" Only ActionError can be thrown by action response writer");
}
return 0;
}
private static int DoReprogramFallback(IntPtr aPtr, IntPtr aInvocation)
{
GCHandle gch = GCHandle.FromIntPtr(aPtr);
DvProviderAvOpenhomeOrgExakt1 self = (DvProviderAvOpenhomeOrgExakt1)gch.Target;
DvInvocation invocation = new DvInvocation(aInvocation);
string deviceId;
string fileUri;
try
{
invocation.ReadStart();
deviceId = invocation.ReadString("DeviceId");
fileUri = invocation.ReadString("FileUri");
invocation.ReadEnd();
self.ReprogramFallback(invocation, deviceId, fileUri);
}
catch (ActionError e)
{
invocation.ReportActionError(e, "ReprogramFallback");
return -1;
}
catch (PropertyUpdateError)
{
invocation.ReportError(501, String.Format("Invalid value for property {0}", new object[] { "ReprogramFallback" }));
return -1;
}
catch (Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "ReprogramFallback" });
System.Diagnostics.Debug.WriteLine(" Only ActionError or PropertyUpdateError should be thrown by actions");
return -1;
}
try
{
invocation.WriteStart();
invocation.WriteEnd();
}
catch (ActionError)
{
return -1;
}
catch (System.Exception e)
{
System.Diagnostics.Debug.WriteLine("WARNING: unexpected exception {0} thrown by {1}", new object[] { e, "ReprogramFallback" });
System.Diagnostics.Debug.WriteLine(" Only ActionError can be thrown by action response writer");
}
return 0;
}
/// <summary>
/// Must be called for each class instance. Must be called before Core.Library.Close().
/// </summary>
public virtual void Dispose()
{
if (DisposeProvider())
iGch.Free();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Xml;
using Umbraco.Core;
using Umbraco.Core.Models;
using Umbraco.Core.Models.EntityBase;
using Umbraco.Core.Persistence.Caching;
using umbraco.cms.businesslogic.web;
using umbraco.DataLayer;
using umbraco.BusinessLogic;
using System.IO;
using System.Text.RegularExpressions;
using System.ComponentModel;
using Umbraco.Core.IO;
using System.Collections;
using umbraco.cms.businesslogic.task;
using umbraco.cms.businesslogic.workflow;
using umbraco.cms.businesslogic.Tags;
using File = System.IO.File;
using Media = umbraco.cms.businesslogic.media.Media;
using Task = umbraco.cms.businesslogic.task.Task;
namespace umbraco.cms.businesslogic
{
/// <summary>
/// CMSNode class serves as the base class for many of the other components in the cms.businesslogic.xx namespaces.
/// Providing the basic hierarchical data structure and properties Text (name), Creator, Createdate, updatedate etc.
/// which are shared by most umbraco objects.
///
/// The child classes are required to implement an identifier (Guid) which is used as the objecttype identifier, for
/// distinguishing the different types of CMSNodes (ex. Documents/Medias/Stylesheets/documenttypes and so forth).
/// </summary>
[Obsolete("Obsolete, This class will eventually be phased out", false)]
public class CMSNode : BusinessLogic.console.IconI
{
#region Private Members
private string _text;
private int _id = 0;
private Guid _uniqueID;
private int _parentid;
private Guid _nodeObjectType;
private int _level;
private string _path;
private bool _hasChildren;
private int _sortOrder;
private int _userId;
private DateTime _createDate;
private bool _hasChildrenInitialized;
private string m_image = "default.png";
private bool? _isTrashed = null;
private IUmbracoEntity _entity;
#endregion
#region Private static
private static readonly string DefaultIconCssFile = IOHelper.MapPath(SystemDirectories.UmbracoClient + "/Tree/treeIcons.css");
private static readonly List<string> InternalDefaultIconClasses = new List<string>();
private static readonly ReaderWriterLockSlim Locker = new ReaderWriterLockSlim();
private static void InitializeIconClasses()
{
StreamReader re = File.OpenText(DefaultIconCssFile);
string content = string.Empty;
string input = null;
while ((input = re.ReadLine()) != null)
{
content += input.Replace("\n", "") + "\n";
}
re.Close();
// parse the classes
var m = Regex.Matches(content, "([^{]*){([^}]*)}", RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace);
foreach (Match match in m)
{
var groups = match.Groups;
var cssClass = groups[1].Value.Replace("\n", "").Replace("\r", "").Trim().Trim(Environment.NewLine.ToCharArray());
if (string.IsNullOrEmpty(cssClass) == false)
{
InternalDefaultIconClasses.Add(cssClass);
}
}
}
private const string SqlSingle = "SELECT id, createDate, trashed, parentId, nodeObjectType, nodeUser, level, path, sortOrder, uniqueID, text FROM umbracoNode WHERE id = @id";
private const string SqlDescendants = @"
SELECT id, createDate, trashed, parentId, nodeObjectType, nodeUser, level, path, sortOrder, uniqueID, text
FROM umbracoNode
WHERE path LIKE '%,{0},%'";
#endregion
#region Public static
/// <summary>
/// Get a count on all CMSNodes given the objecttype
/// </summary>
/// <param name="objectType">The objecttype identifier</param>
/// <returns>
/// The number of CMSNodes of the given objecttype
/// </returns>
public static int CountByObjectType(Guid objectType)
{
return SqlHelper.ExecuteScalar<int>("SELECT COUNT(*) from umbracoNode WHERE nodeObjectType = @type", SqlHelper.CreateParameter("@type", objectType));
}
/// <summary>
/// Number of ancestors of the current CMSNode
/// </summary>
/// <param name="Id">The CMSNode Id</param>
/// <returns>
/// The number of ancestors from the given CMSNode
/// </returns>
public static int CountSubs(int Id)
{
return SqlHelper.ExecuteScalar<int>("SELECT COUNT(*) FROM umbracoNode WHERE ','+path+',' LIKE '%," + Id.ToString() + ",%'");
}
/// <summary>
/// Returns the number of leaf nodes from the newParent id for a given object type
/// </summary>
/// <param name="parentId"></param>
/// <param name="objectType"></param>
/// <returns></returns>
public static int CountLeafNodes(int parentId, Guid objectType)
{
return SqlHelper.ExecuteScalar<int>("Select count(uniqueID) from umbracoNode where nodeObjectType = @type And parentId = @parentId",
SqlHelper.CreateParameter("@type", objectType),
SqlHelper.CreateParameter("@parentId", parentId));
}
/// <summary>
/// Gets the default icon classes.
/// </summary>
/// <value>The default icon classes.</value>
public static List<string> DefaultIconClasses
{
get
{
using (var l = new UpgradeableReadLock(Locker))
{
if (InternalDefaultIconClasses.Count == 0)
{
l.UpgradeToWriteLock();
InitializeIconClasses();
}
return InternalDefaultIconClasses;
}
}
}
/// <summary>
/// Method for checking if a CMSNode exits with the given Guid
/// </summary>
/// <param name="uniqueID">Identifier</param>
/// <returns>True if there is a CMSNode with the given Guid</returns>
public static bool IsNode(Guid uniqueID)
{
return (SqlHelper.ExecuteScalar<int>("select count(id) from umbracoNode where uniqueID = @uniqueID", SqlHelper.CreateParameter("@uniqueId", uniqueID)) > 0);
}
/// <summary>
/// Method for checking if a CMSNode exits with the given id
/// </summary>
/// <param name="Id">Identifier</param>
/// <returns>True if there is a CMSNode with the given id</returns>
public static bool IsNode(int Id)
{
return (SqlHelper.ExecuteScalar<int>("select count(id) from umbracoNode where id = @id", SqlHelper.CreateParameter("@id", Id)) > 0);
}
/// <summary>
/// Retrieve a list of the unique id's of all CMSNodes given the objecttype
/// </summary>
/// <param name="objectType">The objecttype identifier</param>
/// <returns>
/// A list of all unique identifiers which each are associated to a CMSNode
/// </returns>
public static Guid[] getAllUniquesFromObjectType(Guid objectType)
{
IRecordsReader dr = SqlHelper.ExecuteReader("Select uniqueID from umbracoNode where nodeObjectType = @type",
SqlHelper.CreateParameter("@type", objectType));
System.Collections.ArrayList tmp = new System.Collections.ArrayList();
while (dr.Read()) tmp.Add(dr.GetGuid("uniqueID"));
dr.Close();
Guid[] retval = new Guid[tmp.Count];
for (int i = 0; i < tmp.Count; i++) retval[i] = (Guid)tmp[i];
return retval;
}
/// <summary>
/// Retrieve a list of the node id's of all CMSNodes given the objecttype
/// </summary>
/// <param name="objectType">The objecttype identifier</param>
/// <returns>
/// A list of all node ids which each are associated to a CMSNode
/// </returns>
public static int[] getAllUniqueNodeIdsFromObjectType(Guid objectType)
{
IRecordsReader dr = SqlHelper.ExecuteReader("Select id from umbracoNode where nodeObjectType = @type",
SqlHelper.CreateParameter("@type", objectType));
System.Collections.ArrayList tmp = new System.Collections.ArrayList();
while (dr.Read()) tmp.Add(dr.GetInt("id"));
dr.Close();
return (int[])tmp.ToArray(typeof(int));
}
/// <summary>
/// Retrieves the top level nodes in the hierarchy
/// </summary>
/// <param name="ObjectType">The Guid identifier of the type of objects</param>
/// <returns>
/// A list of all top level nodes given the objecttype
/// </returns>
public static Guid[] TopMostNodeIds(Guid ObjectType)
{
IRecordsReader dr = SqlHelper.ExecuteReader("Select uniqueID from umbracoNode where nodeObjectType = @type And parentId = -1 order by sortOrder",
SqlHelper.CreateParameter("@type", ObjectType));
System.Collections.ArrayList tmp = new System.Collections.ArrayList();
while (dr.Read()) tmp.Add(dr.GetGuid("uniqueID"));
dr.Close();
Guid[] retval = new Guid[tmp.Count];
for (int i = 0; i < tmp.Count; i++) retval[i] = (Guid)tmp[i];
return retval;
}
#endregion
#region Protected static
/// <summary>
/// Given the protected modifier the CMSNode.MakeNew method can only be accessed by
/// derived classes > who by definition knows of its own objectType.
/// </summary>
/// <param name="parentId">The newParent CMSNode id</param>
/// <param name="objectType">The objecttype identifier</param>
/// <param name="userId">Creator</param>
/// <param name="level">The level in the tree hieararchy</param>
/// <param name="text">The name of the CMSNode</param>
/// <param name="uniqueID">The unique identifier</param>
/// <returns></returns>
protected static CMSNode MakeNew(int parentId, Guid objectType, int userId, int level, string text, Guid uniqueID)
{
CMSNode parent = null;
string path = "";
int sortOrder = 0;
if (level > 0)
{
parent = new CMSNode(parentId);
sortOrder = GetNewDocumentSortOrder(parentId);
path = parent.Path;
}
else
path = "-1";
// Ruben 8/1/2007: I replace this with a parameterized version.
// But does anyone know what the 'level++' is supposed to be doing there?
// Nothing obviously, since it's a postfix.
SqlHelper.ExecuteNonQuery("INSERT INTO umbracoNode(trashed, parentID, nodeObjectType, nodeUser, level, path, sortOrder, uniqueID, text, createDate) VALUES(@trashed, @parentID, @nodeObjectType, @nodeUser, @level, @path, @sortOrder, @uniqueID, @text, @createDate)",
SqlHelper.CreateParameter("@trashed", 0),
SqlHelper.CreateParameter("@parentID", parentId),
SqlHelper.CreateParameter("@nodeObjectType", objectType),
SqlHelper.CreateParameter("@nodeUser", userId),
SqlHelper.CreateParameter("@level", level++),
SqlHelper.CreateParameter("@path", path),
SqlHelper.CreateParameter("@sortOrder", sortOrder),
SqlHelper.CreateParameter("@uniqueID", uniqueID),
SqlHelper.CreateParameter("@text", text),
SqlHelper.CreateParameter("@createDate", DateTime.Now));
CMSNode retVal = new CMSNode(uniqueID);
retVal.Path = path + "," + retVal.Id.ToString();
// NH 4.7.1 duplicate permissions because of refactor
if (parent != null)
{
IEnumerable<Permission> permissions = Permission.GetNodePermissions(parent);
foreach (Permission p in permissions)
{
Permission.MakeNew(User.GetUser(p.UserId), retVal, p.PermissionId);
}
}
//event
NewEventArgs e = new NewEventArgs();
retVal.FireAfterNew(e);
return retVal;
}
private static int GetNewDocumentSortOrder(int parentId)
{
var sortOrder = 0;
using (IRecordsReader dr = SqlHelper.ExecuteReader(
"SELECT MAX(sortOrder) AS sortOrder FROM umbracoNode WHERE parentID = @parentID AND nodeObjectType = @GuidForNodesOfTypeDocument",
SqlHelper.CreateParameter("@parentID", parentId),
SqlHelper.CreateParameter("@GuidForNodesOfTypeDocument", Document._objectType)
))
{
while (dr.Read())
sortOrder = dr.GetInt("sortOrder") + 1;
}
return sortOrder;
}
/// <summary>
/// Retrieve a list of the id's of all CMSNodes given the objecttype and the first letter of the name.
/// </summary>
/// <param name="objectType">The objecttype identifier</param>
/// <param name="letter">Firstletter</param>
/// <returns>
/// A list of all CMSNodes which has the objecttype and a name that starts with the given letter
/// </returns>
protected static int[] getUniquesFromObjectTypeAndFirstLetter(Guid objectType, char letter)
{
using (IRecordsReader dr = SqlHelper.ExecuteReader("Select id from umbracoNode where nodeObjectType = @objectType AND text like @letter", SqlHelper.CreateParameter("@objectType", objectType), SqlHelper.CreateParameter("@letter", letter.ToString() + "%")))
{
List<int> tmp = new List<int>();
while (dr.Read()) tmp.Add(dr.GetInt("id"));
return tmp.ToArray();
}
}
/// <summary>
/// Gets the SQL helper.
/// </summary>
/// <value>The SQL helper.</value>
protected static ISqlHelper SqlHelper
{
get { return Application.SqlHelper; }
}
#endregion
#region Constructors
/// <summary>
/// Empty constructor that is not suported
/// ...why is it here?
/// </summary>
public CMSNode()
{
throw new NotSupportedException();
}
/// <summary>
/// Initializes a new instance of the <see cref="CMSNode"/> class.
/// </summary>
/// <param name="Id">The id.</param>
public CMSNode(int Id)
{
_id = Id;
setupNode();
}
/// <summary>
/// Initializes a new instance of the <see cref="CMSNode"/> class.
/// </summary>
/// <param name="id">The id.</param>
/// <param name="noSetup">if set to <c>true</c> [no setup].</param>
public CMSNode(int id, bool noSetup)
{
_id = id;
if (!noSetup)
setupNode();
}
/// <summary>
/// Initializes a new instance of the <see cref="CMSNode"/> class.
/// </summary>
/// <param name="uniqueID">The unique ID.</param>
public CMSNode(Guid uniqueID)
{
_id = SqlHelper.ExecuteScalar<int>("SELECT id FROM umbracoNode WHERE uniqueID = @uniqueId", SqlHelper.CreateParameter("@uniqueId", uniqueID));
setupNode();
}
public CMSNode(Guid uniqueID, bool noSetup)
{
_id = SqlHelper.ExecuteScalar<int>("SELECT id FROM umbracoNode WHERE uniqueID = @uniqueId", SqlHelper.CreateParameter("@uniqueId", uniqueID));
if (!noSetup)
setupNode();
}
protected internal CMSNode(IRecordsReader reader)
{
_id = reader.GetInt("id");
PopulateCMSNodeFromReader(reader);
}
protected internal CMSNode(IUmbracoEntity entity)
{
_id = entity.Id;
_entity = entity;
}
#endregion
#region Public Methods
/// <summary>
/// Ensures uniqueness by id
/// </summary>
/// <param name="obj"></param>
/// <returns></returns>
public override bool Equals(object obj)
{
var l = obj as CMSNode;
if (l != null)
{
return this._id.Equals(l._id);
}
return false;
}
/// <summary>
/// Ensures uniqueness by id
/// </summary>
/// <returns></returns>
public override int GetHashCode()
{
return _id.GetHashCode();
}
/// <summary>
/// An xml representation of the CMSNOde
/// </summary>
/// <param name="xd">Xmldocument context</param>
/// <param name="Deep">If true the xml will append the CMSNodes child xml</param>
/// <returns>The CMSNode Xmlrepresentation</returns>
public virtual XmlNode ToXml(XmlDocument xd, bool Deep)
{
XmlNode x = xd.CreateNode(XmlNodeType.Element, "node", "");
XmlPopulate(xd, x, Deep);
return x;
}
public virtual XmlNode ToPreviewXml(XmlDocument xd)
{
// If xml already exists
if (!PreviewExists(UniqueId))
{
SavePreviewXml(ToXml(xd, false), UniqueId);
}
return GetPreviewXml(xd, UniqueId);
}
public virtual List<CMSPreviewNode> GetNodesForPreview(bool childrenOnly)
{
List<CMSPreviewNode> nodes = new List<CMSPreviewNode>();
string sql = @"
select umbracoNode.id, umbracoNode.parentId, umbracoNode.level, umbracoNode.sortOrder, cmsPreviewXml.xml from umbracoNode
inner join cmsPreviewXml on cmsPreviewXml.nodeId = umbracoNode.id
where trashed = 0 and path like '{0}'
order by level,sortOrder";
string pathExp = childrenOnly ? Path + ",%" : Path;
IRecordsReader dr = SqlHelper.ExecuteReader(String.Format(sql, pathExp));
while (dr.Read())
nodes.Add(new CMSPreviewNode(dr.GetInt("id"), dr.GetGuid("uniqueID"), dr.GetInt("parentId"), dr.GetShort("level"), dr.GetInt("sortOrder"), dr.GetString("xml")));
dr.Close();
return nodes;
}
/// <summary>
/// Used to persist object changes to the database. In Version3.0 it's just a stub for future compatibility
/// </summary>
public virtual void Save()
{
SaveEventArgs e = new SaveEventArgs();
this.FireBeforeSave(e);
if (!e.Cancel)
{
//In the future there will be SQL stuff happening here...
this.FireAfterSave(e);
}
}
public override string ToString()
{
if (Id != int.MinValue || !string.IsNullOrEmpty(Text))
{
return string.Format("{{ Id: {0}, Text: {1}, ParentId: {2} }}",
Id,
Text,
_parentid
);
}
return base.ToString();
}
private void Move(CMSNode newParent)
{
MoveEventArgs e = new MoveEventArgs();
FireBeforeMove(e);
if (!e.Cancel)
{
//first we need to establish if the node already exists under the newParent node
//var isNewParentInPath = (Path.Contains("," + newParent.Id + ","));
//if it's the same newParent, we can save some SQL calls since we know these wont change.
//level and path might change even if it's the same newParent because the newParent could be moving somewhere.
if (ParentId != newParent.Id)
{
int maxSortOrder = SqlHelper.ExecuteScalar<int>("select coalesce(max(sortOrder),0) from umbracoNode where parentid = @parentId",
SqlHelper.CreateParameter("@parentId", newParent.Id));
this.Parent = newParent;
this.sortOrder = maxSortOrder + 1;
}
//detect if we have moved, then update the level and path
// issue: http://issues.umbraco.org/issue/U4-1579
if (this.Path != newParent.Path + "," + this.Id.ToString())
{
this.Level = newParent.Level + 1;
this.Path = newParent.Path + "," + this.Id.ToString();
}
//this code block should not be here but since the class structure is very poor and doesn't use
//overrides (instead using shadows/new) for the Children property, when iterating over the children
//and calling Move(), the super classes overridden OnMove or Move methods never get fired, so
//we now need to hard code this here :(
if (Path.Contains("," + ((int)RecycleBin.RecycleBinType.Content).ToString() + ",")
|| Path.Contains("," + ((int)RecycleBin.RecycleBinType.Media).ToString() + ","))
{
//if we've moved this to the recyle bin, we need to update the trashed property
if (!IsTrashed) IsTrashed = true; //don't update if it's not necessary
}
else
{
if (IsTrashed) IsTrashed = false; //don't update if it's not necessary
}
//make sure the node type is a document/media, if it is a recycle bin then this will not be equal
if (!IsTrashed && newParent.nodeObjectType == Document._objectType)
{
// regenerate the xml of the current document
var movedDocument = new Document(this.Id);
movedDocument.XmlGenerate(new XmlDocument());
//regenerate the xml for the newParent node
var parentDocument = new Document(newParent.Id);
parentDocument.XmlGenerate(new XmlDocument());
}
else if (!IsTrashed && newParent.nodeObjectType == Media._objectType)
{
//regenerate the xml for the newParent node
var m = new Media(newParent.Id);
m.XmlGenerate(new XmlDocument());
}
var children = this.Children;
foreach (CMSNode c in children)
{
c.Move(this);
}
//TODO: Properly refactor this, we're just clearing the cache so the changes will also be visible in the backoffice
InMemoryCacheProvider.Current.Clear();
FireAfterMove(e);
}
}
/// <summary>
/// Moves the CMSNode from the current position in the hierarchy to the target
/// </summary>
/// <param name="NewParentId">Target CMSNode id</param>
[Obsolete("Obsolete, Use Umbraco.Core.Services.ContentService.Move() or Umbraco.Core.Services.MediaService.Move()", false)]
public virtual void Move(int newParentId)
{
CMSNode parent = new CMSNode(newParentId);
Move(parent);
}
/// <summary>
/// Deletes this instance.
/// </summary>
public virtual void delete()
{
DeleteEventArgs e = new DeleteEventArgs();
FireBeforeDelete(e);
if (!e.Cancel)
{
// remove relations
var rels = Relations;
foreach (relation.Relation rel in rels)
{
rel.Delete();
}
//removes tasks
foreach (Task t in Tasks)
{
t.Delete();
}
//remove notifications
Notification.DeleteNotifications(this);
//remove permissions
Permission.DeletePermissions(this);
//removes tag associations (i know the key is set to cascade but do it anyways)
Tag.RemoveTagsFromNode(this.Id);
SqlHelper.ExecuteNonQuery("DELETE FROM umbracoNode WHERE uniqueID= @uniqueId", SqlHelper.CreateParameter("@uniqueId", _uniqueID));
FireAfterDelete(e);
}
}
/// <summary>
/// Does the current CMSNode have any child nodes.
/// </summary>
/// <value>
/// <c>true</c> if this instance has children; otherwise, <c>false</c>.
/// </value>
public virtual bool HasChildren
{
get
{
if (!_hasChildrenInitialized)
{
int tmpChildrenCount = SqlHelper.ExecuteScalar<int>("select count(id) from umbracoNode where ParentId = @id",
SqlHelper.CreateParameter("@id", Id));
HasChildren = (tmpChildrenCount > 0);
}
return _hasChildren;
}
set
{
_hasChildrenInitialized = true;
_hasChildren = value;
}
}
/// <summary>
/// Returns all descendant nodes from this node.
/// </summary>
/// <returns></returns>
/// <remarks>
/// This doesn't return a strongly typed IEnumerable object so that we can override in in super clases
/// and since this class isn't a generic (thought it should be) this is not strongly typed.
/// </remarks>
public virtual IEnumerable GetDescendants()
{
var descendants = new List<CMSNode>();
using (IRecordsReader dr = SqlHelper.ExecuteReader(string.Format(SqlDescendants, Id)))
{
while (dr.Read())
{
var node = new CMSNode(dr.GetInt("id"), true);
node.PopulateCMSNodeFromReader(dr);
descendants.Add(node);
}
}
return descendants;
}
#endregion
#region Public properties
/// <summary>
/// Determines if the node is in the recycle bin.
/// This is only relavent for node types that support a recyle bin (such as Document/Media)
/// </summary>
public virtual bool IsTrashed
{
get
{
if (!_isTrashed.HasValue)
{
_isTrashed = Convert.ToBoolean(SqlHelper.ExecuteScalar<object>("SELECT trashed FROM umbracoNode where id=@id",
SqlHelper.CreateParameter("@id", this.Id)));
}
return _isTrashed.Value;
}
set
{
_isTrashed = value;
SqlHelper.ExecuteNonQuery("update umbracoNode set trashed = @trashed where id = @id",
SqlHelper.CreateParameter("@trashed", value),
SqlHelper.CreateParameter("@id", this.Id));
}
}
/// <summary>
/// Gets or sets the sort order.
/// </summary>
/// <value>The sort order.</value>
public virtual int sortOrder
{
get { return _sortOrder; }
set
{
_sortOrder = value;
SqlHelper.ExecuteNonQuery("update umbracoNode set sortOrder = '" + value + "' where id = " + this.Id.ToString());
if (_entity != null)
_entity.SortOrder = value;
}
}
/// <summary>
/// Gets or sets the create date time.
/// </summary>
/// <value>The create date time.</value>
public virtual DateTime CreateDateTime
{
get { return _createDate; }
set
{
_createDate = value;
SqlHelper.ExecuteNonQuery("update umbracoNode set createDate = @createDate where id = " + this.Id.ToString(), SqlHelper.CreateParameter("@createDate", _createDate));
}
}
/// <summary>
/// Gets the creator
/// </summary>
/// <value>The user.</value>
public BusinessLogic.User User
{
get
{
return BusinessLogic.User.GetUser(_userId);
}
}
/// <summary>
/// Gets the id.
/// </summary>
/// <value>The id.</value>
public int Id
{
get { return _id; }
}
/// <summary>
/// Get the newParent id of the node
/// </summary>
public virtual int ParentId
{
get { return _parentid; }
internal set { _parentid = value; }
}
/// <summary>
/// Given the hierarchical tree structure a CMSNode has only one newParent but can have many children
/// </summary>
/// <value>The newParent.</value>
public CMSNode Parent
{
get
{
if (Level == 1) throw new ArgumentException("No newParent node");
return new CMSNode(_parentid);
}
set
{
_parentid = value.Id;
SqlHelper.ExecuteNonQuery("update umbracoNode set parentId = " + value.Id.ToString() + " where id = " + this.Id.ToString());
if (_entity != null)
_entity.ParentId = value.Id;
}
}
/// <summary>
/// An comma separated string consisting of integer node id's
/// that indicates the path from the topmost node to the given node
/// </summary>
/// <value>The path.</value>
public virtual string Path
{
get { return _path; }
set
{
_path = value;
SqlHelper.ExecuteNonQuery("update umbracoNode set path = '" + _path + "' where id = " + this.Id.ToString());
if (_entity != null)
_entity.Path = value;
}
}
/// <summary>
/// Returns an integer value that indicates in which level of the
/// tree structure the given node is
/// </summary>
/// <value>The level.</value>
public virtual int Level
{
get { return _level; }
set
{
_level = value;
SqlHelper.ExecuteNonQuery("update umbracoNode set level = " + _level.ToString() + " where id = " + this.Id.ToString());
if (_entity != null)
_entity.Level = value;
}
}
/// <summary>
/// All CMSNodes has an objecttype ie. Webpage, StyleSheet etc., used to distinguish between the different
/// object types for for fast loading children to the tree.
/// </summary>
/// <value>The type of the node object.</value>
public Guid nodeObjectType
{
get { return _nodeObjectType; }
}
/// <summary>
/// Besides the hierarchy it's possible to relate one CMSNode to another, use this for alternative
/// non-strict hierarchy
/// </summary>
/// <value>The relations.</value>
public relation.Relation[] Relations
{
get { return relation.Relation.GetRelations(this.Id); }
}
/// <summary>
/// Returns all tasks associated with this node
/// </summary>
public Tasks Tasks
{
get { return Task.GetTasks(this.Id); }
}
public virtual int ChildCount
{
get
{
return SqlHelper.ExecuteScalar<int>("SELECT COUNT(*) FROM umbracoNode where ParentID = @parentId",
SqlHelper.CreateParameter("@parentId", this.Id));
}
}
/// <summary>
/// The basic recursive tree pattern
/// </summary>
/// <value>The children.</value>
public virtual BusinessLogic.console.IconI[] Children
{
get
{
System.Collections.ArrayList tmp = new System.Collections.ArrayList();
using (IRecordsReader dr = SqlHelper.ExecuteReader("SELECT id, createDate, trashed, parentId, nodeObjectType, nodeUser, level, path, sortOrder, uniqueID, text FROM umbracoNode WHERE ParentID = @ParentID AND nodeObjectType = @type order by sortOrder",
SqlHelper.CreateParameter("@type", this.nodeObjectType),
SqlHelper.CreateParameter("ParentID", this.Id)))
{
while (dr.Read())
{
tmp.Add(new CMSNode(dr));
}
}
CMSNode[] retval = new CMSNode[tmp.Count];
for (int i = 0; i < tmp.Count; i++)
{
retval[i] = (CMSNode)tmp[i];
}
return retval;
}
}
/// <summary>
/// Retrieve all CMSNodes in the umbraco installation
/// Use with care.
/// </summary>
/// <value>The children of all object types.</value>
public BusinessLogic.console.IconI[] ChildrenOfAllObjectTypes
{
get
{
System.Collections.ArrayList tmp = new System.Collections.ArrayList();
IRecordsReader dr = SqlHelper.ExecuteReader("select id from umbracoNode where ParentID = " + this.Id + " order by sortOrder");
while (dr.Read())
tmp.Add(dr.GetInt("Id"));
dr.Close();
CMSNode[] retval = new CMSNode[tmp.Count];
for (int i = 0; i < tmp.Count; i++)
retval[i] = new CMSNode((int)tmp[i]);
return retval;
}
}
#region IconI members
// Unique identifier of the given node
/// <summary>
/// Unique identifier of the CMSNode, used when locating data.
/// </summary>
public Guid UniqueId
{
get { return _uniqueID; }
}
/// <summary>
/// Human readable name/label
/// </summary>
public virtual string Text
{
get { return _text; }
set
{
_text = value;
SqlHelper.ExecuteNonQuery("UPDATE umbracoNode SET text = @text WHERE id = @id",
SqlHelper.CreateParameter("@text", value.Trim()),
SqlHelper.CreateParameter("@id", this.Id));
if (_entity != null)
_entity.Name = value;
}
}
/// <summary>
/// The menu items used in the tree view
/// </summary>
[Obsolete("this is not used anywhere")]
public virtual BusinessLogic.console.MenuItemI[] MenuItems
{
get { return new BusinessLogic.console.MenuItemI[0]; }
}
/// <summary>
/// Not implemented, always returns "about:blank"
/// </summary>
public virtual string DefaultEditorURL
{
get { return "about:blank"; }
}
/// <summary>
/// The icon in the tree
/// </summary>
public virtual string Image
{
get { return m_image; }
set { m_image = value; }
}
/// <summary>
/// The "open/active" icon in the tree
/// </summary>
public virtual string OpenImage
{
get { return ""; }
}
#endregion
#endregion
#region Protected methods
/// <summary>
/// This allows inheritors to set the underlying text property without persisting the change to the database.
/// </summary>
/// <param name="txt"></param>
protected void SetText(string txt)
{
_text = txt;
if (_entity != null)
_entity.Name = txt;
}
/// <summary>
/// Sets up the internal data of the CMSNode, used by the various constructors
/// </summary>
protected virtual void setupNode()
{
using (IRecordsReader dr = SqlHelper.ExecuteReader(SqlSingle,
SqlHelper.CreateParameter("@id", this.Id)))
{
if (dr.Read())
{
PopulateCMSNodeFromReader(dr);
}
else
{
throw new ArgumentException(string.Format("No node exists with id '{0}'", Id));
}
}
}
/// <summary>
/// Sets up the node for the content tree, this makes no database calls, just sets the underlying properties
/// </summary>
/// <param name="uniqueID">The unique ID.</param>
/// <param name="nodeObjectType">Type of the node object.</param>
/// <param name="Level">The level.</param>
/// <param name="ParentId">The newParent id.</param>
/// <param name="UserId">The user id.</param>
/// <param name="Path">The path.</param>
/// <param name="Text">The text.</param>
/// <param name="CreateDate">The create date.</param>
/// <param name="hasChildren">if set to <c>true</c> [has children].</param>
protected void SetupNodeForTree(Guid uniqueID, Guid nodeObjectType, int leve, int parentId, int userId, string path, string text,
DateTime createDate, bool hasChildren)
{
_uniqueID = uniqueID;
_nodeObjectType = nodeObjectType;
_level = leve;
_parentid = parentId;
_userId = userId;
_path = path;
_text = text;
_createDate = createDate;
HasChildren = hasChildren;
}
/// <summary>
/// Updates the temp path for the content tree.
/// </summary>
/// <param name="Path">The path.</param>
protected void UpdateTempPathForTree(string Path)
{
this._path = Path;
}
protected virtual XmlNode GetPreviewXml(XmlDocument xd, Guid version)
{
XmlDocument xmlDoc = new XmlDocument();
using (XmlReader xmlRdr = SqlHelper.ExecuteXmlReader(
"select xml from cmsPreviewXml where nodeID = @nodeId and versionId = @versionId",
SqlHelper.CreateParameter("@nodeId", Id),
SqlHelper.CreateParameter("@versionId", version)))
{
xmlDoc.Load(xmlRdr);
}
return xd.ImportNode(xmlDoc.FirstChild, true);
}
protected internal virtual bool PreviewExists(Guid versionId)
{
return (SqlHelper.ExecuteScalar<int>("SELECT COUNT(nodeId) FROM cmsPreviewXml WHERE nodeId=@nodeId and versionId = @versionId",
SqlHelper.CreateParameter("@nodeId", Id), SqlHelper.CreateParameter("@versionId", versionId)) != 0);
}
/// <summary>
/// This needs to be synchronized since we are doing multiple sql operations in one method
/// </summary>
/// <param name="x"></param>
/// <param name="versionId"></param>
[MethodImpl(MethodImplOptions.Synchronized)]
protected void SavePreviewXml(XmlNode x, Guid versionId)
{
string sql = PreviewExists(versionId) ? "UPDATE cmsPreviewXml SET xml = @xml, timestamp = @timestamp WHERE nodeId=@nodeId AND versionId = @versionId"
: "INSERT INTO cmsPreviewXml(nodeId, versionId, timestamp, xml) VALUES (@nodeId, @versionId, @timestamp, @xml)";
SqlHelper.ExecuteNonQuery(sql,
SqlHelper.CreateParameter("@nodeId", Id),
SqlHelper.CreateParameter("@versionId", versionId),
SqlHelper.CreateParameter("@timestamp", DateTime.Now),
SqlHelper.CreateParameter("@xml", x.OuterXml));
}
protected void PopulateCMSNodeFromReader(IRecordsReader dr)
{
// testing purposes only > original umbraco data hasn't any unique values ;)
// And we need to have a newParent in order to create a new node ..
// Should automatically add an unique value if no exists (or throw a decent exception)
if (dr.IsNull("uniqueID")) _uniqueID = Guid.NewGuid();
else _uniqueID = dr.GetGuid("uniqueID");
_nodeObjectType = dr.GetGuid("nodeObjectType");
_level = dr.GetShort("level");
_path = dr.GetString("path");
_parentid = dr.GetInt("parentId");
_text = dr.GetString("text");
_sortOrder = dr.GetInt("sortOrder");
_userId = dr.GetInt("nodeUser");
_createDate = dr.GetDateTime("createDate");
_isTrashed = dr.GetBoolean("trashed");
}
internal protected void PopulateCMSNodeFromUmbracoEntity(IUmbracoEntity content, Guid objectType)
{
_uniqueID = content.Key;
_nodeObjectType = objectType;
_level = content.Level;
_path = content.Path;
_parentid = content.ParentId;
_text = content.Name;
_sortOrder = content.SortOrder;
_userId = content.CreatorId;
_createDate = content.CreateDate;
_isTrashed = content.Trashed;
_entity = content;
}
#endregion
#region Private Methods
private void XmlPopulate(XmlDocument xd, XmlNode x, bool Deep)
{
// attributes
x.Attributes.Append(xmlHelper.addAttribute(xd, "id", this.Id.ToString()));
if (this.Level > 1)
x.Attributes.Append(xmlHelper.addAttribute(xd, "parentID", this.Parent.Id.ToString()));
else
x.Attributes.Append(xmlHelper.addAttribute(xd, "parentID", "-1"));
x.Attributes.Append(xmlHelper.addAttribute(xd, "level", this.Level.ToString()));
x.Attributes.Append(xmlHelper.addAttribute(xd, "writerID", this.User.Id.ToString()));
x.Attributes.Append(xmlHelper.addAttribute(xd, "sortOrder", this.sortOrder.ToString()));
x.Attributes.Append(xmlHelper.addAttribute(xd, "createDate", this.CreateDateTime.ToString("s")));
x.Attributes.Append(xmlHelper.addAttribute(xd, "nodeName", this.Text));
x.Attributes.Append(xmlHelper.addAttribute(xd, "path", this.Path));
if (Deep)
{
//store children array here because iterating over an Array property object is very inneficient.
var children = this.Children;
foreach (Content c in children)
x.AppendChild(c.ToXml(xd, true));
}
}
#endregion
#region Events
/// <summary>
/// Calls the subscribers of a cancelable event handler,
/// stopping at the event handler which cancels the event (if any).
/// </summary>
/// <typeparam name="T">Type of the event arguments.</typeparam>
/// <param name="cancelableEvent">The event to fire.</param>
/// <param name="sender">Sender of the event.</param>
/// <param name="eventArgs">Event arguments.</param>
protected virtual void FireCancelableEvent<T>(EventHandler<T> cancelableEvent, object sender, T eventArgs) where T : CancelEventArgs
{
if (cancelableEvent != null)
{
foreach (Delegate invocation in cancelableEvent.GetInvocationList())
{
invocation.DynamicInvoke(sender, eventArgs);
if (eventArgs.Cancel)
break;
}
}
}
/// <summary>
/// Occurs before a node is saved.
/// </summary>
public static event EventHandler<SaveEventArgs> BeforeSave;
/// <summary>
/// Raises the <see cref="E:BeforeSave"/> event.
/// </summary>
/// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param>
protected virtual void FireBeforeSave(SaveEventArgs e)
{
FireCancelableEvent(BeforeSave, this, e);
}
/// <summary>
/// Occurs after a node is saved.
/// </summary>
public static event EventHandler<SaveEventArgs> AfterSave;
/// <summary>
/// Raises the <see cref="E:AfterSave"/> event.
/// </summary>
/// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param>
protected virtual void FireAfterSave(SaveEventArgs e)
{
if (AfterSave != null)
AfterSave(this, e);
}
/// <summary>
/// Occurs after a new node is created.
/// </summary>
public static event EventHandler<NewEventArgs> AfterNew;
/// <summary>
/// Raises the <see cref="E:AfterNew"/> event.
/// </summary>
/// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param>
protected virtual void FireAfterNew(NewEventArgs e)
{
if (AfterNew != null)
AfterNew(this, e);
}
/// <summary>
/// Occurs before a node is deleted.
/// </summary>
public static event EventHandler<DeleteEventArgs> BeforeDelete;
/// <summary>
/// Raises the <see cref="E:BeforeDelete"/> event.
/// </summary>
/// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param>
protected virtual void FireBeforeDelete(DeleteEventArgs e)
{
FireCancelableEvent(BeforeDelete, this, e);
}
/// <summary>
/// Occurs after a node is deleted.
/// </summary>
public static event EventHandler<DeleteEventArgs> AfterDelete;
/// <summary>
/// Raises the <see cref="E:AfterDelete"/> event.
/// </summary>
/// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param>
protected virtual void FireAfterDelete(DeleteEventArgs e)
{
if (AfterDelete != null)
AfterDelete(this, e);
}
/// <summary>
/// Occurs before a node is moved.
/// </summary>
public static event EventHandler<MoveEventArgs> BeforeMove;
/// <summary>
/// Raises the <see cref="E:BeforeMove"/> event.
/// </summary>
/// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param>
protected virtual void FireBeforeMove(MoveEventArgs e)
{
FireCancelableEvent(BeforeMove, this, e);
}
/// <summary>
/// Occurs after a node is moved.
/// </summary>
public static event EventHandler<MoveEventArgs> AfterMove;
/// <summary>
/// Raises the <see cref="E:AfterMove"/> event.
/// </summary>
/// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param>
protected virtual void FireAfterMove(MoveEventArgs e)
{
if (AfterMove != null)
AfterMove(this, e);
}
#endregion
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Security;
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
namespace OpenSim.Region.CoreModules.World.Estate
{
public class EstateManagementModule : IEstateModule
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private delegate void LookupUUIDS(List<UUID> uuidLst);
private Scene m_scene;
private EstateTerrainXferHandler TerrainUploader;
#region Packet Data Responders
private void sendDetailedEstateData(IClientAPI remote_client, UUID invoice)
{
uint sun = 0;
if (!m_scene.RegionInfo.EstateSettings.UseGlobalTime)
sun=(uint)(m_scene.RegionInfo.EstateSettings.SunPosition*1024.0) + 0x1800;
UUID estateOwner;
if (m_scene.RegionInfo.EstateSettings.EstateOwner != UUID.Zero)
estateOwner = m_scene.RegionInfo.EstateSettings.EstateOwner;
else
estateOwner = m_scene.RegionInfo.MasterAvatarAssignedUUID;
if (m_scene.Permissions.IsGod(remote_client.AgentId))
estateOwner = remote_client.AgentId;
remote_client.SendDetailedEstateData(invoice,
m_scene.RegionInfo.EstateSettings.EstateName,
m_scene.RegionInfo.EstateSettings.EstateID,
m_scene.RegionInfo.EstateSettings.ParentEstateID,
GetEstateFlags(),
sun,
m_scene.RegionInfo.RegionSettings.Covenant,
m_scene.RegionInfo.EstateSettings.AbuseEmail,
estateOwner);
remote_client.SendEstateList(invoice,
(int)Constants.EstateAccessCodex.EstateManagers,
m_scene.RegionInfo.EstateSettings.EstateManagers,
m_scene.RegionInfo.EstateSettings.EstateID);
remote_client.SendEstateList(invoice,
(int)Constants.EstateAccessCodex.AccessOptions,
m_scene.RegionInfo.EstateSettings.EstateAccess,
m_scene.RegionInfo.EstateSettings.EstateID);
remote_client.SendEstateList(invoice,
(int)Constants.EstateAccessCodex.AllowedGroups,
m_scene.RegionInfo.EstateSettings.EstateGroups,
m_scene.RegionInfo.EstateSettings.EstateID);
remote_client.SendBannedUserList(invoice,
m_scene.RegionInfo.EstateSettings.EstateBans,
m_scene.RegionInfo.EstateSettings.EstateID);
}
private void estateSetRegionInfoHandler(bool blockTerraform, bool noFly, bool allowDamage, bool blockLandResell, int maxAgents, float objectBonusFactor,
int matureLevel, bool restrictPushObject, bool allowParcelChanges)
{
if (blockTerraform)
m_scene.RegionInfo.RegionSettings.BlockTerraform = true;
else
m_scene.RegionInfo.RegionSettings.BlockTerraform = false;
if (noFly)
m_scene.RegionInfo.RegionSettings.BlockFly = true;
else
m_scene.RegionInfo.RegionSettings.BlockFly = false;
if (allowDamage)
m_scene.RegionInfo.RegionSettings.AllowDamage = true;
else
m_scene.RegionInfo.RegionSettings.AllowDamage = false;
if (blockLandResell)
m_scene.RegionInfo.RegionSettings.AllowLandResell = false;
else
m_scene.RegionInfo.RegionSettings.AllowLandResell = true;
m_scene.RegionInfo.RegionSettings.AgentLimit = (byte) maxAgents;
m_scene.RegionInfo.RegionSettings.ObjectBonus = objectBonusFactor;
if (matureLevel <= 13)
m_scene.RegionInfo.RegionSettings.Maturity = 0;
else if (matureLevel <= 21)
m_scene.RegionInfo.RegionSettings.Maturity = 1;
else
m_scene.RegionInfo.RegionSettings.Maturity = 2;
if (restrictPushObject)
m_scene.RegionInfo.RegionSettings.RestrictPushing = true;
else
m_scene.RegionInfo.RegionSettings.RestrictPushing = false;
if (allowParcelChanges)
m_scene.RegionInfo.RegionSettings.AllowLandJoinDivide = true;
else
m_scene.RegionInfo.RegionSettings.AllowLandJoinDivide = false;
m_scene.RegionInfo.RegionSettings.Save();
sendRegionInfoPacketToAll();
}
public void setEstateTerrainBaseTexture(IClientAPI remoteClient, int corner, UUID texture)
{
if (texture == UUID.Zero)
return;
switch (corner)
{
case 0:
m_scene.RegionInfo.RegionSettings.TerrainTexture1 = texture;
break;
case 1:
m_scene.RegionInfo.RegionSettings.TerrainTexture2 = texture;
break;
case 2:
m_scene.RegionInfo.RegionSettings.TerrainTexture3 = texture;
break;
case 3:
m_scene.RegionInfo.RegionSettings.TerrainTexture4 = texture;
break;
}
m_scene.RegionInfo.RegionSettings.Save();
sendRegionInfoPacketToAll();
}
public void setEstateTerrainTextureHeights(IClientAPI client, int corner, float lowValue, float highValue)
{
switch (corner)
{
case 0:
m_scene.RegionInfo.RegionSettings.Elevation1SW = lowValue;
m_scene.RegionInfo.RegionSettings.Elevation2SW = highValue;
break;
case 1:
m_scene.RegionInfo.RegionSettings.Elevation1NW = lowValue;
m_scene.RegionInfo.RegionSettings.Elevation2NW = highValue;
break;
case 2:
m_scene.RegionInfo.RegionSettings.Elevation1SE = lowValue;
m_scene.RegionInfo.RegionSettings.Elevation2SE = highValue;
break;
case 3:
m_scene.RegionInfo.RegionSettings.Elevation1NE = lowValue;
m_scene.RegionInfo.RegionSettings.Elevation2NE = highValue;
break;
}
m_scene.RegionInfo.RegionSettings.Save();
sendRegionInfoPacketToAll();
}
private void handleCommitEstateTerrainTextureRequest(IClientAPI remoteClient)
{
sendRegionHandshakeToAll();
}
public void setRegionTerrainSettings(float WaterHeight,
float TerrainRaiseLimit, float TerrainLowerLimit,
bool UseEstateSun, bool UseFixedSun, float SunHour,
bool UseGlobal, bool EstateFixedSun, float EstateSunHour)
{
// Water Height
m_scene.RegionInfo.RegionSettings.WaterHeight = WaterHeight;
// Terraforming limits
m_scene.RegionInfo.RegionSettings.TerrainRaiseLimit = TerrainRaiseLimit;
m_scene.RegionInfo.RegionSettings.TerrainLowerLimit = TerrainLowerLimit;
// Time of day / fixed sun
m_scene.RegionInfo.RegionSettings.UseEstateSun = UseEstateSun;
m_scene.RegionInfo.RegionSettings.FixedSun = UseFixedSun;
m_scene.RegionInfo.RegionSettings.SunPosition = SunHour;
TriggerEstateToolsSunUpdate();
//m_log.Debug("[ESTATE]: UFS: " + UseFixedSun.ToString());
//m_log.Debug("[ESTATE]: SunHour: " + SunHour.ToString());
sendRegionInfoPacketToAll();
m_scene.RegionInfo.RegionSettings.Save();
}
private void handleEstateRestartSimRequest(IClientAPI remoteClient, int timeInSeconds)
{
m_scene.Restart(timeInSeconds);
}
private void handleChangeEstateCovenantRequest(IClientAPI remoteClient, UUID estateCovenantID)
{
m_scene.RegionInfo.RegionSettings.Covenant = estateCovenantID;
m_scene.RegionInfo.RegionSettings.Save();
}
private void handleEstateAccessDeltaRequest(IClientAPI remote_client, UUID invoice, int estateAccessType, UUID user)
{
// EstateAccessDelta handles Estate Managers, Sim Access, Sim Banlist, allowed Groups.. etc.
if (user == m_scene.RegionInfo.EstateSettings.EstateOwner)
return; // never process EO
if (user == m_scene.RegionInfo.MasterAvatarAssignedUUID)
return; // never process owner
if ((estateAccessType & 4) != 0) // User add
{
if (m_scene.Permissions.CanIssueEstateCommand(remote_client.AgentId, true) || m_scene.Permissions.BypassPermissions())
{
m_scene.RegionInfo.EstateSettings.AddEstateUser(user);
m_scene.RegionInfo.EstateSettings.Save();
remote_client.SendEstateList(invoice, (int)Constants.EstateAccessCodex.AccessOptions, m_scene.RegionInfo.EstateSettings.EstateAccess, m_scene.RegionInfo.EstateSettings.EstateID);
}
else
{
remote_client.SendAlertMessage("Method EstateAccessDelta Failed, you don't have permissions");
}
}
if ((estateAccessType & 8) != 0) // User remove
{
if (m_scene.Permissions.CanIssueEstateCommand(remote_client.AgentId, true) || m_scene.Permissions.BypassPermissions())
{
m_scene.RegionInfo.EstateSettings.RemoveEstateUser(user);
m_scene.RegionInfo.EstateSettings.Save();
remote_client.SendEstateList(invoice, (int)Constants.EstateAccessCodex.AccessOptions, m_scene.RegionInfo.EstateSettings.EstateAccess, m_scene.RegionInfo.EstateSettings.EstateID);
}
else
{
remote_client.SendAlertMessage("Method EstateAccessDelta Failed, you don't have permissions");
}
}
if ((estateAccessType & 16) != 0) // Group add
{
if (m_scene.Permissions.CanIssueEstateCommand(remote_client.AgentId, true) || m_scene.Permissions.BypassPermissions())
{
m_scene.RegionInfo.EstateSettings.AddEstateGroup(user);
m_scene.RegionInfo.EstateSettings.Save();
remote_client.SendEstateList(invoice, (int)Constants.EstateAccessCodex.AllowedGroups, m_scene.RegionInfo.EstateSettings.EstateGroups, m_scene.RegionInfo.EstateSettings.EstateID);
}
else
{
remote_client.SendAlertMessage("Method EstateAccessDelta Failed, you don't have permissions");
}
}
if ((estateAccessType & 32) != 0) // Group remove
{
if (m_scene.Permissions.CanIssueEstateCommand(remote_client.AgentId, true) || m_scene.Permissions.BypassPermissions())
{
m_scene.RegionInfo.EstateSettings.RemoveEstateGroup(user);
m_scene.RegionInfo.EstateSettings.Save();
remote_client.SendEstateList(invoice, (int)Constants.EstateAccessCodex.AllowedGroups, m_scene.RegionInfo.EstateSettings.EstateGroups, m_scene.RegionInfo.EstateSettings.EstateID);
}
else
{
remote_client.SendAlertMessage("Method EstateAccessDelta Failed, you don't have permissions");
}
}
if ((estateAccessType & 64) != 0) // Ban add
{
if (m_scene.Permissions.CanIssueEstateCommand(remote_client.AgentId, false) || m_scene.Permissions.BypassPermissions())
{
EstateBan[] banlistcheck = m_scene.RegionInfo.EstateSettings.EstateBans;
bool alreadyInList = false;
for (int i = 0; i < banlistcheck.Length; i++)
{
if (user == banlistcheck[i].BannedUserID)
{
alreadyInList = true;
break;
}
}
if (!alreadyInList)
{
EstateBan item = new EstateBan();
item.BannedUserID = user;
item.EstateID = m_scene.RegionInfo.EstateSettings.EstateID;
item.BannedHostAddress = "0.0.0.0";
item.BannedHostIPMask = "0.0.0.0";
m_scene.RegionInfo.EstateSettings.AddBan(item);
m_scene.RegionInfo.EstateSettings.Save();
ScenePresence s = m_scene.GetScenePresence(user);
if (s != null)
{
if (!s.IsChildAgent)
{
s.ControllingClient.SendTeleportLocationStart();
m_scene.TeleportClientHome(user, s.ControllingClient);
}
}
}
else
{
remote_client.SendAlertMessage("User is already on the region ban list");
}
//m_scene.RegionInfo.regionBanlist.Add(Manager(user);
remote_client.SendBannedUserList(invoice, m_scene.RegionInfo.EstateSettings.EstateBans, m_scene.RegionInfo.EstateSettings.EstateID);
}
else
{
remote_client.SendAlertMessage("Method EstateAccessDelta Failed, you don't have permissions");
}
}
if ((estateAccessType & 128) != 0) // Ban remove
{
if (m_scene.Permissions.CanIssueEstateCommand(remote_client.AgentId, false) || m_scene.Permissions.BypassPermissions())
{
EstateBan[] banlistcheck = m_scene.RegionInfo.EstateSettings.EstateBans;
bool alreadyInList = false;
EstateBan listitem = null;
for (int i = 0; i < banlistcheck.Length; i++)
{
if (user == banlistcheck[i].BannedUserID)
{
alreadyInList = true;
listitem = banlistcheck[i];
break;
}
}
if (alreadyInList && listitem != null)
{
m_scene.RegionInfo.EstateSettings.RemoveBan(listitem.BannedUserID);
m_scene.RegionInfo.EstateSettings.Save();
}
else
{
remote_client.SendAlertMessage("User is not on the region ban list");
}
//m_scene.RegionInfo.regionBanlist.Add(Manager(user);
remote_client.SendBannedUserList(invoice, m_scene.RegionInfo.EstateSettings.EstateBans, m_scene.RegionInfo.EstateSettings.EstateID);
}
else
{
remote_client.SendAlertMessage("Method EstateAccessDelta Failed, you don't have permissions");
}
}
if ((estateAccessType & 256) != 0) // Manager add
{
if (m_scene.Permissions.CanIssueEstateCommand(remote_client.AgentId, true) || m_scene.Permissions.BypassPermissions())
{
m_scene.RegionInfo.EstateSettings.AddEstateManager(user);
m_scene.RegionInfo.EstateSettings.Save();
remote_client.SendEstateList(invoice, (int)Constants.EstateAccessCodex.EstateManagers, m_scene.RegionInfo.EstateSettings.EstateManagers, m_scene.RegionInfo.EstateSettings.EstateID);
}
else
{
remote_client.SendAlertMessage("Method EstateAccessDelta Failed, you don't have permissions");
}
}
if ((estateAccessType & 512) != 0) // Manager remove
{
if (m_scene.Permissions.CanIssueEstateCommand(remote_client.AgentId, true) || m_scene.Permissions.BypassPermissions())
{
m_scene.RegionInfo.EstateSettings.RemoveEstateManager(user);
m_scene.RegionInfo.EstateSettings.Save();
remote_client.SendEstateList(invoice, (int)Constants.EstateAccessCodex.EstateManagers, m_scene.RegionInfo.EstateSettings.EstateManagers, m_scene.RegionInfo.EstateSettings.EstateID);
}
else
{
remote_client.SendAlertMessage("Method EstateAccessDelta Failed, you don't have permissions");
}
}
}
private void SendSimulatorBlueBoxMessage(
IClientAPI remote_client, UUID invoice, UUID senderID, UUID sessionID, string senderName, string message)
{
IDialogModule dm = m_scene.RequestModuleInterface<IDialogModule>();
if (dm != null)
dm.SendNotificationToUsersInRegion(senderID, senderName, message);
}
private void SendEstateBlueBoxMessage(
IClientAPI remote_client, UUID invoice, UUID senderID, UUID sessionID, string senderName, string message)
{
IDialogModule dm = m_scene.RequestModuleInterface<IDialogModule>();
if (dm != null)
dm.SendNotificationToUsersInEstate(senderID, senderName, message);
}
private void handleEstateDebugRegionRequest(IClientAPI remote_client, UUID invoice, UUID senderID, bool scripted, bool collisionEvents, bool physics)
{
if (physics)
m_scene.RegionInfo.RegionSettings.DisablePhysics = true;
else
m_scene.RegionInfo.RegionSettings.DisablePhysics = false;
if (scripted)
m_scene.RegionInfo.RegionSettings.DisableScripts = true;
else
m_scene.RegionInfo.RegionSettings.DisableScripts = false;
if (collisionEvents)
m_scene.RegionInfo.RegionSettings.DisableCollisions = true;
else
m_scene.RegionInfo.RegionSettings.DisableCollisions = false;
m_scene.RegionInfo.RegionSettings.Save();
m_scene.SetSceneCoreDebug(scripted, collisionEvents, physics);
}
private void handleEstateTeleportOneUserHomeRequest(IClientAPI remover_client, UUID invoice, UUID senderID, UUID prey)
{
if (prey != UUID.Zero)
{
ScenePresence s = m_scene.GetScenePresence(prey);
if (s != null)
{
s.ControllingClient.SendTeleportLocationStart();
m_scene.TeleportClientHome(prey, s.ControllingClient);
}
}
}
private void handleEstateTeleportAllUsersHomeRequest(IClientAPI remover_client, UUID invoice, UUID senderID)
{
// Get a fresh list that will not change as people get teleported away
ScenePresence[] presences = m_scene.GetScenePresences();
for (int i = 0; i < presences.Length; i++)
{
ScenePresence p = presences[i];
if (p.UUID != senderID)
{
// make sure they are still there, we could be working down a long list
ScenePresence s = m_scene.GetScenePresence(p.UUID);
if (s != null)
{
// Also make sure they are actually in the region
if (!s.IsChildAgent)
{
s.ControllingClient.SendTeleportLocationStart();
m_scene.TeleportClientHome(s.UUID, s.ControllingClient);
}
}
}
}
}
private void AbortTerrainXferHandler(IClientAPI remoteClient, ulong XferID)
{
if (TerrainUploader != null)
{
lock (TerrainUploader)
{
if (XferID == TerrainUploader.XferID)
{
remoteClient.OnXferReceive -= TerrainUploader.XferReceive;
remoteClient.OnAbortXfer -= AbortTerrainXferHandler;
TerrainUploader.TerrainUploadDone -= HandleTerrainApplication;
TerrainUploader = null;
remoteClient.SendAlertMessage("Terrain Upload aborted by the client");
}
}
}
}
private void HandleTerrainApplication(string filename, byte[] terrainData, IClientAPI remoteClient)
{
lock (TerrainUploader)
{
remoteClient.OnXferReceive -= TerrainUploader.XferReceive;
remoteClient.OnAbortXfer -= AbortTerrainXferHandler;
TerrainUploader.TerrainUploadDone -= HandleTerrainApplication;
TerrainUploader = null;
}
remoteClient.SendAlertMessage("Terrain Upload Complete. Loading....");
ITerrainModule terr = m_scene.RequestModuleInterface<ITerrainModule>();
if (terr != null)
{
m_log.Warn("[CLIENT]: Got Request to Send Terrain in region " + m_scene.RegionInfo.RegionName);
try
{
string localfilename = "terrain.raw";
if (terrainData.Length == 851968)
{
localfilename = Path.Combine(Util.dataDir(),"terrain.raw"); // It's a .LLRAW
}
if (terrainData.Length == 196662) // 24-bit 256x256 Bitmap
localfilename = Path.Combine(Util.dataDir(), "terrain.bmp");
if (terrainData.Length == 256 * 256 * 4) // It's a .R32
localfilename = Path.Combine(Util.dataDir(), "terrain.r32");
if (terrainData.Length == 256 * 256 * 8) // It's a .R64
localfilename = Path.Combine(Util.dataDir(), "terrain.r64");
if (File.Exists(localfilename))
{
File.Delete(localfilename);
}
FileStream input = new FileStream(localfilename, FileMode.CreateNew);
input.Write(terrainData, 0, terrainData.Length);
input.Close();
FileInfo x = new FileInfo(localfilename);
terr.LoadFromFile(localfilename);
remoteClient.SendAlertMessage("Your terrain was loaded as a ." + x.Extension + " file. It may take a few moments to appear.");
}
catch (IOException e)
{
m_log.ErrorFormat("[TERRAIN]: Error Saving a terrain file uploaded via the estate tools. It gave us the following error: {0}", e.ToString());
remoteClient.SendAlertMessage("There was an IO Exception loading your terrain. Please check free space.");
return;
}
catch (SecurityException e)
{
m_log.ErrorFormat("[TERRAIN]: Error Saving a terrain file uploaded via the estate tools. It gave us the following error: {0}", e.ToString());
remoteClient.SendAlertMessage("There was a security Exception loading your terrain. Please check the security on the simulator drive");
return;
}
catch (UnauthorizedAccessException e)
{
m_log.ErrorFormat("[TERRAIN]: Error Saving a terrain file uploaded via the estate tools. It gave us the following error: {0}", e.ToString());
remoteClient.SendAlertMessage("There was a security Exception loading your terrain. Please check the security on the simulator drive");
return;
}
catch (Exception e)
{
m_log.ErrorFormat("[TERRAIN]: Error loading a terrain file uploaded via the estate tools. It gave us the following error: {0}", e.ToString());
remoteClient.SendAlertMessage("There was a general error loading your terrain. Please fix the terrain file and try again");
}
}
else
{
remoteClient.SendAlertMessage("Unable to apply terrain. Cannot get an instance of the terrain module");
}
}
private void handleUploadTerrain(IClientAPI remote_client, string clientFileName)
{
if (TerrainUploader == null)
{
TerrainUploader = new EstateTerrainXferHandler(remote_client, clientFileName);
lock (TerrainUploader)
{
remote_client.OnXferReceive += TerrainUploader.XferReceive;
remote_client.OnAbortXfer += AbortTerrainXferHandler;
TerrainUploader.TerrainUploadDone += HandleTerrainApplication;
}
TerrainUploader.RequestStartXfer(remote_client);
}
else
{
remote_client.SendAlertMessage("Another Terrain Upload is in progress. Please wait your turn!");
}
}
private void handleTerrainRequest(IClientAPI remote_client, string clientFileName)
{
// Save terrain here
ITerrainModule terr = m_scene.RequestModuleInterface<ITerrainModule>();
if (terr != null)
{
m_log.Warn("[CLIENT]: Got Request to Send Terrain in region " + m_scene.RegionInfo.RegionName);
if (File.Exists(Util.dataDir() + "/terrain.raw"))
{
File.Delete(Util.dataDir() + "/terrain.raw");
}
terr.SaveToFile(Util.dataDir() + "/terrain.raw");
FileStream input = new FileStream(Util.dataDir() + "/terrain.raw", FileMode.Open);
byte[] bdata = new byte[input.Length];
input.Read(bdata, 0, (int)input.Length);
remote_client.SendAlertMessage("Terrain file written, starting download...");
m_scene.XferManager.AddNewFile("terrain.raw", bdata);
// Tell client about it
m_log.Warn("[CLIENT]: Sending Terrain to " + remote_client.Name);
remote_client.SendInitiateDownload("terrain.raw", clientFileName);
}
}
private void HandleRegionInfoRequest(IClientAPI remote_client)
{
RegionInfoForEstateMenuArgs args = new RegionInfoForEstateMenuArgs();
args.billableFactor = m_scene.RegionInfo.EstateSettings.BillableFactor;
args.estateID = m_scene.RegionInfo.EstateSettings.EstateID;
args.maxAgents = (byte)m_scene.RegionInfo.RegionSettings.AgentLimit;
args.objectBonusFactor = (float)m_scene.RegionInfo.RegionSettings.ObjectBonus;
args.parentEstateID = m_scene.RegionInfo.EstateSettings.ParentEstateID;
args.pricePerMeter = m_scene.RegionInfo.EstateSettings.PricePerMeter;
args.redirectGridX = m_scene.RegionInfo.EstateSettings.RedirectGridX;
args.redirectGridY = m_scene.RegionInfo.EstateSettings.RedirectGridY;
args.regionFlags = GetRegionFlags();
args.simAccess = m_scene.RegionInfo.AccessLevel;
args.sunHour = (float)m_scene.RegionInfo.RegionSettings.SunPosition;
args.terrainLowerLimit = (float)m_scene.RegionInfo.RegionSettings.TerrainLowerLimit;
args.terrainRaiseLimit = (float)m_scene.RegionInfo.RegionSettings.TerrainRaiseLimit;
args.useEstateSun = m_scene.RegionInfo.RegionSettings.UseEstateSun;
args.waterHeight = (float)m_scene.RegionInfo.RegionSettings.WaterHeight;
args.simName = m_scene.RegionInfo.RegionName;
remote_client.SendRegionInfoToEstateMenu(args);
}
private void HandleEstateCovenantRequest(IClientAPI remote_client)
{
remote_client.SendEstateCovenantInformation(m_scene.RegionInfo.RegionSettings.Covenant);
}
private void HandleLandStatRequest(int parcelID, uint reportType, uint requestFlags, string filter, IClientAPI remoteClient)
{
Dictionary<uint, float> SceneData = new Dictionary<uint,float>();
List<UUID> uuidNameLookupList = new List<UUID>();
if (reportType == 1)
{
SceneData = m_scene.PhysicsScene.GetTopColliders();
}
else if (reportType == 0)
{
SceneData = m_scene.SceneGraph.GetTopScripts();
}
List<LandStatReportItem> SceneReport = new List<LandStatReportItem>();
lock (SceneData)
{
foreach (uint obj in SceneData.Keys)
{
SceneObjectPart prt = m_scene.GetSceneObjectPart(obj);
if (prt != null)
{
if (prt.ParentGroup != null)
{
SceneObjectGroup sog = prt.ParentGroup;
if (sog != null)
{
LandStatReportItem lsri = new LandStatReportItem();
lsri.LocationX = sog.AbsolutePosition.X;
lsri.LocationY = sog.AbsolutePosition.Y;
lsri.LocationZ = sog.AbsolutePosition.Z;
lsri.Score = SceneData[obj];
lsri.TaskID = sog.UUID;
lsri.TaskLocalID = sog.LocalId;
lsri.TaskName = sog.GetPartName(obj);
if (m_scene.CommsManager.UUIDNameCachedTest(sog.OwnerID))
{
lsri.OwnerName = m_scene.CommsManager.UUIDNameRequestString(sog.OwnerID);
}
else
{
lsri.OwnerName = "waiting";
lock (uuidNameLookupList)
uuidNameLookupList.Add(sog.OwnerID);
}
if (filter.Length != 0)
{
if ((lsri.OwnerName.Contains(filter) || lsri.TaskName.Contains(filter)))
{
}
else
{
continue;
}
}
SceneReport.Add(lsri);
}
}
}
}
}
remoteClient.SendLandStatReply(reportType, requestFlags, (uint)SceneReport.Count,SceneReport.ToArray());
if (uuidNameLookupList.Count > 0)
LookupUUID(uuidNameLookupList);
}
private static void LookupUUIDSCompleted(IAsyncResult iar)
{
LookupUUIDS icon = (LookupUUIDS)iar.AsyncState;
icon.EndInvoke(iar);
}
private void LookupUUID(List<UUID> uuidLst)
{
LookupUUIDS d = LookupUUIDsAsync;
d.BeginInvoke(uuidLst,
LookupUUIDSCompleted,
d);
}
private void LookupUUIDsAsync(List<UUID> uuidLst)
{
UUID[] uuidarr;
lock (uuidLst)
{
uuidarr = uuidLst.ToArray();
}
for (int i = 0; i < uuidarr.Length; i++)
{
// string lookupname = m_scene.CommsManager.UUIDNameRequestString(uuidarr[i]);
m_scene.CommsManager.UUIDNameRequestString(uuidarr[i]);
// we drop it. It gets cached though... so we're ready for the next request.
}
}
#endregion
#region Outgoing Packets
public void sendRegionInfoPacketToAll()
{
List<ScenePresence> avatars = m_scene.GetAvatars();
for (int i = 0; i < avatars.Count; i++)
{
HandleRegionInfoRequest(avatars[i].ControllingClient);
}
}
public void sendRegionHandshake(IClientAPI remoteClient)
{
RegionHandshakeArgs args = new RegionHandshakeArgs();
args.isEstateManager = m_scene.RegionInfo.EstateSettings.IsEstateManager(remoteClient.AgentId);
if (m_scene.RegionInfo.EstateSettings.EstateOwner != UUID.Zero && m_scene.RegionInfo.EstateSettings.EstateOwner == remoteClient.AgentId)
args.isEstateManager = true;
args.billableFactor = m_scene.RegionInfo.EstateSettings.BillableFactor;
args.terrainStartHeight0 = (float)m_scene.RegionInfo.RegionSettings.Elevation1SW;
args.terrainHeightRange0 = (float)m_scene.RegionInfo.RegionSettings.Elevation2SW;
args.terrainStartHeight1 = (float)m_scene.RegionInfo.RegionSettings.Elevation1NW;
args.terrainHeightRange1 = (float)m_scene.RegionInfo.RegionSettings.Elevation2NW;
args.terrainStartHeight2 = (float)m_scene.RegionInfo.RegionSettings.Elevation1SE;
args.terrainHeightRange2 = (float)m_scene.RegionInfo.RegionSettings.Elevation2SE;
args.terrainStartHeight3 = (float)m_scene.RegionInfo.RegionSettings.Elevation1NE;
args.terrainHeightRange3 = (float)m_scene.RegionInfo.RegionSettings.Elevation2NE;
args.simAccess = m_scene.RegionInfo.AccessLevel;
args.waterHeight = (float)m_scene.RegionInfo.RegionSettings.WaterHeight;
args.regionFlags = GetRegionFlags();
args.regionName = m_scene.RegionInfo.RegionName;
if (m_scene.RegionInfo.EstateSettings.EstateOwner != UUID.Zero)
args.SimOwner = m_scene.RegionInfo.EstateSettings.EstateOwner;
else
args.SimOwner = m_scene.RegionInfo.MasterAvatarAssignedUUID;
// Fudge estate owner
//if (m_scene.Permissions.IsGod(remoteClient.AgentId))
// args.SimOwner = remoteClient.AgentId;
args.terrainBase0 = UUID.Zero;
args.terrainBase1 = UUID.Zero;
args.terrainBase2 = UUID.Zero;
args.terrainBase3 = UUID.Zero;
args.terrainDetail0 = m_scene.RegionInfo.RegionSettings.TerrainTexture1;
args.terrainDetail1 = m_scene.RegionInfo.RegionSettings.TerrainTexture2;
args.terrainDetail2 = m_scene.RegionInfo.RegionSettings.TerrainTexture3;
args.terrainDetail3 = m_scene.RegionInfo.RegionSettings.TerrainTexture4;
remoteClient.SendRegionHandshake(m_scene.RegionInfo,args);
}
public void sendRegionHandshakeToAll()
{
m_scene.ForEachClient(sendRegionHandshake);
}
public void handleEstateChangeInfo(IClientAPI remoteClient, UUID invoice, UUID senderID, UInt32 parms1, UInt32 parms2)
{
if (parms2 == 0)
{
m_scene.RegionInfo.EstateSettings.UseGlobalTime = true;
m_scene.RegionInfo.EstateSettings.SunPosition = 0.0;
}
else
{
m_scene.RegionInfo.EstateSettings.UseGlobalTime = false;
m_scene.RegionInfo.EstateSettings.SunPosition = (parms2 - 0x1800)/1024.0;
}
if ((parms1 & 0x00000010) != 0)
m_scene.RegionInfo.EstateSettings.FixedSun = true;
else
m_scene.RegionInfo.EstateSettings.FixedSun = false;
if ((parms1 & 0x00008000) != 0)
m_scene.RegionInfo.EstateSettings.PublicAccess = true;
else
m_scene.RegionInfo.EstateSettings.PublicAccess = false;
if ((parms1 & 0x10000000) != 0)
m_scene.RegionInfo.EstateSettings.AllowVoice = true;
else
m_scene.RegionInfo.EstateSettings.AllowVoice = false;
if ((parms1 & 0x00100000) != 0)
m_scene.RegionInfo.EstateSettings.AllowDirectTeleport = true;
else
m_scene.RegionInfo.EstateSettings.AllowDirectTeleport = false;
if ((parms1 & 0x00800000) != 0)
m_scene.RegionInfo.EstateSettings.DenyAnonymous = true;
else
m_scene.RegionInfo.EstateSettings.DenyAnonymous = false;
if ((parms1 & 0x01000000) != 0)
m_scene.RegionInfo.EstateSettings.DenyIdentified = true;
else
m_scene.RegionInfo.EstateSettings.DenyIdentified = false;
if ((parms1 & 0x02000000) != 0)
m_scene.RegionInfo.EstateSettings.DenyTransacted = true;
else
m_scene.RegionInfo.EstateSettings.DenyTransacted = false;
if ((parms1 & 0x40000000) != 0)
m_scene.RegionInfo.EstateSettings.DenyMinors = true;
else
m_scene.RegionInfo.EstateSettings.DenyMinors = false;
m_scene.RegionInfo.EstateSettings.Save();
TriggerEstateToolsSunUpdate();
sendDetailedEstateData(remoteClient, invoice);
}
#endregion
#region IRegionModule Members
public void Initialise(Scene scene, IConfigSource source)
{
m_scene = scene;
m_scene.RegisterModuleInterface<IEstateModule>(this);
m_scene.EventManager.OnNewClient += EventManager_OnNewClient;
m_scene.EventManager.OnRequestChangeWaterHeight += changeWaterHeight;
m_scene.AddCommand(this, "set terrain texture",
"set terrain texture <number> <uuid> [<x>] [<y>]",
"Sets the terrain <number> to <uuid>, if <x> or <y> are specified, it will only " +
"set it on regions with a matching coordinate. Specify -1 in <x> or <y> to wildcard" +
" that coordinate.",
consoleSetTerrainTexture);
m_scene.AddCommand(this, "set terrain heights",
"set terrain heights <corner> <min> <max> [<x>] [<y>]",
"Sets the terrain texture heights on corner #<corner> to <min>/<max>, if <x> or <y> are specified, it will only " +
"set it on regions with a matching coordinate. Specify -1 in <x> or <y> to wildcard" +
" that coordinate. Corner # SW = 0, NW = 1, SE = 2, NE = 3.",
consoleSetTerrainHeights);
}
#region Console Commands
public void consoleSetTerrainTexture(string module, string[] args)
{
string num = args[3];
string uuid = args[4];
int x = (args.Length > 5 ? int.Parse(args[5]) : -1);
int y = (args.Length > 6 ? int.Parse(args[6]) : -1);
if (x == -1 || m_scene.RegionInfo.RegionLocX == x)
{
if (y == -1 || m_scene.RegionInfo.RegionLocY == y)
{
int corner = int.Parse(num);
UUID texture = UUID.Parse(uuid);
m_log.Debug("[ESTATEMODULE] Setting terrain textures for " + m_scene.RegionInfo.RegionName +
string.Format(" (C#{0} = {1})", corner, texture));
switch (corner)
{
case 0:
m_scene.RegionInfo.RegionSettings.TerrainTexture1 = texture;
break;
case 1:
m_scene.RegionInfo.RegionSettings.TerrainTexture2 = texture;
break;
case 2:
m_scene.RegionInfo.RegionSettings.TerrainTexture3 = texture;
break;
case 3:
m_scene.RegionInfo.RegionSettings.TerrainTexture4 = texture;
break;
}
m_scene.RegionInfo.RegionSettings.Save();
sendRegionInfoPacketToAll();
}
}
}
public void consoleSetTerrainHeights(string module, string[] args)
{
string num = args[3];
string min = args[4];
string max = args[5];
int x = (args.Length > 6 ? int.Parse(args[6]) : -1);
int y = (args.Length > 7 ? int.Parse(args[7]) : -1);
if (x == -1 || m_scene.RegionInfo.RegionLocX == x)
{
if (y == -1 || m_scene.RegionInfo.RegionLocY == y)
{
int corner = int.Parse(num);
float lowValue = float.Parse(min, Culture.NumberFormatInfo);
float highValue = float.Parse(max, Culture.NumberFormatInfo);
m_log.Debug("[ESTATEMODULE] Setting terrain heights " + m_scene.RegionInfo.RegionName +
string.Format(" (C{0}, {1}-{2}", corner, lowValue, highValue));
switch (corner)
{
case 0:
m_scene.RegionInfo.RegionSettings.Elevation1SW = lowValue;
m_scene.RegionInfo.RegionSettings.Elevation2SW = highValue;
break;
case 1:
m_scene.RegionInfo.RegionSettings.Elevation1NW = lowValue;
m_scene.RegionInfo.RegionSettings.Elevation2NW = highValue;
break;
case 2:
m_scene.RegionInfo.RegionSettings.Elevation1SE = lowValue;
m_scene.RegionInfo.RegionSettings.Elevation2SE = highValue;
break;
case 3:
m_scene.RegionInfo.RegionSettings.Elevation1NE = lowValue;
m_scene.RegionInfo.RegionSettings.Elevation2NE = highValue;
break;
}
m_scene.RegionInfo.RegionSettings.Save();
sendRegionHandshakeToAll();
}
}
}
#endregion
public void PostInitialise()
{
// Sets up the sun module based no the saved Estate and Region Settings
// DO NOT REMOVE or the sun will stop working
TriggerEstateToolsSunUpdate();
}
public void Close()
{
}
public string Name
{
get { return "EstateManagementModule"; }
}
public bool IsSharedModule
{
get { return false; }
}
#endregion
#region Other Functions
private void TriggerEstateToolsSunUpdate()
{
float sun;
if (m_scene.RegionInfo.RegionSettings.UseEstateSun)
{
sun = (float)m_scene.RegionInfo.EstateSettings.SunPosition;
if (m_scene.RegionInfo.EstateSettings.UseGlobalTime)
{
sun = m_scene.EventManager.GetCurrentTimeAsSunLindenHour() - 6.0f;
}
//
m_scene.EventManager.TriggerEstateToolsSunUpdate(
m_scene.RegionInfo.RegionHandle,
m_scene.RegionInfo.EstateSettings.FixedSun,
m_scene.RegionInfo.RegionSettings.UseEstateSun,
sun);
}
else
{
// Use the Sun Position from the Region Settings
sun = (float)m_scene.RegionInfo.RegionSettings.SunPosition - 6.0f;
m_scene.EventManager.TriggerEstateToolsSunUpdate(
m_scene.RegionInfo.RegionHandle,
m_scene.RegionInfo.RegionSettings.FixedSun,
m_scene.RegionInfo.RegionSettings.UseEstateSun,
sun);
}
}
public void changeWaterHeight(float height)
{
setRegionTerrainSettings(height,
(float)m_scene.RegionInfo.RegionSettings.TerrainRaiseLimit,
(float)m_scene.RegionInfo.RegionSettings.TerrainLowerLimit,
m_scene.RegionInfo.RegionSettings.UseEstateSun,
m_scene.RegionInfo.RegionSettings.FixedSun,
(float)m_scene.RegionInfo.RegionSettings.SunPosition,
m_scene.RegionInfo.EstateSettings.UseGlobalTime,
m_scene.RegionInfo.EstateSettings.FixedSun,
(float)m_scene.RegionInfo.EstateSettings.SunPosition);
sendRegionInfoPacketToAll();
}
#endregion
private void EventManager_OnNewClient(IClientAPI client)
{
client.OnDetailedEstateDataRequest += sendDetailedEstateData;
client.OnSetEstateFlagsRequest += estateSetRegionInfoHandler;
// client.OnSetEstateTerrainBaseTexture += setEstateTerrainBaseTexture;
client.OnSetEstateTerrainDetailTexture += setEstateTerrainBaseTexture;
client.OnSetEstateTerrainTextureHeights += setEstateTerrainTextureHeights;
client.OnCommitEstateTerrainTextureRequest += handleCommitEstateTerrainTextureRequest;
client.OnSetRegionTerrainSettings += setRegionTerrainSettings;
client.OnEstateRestartSimRequest += handleEstateRestartSimRequest;
client.OnEstateChangeCovenantRequest += handleChangeEstateCovenantRequest;
client.OnEstateChangeInfo += handleEstateChangeInfo;
client.OnUpdateEstateAccessDeltaRequest += handleEstateAccessDeltaRequest;
client.OnSimulatorBlueBoxMessageRequest += SendSimulatorBlueBoxMessage;
client.OnEstateBlueBoxMessageRequest += SendEstateBlueBoxMessage;
client.OnEstateDebugRegionRequest += handleEstateDebugRegionRequest;
client.OnEstateTeleportOneUserHomeRequest += handleEstateTeleportOneUserHomeRequest;
client.OnEstateTeleportAllUsersHomeRequest += handleEstateTeleportAllUsersHomeRequest;
client.OnRequestTerrain += handleTerrainRequest;
client.OnUploadTerrain += handleUploadTerrain;
client.OnRegionInfoRequest += HandleRegionInfoRequest;
client.OnEstateCovenantRequest += HandleEstateCovenantRequest;
client.OnLandStatRequest += HandleLandStatRequest;
sendRegionHandshake(client);
}
public uint GetRegionFlags()
{
RegionFlags flags = RegionFlags.None;
// Fully implemented
//
if (m_scene.RegionInfo.RegionSettings.AllowDamage)
flags |= RegionFlags.AllowDamage;
if (m_scene.RegionInfo.RegionSettings.BlockTerraform)
flags |= RegionFlags.BlockTerraform;
if (!m_scene.RegionInfo.RegionSettings.AllowLandResell)
flags |= RegionFlags.BlockLandResell;
if (m_scene.RegionInfo.RegionSettings.DisableCollisions)
flags |= RegionFlags.SkipCollisions;
if (m_scene.RegionInfo.RegionSettings.DisableScripts)
flags |= RegionFlags.SkipScripts;
if (m_scene.RegionInfo.RegionSettings.DisablePhysics)
flags |= RegionFlags.SkipPhysics;
if (m_scene.RegionInfo.RegionSettings.BlockFly)
flags |= RegionFlags.NoFly;
if (m_scene.RegionInfo.RegionSettings.RestrictPushing)
flags |= RegionFlags.RestrictPushObject;
if (m_scene.RegionInfo.RegionSettings.AllowLandJoinDivide)
flags |= RegionFlags.AllowParcelChanges;
if (m_scene.RegionInfo.RegionSettings.BlockShowInSearch)
flags |= (RegionFlags)(1 << 29);
if (m_scene.RegionInfo.RegionSettings.FixedSun)
flags |= RegionFlags.SunFixed;
if (m_scene.RegionInfo.RegionSettings.Sandbox)
flags |= RegionFlags.Sandbox;
// Fudge these to always on, so the menu options activate
//
flags |= RegionFlags.AllowLandmark;
flags |= RegionFlags.AllowSetHome;
// TODO: SkipUpdateInterestList
// Omitted
//
// Omitted: NullLayer (what is that?)
// Omitted: SkipAgentAction (what does it do?)
return (uint)flags;
}
public uint GetEstateFlags()
{
RegionFlags flags = RegionFlags.None;
if (m_scene.RegionInfo.EstateSettings.FixedSun)
flags |= RegionFlags.SunFixed;
if (m_scene.RegionInfo.EstateSettings.PublicAccess)
flags |= (RegionFlags.PublicAllowed |
RegionFlags.ExternallyVisible);
if (m_scene.RegionInfo.EstateSettings.AllowVoice)
flags |= RegionFlags.AllowVoice;
if (m_scene.RegionInfo.EstateSettings.AllowDirectTeleport)
flags |= RegionFlags.AllowDirectTeleport;
if (m_scene.RegionInfo.EstateSettings.DenyAnonymous)
flags |= RegionFlags.DenyAnonymous;
if (m_scene.RegionInfo.EstateSettings.DenyIdentified)
flags |= RegionFlags.DenyIdentified;
if (m_scene.RegionInfo.EstateSettings.DenyTransacted)
flags |= RegionFlags.DenyTransacted;
if (m_scene.RegionInfo.EstateSettings.AbuseEmailToEstateOwner)
flags |= RegionFlags.AbuseEmailToEstateOwner;
if (m_scene.RegionInfo.EstateSettings.BlockDwell)
flags |= RegionFlags.BlockDwell;
if (m_scene.RegionInfo.EstateSettings.EstateSkipScripts)
flags |= RegionFlags.EstateSkipScripts;
if (m_scene.RegionInfo.EstateSettings.ResetHomeOnTeleport)
flags |= RegionFlags.ResetHomeOnTeleport;
if (m_scene.RegionInfo.EstateSettings.TaxFree)
flags |= RegionFlags.TaxFree;
if (m_scene.RegionInfo.EstateSettings.DenyMinors)
flags |= (RegionFlags)(1 << 30);
return (uint)flags;
}
public bool IsManager(UUID avatarID)
{
if (avatarID == m_scene.RegionInfo.MasterAvatarAssignedUUID)
return true;
if (avatarID == m_scene.RegionInfo.EstateSettings.EstateOwner)
return true;
List<UUID> ems = new List<UUID>(m_scene.RegionInfo.EstateSettings.EstateManagers);
if (ems.Contains(avatarID))
return true;
return false;
}
}
}
| |
// <copyright file="Evaluate.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// http://numerics.mathdotnet.com
// http://github.com/mathnet/mathnet-numerics
// http://mathnetnumerics.codeplex.com
//
// Copyright (c) 2009-2013 Math.NET
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
// <contribution>
// CERN - European Laboratory for Particle Physics
// http://www.docjar.com/html/api/cern/jet/math/Bessel.java.html
// Copyright 1999 CERN - European Laboratory for Particle Physics.
// Permission to use, copy, modify, distribute and sell this software and its documentation for any purpose
// is hereby granted without fee, provided that the above copyright notice appear in all copies and
// that both that copyright notice and this permission notice appear in supporting documentation.
// CERN makes no representations about the suitability of this software for any purpose.
// It is provided "as is" without expressed or implied warranty.
// TOMS757 - Uncommon Special Functions (Fortran77) by Allan McLeod
// http://people.sc.fsu.edu/~jburkardt/f77_src/toms757/toms757.html
// Wei Wu
// Cephes Math Library, Stephen L. Moshier
// ALGLIB 2.0.1, Sergey Bochkanov
// </contribution>
using System;
#if !NOSYSNUMERICS
using System.Numerics;
#endif
// ReSharper disable CheckNamespace
namespace MathNet.Numerics
// ReSharper restore CheckNamespace
{
/// <summary>
/// Evaluation functions, useful for function approximation.
/// </summary>
public static class Evaluate
{
/// <summary>
/// Evaluate a polynomial at point x.
/// Coefficients are ordered by power with power k at index k.
/// Example: coefficients [3,-1,2] represent y=2x^2-x+3.
/// </summary>
/// <param name="z">The location where to evaluate the polynomial at.</param>
/// <param name="coefficients">The coefficients of the polynomial, coefficient for power k at index k.</param>
public static double Polynomial(double z, params double[] coefficients)
{
double sum = coefficients[coefficients.Length - 1];
for (int i = coefficients.Length - 2; i >= 0; --i)
{
sum *= z;
sum += coefficients[i];
}
return sum;
}
/// <summary>
/// Evaluate a polynomial at point x.
/// Coefficients are ordered by power with power k at index k.
/// Example: coefficients [3,-1,2] represent y=2x^2-x+3.
/// </summary>
/// <param name="z">The location where to evaluate the polynomial at.</param>
/// <param name="coefficients">The coefficients of the polynomial, coefficient for power k at index k.</param>
public static Complex Polynomial(Complex z, params double[] coefficients)
{
Complex sum = coefficients[coefficients.Length - 1];
for (int i = coefficients.Length - 2; i >= 0; --i)
{
sum *= z;
sum += coefficients[i];
}
return sum;
}
/// <summary>
/// Evaluate a polynomial at point x.
/// Coefficients are ordered by power with power k at index k.
/// Example: coefficients [3,-1,2] represent y=2x^2-x+3.
/// </summary>
/// <param name="z">The location where to evaluate the polynomial at.</param>
/// <param name="coefficients">The coefficients of the polynomial, coefficient for power k at index k.</param>
public static Complex Polynomial(Complex z, params Complex[] coefficients)
{
Complex sum = coefficients[coefficients.Length - 1];
for (int i = coefficients.Length - 2; i >= 0; --i)
{
sum *= z;
sum += coefficients[i];
}
return sum;
}
/// <summary>
/// Numerically stable series summation
/// </summary>
/// <param name="nextSummand">provides the summands sequentially</param>
/// <returns>Sum</returns>
internal static double Series(Func<double> nextSummand)
{
double compensation = 0.0;
double current;
const double factor = 1 << 16;
double sum = nextSummand();
do
{
// Kahan Summation
// NOTE (ruegg): do NOT optimize. Now, how to tell that the compiler?
current = nextSummand();
double y = current - compensation;
double t = sum + y;
compensation = t - sum;
compensation -= y;
sum = t;
}
while (Math.Abs(sum) < Math.Abs(factor*current));
return sum;
}
/// <summary> Evaluates the series of Chebyshev polynomials Ti at argument x/2.
/// The series is given by
/// <pre>
/// N-1
/// - '
/// y = > coef[i] T (x/2)
/// - i
/// i=0
/// </pre>
/// Coefficients are stored in reverse order, i.e. the zero
/// order term is last in the array. Note N is the number of
/// coefficients, not the order.
/// <p/>
/// If coefficients are for the interval a to b, x must
/// have been transformed to x -> 2(2x - b - a)/(b-a) before
/// entering the routine. This maps x from (a, b) to (-1, 1),
/// over which the Chebyshev polynomials are defined.
/// <p/>
/// If the coefficients are for the inverted interval, in
/// which (a, b) is mapped to (1/b, 1/a), the transformation
/// required is x -> 2(2ab/x - b - a)/(b-a). If b is infinity,
/// this becomes x -> 4a/x - 1.
/// <p/>
/// SPEED:
/// <p/>
/// Taking advantage of the recurrence properties of the
/// Chebyshev polynomials, the routine requires one more
/// addition per loop than evaluating a nested polynomial of
/// the same degree.
/// </summary>
/// <param name="coefficients">The coefficients of the polynomial.</param>
/// <param name="x">Argument to the polynomial.</param>
/// <remarks>
/// Reference: https://bpm2.svn.codeplex.com/svn/Common.Numeric/Arithmetic.cs
/// <p/>
/// Marked as Deprecated in
/// http://people.apache.org/~isabel/mahout_site/mahout-matrix/apidocs/org/apache/mahout/jet/math/Arithmetic.html
/// </remarks>
internal static double ChebyshevA(double[] coefficients, double x)
{
// TODO: Unify, normalize, then make public
double b2;
int p = 0;
double b0 = coefficients[p++];
double b1 = 0.0;
int i = coefficients.Length - 1;
do
{
b2 = b1;
b1 = b0;
b0 = x*b1 - b2 + coefficients[p++];
}
while (--i > 0);
return 0.5*(b0 - b2);
}
/// <summary>
/// Summation of Chebyshev polynomials, using the Clenshaw method with Reinsch modification.
/// </summary>
/// <param name="n">The no. of terms in the sequence.</param>
/// <param name="coefficients">The coefficients of the Chebyshev series, length n+1.</param>
/// <param name="x">The value at which the series is to be evaluated.</param>
/// <remarks>
/// ORIGINAL AUTHOR:
/// Dr. Allan J. MacLeod; Dept. of Mathematics and Statistics, University of Paisley; High St., PAISLEY, SCOTLAND
/// REFERENCES:
/// "An error analysis of the modified Clenshaw method for evaluating Chebyshev and Fourier series"
/// J. Oliver, J.I.M.A., vol. 20, 1977, pp379-391
/// </remarks>
internal static double ChebyshevSum(int n, double[] coefficients, double x)
{
// TODO: Unify, normalize, then make public
// If |x| < 0.6 use the standard Clenshaw method
if (Math.Abs(x) < 0.6)
{
double u0 = 0.0;
double u1 = 0.0;
double u2 = 0.0;
double xx = x + x;
for (int i = n; i >= 0; i--)
{
u2 = u1;
u1 = u0;
u0 = xx*u1 + coefficients[i] - u2;
}
return (u0 - u2)/2.0;
}
// If ABS ( T ) > = 0.6 use the Reinsch modification
// T > = 0.6 code
if (x > 0.0)
{
double u1 = 0.0;
double d1 = 0.0;
double d2 = 0.0;
double xx = (x - 0.5) - 0.5;
xx = xx + xx;
for (int i = n; i >= 0; i--)
{
d2 = d1;
double u2 = u1;
d1 = xx*u2 + coefficients[i] + d2;
u1 = d1 + u2;
}
return (d1 + d2)/2.0;
}
else
{
// T < = -0.6 code
double u1 = 0.0;
double d1 = 0.0;
double d2 = 0.0;
double xx = (x + 0.5) + 0.5;
xx = xx + xx;
for (int i = n; i >= 0; i--)
{
d2 = d1;
double u2 = u1;
d1 = xx*u2 + coefficients[i] - d2;
u1 = d1 - u2;
}
return (d1 - d2)/2.0;
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace TestUtilities
{
public static class FileUtils
{
public static void CopyDirectory(string sourceDir, string destDir)
{
sourceDir = sourceDir.TrimEnd('\\');
destDir = destDir.TrimEnd('\\');
try
{
Directory.CreateDirectory(destDir);
}
catch (IOException)
{
}
var newDirectories = new HashSet<string>(from d in Directory.EnumerateDirectories(sourceDir, "*", SearchOption.AllDirectories)
where d.StartsWith(sourceDir)
select d.Substring(sourceDir.Length + 1), StringComparer.OrdinalIgnoreCase);
newDirectories.ExceptWith(from d in Directory.EnumerateDirectories(destDir, "*", SearchOption.AllDirectories)
where d.StartsWith(destDir)
select d.Substring(destDir.Length + 1));
foreach (var newDir in newDirectories.OrderBy(i => i.Length).Select(i => Path.Combine(destDir, i)))
{
try
{
if (newDir.Length < NativeMethods.MAX_FOLDER_PATH)
{
Directory.CreateDirectory(newDir);
}
}
catch
{
Debug.WriteLine("Failed to create directory " + newDir);
}
}
var newFiles = new HashSet<string>(from f in Directory.EnumerateFiles(sourceDir, "*", SearchOption.AllDirectories)
where f.StartsWith(sourceDir)
select f.Substring(sourceDir.Length + 1), StringComparer.OrdinalIgnoreCase);
newFiles.ExceptWith(from f in Directory.EnumerateFiles(destDir, "*", SearchOption.AllDirectories)
where f.StartsWith(destDir)
select f.Substring(destDir.Length + 1));
foreach (var newFile in newFiles)
{
var copyFrom = Path.Combine(sourceDir, newFile);
var copyTo = Path.Combine(destDir, newFile);
try
{
if (copyTo.Length < NativeMethods.MAX_PATH && copyFrom.Length < NativeMethods.MAX_PATH)
{
var copyToDir = Path.GetDirectoryName(copyTo);
if (copyToDir.Length < NativeMethods.MAX_FOLDER_PATH)
{
File.Copy(copyFrom, copyTo);
File.SetAttributes(copyTo, FileAttributes.Normal);
}
else
{
Debug.WriteLine("Failed to copy " + copyFrom + " to " + copyTo + " due to max path limit");
}
}
else
{
Debug.WriteLine("Failed to copy " + copyFrom + " to " + copyTo + " due to max path limit");
}
}
catch
{
Debug.WriteLine("Failed to copy " + copyFrom + " to " + copyTo + " for unknown reason");
}
}
}
public static IDisposable Backup(string path)
{
var backup = Path.GetTempFileName();
File.Delete(backup);
File.Copy(path, backup);
return new FileRestorer(path, backup);
}
public static IDisposable TemporaryTextFile(out string path, string content)
{
var tempPath = TestData.GetTempPath();
for (int retries = 100; retries > 0; --retries)
{
path = Path.Combine(tempPath, Path.GetRandomFileName());
try
{
using (var stream = new FileStream(path, FileMode.CreateNew, FileAccess.Write, FileShare.None))
using (var writer = new StreamWriter(stream, Encoding.Default, 128, true))
{
writer.Write(content);
return new FileDeleter(path);
}
}
catch (IOException)
{
}
catch (UnauthorizedAccessException)
{
}
}
Assert.Fail("Failed to create temporary file.");
throw new InvalidOperationException();
}
private sealed class FileDeleter : IDisposable
{
private readonly string _path;
public FileDeleter(string path)
{
_path = path;
}
public void Dispose()
{
for (int retries = 10; retries > 0; --retries)
{
try
{
File.Delete(_path);
return;
}
catch (IOException)
{
}
catch (UnauthorizedAccessException)
{
try
{
File.SetAttributes(_path, FileAttributes.Normal);
}
catch (IOException)
{
}
catch (UnauthorizedAccessException)
{
}
}
Thread.Sleep(100);
}
}
}
private sealed class FileRestorer : IDisposable
{
private readonly string _original, _backup;
public FileRestorer(string original, string backup)
{
_original = original;
_backup = backup;
}
public void Dispose()
{
for (int retries = 10; retries > 0; --retries)
{
try
{
File.Delete(_original);
File.Move(_backup, _original);
return;
}
catch (IOException)
{
}
catch (UnauthorizedAccessException)
{
try
{
File.SetAttributes(_original, FileAttributes.Normal);
}
catch (IOException)
{
}
catch (UnauthorizedAccessException)
{
}
}
Thread.Sleep(100);
}
Assert.Fail("Failed to restore {0} from {1}", _original, _backup);
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Completion;
using Microsoft.CodeAnalysis.CSharp.Extensions;
using Microsoft.CodeAnalysis.CSharp.Extensions.ContextQuery;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Options;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
using Microsoft.CodeAnalysis.Completion.Providers;
namespace Microsoft.CodeAnalysis.CSharp.Completion.Providers
{
internal partial class NamedParameterCompletionProvider : CommonCompletionProvider, IEqualityComparer<IParameterSymbol>
{
private const string ColonString = ":";
// Explicitly remove ":" from the set of filter characters because (by default)
// any character that appears in DisplayText gets treated as a filter char.
private static readonly CompletionItemRules s_rules = CompletionItemRules.Default
.WithFilterCharacterRule(CharacterSetModificationRule.Create(CharacterSetModificationKind.Remove, ':'));
internal override bool IsInsertionTrigger(SourceText text, int characterPosition, OptionSet options)
{
return CompletionUtilities.IsTriggerCharacter(text, characterPosition, options);
}
public override async Task ProvideCompletionsAsync(CompletionContext context)
{
var document = context.Document;
var position = context.Position;
var cancellationToken = context.CancellationToken;
var syntaxTree = await document.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false);
if (syntaxTree.IsInNonUserCode(position, cancellationToken))
{
return;
}
var token = syntaxTree
.FindTokenOnLeftOfPosition(position, cancellationToken)
.GetPreviousTokenIfTouchingWord(position);
if (!token.IsKind(SyntaxKind.OpenParenToken, SyntaxKind.OpenBracketToken, SyntaxKind.CommaToken))
{
return;
}
var argumentList = token.Parent as BaseArgumentListSyntax;
if (argumentList == null)
{
return;
}
var semanticModel = await document.GetSemanticModelForNodeAsync(argumentList, cancellationToken).ConfigureAwait(false);
var parameterLists = GetParameterLists(semanticModel, position, argumentList.Parent, cancellationToken);
if (parameterLists == null)
{
return;
}
var existingNamedParameters = GetExistingNamedParameters(argumentList, position);
parameterLists = parameterLists.Where(pl => IsValid(pl, existingNamedParameters));
var unspecifiedParameters = parameterLists.SelectMany(pl => pl)
.Where(p => !existingNamedParameters.Contains(p.Name))
.Distinct(this);
if (!unspecifiedParameters.Any())
{
return;
}
if (token.IsMandatoryNamedParameterPosition())
{
context.IsExclusive = true;
}
var text = await document.GetTextAsync(cancellationToken).ConfigureAwait(false);
var workspace = document.Project.Solution.Workspace;
foreach (var parameter in unspecifiedParameters)
{
// Note: the filter text does not include the ':'. We want to ensure that if
// the user types the name exactly (up to the colon) that it is selected as an
// exact match.
var escapedName = parameter.Name.ToIdentifierToken().ToString();
context.AddItem(SymbolCompletionItem.Create(
displayText: escapedName + ColonString,
insertionText: null,
symbol: parameter,
contextPosition: token.SpanStart,
filterText: escapedName,
rules: s_rules,
matchPriority: SymbolMatchPriority.PreferNamedArgument));
}
}
public override Task<CompletionDescription> GetDescriptionAsync(Document document, CompletionItem item, CancellationToken cancellationToken)
{
return SymbolCompletionItem.GetDescriptionAsync(item, document, cancellationToken);
}
private bool IsValid(ImmutableArray<IParameterSymbol> parameterList, ISet<string> existingNamedParameters)
{
// A parameter list is valid if it has parameters that match in name all the existing
// named parameters that have been provided.
return existingNamedParameters.Except(parameterList.Select(p => p.Name)).IsEmpty();
}
private ISet<string> GetExistingNamedParameters(BaseArgumentListSyntax argumentList, int position)
{
var existingArguments = argumentList.Arguments.Where(a => a.Span.End <= position && a.NameColon != null)
.Select(a => a.NameColon.Name.Identifier.ValueText);
return existingArguments.ToSet();
}
private IEnumerable<ImmutableArray<IParameterSymbol>> GetParameterLists(
SemanticModel semanticModel,
int position,
SyntaxNode invocableNode,
CancellationToken cancellationToken)
{
return invocableNode.TypeSwitch(
(InvocationExpressionSyntax invocationExpression) => GetInvocationExpressionParameterLists(semanticModel, position, invocationExpression, cancellationToken),
(ConstructorInitializerSyntax constructorInitializer) => GetConstructorInitializerParameterLists(semanticModel, position, constructorInitializer, cancellationToken),
(ElementAccessExpressionSyntax elementAccessExpression) => GetElementAccessExpressionParameterLists(semanticModel, position, elementAccessExpression, cancellationToken),
(ObjectCreationExpressionSyntax objectCreationExpression) => GetObjectCreationExpressionParameterLists(semanticModel, position, objectCreationExpression, cancellationToken));
}
private IEnumerable<ImmutableArray<IParameterSymbol>> GetObjectCreationExpressionParameterLists(
SemanticModel semanticModel,
int position,
ObjectCreationExpressionSyntax objectCreationExpression,
CancellationToken cancellationToken)
{
var type = semanticModel.GetTypeInfo(objectCreationExpression, cancellationToken).Type as INamedTypeSymbol;
var within = semanticModel.GetEnclosingNamedType(position, cancellationToken);
if (type != null && within != null && type.TypeKind != TypeKind.Delegate)
{
return type.InstanceConstructors.Where(c => c.IsAccessibleWithin(within))
.Select(c => c.Parameters);
}
return null;
}
private IEnumerable<ImmutableArray<IParameterSymbol>> GetElementAccessExpressionParameterLists(
SemanticModel semanticModel,
int position,
ElementAccessExpressionSyntax elementAccessExpression,
CancellationToken cancellationToken)
{
var expressionSymbol = semanticModel.GetSymbolInfo(elementAccessExpression.Expression, cancellationToken).GetAnySymbol();
var expressionType = semanticModel.GetTypeInfo(elementAccessExpression.Expression, cancellationToken).Type;
if (expressionSymbol != null && expressionType != null)
{
var indexers = semanticModel.LookupSymbols(position, expressionType, WellKnownMemberNames.Indexer).OfType<IPropertySymbol>();
var within = semanticModel.GetEnclosingNamedTypeOrAssembly(position, cancellationToken);
if (within != null)
{
return indexers.Where(i => i.IsAccessibleWithin(within, throughTypeOpt: expressionType))
.Select(i => i.Parameters);
}
}
return null;
}
private IEnumerable<ImmutableArray<IParameterSymbol>> GetConstructorInitializerParameterLists(
SemanticModel semanticModel,
int position,
ConstructorInitializerSyntax constructorInitializer,
CancellationToken cancellationToken)
{
var within = semanticModel.GetEnclosingNamedType(position, cancellationToken);
if (within != null &&
(within.TypeKind == TypeKind.Struct || within.TypeKind == TypeKind.Class))
{
var type = constructorInitializer.Kind() == SyntaxKind.BaseConstructorInitializer
? within.BaseType
: within;
if (type != null)
{
return type.InstanceConstructors.Where(c => c.IsAccessibleWithin(within))
.Select(c => c.Parameters);
}
}
return null;
}
private IEnumerable<ImmutableArray<IParameterSymbol>> GetInvocationExpressionParameterLists(
SemanticModel semanticModel,
int position,
InvocationExpressionSyntax invocationExpression,
CancellationToken cancellationToken)
{
var within = semanticModel.GetEnclosingNamedTypeOrAssembly(position, cancellationToken);
if (within != null)
{
var methodGroup = semanticModel.GetMemberGroup(invocationExpression.Expression, cancellationToken).OfType<IMethodSymbol>();
var expressionType = semanticModel.GetTypeInfo(invocationExpression.Expression, cancellationToken).Type as INamedTypeSymbol;
if (methodGroup.Any())
{
return methodGroup.Where(m => m.IsAccessibleWithin(within))
.Select(m => m.Parameters);
}
else if (expressionType.IsDelegateType())
{
var delegateType = expressionType;
return SpecializedCollections.SingletonEnumerable(delegateType.DelegateInvokeMethod.Parameters);
}
}
return null;
}
bool IEqualityComparer<IParameterSymbol>.Equals(IParameterSymbol x, IParameterSymbol y)
{
return x.Name.Equals(y.Name);
}
int IEqualityComparer<IParameterSymbol>.GetHashCode(IParameterSymbol obj)
{
return obj.Name.GetHashCode();
}
protected override Task<TextChange?> GetTextChangeAsync(CompletionItem selectedItem, char? ch, CancellationToken cancellationToken)
{
return Task.FromResult<TextChange?>(new TextChange(
selectedItem.Span,
selectedItem.DisplayText.Substring(0, selectedItem.DisplayText.Length - ColonString.Length)));
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using Newtonsoft.Json.Linq;
using NuGet.Client.Installation;
using NuGet.Client.Resolution;
using NuGet.Versioning;
using NuGet.VisualStudio;
using NuGetConsole;
using Resx = NuGet.Client.VisualStudio.UI.Resources;
namespace NuGet.Client.VisualStudio.UI
{
/// <summary>
/// Interaction logic for PackageManagerControl.xaml
/// </summary>
public partial class PackageManagerControl : UserControl
{
private const int PageSize = 10;
// Copied from file Constants.cs in NuGet.Core:
// This is temporary until we fix the gallery to have proper first class support for this.
// The magic unpublished date is 1900-01-01T00:00:00
public static readonly DateTimeOffset Unpublished = new DateTimeOffset(1900, 1, 1, 0, 0, 0, TimeSpan.FromHours(-8));
private bool _initialized;
// used to prevent starting new search when we update the package sources
// list in response to PackageSourcesChanged event.
private bool _dontStartNewSearch;
private int _busyCount;
public PackageManagerModel Model { get; private set; }
public SourceRepositoryManager Sources
{
get
{
return Model.Sources;
}
}
public InstallationTarget Target
{
get
{
return Model.Target;
}
}
private IConsole _outputConsole;
internal IUserInterfaceService UI { get; private set; }
private PackageRestoreBar _restoreBar;
private IPackageRestoreManager _packageRestoreManager;
public PackageManagerControl(PackageManagerModel model, IUserInterfaceService ui)
{
UI = ui;
Model = model;
InitializeComponent();
_searchControl.Text = model.SearchText;
_filter.Items.Add(Resx.Resources.Filter_All);
_filter.Items.Add(Resx.Resources.Filter_Installed);
_filter.Items.Add(Resx.Resources.Filter_UpdateAvailable);
// TODO: Relocate to v3 API.
_packageRestoreManager = ServiceLocator.GetInstance<IPackageRestoreManager>();
AddRestoreBar();
_packageDetail.Visibility = System.Windows.Visibility.Collapsed;
_packageDetail.Control = this;
_packageSolutionDetail.Visibility = System.Windows.Visibility.Collapsed;
_packageSolutionDetail.Control = this;
_busyCount = 0;
if (Target.IsSolution)
{
_packageSolutionDetail.Visibility = System.Windows.Visibility.Visible;
}
else
{
_packageDetail.Visibility = System.Windows.Visibility.Visible;
}
var outputConsoleProvider = ServiceLocator.GetInstance<IOutputConsoleProvider>();
_outputConsole = outputConsoleProvider.CreateOutputConsole(requirePowerShellHost: false);
InitSourceRepoList();
this.Unloaded += PackageManagerControl_Unloaded;
_initialized = true;
Model.Sources.PackageSourcesChanged += Sources_PackageSourcesChanged;
}
// Set the PackageStatus property of the given package.
private static void SetPackageStatus(
UiSearchResultPackage package,
InstallationTarget target)
{
var latestStableVersion = package.AllVersions
.Where(p => !p.Version.IsPrerelease)
.Max(p => p.Version);
// Get the minimum version installed in any target project/solution
var minimumInstalledPackage = target.GetAllTargetsRecursively()
.Select(t => t.InstalledPackages.GetInstalledPackage(Id))
.Where(p => p != null)
.OrderBy(r => r.Identity.Version)
.FirstOrDefault();
PackageStatus status;
if (minimumInstalledPackage != null)
{
if (minimumInstalledPackage.Identity.Version < latestStableVersion)
{
status = PackageStatus.UpdateAvailable;
}
else
{
status = PackageStatus.Installed;
}
}
else
{
status = PackageStatus.NotInstalled;
}
package.Status = status;
if (!Target.IsSolution)
{
var installedPackage = Target.InstalledPackages.GetInstalledPackage(Id);
var installedVersion = installedPackage == null ? null : installedPackage.Identity.Version;
_packageDetail.DataContext = new PackageDetailControlModel(selectedPackage, installedVersion);
}
else
{
_packageSolutionDetail.DataContext = new PackageSolutionDetailControlModel(selectedPackage, (VsSolution)Target);
}
}
private void Sources_PackageSourcesChanged(object sender, EventArgs e)
{
// Set _dontStartNewSearch to true to prevent a new search started in
// _sourceRepoList_SelectionChanged(). This method will start the new
// search when needed by itself.
_dontStartNewSearch = true;
try
{
var oldActiveSource = _sourceRepoList.SelectedItem as PackageSource;
var newSources = new List<PackageSource>(Sources.AvailableSources);
// Update the source repo list with the new value.
_sourceRepoList.Items.Clear();
foreach (var source in newSources)
{
_sourceRepoList.Items.Add(source);
}
if (oldActiveSource != null && newSources.Contains(oldActiveSource))
{
// active source is not changed. Set _dontStartNewSearch to true
// to prevent a new search when _sourceRepoList.SelectedItem is set.
_sourceRepoList.SelectedItem = oldActiveSource;
}
else
{
// active source changed.
_sourceRepoList.SelectedItem =
newSources.Count > 0 ?
newSources[0] :
null;
// start search explicitly.
SearchPackageInActivePackageSource();
}
}
finally
{
_dontStartNewSearch = false;
}
}
private void PackageManagerControl_Unloaded(object sender, RoutedEventArgs e)
{
RemoveRestoreBar();
}
private void AddRestoreBar()
{
_restoreBar = new PackageRestoreBar(_packageRestoreManager);
_root.Children.Add(_restoreBar);
_packageRestoreManager.PackagesMissingStatusChanged += packageRestoreManager_PackagesMissingStatusChanged;
}
private void RemoveRestoreBar()
{
_restoreBar.CleanUp();
_packageRestoreManager.PackagesMissingStatusChanged -= packageRestoreManager_PackagesMissingStatusChanged;
}
private void packageRestoreManager_PackagesMissingStatusChanged(object sender, PackagesMissingStatusEventArgs e)
{
// PackageRestoreManager fires this event even when solution is closed.
// Don't do anything if solution is closed.
if (!Target.IsAvailable)
{
return;
}
if (!e.PackagesMissing)
{
// packages are restored. Update the UI
if (Target.IsSolution)
{
// TODO: update UI here
}
else
{
// TODO: update UI here
}
}
}
private void InitSourceRepoList()
{
_label.Text = string.Format(
CultureInfo.CurrentCulture,
Resx.Resources.Label_PackageManager,
Target.Name);
// init source repo list
_sourceRepoList.Items.Clear();
foreach (var source in Sources.AvailableSources)
{
_sourceRepoList.Items.Add(source);
}
if (Sources.ActiveRepository != null)
{
_sourceRepoList.SelectedItem = Sources.ActiveRepository.Source;
}
}
private void SetBusy(bool busy)
{
if (busy)
{
_busyCount++;
if (_busyCount > 0)
{
_busyControl.Visibility = System.Windows.Visibility.Visible;
this.IsEnabled = false;
}
}
else
{
_busyCount--;
if (_busyCount <= 0)
{
_busyControl.Visibility = System.Windows.Visibility.Collapsed;
this.IsEnabled = true;
}
}
}
private class PackageLoaderOption
{
public bool IncludePrerelease { get; set; }
public bool ShowUpdatesAvailable { get; set; }
}
private class PackageLoader : ILoader
{
// where to get the package list
private Func<int, CancellationToken, Task<IEnumerable<JObject>>> _loader;
private InstallationTarget _target;
private PackageLoaderOption _option;
public PackageLoader(
Func<int, CancellationToken, Task<IEnumerable<JObject>>> loader,
InstallationTarget target,
PackageLoaderOption option,
string searchText)
{
_loader = loader;
_target = target;
_option = option;
LoadingMessage = string.IsNullOrWhiteSpace(searchText) ?
Resx.Resources.Text_Loading :
string.Format(
CultureInfo.CurrentCulture,
Resx.Resources.Text_Searching,
searchText);
}
public string LoadingMessage
{
get;
private set;
}
private Task<List<JObject>> InternalLoadItems(
int startIndex,
CancellationToken ct,
Func<int, CancellationToken, Task<IEnumerable<JObject>>> loader)
{
return Task.Factory.StartNew(() =>
{
var r1 = _loader(startIndex, ct);
return r1.Result.ToList();
});
}
private UiDetailedPackage ToDetailedPackage(UiSearchResultPackage package)
{
var detailedPackage = new UiDetailedPackage();
detailedPackage.Id = package.Id;
detailedPackage.Version = package.Version;
detailedPackage.Summary = package.Summary;
return detailedPackage;
}
public async Task<LoadResult> LoadItems(int startIndex, CancellationToken ct)
{
var results = await InternalLoadItems(startIndex, ct, _loader);
List<UiSearchResultPackage> packages = new List<UiSearchResultPackage>();
foreach (var package in results)
{
ct.ThrowIfCancellationRequested();
// As a debugging aide, I am intentionally NOT using an object initializer -anurse
var searchResultPackage = new UiSearchResultPackage();
searchResultPackage.Id = package.Value<string>(Properties.PackageId);
searchResultPackage.Version = NuGetVersion.Parse(package.Value<string>(Properties.LatestVersion));
if (searchResultPackage.Version.IsPrerelease && !_option.IncludePrerelease)
{
// don't include prerelease version if includePrerelease is false
continue;
}
searchResultPackage.IconUrl = GetUri(package, Properties.IconUrl);
var allVersions = LoadVersions(
package.Value<JArray>(Properties.Packages),
searchResultPackage.Version);
if (!allVersions.Select(v => v.Version).Contains(searchResultPackage.Version))
{
// make sure allVersions contains searchResultPackage itself.
allVersions.Add(ToDetailedPackage(searchResultPackage));
}
searchResultPackage.AllVersions = allVersions;
SetPackageStatus(searchResultPackage, _target);
if (_option.ShowUpdatesAvailable &&
searchResultPackage.Status != PackageStatus.UpdateAvailable)
{
continue;
}
searchResultPackage.Summary = package.Value<string>(Properties.Summary);
if (string.IsNullOrWhiteSpace(searchResultPackage.Summary))
{
// summary is empty. Use its description instead.
var self = searchResultPackage.AllVersions.FirstOrDefault(p => p.Version == searchResultPackage.Version);
if (self != null)
{
searchResultPackage.Summary = self.Description;
}
}
packages.Add(searchResultPackage);
}
ct.ThrowIfCancellationRequested();
return new LoadResult()
{
Items = packages,
HasMoreItems = packages.Count == PageSize
};
}
// Get all versions of the package
private List<UiDetailedPackage> LoadVersions(JArray versions, NuGetVersion searchResultVersion)
{
var retValue = new List<UiDetailedPackage>();
// If repo is AggregateRepository, the package duplicates can be returned by
// FindPackagesById(), so Distinct is needed here to remove the duplicates.
foreach (var token in versions)
{
Debug.Assert(token.Type == JTokenType.Object);
JObject version = (JObject)token;
var detailedPackage = new UiDetailedPackage();
detailedPackage.Id = version.Value<string>(Properties.PackageId);
detailedPackage.Version = NuGetVersion.Parse(version.Value<string>(Properties.Version));
if (detailedPackage.Version.IsPrerelease &&
!_option.IncludePrerelease &&
detailedPackage.Version != searchResultVersion)
{
// don't include prerelease version if includePrerelease is false
continue;
}
string publishedStr = version.Value<string>(Properties.Published);
if (!String.IsNullOrEmpty(publishedStr))
{
detailedPackage.Published = DateTime.Parse(publishedStr);
if (detailedPackage.Published <= Unpublished &&
detailedPackage.Version != searchResultVersion)
{
// don't include unlisted package
continue;
}
}
detailedPackage.Summary = version.Value<string>(Properties.Summary);
detailedPackage.Description = version.Value<string>(Properties.Description);
detailedPackage.Authors = version.Value<string>(Properties.Authors);
detailedPackage.Owners = version.Value<string>(Properties.Owners);
detailedPackage.IconUrl = GetUri(version, Properties.IconUrl);
detailedPackage.LicenseUrl = GetUri(version, Properties.LicenseUrl);
detailedPackage.ProjectUrl = GetUri(version, Properties.ProjectUrl);
detailedPackage.Tags = String.Join(" ", (version.Value<JArray>(Properties.Tags) ?? Enumerable.Empty<JToken>()).Select(t => t.ToString()));
detailedPackage.DownloadCount = version.Value<int>(Properties.DownloadCount);
detailedPackage.DependencySets = (version.Value<JArray>(Properties.DependencyGroups) ?? Enumerable.Empty<JToken>()).Select(obj => LoadDependencySet((JObject)obj));
detailedPackage.HasDependencies = detailedPackage.DependencySets.Any(
set => set.Dependencies != null && set.Dependencies.Count > 0);
retValue.Add(detailedPackage);
}
return retValue;
}
private Uri GetUri(JObject json, string property)
{
if (json[property] == null)
{
return null;
}
string str = json[property].ToString();
if (String.IsNullOrEmpty(str))
{
return null;
}
return new Uri(str);
}
private UiPackageDependencySet LoadDependencySet(JObject set)
{
var fxName = set.Value<string>(Properties.TargetFramework);
return new UiPackageDependencySet(
String.IsNullOrEmpty(fxName) ? null : FrameworkNameHelper.ParsePossiblyShortenedFrameworkName(fxName),
(set.Value<JArray>(Properties.Dependencies) ?? Enumerable.Empty<JToken>()).Select(obj => LoadDependency((JObject)obj)));
}
private UiPackageDependency LoadDependency(JObject dep)
{
var ver = dep.Value<string>(Properties.Range);
return new UiPackageDependency(
dep.Value<string>(Properties.PackageId),
String.IsNullOrEmpty(ver) ? null : VersionRange.Parse(ver));
}
private string StringCollectionToString(JArray v)
{
if (v == null)
{
return null;
}
string retValue = String.Join(", ", v.Select(t => t.ToString()));
if (retValue == String.Empty)
{
return null;
}
return retValue;
}
}
private bool ShowInstalled
{
get
{
return Resx.Resources.Filter_Installed.Equals(_filter.SelectedItem);
}
}
private bool ShowUpdatesAvailable
{
get
{
return Resx.Resources.Filter_UpdateAvailable.Equals(_filter.SelectedItem);
}
}
public bool IncludePrerelease
{
get
{
return _checkboxPrerelease.IsChecked == true;
}
}
internal SourceRepository CreateActiveRepository()
{
var activeSource = _sourceRepoList.SelectedItem as PackageSource;
if (activeSource == null)
{
return null;
}
return Sources.CreateSourceRepository(activeSource);
}
private void SearchPackageInActivePackageSource()
{
var searchText = _searchControl.Text;
var supportedFrameworks = Target.GetSupportedFrameworks();
// search online
var activeSource = _sourceRepoList.SelectedItem as PackageSource;
var sourceRepository = Sources.CreateSourceRepository(activeSource);
PackageLoaderOption option = new PackageLoaderOption()
{
IncludePrerelease = this.IncludePrerelease,
ShowUpdatesAvailable = this.ShowUpdatesAvailable
};
if (ShowInstalled || ShowUpdatesAvailable)
{
// search installed packages
var loader = new PackageLoader(
(startIndex, ct) =>
Target.SearchInstalled(
sourceRepository,
searchText,
startIndex,
PageSize,
ct),
Target,
option,
searchText);
_packageList.Loader = loader;
}
else
{
// search in active package source
if (activeSource == null)
{
var loader = new PackageLoader(
(startIndex, ct) =>
{
return Task.Factory.StartNew(() =>
{
return Enumerable.Empty<JObject>();
});
},
Target,
option,
searchText);
_packageList.Loader = loader;
}
else
{
var loader = new PackageLoader(
(startIndex, ct) =>
sourceRepository.Search(
searchText,
new SearchFilter()
{
SupportedFrameworks = supportedFrameworks,
IncludePrerelease = option.IncludePrerelease
},
startIndex,
PageSize,
ct),
Target,
option,
searchText);
_packageList.Loader = loader;
}
}
}
private void SettingsButtonClick(object sender, RoutedEventArgs e)
{
UI.LaunchNuGetOptionsDialog();
}
private void PackageList_SelectionChanged(object sender, SelectionChangedEventArgs e)
{
UpdateDetailPane();
}
/// <summary>
/// Updates the detail pane based on the selected package
/// </summary>
private void UpdateDetailPane()
{
var selectedPackage = _packageList.SelectedItem as UiSearchResultPackage;
if (selectedPackage == null)
{
_packageDetail.DataContext = null;
_packageSolutionDetail.DataContext = null;
}
else
{
if (!Target.IsSolution)
{
var installedPackage = Target.InstalledPackages.GetInstalledPackage(selectedPackage.Id);
var installedVersion = installedPackage == null ? null : installedPackage.Identity.Version;
_packageDetail.DataContext = new PackageDetailControlModel(selectedPackage, installedVersion);
}
else
{
_packageSolutionDetail.DataContext = new PackageSolutionDetailControlModel(selectedPackage, (VsSolution)Target);
}
}
}
private void _sourceRepoList_SelectionChanged(object sender, SelectionChangedEventArgs e)
{
if (_dontStartNewSearch)
{
return;
}
var newSource = _sourceRepoList.SelectedItem as PackageSource;
if (newSource != null)
{
Sources.ChangeActiveSource(newSource);
}
SearchPackageInActivePackageSource();
}
private void _filter_SelectionChanged(object sender, SelectionChangedEventArgs e)
{
if (_initialized)
{
SearchPackageInActivePackageSource();
}
}
internal void UpdatePackageStatus()
{
if (ShowInstalled || ShowUpdatesAvailable)
{
// refresh the whole package list
_packageList.Reload();
}
else
{
// in this case, we only need to update PackageStatus of
// existing items in the package list
foreach (var item in _packageList.Items)
{
var package = item as UiSearchResultPackage;
if (package == null)
{
continue;
}
SetPackageStatus(package, Target);
}
}
}
public bool ShowLicenseAgreement(IEnumerable<PackageAction> operations)
{
var licensePackages = operations.Where(op =>
op.ActionType == PackageActionType.Install &&
op.Package.Value<bool>("requireLicenseAcceptance"));
// display license window if necessary
if (licensePackages.Any())
{
// Hacky distinct without writing a custom comparer
var licenseModels = licensePackages
.GroupBy(a => Tuple.Create(a.Package["id"], a.Package["version"]))
.Select(g =>
{
dynamic p = g.First().Package;
string licenseUrl = (string)p.licenseUrl;
string id = (string)p.id;
string authors = (string)p.authors;
return new PackageLicenseInfo(
id,
licenseUrl == null ? null : new Uri(licenseUrl),
authors);
})
.Where(pli => pli.LicenseUrl != null); // Shouldn't get nulls, but just in case
bool accepted = this.UI.PromptForLicenseAcceptance(licenseModels);
if (!accepted)
{
return false;
}
}
return true;
}
private void PreviewActions(IEnumerable<PackageAction> actions)
{
var w = new PreviewWindow();
w.DataContext = new PreviewWindowModel(actions, Target);
w.ShowModal();
}
// preview user selected action
internal async void Preview(IDetailControl detailControl)
{
SetBusy(true);
try
{
_outputConsole.Clear();
var actions = await detailControl.ResolveActionsAsync();
PreviewActions(actions);
}
catch (Exception ex)
{
var errorDialog = new ErrorReportingDialog(
ex.Message,
ex.ToString());
errorDialog.ShowModal();
}
finally
{
SetBusy(false);
}
}
// perform the user selected action
internal async void PerformAction(IDetailControl detailControl)
{
SetBusy(true);
_outputConsole.Clear();
var progressDialog = new ProgressDialog(_outputConsole);
progressDialog.Owner = Window.GetWindow(this);
progressDialog.WindowStartupLocation = WindowStartupLocation.CenterOwner;
try
{
var actions = await detailControl.ResolveActionsAsync();
// show license agreeement
bool acceptLicense = ShowLicenseAgreement(actions);
if (!acceptLicense)
{
return;
}
// Create the executor and execute the actions
progressDialog.FileConflictAction = detailControl.FileConflictAction;
progressDialog.Show();
var executor = new ActionExecutor();
await executor.ExecuteActionsAsync(actions, logger: progressDialog, cancelToken: CancellationToken.None);
UpdatePackageStatus();
detailControl.Refresh();
}
catch (Exception ex)
{
var errorDialog = new ErrorReportingDialog(
ex.Message,
ex.ToString());
errorDialog.ShowModal();
}
finally
{
progressDialog.RequestToClose();
SetBusy(false);
}
}
private void _searchControl_SearchStart(object sender, EventArgs e)
{
if (!_initialized)
{
return;
}
SearchPackageInActivePackageSource();
}
private void _checkboxPrerelease_CheckChanged(object sender, RoutedEventArgs e)
{
if (!_initialized)
{
return;
}
SearchPackageInActivePackageSource();
}
}
}
| |
// Copyright 2014 The Rector & Visitors of the University of Virginia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using SensusService.Anonymization;
using SensusService.Anonymization.Anonymizers;
using SensusService.Probes.User.ProbeTriggerProperties;
using System.Collections;
using System.Linq;
using System.Collections.Generic;
using SensusUI.Inputs;
namespace SensusService.Probes.User
{
public class ScriptDatum : Datum
{
private string _scriptId;
private string _scriptName;
private string _groupId;
private string _inputId;
private string _runId;
private object _response;
private string _triggerDatumId;
private double? _latitude;
private double? _longitude;
private DateTimeOffset _presentationTimestamp;
private DateTimeOffset? _locationTimestamp;
private List<InputCompletionRecord> _completionRecords;
public string ScriptId
{
get
{
return _scriptId;
}
set
{
_scriptId = value;
}
}
public string ScriptName
{
get
{
return _scriptName;
}
set
{
_scriptName = value;
}
}
public string GroupId
{
get
{
return _groupId;
}
set
{
_groupId = value;
}
}
public string InputId
{
get
{
return _inputId;
}
set
{
_inputId = value;
}
}
public string RunId
{
get
{
return _runId;
}
set
{
_runId = value;
}
}
public object Response
{
get { return _response; }
set { _response = value; }
}
[Anonymizable("Triggering Datum ID:", typeof(StringHashAnonymizer), false)]
public string TriggerDatumId
{
get { return _triggerDatumId; }
set { _triggerDatumId = value; }
}
[DoubleProbeTriggerProperty]
[Anonymizable(null, new Type[] { typeof(DoubleRoundingTenthsAnonymizer), typeof(DoubleRoundingHundredthsAnonymizer), typeof(DoubleRoundingThousandthsAnonymizer) }, -1)]
public double? Latitude
{
get { return _latitude; }
set { _latitude = value; }
}
[DoubleProbeTriggerProperty]
[Anonymizable(null, new Type[] { typeof(DoubleRoundingTenthsAnonymizer), typeof(DoubleRoundingHundredthsAnonymizer), typeof(DoubleRoundingThousandthsAnonymizer) }, -1)]
public double? Longitude
{
get { return _longitude; }
set { _longitude = value; }
}
public DateTimeOffset PresentationTimestamp
{
get
{
return _presentationTimestamp;
}
set
{
_presentationTimestamp = value;
}
}
public DateTimeOffset? LocationTimestamp
{
get
{
return _locationTimestamp;
}
set
{
_locationTimestamp = value;
}
}
public List<InputCompletionRecord> CompletionRecords
{
get
{
return _completionRecords;
}
// need setter in order for anonymizer to pick up the property (only includes writable properties)
set
{
_completionRecords = value;
}
}
public override string DisplayDetail
{
get
{
if (_response == null)
return "No response.";
else
{
if (_response is IList)
{
IList responseList = _response as IList;
return responseList.Count + " response" + (responseList.Count == 1 ? "" : "s") + ".";
}
else
return _response.ToString();
}
}
}
/// <summary>
/// For JSON deserialization.
/// </summary>
private ScriptDatum()
{
_completionRecords = new List<InputCompletionRecord>();
}
public ScriptDatum(DateTimeOffset timestamp, string scriptId, string scriptName, string groupId, string inputId, string runId, object response, string triggerDatumId, double? latitude, double? longitude, DateTimeOffset presentationTimestamp, DateTimeOffset? locationTimestamp, List<InputCompletionRecord> completionRecords)
: base(timestamp)
{
_scriptId = scriptId;
_scriptName = scriptName;
_groupId = groupId;
_inputId = inputId;
_runId = runId;
_response = response;
_triggerDatumId = triggerDatumId == null ? "" : triggerDatumId;
_latitude = latitude;
_longitude = longitude;
_presentationTimestamp = presentationTimestamp;
_locationTimestamp = locationTimestamp;
_completionRecords = completionRecords;
}
public override string ToString()
{
return base.ToString() + Environment.NewLine +
"Script: " + _scriptId + Environment.NewLine +
"Group: " + _groupId + Environment.NewLine +
"Input: " + _inputId + Environment.NewLine +
"Run: " + _runId + Environment.NewLine +
"Response: " + _response + Environment.NewLine +
"Latitude: " + _latitude + Environment.NewLine +
"Longitude: " + _longitude + Environment.NewLine +
"Presentation Timestamp: " + _presentationTimestamp + Environment.NewLine +
"Location Timestamp: " + _locationTimestamp;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Gate.Utils;
using Owin;
namespace Gate
{
// A helper class for creating, modifying, or consuming request data in an Environment dictionary.
internal class Request
{
IDictionary<string, object> environment;
static readonly char[] CommaSemicolon = new[] { ',', ';' };
public Request()
: this(new Dictionary<string, object>())
{
Environment.Set(OwinConstants.RequestHeaders, Gate.Headers.New());
Environment.Set(OwinConstants.ResponseHeaders, Gate.Headers.New());
}
public Request(IDictionary<string, object> environment)
{
this.environment = environment;
}
public IDictionary<string, object> Environment
{
get { return environment; }
set { environment = value; }
}
public IDictionary<string, string[]> Headers
{
get { return Environment.Get<IDictionary<string, string[]>>(OwinConstants.RequestHeaders); }
set { Environment.Set<IDictionary<string, string[]>>(OwinConstants.RequestHeaders, value); }
}
public Stream Body
{
get { return Environment.Get<Stream>(OwinConstants.RequestBody); }
set { Environment.Set<Stream>(OwinConstants.RequestBody, value); }
}
public Task Completed
{
get { return Environment.Get<Task>(OwinConstants.CallCompleted); }
set { Environment.Set<Task>(OwinConstants.CallCompleted, value); }
}
/// <summary>
/// "owin.Version" The string "1.0" indicating OWIN version 1.0.
/// </summary>
public string Version
{
get { return Environment.Get<string>(OwinConstants.Version); }
set { Environment.Set<string>(OwinConstants.Version, value); }
}
/// <summary>
/// "owin.RequestProtocol" A string containing the protocol name and version (e.g. "HTTP/1.0" or "HTTP/1.1").
/// </summary>
public string Protocol
{
get { return Environment.Get<string>(OwinConstants.RequestProtocol); }
set { Environment.Set<string>(OwinConstants.RequestProtocol, value); }
}
/// <summary>
/// "owin.RequestMethod" A string containing the HTTP request method of the request (e.g., "GET", "POST").
/// </summary>
public string Method
{
get { return Environment.Get<string>(OwinConstants.RequestMethod); }
set { Environment.Set<string>(OwinConstants.RequestMethod, value); }
}
/// <summary>
/// "owin.RequestScheme" A string containing the URI scheme used for the request (e.g., "http", "https").
/// </summary>
public string Scheme
{
get { return Environment.Get<string>(OwinConstants.RequestScheme); }
set { Environment.Set<string>(OwinConstants.RequestScheme, value); }
}
/// <summary>
/// "owin.RequestPathBase" A string containing the portion of the request path corresponding to the "root" of the application delegate. The value may be an empty string.
/// </summary>
public string PathBase
{
get { return Environment.Get<string>(OwinConstants.RequestPathBase); }
set { Environment.Set<string>(OwinConstants.RequestPathBase, value); }
}
/// <summary>
/// "owin.RequestPath" A string containing the request path. The path must be relative to the "root" of the application delegate.
/// </summary>
public string Path
{
get { return Environment.Get<string>(OwinConstants.RequestPath); }
set { Environment.Set<string>(OwinConstants.RequestPath, value); }
}
/// <summary>
/// "owin.QueryString" A string containing the query string component of the HTTP request URI (e.g., "foo=bar&baz=quux"). The value may be an empty string.
/// </summary>
public string QueryString
{
get { return Environment.Get<string>(OwinConstants.RequestQueryString); }
set { Environment.Set<string>(OwinConstants.RequestQueryString, value); }
}
/// <summary>
/// "host.TraceOutput" A TextWriter that directs trace or logger output to an appropriate place for the host
/// </summary>
public TextWriter TraceOutput
{
get { return Environment.Get<TextWriter>(OwinConstants.TraceOutput); }
set { Environment.Set<TextWriter>(OwinConstants.TraceOutput, value); }
}
public IDictionary<string, string> Query
{
get
{
var text = QueryString;
if (Environment.Get<string>("Gate.Request.Query#text") != text ||
Environment.Get<IDictionary<string, string>>("Gate.Request.Query") == null)
{
Environment.Set<string>("Gate.Request.Query#text", text);
Environment.Set<IDictionary<string, string>>("Gate.Request.Query", ParamDictionary.Parse(text));
}
return Environment.Get<IDictionary<string, string>>("Gate.Request.Query");
}
}
public IDictionary<string, string> Cookies
{
get
{
var cookies = Environment.Get<IDictionary<string, string>>("Gate.Request.Cookies#dictionary");
if (cookies == null)
{
cookies = new Dictionary<string, string>(StringComparer.Ordinal);
Environment.Set("Gate.Request.Cookies#dictionary", cookies);
}
var text = Headers.GetHeader("Cookie");
if (Environment.Get<string>("Gate.Request.Cookies#text") != text)
{
cookies.Clear();
foreach (var kv in ParamDictionary.ParseToEnumerable(text, CommaSemicolon))
{
if (!cookies.ContainsKey(kv.Key))
cookies.Add(kv);
}
Environment.Set("Gate.Request.Cookies#text", text);
}
return cookies;
}
}
public bool HasFormData
{
get
{
var mediaType = MediaType;
return (Method == "POST" && string.IsNullOrEmpty(mediaType))
|| mediaType == "application/x-www-form-urlencoded"
|| mediaType == "multipart/form-data";
}
}
public bool HasParseableData
{
get
{
var mediaType = MediaType;
return mediaType == "application/x-www-form-urlencoded"
|| mediaType == "multipart/form-data";
}
}
public string ContentType
{
get
{
return Headers.GetHeader("Content-Type");
}
}
public string MediaType
{
get
{
var contentType = ContentType;
if (contentType == null)
return null;
var delimiterPos = contentType.IndexOfAny(CommaSemicolon);
return delimiterPos < 0 ? contentType : contentType.Substring(0, delimiterPos);
}
}
public Task CopyToStreamAsync(Stream stream)
{
if (Body == null)
{
return TaskHelpers.Completed();
}
if (Body.CanSeek)
{
Body.Seek(0, SeekOrigin.Begin);
}
return Body.CopyToAsync(stream);
}
public void CopyToStream(Stream stream)
{
if (Body == null)
{
return;
}
if (Body.CanSeek)
{
Body.Seek(0, SeekOrigin.Begin);
}
Body.CopyTo(stream);
}
public Task<string> ReadTextAsync()
{
var text = Environment.Get<string>("Gate.Request.Text");
var thisInput = Body;
var lastInput = Environment.Get<object>("Gate.Request.Text#input");
if (text != null && ReferenceEquals(thisInput, lastInput))
{
return TaskHelpers.FromResult(text);
}
var buffer = new MemoryStream();
//TODO: determine encoding from request content type
return CopyToStreamAsync(buffer)
.Then(() =>
{
buffer.Seek(0, SeekOrigin.Begin);
text = new StreamReader(buffer).ReadToEnd();
Environment["Gate.Request.Text#input"] = thisInput;
Environment["Gate.Request.Text"] = text;
return text;
});
}
public string ReadText()
{
var text = Environment.Get<string>("Gate.Request.Text");
var thisInput = Body;
var lastInput = Environment.Get<object>("Gate.Request.Text#input");
if (text != null && ReferenceEquals(thisInput, lastInput))
{
return text;
}
if (thisInput != null)
{
if (thisInput.CanSeek)
{
thisInput.Seek(0, SeekOrigin.Begin);
}
text = new StreamReader(thisInput).ReadToEnd();
}
Environment.Set("Gate.Request.Text#input", thisInput);
Environment.Set("Gate.Request.Text", text);
return text;
}
public Task<IDictionary<string, string>> ReadFormAsync()
{
if (!HasFormData && !HasParseableData)
{
return TaskHelpers.FromResult(ParamDictionary.Parse(""));
}
var form = Environment.Get<IDictionary<string, string>>("Gate.Request.Form");
var thisInput = Body;
var lastInput = Environment.Get<object>("Gate.Request.Form#input");
if (form != null && ReferenceEquals(thisInput, lastInput))
{
return TaskHelpers.FromResult(form);
}
return ReadTextAsync().Then(text =>
{
form = ParamDictionary.Parse(text);
Environment.Set("Gate.Request.Form#input", thisInput);
Environment.Set("Gate.Request.Form", form);
return form;
});
}
public IDictionary<string, string> ReadForm()
{
if (!HasFormData && !HasParseableData)
{
return ParamDictionary.Parse("");
}
var form = Environment.Get<IDictionary<string, string>>("Gate.Request.Form");
var thisInput = Body;
var lastInput = Environment.Get<object>("Gate.Request.Form#input");
if (form != null && ReferenceEquals(thisInput, lastInput))
{
return form;
}
var text = ReadText();
form = ParamDictionary.Parse(text);
Environment.Set("Gate.Request.Form#input", thisInput);
Environment.Set("Gate.Request.Form", form);
return form;
}
public string HostWithPort
{
get
{
var hostHeader = Headers.GetHeader("Host");
if (!string.IsNullOrWhiteSpace(hostHeader))
{
return hostHeader;
}
var localIpAddress = Environment.Get<string>(OwinConstants.LocalIpAddress);
if (string.IsNullOrWhiteSpace(localIpAddress))
{
localIpAddress = "localhost";
}
var localPort = Environment.Get<string>(OwinConstants.LocalPort);
if (string.IsNullOrWhiteSpace(localPort))
{
localPort = string.Equals(Scheme, "https", StringComparison.OrdinalIgnoreCase) ? "443" : "80";
}
return localIpAddress + ":" + localPort;
}
set
{
if (string.IsNullOrWhiteSpace(value))
{
Headers.Remove("Host");
}
else
{
Headers.SetHeader("Host", value);
}
}
}
public string Host
{
get
{
var hostWithPort = HostWithPort;
if (!string.IsNullOrWhiteSpace(hostWithPort))
{
var delimiter = hostWithPort.IndexOf(':');
return delimiter < 0 ? hostWithPort : hostWithPort.Substring(0, delimiter);
}
return string.Empty;
}
set
{
var port = Port;
if (string.IsNullOrWhiteSpace(value) ||
string.IsNullOrWhiteSpace(port))
{
HostWithPort = value;
}
else
{
HostWithPort = value + ":" + port;
}
}
}
public string Port
{
get
{
var hostHeader = Headers.GetHeader("Host");
if (!string.IsNullOrWhiteSpace(hostHeader))
{
var delimiter = hostHeader.IndexOf(':');
if (delimiter != -1)
{
return hostHeader.Substring(delimiter + 1);
}
}
var localPort = Environment.Get<string>(OwinConstants.LocalPort);
if (string.IsNullOrWhiteSpace(localPort))
{
return string.Equals(Scheme, "https", StringComparison.OrdinalIgnoreCase) ? "443" : "80";
}
return localPort;
}
set
{
string host = Host;
if (string.IsNullOrWhiteSpace(value))
{
HostWithPort = host; // Truncate port
}
else
{
HostWithPort = host + ":" + value;
}
}
}
}
}
| |
//
// Copyright (c) 2004-2021 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog.UnitTests.Targets.Wrappers
{
using System;
using System.Collections.Generic;
using System.Threading;
using NLog.Common;
using NLog.Targets;
using NLog.Targets.Wrappers;
using Xunit;
public class RandomizeGroupTargetTests : NLogTestBase
{
[Fact]
public void RandomizeGroupSyncTest1()
{
var myTarget1 = new MyTarget();
var myTarget2 = new MyTarget();
var myTarget3 = new MyTarget();
var wrapper = new RandomizeGroupTarget()
{
Targets = { myTarget1, myTarget2, myTarget3 },
};
myTarget1.Initialize(null);
myTarget2.Initialize(null);
myTarget3.Initialize(null);
wrapper.Initialize(null);
List<Exception> exceptions = new List<Exception>();
// no exceptions
for (int i = 0; i < 10; ++i)
{
wrapper.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add));
}
Assert.Equal(10, exceptions.Count);
foreach (var e in exceptions)
{
Assert.Null(e);
}
Assert.Equal(10, myTarget1.WriteCount + myTarget2.WriteCount + myTarget3.WriteCount);
Exception flushException = null;
var flushHit = new ManualResetEvent(false);
wrapper.Flush(ex => { flushException = ex; flushHit.Set(); });
flushHit.WaitOne();
if (flushException != null)
{
Assert.True(false, flushException.ToString());
}
Assert.Equal(1, myTarget1.FlushCount);
Assert.Equal(1, myTarget2.FlushCount);
Assert.Equal(1, myTarget3.FlushCount);
}
[Fact]
public void RandomizeGroupSyncTest2()
{
var wrapper = new RandomizeGroupTarget()
{
// no targets
};
wrapper.Initialize(null);
List<Exception> exceptions = new List<Exception>();
// no exceptions
for (int i = 0; i < 10; ++i)
{
wrapper.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add));
}
Assert.Equal(10, exceptions.Count);
foreach (var e in exceptions)
{
Assert.Null(e);
}
Exception flushException = new Exception("Flush not hit synchronously.");
wrapper.Flush(ex => flushException = ex);
if (flushException != null)
{
Assert.True(false, flushException.ToString());
}
}
public class MyAsyncTarget : Target
{
public int FlushCount { get; private set; }
public int WriteCount { get; private set; }
public MyAsyncTarget() : base()
{
}
public MyAsyncTarget(string name) : this()
{
Name = name;
}
protected override void Write(LogEventInfo logEvent)
{
throw new NotSupportedException();
}
protected override void Write(AsyncLogEventInfo logEvent)
{
Assert.True(FlushCount <= WriteCount);
WriteCount++;
ThreadPool.QueueUserWorkItem(
s =>
{
if (ThrowExceptions)
{
logEvent.Continuation(new InvalidOperationException("Some problem!"));
logEvent.Continuation(new InvalidOperationException("Some problem!"));
}
else
{
logEvent.Continuation(null);
logEvent.Continuation(null);
}
});
}
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
FlushCount++;
ThreadPool.QueueUserWorkItem(
s => asyncContinuation(null));
}
public bool ThrowExceptions { get; set; }
}
class MyTarget : Target
{
public int FlushCount { get; set; }
public int WriteCount { get; set; }
public int FailCounter { get; set; }
public MyTarget() : base()
{
}
public MyTarget(string name) : this()
{
Name = name;
}
protected override void Write(LogEventInfo logEvent)
{
Assert.True(FlushCount <= WriteCount);
WriteCount++;
if (FailCounter > 0)
{
FailCounter--;
throw new InvalidOperationException("Some failure.");
}
}
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
FlushCount++;
asyncContinuation(null);
}
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Collections;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Net;
using System.Text;
using System.Text.RegularExpressions;
using OpenLiveWriter.Api;
using OpenLiveWriter.BlogClient;
using OpenLiveWriter.BlogClient.Clients;
using OpenLiveWriter.CoreServices.Progress;
using OpenLiveWriter.Extensibility.BlogClient;
using OpenLiveWriter.FileDestinations;
using OpenLiveWriter.CoreServices;
using OpenLiveWriter.Interop.Windows;
using OpenLiveWriter.Localization;
namespace OpenLiveWriter.PostEditor
{
public abstract class BlogFileUploader : IDisposable
{
protected BlogFileUploader(string destinationContext, string postContext, string blogId)
{
DestinationContext = destinationContext;
PostContext = postContext ;
BlogId = blogId;
}
public virtual void Dispose()
{
try
{
Disconnect();
}
catch(Exception ex)
{
Trace.Fail("Unexpected exception attempting to disconnect from BlogFileUploader: " + ex.ToString());
}
GC.SuppressFinalize(this);
}
~BlogFileUploader()
{
Debug.Fail("Failed to Dispose BlogFileUploader!");
}
public virtual void Connect() {}
public virtual void Disconnect() {}
public abstract Uri DoUploadWorkBeforePublish( IFileUploadContext uploadContext ) ;
public virtual void DoUploadWorkAfterPublish( IFileUploadContext uploadContext )
{
}
public abstract string FormatUploadFileName(string filename, string conflictToken);
public readonly string BlogId ;
protected readonly string PostContext ;
public readonly string DestinationContext ;
protected string FormatFileName(string format, string filename, string conflictToken)
{
string postTitle = PostContext;
string postRandomizerToken = PostContext;
int uniqueIndex = PostContext.LastIndexOf("_");
if(uniqueIndex != -1)
{
postTitle = postTitle.Substring(0, uniqueIndex);
postRandomizerToken = PostContext.TrimEnd('/').Substring(uniqueIndex+1);
}
if(format == String.Empty)
{
format = "{PostTitle}_{PostRandomizer}/{FileName}";
}
string formattedName = format.Replace("{FileName}", filename);
formattedName = formattedName.Replace("{FileNameWithoutExtension}", Path.GetFileNameWithoutExtension(filename));
formattedName = formattedName.Replace("{AsciiFileName}", FileHelper.GetValidAnsiFileName(filename));
formattedName = formattedName.Replace("{AsciiFileNameWithoutExtension}", Path.GetFileNameWithoutExtension(FileHelper.GetValidAnsiFileName(filename)));
formattedName = ReplaceVariableFormatted(formattedName, "FileNameConflictToken", conflictToken != null ? new StringFormatter(conflictToken) : null);
formattedName = formattedName.Replace("{FileExtension}", Path.GetExtension(filename));
formattedName = formattedName.Replace("{PostRandomizer}", postRandomizerToken);
formattedName = formattedName.Replace("{PostTitle}", postTitle);
formattedName = formattedName.Replace("{OpenLiveWriter}", FileHelper.GetValidAnsiFileName(ApplicationEnvironment.ProductName));
formattedName = ReplaceVariableFormatted(formattedName, "UploadDate", DateTime.Now);
formattedName = formattedName.Replace("{Randomizer}", GuidHelper.GetVeryShortGuid());
return formattedName;
}
string ReplaceVariableFormatted(string input, string variableName, object val)
{
MatchCollection mc = Regex.Matches(input, "{" + variableName + "([^}]*)");
StringBuilder sb = new StringBuilder();
int inputIndex = 0;
foreach(Match m in mc)
{
if(m.Success)
{
sb.Append(input.Substring(inputIndex, m.Index - inputIndex));
Group tokenFormatGroup = null;
if(m.Groups.Count > 1)
tokenFormatGroup = m.Groups[1];
if(val != null)
{
if(tokenFormatGroup != null)
{
val = String.Format(CultureInfo.InvariantCulture, "{0" + tokenFormatGroup.Value + "}", val);
}
sb.Append(val);
}
inputIndex = m.Index + m.Length + 1;
}
}
if(inputIndex < input.Length)
sb.Append(input.Substring(inputIndex));
return sb.ToString();
}
/// <summary>
/// Formattable wrapper for a string that allows the string's format to print itself.
/// Note: the format syntax uses a ? character to replace its value
/// Example: String.Format{"0:---?----"}, new StringFormatter("test")) outputs: "---test----"
/// </summary>
private class StringFormatter : IFormattable
{
private string _value;
public StringFormatter(string val)
{
_value = val;
}
public string ToString(string format, IFormatProvider formatProvider)
{
string formattedString = _value;
if(format != null)
formattedString = String.Format(CultureInfo.InvariantCulture, format.Replace("?", "{0}"), _value);
return formattedString;
}
public override string ToString()
{
return _value;
}
}
public static BlogFileUploader CreateFileUploader( Blog blog, string postContextName)
{
string destinationContext = GetFileUploadDestinationContext(blog.Id, blog.FileUploadSupport, blog.FileUploadSettings);
switch( blog.FileUploadSupport )
{
case FileUploadSupport.Weblog:
return new WeblogBlogFileUploader( destinationContext, postContextName, blog.Id, blog.HostBlogId) ;
case FileUploadSupport.FTP:
return new FTPBlogFileUploader(destinationContext, postContextName, new FtpUploaderSettings(blog.FileUploadSettings), blog.Id ) ;
default:
Trace.Fail("Unexpected value for blog.FileUploadSupport: " + blog.FileUploadSupport.ToString());
goto case FileUploadSupport.Weblog;
}
}
/// <summary>
/// Returns the file upload destination context string for the specified blog based on its upload settings.
/// (used for persisting upload information in the ISupportingFile).
/// </summary>
/// <param name="blogId"></param>
/// <returns></returns>
public static string GetFileUploadDestinationContext(string blogId)
{
using(BlogSettings blogSettings = BlogSettings.ForBlogId(blogId))
{
IBlogFileUploadSettings uploadSettings = blogSettings.FileUploadSettings;
FileUploadSupport fileUploadSupport = blogSettings.FileUploadSupport;
return GetFileUploadDestinationContext(blogId, fileUploadSupport, uploadSettings);
}
}
static string GetFileUploadDestinationContext(string blogId, FileUploadSupport fileUploadSupport, IBlogFileUploadSettings uploadSettings)
{
switch( fileUploadSupport )
{
case FileUploadSupport.Weblog:
return blogId ;
case FileUploadSupport.FTP:
FtpUploaderSettings ftpSettings = new FtpUploaderSettings(uploadSettings);
return String.Format(CultureInfo.InvariantCulture, "ftp://{0}@{1}{2}", ftpSettings.Username, ftpSettings.FtpServer, ftpSettings.PublishPath);
default:
Trace.Fail("Unexpected value for fileUploadSupport: " + fileUploadSupport.ToString());
goto case FileUploadSupport.Weblog;
}
}
public virtual bool DoesFileNeedUpload(ISupportingFile file, IFileUploadContext uploadContext)
{
// Check to see if we have already uploaded this file.
if(!file.IsUploaded(DestinationContext))
{
return true;
}
else
{
Debug.WriteLine(String.Format(CultureInfo.InvariantCulture, "File is up-to-date: {0}", file.FileName));
return false;
}
}
}
public class NullBlogFileUploader : BlogFileUploader
{
public NullBlogFileUploader( string destinationContext, string contextName, string blogId )
: base( destinationContext, contextName, blogId)
{
}
public override Uri DoUploadWorkBeforePublish(IFileUploadContext uploadContext)
{
throw new BlogClientFileUploadNotSupportedException() ;
}
public override string FormatUploadFileName(string filename, string conflictToken)
{
return filename;
}
}
public class WeblogBlogFileUploader : BlogFileUploader
{
public WeblogBlogFileUploader( string destinationContext, string postContext, string localBlogId, string remoteBlogId )
: base( destinationContext,
postContext,
remoteBlogId )
{
_blogSettings = BlogSettings.ForBlogId(localBlogId);
_blogClient = BlogClientManager.CreateClient(_blogSettings) ;
_blogHomepageUrl = _blogSettings.HomepageUrl;
}
public override void Dispose()
{
base.Dispose();
_blogSettings.Dispose();
}
public override Uri DoUploadWorkBeforePublish(IFileUploadContext uploadContext)
{
try
{
string uploadUrl = _blogClient.DoBeforePublishUploadWork(uploadContext);
if(!UrlHelper.IsUrl(uploadUrl))
{
string baseURL;
if(uploadUrl.StartsWith("/"))
baseURL = UrlHelper.GetBaseUrl(_blogHomepageUrl);
else
baseURL = UrlHelper.GetBasePathUrl(_blogHomepageUrl);
uploadUrl = UrlHelper.UrlCombineIfRelative(baseURL, uploadUrl);
}
return new Uri(uploadUrl) ;
}
catch(BlogClientOperationCancelledException)
{
throw; // No need to assert when an operation is cancelled
}
catch(IOException ex)
{
Trace.Fail(ex.ToString());
throw new BlogClientIOException( new FileInfo(uploadContext.GetContentsLocalFilePath()), ex) ;
}
catch(BlogClientAuthenticationException ex)
{
Trace.Fail(ex.ToString());
throw ;
}
catch(BlogClientProviderException ex)
{
Trace.Fail(ex.ToString());
// provider exceptions that are not authentication exceptions are presumed
// to be lack of support for newMediaObject we may want to filter this down
// further -- not sure how to do this other than by trial and error with the
// various services.
throw new BlogClientFileUploadNotSupportedException(ex.ErrorCode, ex.ErrorString);
}
catch(BlogClientException ex)
{
Trace.Fail(ex.ToString());
throw ;
}
catch(Exception ex)
{
Trace.Fail(ex.ToString());
if (ex is WebException)
{
HttpRequestHelper.LogException((WebException) ex);
}
throw new BlogClientException(Res.Get(StringId.FileUploadFailedException), ex.Message);
}
}
public override void DoUploadWorkAfterPublish( IFileUploadContext uploadContext )
{
try
{
_blogClient.DoAfterPublishUploadWork(uploadContext);
}
catch(IOException ex)
{
Trace.Fail(ex.ToString());
throw new BlogClientIOException( new FileInfo(uploadContext.GetContentsLocalFilePath()), ex) ;
}
catch(BlogClientException ex)
{
Trace.Fail(ex.ToString());
throw ;
}
catch(Exception ex)
{
Trace.Fail(ex.ToString());
throw new BlogClientException(Res.Get(StringId.FileUploadFailedException), ex.Message);
}
return;
}
public override string FormatUploadFileName(string filename, string conflictToken)
{
string format = _blogClient.Options.FileUploadNameFormat;
if(format == String.Empty)
{
format = "{OpenLiveWriter}/{PostTitle}_{PostRandomizer}/{FileNameWithoutExtension}{FileNameConflictToken:_?}{FileExtension}";
}
return base.FormatFileName(format, filename, conflictToken);
}
public override bool DoesFileNeedUpload(ISupportingFile file, IFileUploadContext uploadContext)
{
// Let the blog client decide if it wants to upload this file or not
bool? shouldUpload = _blogClient.DoesFileNeedUpload(uploadContext);
// Check to see if the blog client made a decision, if so, then use it
if(shouldUpload != null)
{
return shouldUpload.Value;
}
// Check to see if it was already uploaded and saved in the content for this post
return base.DoesFileNeedUpload(file, uploadContext);
}
private BlogSettings _blogSettings;
private string _blogHomepageUrl;
private IBlogClient _blogClient ;
}
public class FTPBlogFileUploader : BlogFileUploader
{
private static Hashtable _credentials = new Hashtable();
public FTPBlogFileUploader( string destinationContext, string postContext, FtpUploaderSettings settings, string blogId )
: base(destinationContext, postContext, blogId)
{
_settings = settings ;
}
public static void ClearCachedCredentials(string blogId)
{
String destinationContext = GetFileUploadDestinationContext(blogId);
_credentials.Remove(destinationContext);
}
private void ConnectForUpload()
{
if(_fileDestination == null)
{
try
{
bool loggedIn = false;
FtpCredentials credentials = (FtpCredentials) _credentials[DestinationContext];
string username = credentials != null ? credentials.Username : _settings.Username;
string password = credentials != null ? credentials.Password : _settings.Password;
while(!loggedIn)
{
if(password == String.Empty)
{
CredentialsDomain cd = new CredentialsDomain(Res.Get(StringId.FtpLoginDomain), _settings.FtpServer, null, FtpIconBytes);
CredentialsPromptResult result = CredentialsHelper.PromptForCredentials(ref username, ref password, cd);
if(result == CredentialsPromptResult.Cancel || result == CredentialsPromptResult.Abort)
{
throw new OperationCancelledException();
}
else
{
//save the user/pass as appropriate
if(result == CredentialsPromptResult.SaveUsername)
{
_settings.Username = username;
_settings.Password = String.Empty;
}
else if(result == CredentialsPromptResult.SaveUsernameAndPassword)
{
_settings.Username = username;
_settings.Password = password;
}
}
}
try
{
// create and connect to the destination
_fileDestination = new WinInetFTPFileDestination(
_settings.FtpServer,
_settings.PublishPath,
username,
password ) ;
_fileDestination.Connect();
//save the validated credentials so we don't need to prompt again later
_credentials[DestinationContext] = new FtpCredentials(DestinationContext, username, password);
loggedIn = true;
}
catch(LoginException)
{
loggedIn = false;
password = String.Empty;
_credentials.Remove(DestinationContext);
}
}
// calculate the target path and ensure that it exists
_fileDestination.InsureDirectoryExists(PostContext) ;
}
catch(Exception ex)
{
WebPublishMessage message = WebPublishUtils.ExceptionToErrorMessage(ex) ;
throw new BlogClientFileTransferException(Res.Get(StringId.BCEFileTransferConnectingToDestination), message.Title, message.Text);
}
}
}
private static byte[] ftpIconBytes;
private static byte[] FtpIconBytes
{
get
{
if(ftpIconBytes == null)
{
MemoryStream memStream = new MemoryStream();
ResourceHelper.SaveAssemblyResourceToStream("Images.FtpIcon.png", memStream) ;
ftpIconBytes = memStream.ToArray();
}
return ftpIconBytes;
}
}
private class FtpCredentials
{
public FtpCredentials(string connectionId, string username, string password)
{
ConnectionId = connectionId;
Username = username;
Password = password;
}
public string Username;
public string Password;
public string ConnectionId;
}
public override void Disconnect()
{
try
{
if ( _fileDestination != null )
{
_fileDestination.Disconnect();
_fileDestination.Dispose();
_fileDestination = null ;
}
}
catch(Exception ex)
{
WebPublishMessage message = WebPublishUtils.ExceptionToErrorMessage(ex) ;
throw new BlogClientFileTransferException( "disconnecting from file destination", message.Text, message.Title ) ;
}
}
public override Uri DoUploadWorkBeforePublish( IFileUploadContext uploadContext )
{
try
{
ConnectForUpload();
string uploadPath = uploadContext.Settings.GetString(UPLOAD_PATH, null);
bool overwrite = uploadPath != null;
if(uploadPath == null)
{
string uploadFolder = null;
string fileName = uploadContext.PreferredFileName;
string filePath = uploadContext.FormatFileName(fileName);
string[] pathParts = filePath.Split('/');
if(pathParts.Length > 1)
{
uploadFolder = FileHelper.GetValidAnsiFileName(pathParts[0]);
for(int i=1; i<pathParts.Length - 1; i++)
uploadFolder = uploadFolder + "/" + FileHelper.GetValidAnsiFileName(pathParts[i]);
}
fileName = FileHelper.GetValidAnsiFileName(pathParts[pathParts.Length-1]);
uploadPath = _fileDestination.CombinePath(uploadFolder, fileName);
if(_fileDestination.FileExists(uploadPath))
{
string fileBaseName = Path.GetFileNameWithoutExtension(fileName);
string fileExtension = Path.GetExtension(fileName);
try
{
Hashtable existingFiles = new Hashtable();
foreach(string name in _fileDestination.ListFiles(uploadFolder))
existingFiles[name] = name;
for(int i=3; i<Int32.MaxValue && existingFiles.ContainsKey(fileName); i++)
{
fileName = FileHelper.GetValidAnsiFileName(fileBaseName + "_" + i + fileExtension);
}
}
catch(Exception e)
{
Debug.Fail("Error while calculating unique filename", e.ToString());
}
uploadPath = _fileDestination.CombinePath(uploadFolder, fileName);
if(_fileDestination.FileExists(uploadPath))
{
Debug.Fail("Failed to calculate unique filename");
fileName = FileHelper.GetValidAnsiFileName(fileBaseName + Guid.NewGuid().ToString() + fileExtension);
uploadPath = _fileDestination.CombinePath(uploadFolder, fileName);
}
}
}
// transfer the file
_fileDestination.DoTransfer(
uploadContext.GetContentsLocalFilePath(),
uploadPath,
overwrite);
uploadContext.Settings.SetString(UPLOAD_PATH, uploadPath);
// return the url to the transferred file
string baseUrl = UrlHelper.InsureTrailingSlash(_settings.UrlMapping) ;
string relativeUrl = uploadPath;
return new Uri( UrlHelper.UrlCombine(baseUrl, relativeUrl) ) ;
}
catch(Exception ex)
{
WebPublishMessage message = WebPublishUtils.ExceptionToErrorMessage(ex) ;
throw new BlogClientFileTransferException( new FileInfo(uploadContext.GetContentsLocalFilePath()), message.Title, message.Text ) ;
}
}
private const string UPLOAD_PATH = "upload.path";
public override string FormatUploadFileName(string filename, string conflictToken)
{
string format = _settings.FileUploadFormat;
return base.FormatFileName(format, filename, conflictToken);
}
private FtpUploaderSettings _settings ;
private FileDestination _fileDestination ;
}
public class FtpUploaderSettings
{
public static void Copy(IBlogFileUploadSettings source, IBlogFileUploadSettings destination)
{
// create typesafe wrappers for source and destination
FtpUploaderSettings sourceSettings = new FtpUploaderSettings(source);
FtpUploaderSettings destinationSettings = new FtpUploaderSettings(destination);
// copy the values
destinationSettings.FtpServer = sourceSettings.FtpServer ;
destinationSettings.PublishPath = sourceSettings.PublishPath ;
destinationSettings.UrlMapping = sourceSettings.UrlMapping ;
destinationSettings.Username = sourceSettings.Username ;
destinationSettings.Password = sourceSettings.Password ;
}
public FtpUploaderSettings(IBlogFileUploadSettings settings)
{
_settings = settings ;
}
public string FtpServer
{
get { return _settings.GetValue(FTP_SERVER); }
set { _settings.SetValue(FTP_SERVER, value);}
}
private const string FTP_SERVER = "FtpServer" ;
public string PublishPath
{
get { return _settings.GetValue(PUBLISH_PATH); }
set { _settings.SetValue(PUBLISH_PATH, value);}
}
private const string PUBLISH_PATH = "PublishPath" ;
public string UrlMapping
{
get { return _settings.GetValue(URL_MAPPING); }
set { _settings.SetValue(URL_MAPPING, value);}
}
private const string URL_MAPPING = "UrlMapping" ;
public string Username
{
get { return _settings.GetValue(USERNAME); }
set { _settings.SetValue(USERNAME, value);}
}
private const string USERNAME = "Username" ;
public string Password
{
get
{
//load the decrypted password
try
{
string base64EncodedPass = _settings.GetValue(PASSWORD);
if(!string.IsNullOrEmpty(base64EncodedPass))
{
if(base64EncodedPass.Length < 200) //encoded passwords are always larger than 200, and non-encoded passwords are unlikely to be
{
//then this password is not stored encrypted (probably because of a bug introduced in M2, so resave the password in encrupted form)
Password = base64EncodedPass;
base64EncodedPass = _settings.GetValue(PASSWORD);
Trace.WriteLine("FTP password was auto-encrypted");
}
byte[] encrypted = Convert.FromBase64String(base64EncodedPass);
string password = CryptHelper.Decrypt(encrypted);
return password;
}
}
catch(Exception e)
{
Trace.Fail("Failed to decrypt password: " + e);
}
return String.Empty;
}
set
{
if(value != String.Empty)
{
//save an encrypted password
try
{
_settings.SetValue(PASSWORD, Convert.ToBase64String(CryptHelper.Encrypt(value)));
}
catch(Exception e)
{
//if an exception occurs, just leave the password empty
Trace.Fail("Failed to encrypt password: " + e);
}
}
else
_settings.SetValue(PASSWORD, value);
}
}
private const string PASSWORD = "Password" ;
public string FileUploadFormat
{
get
{
string format = _settings.GetValue(FILE_UPLOAD_FORMAT);
if(format == null)
{
format = "{PostTitle}_{PostRandomizer}/{FileName}";
}
return format;
}
set { _settings.SetValue(FILE_UPLOAD_FORMAT, value);}
}
private const string FILE_UPLOAD_FORMAT = "FileUploadFormat" ;
private IBlogFileUploadSettings _settings ;
}
}
| |
using System;
using System.Diagnostics;
using Alachisoft.NCache.Common.Interop;
using Microsoft.Win32;
namespace Alachisoft.NCache.Common
{
/// <summary>
/// Utility class to help with common tasks.
/// </summary>
public class AppUtil
{
static bool isRunningAsWow64 = false;
static string installDir = null;
public readonly static string DeployedAssemblyDir = "deploy\\";
static int s_logLevel = 7;
static string javaLibDir = null;
static AppUtil()
{
try
{
isRunningAsWow64 = Win32.InternalCheckIsWow64();
}
catch (Exception ex)
{
LogEvent("Win32.InternalCheckIsWow64() Error " + ex.Message, EventLogEntryType.Error);
}
installDir = GetInstallDir();
javaLibDir = GetJavaLibDir();
DeployedAssemblyDir = installDir + DeployedAssemblyDir;
string logLevel = System.Configuration.ConfigurationSettings.AppSettings["NCacheServer.EventLogLevel"];
if (logLevel != null && logLevel != "")
{
logLevel = logLevel.ToLower();
switch (logLevel)
{
case "error":
s_logLevel = 1;
break;
case "warning":
s_logLevel = 3;
break;
case "all":
s_logLevel = 7;
break;
}
}
}
public static bool IsRunningAsWow64
{
get { return isRunningAsWow64; }
}
public static bool IsNew { get { return true; } }
private static string GetInstallDir()
{
string installPath = System.Configuration.ConfigurationSettings.AppSettings["InstallDir"];
if (installPath != null && installPath != string.Empty)
{
return installPath;
}
string path = System.Environment.CurrentDirectory + "\\";
try
{
path = GetAppSetting("InstallDir");
}
catch (Exception)
{
throw;
}
if (path == null || path.Length == 0)
return null;
return path;
}
/// <summary>
/// Reads the value/data pair from the NCache registry key.
/// Automatically caters for wow64/win32.
/// </summary>
/// <param name="key">Name of the value to be read.</param>
/// <returns>Data of the value.</returns>
public static string GetAppSetting(string key)
{
return GetAppSetting("", key);
}
/// <summary>
/// Reads the value/data pair from the NCache registry key.
/// Automatically caters for wow64/win32.
/// </summary>
/// <param name="section">Section from which key is to be read.</param>
/// <param name="key">Name of the value to be read.</param>
/// <returns>Data of the value.</returns>
public static string GetAppSetting(string section, string key)
{
if (!IsRunningAsWow64)
section = RegHelper.ROOT_KEY + section;
object tempVal = RegHelper.GetRegValue(section, key,0);
if (! (tempVal is String))
{
return Convert.ToString(tempVal);
}
return (String)tempVal;
}
/// <summary>
/// Get decrypted value from section.
/// Automatically caters for wow64/win32.
/// </summary>
/// <param name="section">">Section from which key is to be read.</param>
/// <param name="key">key</param>
/// <returns>value retrieved</returns>
public static string GetDecryptedAppSetting(string section, string key)
{
section = RegHelper.ROOT_KEY + section;
return (string)RegHelper.GetDecryptedRegValue(section, key,0);
}
/// <summary>
/// Write the value to the NCache registry key.
/// Automatically caters for wow64/win32.
/// </summary>
/// <param name="section">">Section from which key is to be read.</param>
/// <param name="key">Name of the value to be write.</param>
/// <param name="value">New value of key</param>
public static void SetAppSetting(string section, string key, string value, short prodId)
{
section = RegHelper.ROOT_KEY + section;
RegHelper.SetRegValue(section, key, value,prodId);
}
/// <summary>
/// Write the value to the NCache registry key after encrypting it.
/// Automatically caters for wow64/win32.
/// </summary>
/// <param name="section">">Section from which key is to be read.</param>
/// <param name="key">Name of the value to be write.</param>
/// <param name="value">New value of key</param>
public static void SetEncryptedAppSetting(string section, string key, string value)
{
section = RegHelper.ROOT_KEY + section;
RegHelper.SetEncryptedRegValue(section, key, value);
}
/// <summary>
/// Check if the section has preceeding \. If not then append one
/// </summary>
/// <param name="section">Section</param>
/// <returns>Checked and completed section</returns>
private static string CompleteSection(string section)
{
return section.StartsWith("\\") ? section : "\\" + section;
}
/// <summary>
/// Gets the install directory of NCache.
/// Returns null if registry key does not exist.
/// </summary>
public static string InstallDir
{
get { return installDir; }
}
private static string GetJavaLibDir()
{
return AppUtil.InstallDir + "Java\\Lib\\";
}
public static string JavaLibDir
{
get { return javaLibDir; }
}
/// <summary>
/// Writes an error, warning, information, success audit, or failure audit
/// entry with the given message text to the event log.
/// </summary>
/// <param name="msg">The string to write to the event log.</param>
/// <param name="type">One of the <c>EventLogEntryType</c> values.</param>
public static void LogEvent(string source, string msg, EventLogEntryType type, short category, int eventId)
{
try
{
int level = (int)type;
if ((level & s_logLevel) == level)
{
using (EventLog ncLog = new EventLog("Application"))
{
ncLog.Source = source;
ncLog.WriteEntry(msg, type, eventId);
}
}
}
catch (Exception) { }
}
/// <summary>
/// Writes an error, warning, information, success audit, or failure audit
/// entry with the given message text to the event log.
/// </summary>
/// <param name="msg">The string to write to the event log.</param>
/// <param name="type">One of the <c>EventLogEntryType</c> values.</param>
public static void LogEvent(string msg, EventLogEntryType type)
{
string cacheserver="NCache";
if (type == EventLogEntryType.Information)
LogEvent(cacheserver, msg, type, EventCategories.Information, EventID.GeneralInformation);
else
LogEvent(cacheserver, msg, type, EventCategories.Warning, EventID.GeneralError);
}
/// <summary>
/// Returns lg(Log2) of a number.
/// </summary>
/// <param name="val"></param>
/// <returns></returns>
public static byte Lg(int val)
{
byte i = 0;
while (val > 1)
{
val >>= 1;
i++;
}
return i;
}
/// <summary>
/// Store all date time values as a difference to this time
/// </summary>
private static DateTime START_DT = new DateTime(2004, 12, 31, 0, 0, 0, 0, DateTimeKind.Utc);
/// <summary>
/// Convert DateTime to integer taking 31-12-2004 as base
/// and removing millisecond information
/// </summary>
/// <param name="dt"></param>
/// <returns></returns>
public static int DiffSeconds(DateTime dt)
{
dt = dt.ToUniversalTime();
TimeSpan interval = dt - START_DT;
return (int)interval.TotalSeconds;
}
public static int DiffMilliseconds(DateTime dt)
{
dt = dt.ToUniversalTime();
TimeSpan interval = dt - START_DT;
return (int)interval.Milliseconds;
}
public static long DiffTicks(DateTime dt)
{
dt = dt.ToUniversalTime();
TimeSpan interval = dt - START_DT;
return interval.Ticks;
}
/// <summary>
/// Convert DateTime to integer taking 31-12-2004 as base
/// and removing millisecond information
/// </summary>
/// <param name="dt"></param>
/// <returns></returns>
public static DateTime GetDateTime(int absoluteTime)
{
DateTime dt = new DateTime(START_DT.Ticks, DateTimeKind.Utc);
return dt.AddSeconds(absoluteTime);
}
/// <summary>
/// Checks environment to verify if there is 'Any' version of Visual Studio installed.
/// and removing millisecond information
/// </summary>
public static bool IsVSIdeInstalled()
{
RegistryKey rKey8 = Registry.LocalMachine.OpenSubKey("SOFTWARE\\Microsoft\\VisualStudio\\8.0");
RegistryKey rKey9 = Registry.LocalMachine.OpenSubKey("SOFTWARE\\Microsoft\\VisualStudio\\9.0");
if (rKey8 != null)
{
if (rKey8.GetValue("InstallDir", "").ToString().Length != 0)
return true;
}
if (rKey9 != null)
{
if (rKey9.GetValue("InstallDir", "").ToString().Length != 0)
return true;
}
return false;
}
/// <summary>
/// Hashcode algorithm returning same hash code for both 32bit and 64 bit apps.
/// Used for data distribution under por/partitioned topologies.
/// </summary>
/// <param name="strArg"></param>
/// <returns></returns>
public static unsafe int GetHashCode(string strArg)
{
fixed (void* str = strArg)
{
char* chPtr = (char*)str;
int num = 0x15051505;
int num2 = num;
int* numPtr = (int*)chPtr;
for (int i = strArg.Length; i > 0; i -= 4)
{
num = (((num << 5) + num) + (num >> 0x1b)) ^ numPtr[0];
if (i <= 2)
{
break;
}
num2 = (((num2 << 5) + num2) + (num2 >> 0x1b)) ^ numPtr[1];
numPtr += 2;
}
return (num + (num2 * 0x5d588b65));
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Extensions.Logging;
using Orleans.Streams;
namespace Orleans.Providers.Streams.Common
{
/// <summary>
/// The PooledQueueCache is a cache that is intended to serve as a message cache in an IQueueCache.
/// It is capable of storing large numbers of messages (gigs worth of messages) for extended periods
/// of time (minutes to indefinite), while incurring a minimal performance hit due to garbage collection.
/// This pooled cache allocates memory and never releases it. It keeps freed resources available in pools
/// that remain in application use through the life of the service. This means these objects go to gen2,
/// are compacted, and then stay there. This is relatively cheap, as the only cost they now incur is
/// the cost of checking to see if they should be freed in each collection cycle. Since this cache uses
/// small numbers of large objects with relatively simple object graphs, they are less costly to check
/// then large numbers of smaller objects with more complex object graphs.
/// For performance reasons this cache is designed to more closely align with queue specific data. This is,
/// in part, why, unlike the SimpleQueueCache, this cache does not implement IQueueCache. It is intended
/// to be used in queue specific implementations of IQueueCache.
/// </summary>
public class PooledQueueCache: IPurgeObservable
{
// linked list of message bocks. First is newest.
private readonly LinkedList<CachedMessageBlock> messageBlocks;
private readonly CachedMessagePool pool;
private readonly ICacheDataAdapter cacheDataAdapter;
private readonly ILogger logger;
private readonly ICacheMonitor cacheMonitor;
private readonly PeriodicAction periodicMonitoring;
/// <summary>
/// Cached message most recently added
/// </summary>
public CachedMessage? Newest
{
get
{
if (IsEmpty)
return null;
return messageBlocks.First.Value.NewestMessage;
}
}
/// <summary>
/// Oldest message in cache
/// </summary>
public CachedMessage? Oldest
{
get
{
if (IsEmpty)
return null;
return messageBlocks.Last.Value.OldestMessage;
}
}
/// <summary>
/// Cached message count
/// </summary>
public int ItemCount { get; private set; }
/// <summary>
/// Pooled queue cache is a cache of message that obtains resource from a pool
/// </summary>
/// <param name="cacheDataAdapter"></param>
/// <param name="logger"></param>
/// <param name="cacheMonitor"></param>
/// <param name="cacheMonitorWriteInterval">cache monitor write interval. Only triggered for active caches.</param>
public PooledQueueCache(ICacheDataAdapter cacheDataAdapter, ILogger logger, ICacheMonitor cacheMonitor, TimeSpan? cacheMonitorWriteInterval)
{
this.cacheDataAdapter = cacheDataAdapter ?? throw new ArgumentNullException("cacheDataAdapter");
this.logger = logger ?? throw new ArgumentNullException("logger");
this.ItemCount = 0;
pool = new CachedMessagePool(cacheDataAdapter);
messageBlocks = new LinkedList<CachedMessageBlock>();
this.cacheMonitor = cacheMonitor;
if (this.cacheMonitor != null && cacheMonitorWriteInterval.HasValue)
{
this.periodicMonitoring = new PeriodicAction(cacheMonitorWriteInterval.Value, this.ReportCacheMessageStatistics);
}
}
/// <summary>
/// Indicates whether the cache is empty
/// </summary>
public bool IsEmpty => messageBlocks.Count == 0 || (messageBlocks.Count == 1 && messageBlocks.First.Value.IsEmpty);
/// <summary>
/// Acquires a cursor to enumerate through the messages in the cache at the provided sequenceToken,
/// filtered on the specified stream.
/// </summary>
/// <param name="streamIdentity">stream identity</param>
/// <param name="sequenceToken"></param>
/// <returns></returns>
public object GetCursor(IStreamIdentity streamIdentity, StreamSequenceToken sequenceToken)
{
var cursor = new Cursor(streamIdentity);
SetCursor(cursor, sequenceToken);
return cursor;
}
private void ReportCacheMessageStatistics()
{
if (this.IsEmpty)
{
this.cacheMonitor.ReportMessageStatistics(null, null, null, this.ItemCount);
}
else
{
var newestMessage = this.Newest.Value;
var oldestMessage = this.Oldest.Value;
var now = DateTime.UtcNow;
var newestMessageEnqueueTime = newestMessage.EnqueueTimeUtc;
var oldestMessageEnqueueTime = oldestMessage.EnqueueTimeUtc;
var oldestMessageDequeueTime = oldestMessage.DequeueTimeUtc;
this.cacheMonitor.ReportMessageStatistics(oldestMessageEnqueueTime, oldestMessageDequeueTime, newestMessageEnqueueTime, this.ItemCount);
}
}
private void SetCursor(Cursor cursor, StreamSequenceToken sequenceToken)
{
// If nothing in cache, unset token, and wait for more data.
if (messageBlocks.Count == 0)
{
cursor.State = CursorStates.Unset;
cursor.SequenceToken = sequenceToken;
return;
}
LinkedListNode<CachedMessageBlock> newestBlock = messageBlocks.First;
// if sequenceToken is null, iterate from newest message in cache
if (sequenceToken == null)
{
cursor.State = CursorStates.Idle;
cursor.CurrentBlock = newestBlock;
cursor.Index = newestBlock.Value.NewestMessageIndex;
cursor.SequenceToken = newestBlock.Value.GetNewestSequenceToken(cacheDataAdapter);
return;
}
// If sequenceToken is too new to be in cache, unset token, and wait for more data.
CachedMessage newestMessage = newestBlock.Value.NewestMessage;
if (newestMessage.Compare(sequenceToken) < 0)
{
cursor.State = CursorStates.Unset;
cursor.SequenceToken = sequenceToken;
return;
}
// Check to see if sequenceToken is too old to be in cache
CachedMessage oldestMessage = messageBlocks.Last.Value.OldestMessage;
if (oldestMessage.Compare(sequenceToken) > 0)
{
// throw cache miss exception
throw new QueueCacheMissException(sequenceToken,
messageBlocks.Last.Value.GetOldestSequenceToken(cacheDataAdapter),
messageBlocks.First.Value.GetNewestSequenceToken(cacheDataAdapter));
}
// Find block containing sequence number, starting from the newest and working back to oldest
LinkedListNode<CachedMessageBlock> node = messageBlocks.First;
while (true)
{
CachedMessage oldestMessageInBlock = node.Value.OldestMessage;
if (oldestMessageInBlock.Compare(sequenceToken) <= 0)
{
break;
}
node = node.Next;
}
// return cursor from start.
cursor.CurrentBlock = node;
cursor.Index = node.Value.GetIndexOfFirstMessageLessThanOrEqualTo(sequenceToken);
// if cursor has been idle, move to next message after message specified by sequenceToken
if(cursor.State == CursorStates.Idle)
{
// if there are more messages in this block, move to next message
if (!cursor.IsNewestInBlock)
{
cursor.Index++;
}
// if this is the newest message in this block, move to oldest message in newer block
else if (node.Previous != null)
{
cursor.CurrentBlock = node.Previous;
cursor.Index = cursor.CurrentBlock.Value.OldestMessageIndex;
}
else
{
cursor.State = CursorStates.Idle;
return;
}
}
cursor.SequenceToken = cursor.CurrentBlock.Value.GetSequenceToken(cursor.Index, cacheDataAdapter);
cursor.State = CursorStates.Set;
}
/// <summary>
/// Acquires the next message in the cache at the provided cursor
/// </summary>
/// <param name="cursorObj"></param>
/// <param name="message"></param>
/// <returns></returns>
public bool TryGetNextMessage(object cursorObj, out IBatchContainer message)
{
message = null;
if (cursorObj == null)
{
throw new ArgumentNullException("cursorObj");
}
var cursor = cursorObj as Cursor;
if (cursor == null)
{
throw new ArgumentOutOfRangeException("cursorObj", "Cursor is bad");
}
if (cursor.State != CursorStates.Set)
{
SetCursor(cursor, cursor.SequenceToken);
if (cursor.State != CursorStates.Set)
{
return false;
}
}
// has this message been purged
CachedMessage oldestMessage = messageBlocks.Last.Value.OldestMessage;
if (oldestMessage.Compare(cursor.SequenceToken) > 0)
{
throw new QueueCacheMissException(cursor.SequenceToken,
messageBlocks.Last.Value.GetOldestSequenceToken(cacheDataAdapter),
messageBlocks.First.Value.GetNewestSequenceToken(cacheDataAdapter));
}
// Iterate forward (in time) in the cache until we find a message on the stream or run out of cached messages.
// Note that we get the message from the current cursor location, then move it forward. This means that if we return true, the cursor
// will point to the next message after the one we're returning.
while (cursor.State == CursorStates.Set)
{
CachedMessage currentMessage = cursor.Message;
// Have we caught up to the newest event, if so set cursor to idle.
if (cursor.CurrentBlock == messageBlocks.First && cursor.IsNewestInBlock)
{
cursor.State = CursorStates.Idle;
cursor.SequenceToken = messageBlocks.First.Value.GetNewestSequenceToken(cacheDataAdapter);
}
else // move to next
{
int index;
if (cursor.IsNewestInBlock)
{
cursor.CurrentBlock = cursor.CurrentBlock.Previous;
cursor.CurrentBlock.Value.TryFindFirstMessage(cursor.StreamIdentity, this.cacheDataAdapter, out index);
}
else
{
cursor.CurrentBlock.Value.TryFindNextMessage(cursor.Index + 1, cursor.StreamIdentity, this.cacheDataAdapter, out index);
}
cursor.Index = index;
}
// check if this message is in the cursor's stream
if (currentMessage.CompareStreamId(cursor.StreamIdentity))
{
message = cacheDataAdapter.GetBatchContainer(ref currentMessage);
cursor.SequenceToken = cursor.CurrentBlock.Value.GetSequenceToken(cursor.Index, cacheDataAdapter);
return true;
}
}
return false;
}
/// <summary>
/// Add a list of queue message to the cache
/// </summary>
/// <param name="messages"></param>
/// <param name="dequeueTime"></param>
/// <returns></returns>
public void Add(List<CachedMessage> messages, DateTime dequeueTime)
{
foreach (var message in messages)
{
this.Add(message);
}
this.cacheMonitor?.TrackMessagesAdded(messages.Count);
periodicMonitoring?.TryAction(dequeueTime);
}
private void Add(CachedMessage message)
{
// allocate message from pool
CachedMessageBlock block = pool.AllocateMessage(message);
// If new block, add message block to linked list
if (block != messageBlocks.FirstOrDefault())
messageBlocks.AddFirst(block.Node);
ItemCount++;
}
/// <summary>
/// Remove oldest message in the cache, remove oldest block too if the block is empty
/// </summary>
public void RemoveOldestMessage()
{
this.messageBlocks.Last.Value.Remove();
this.ItemCount--;
CachedMessageBlock lastCachedMessageBlock = this.messageBlocks.Last.Value;
// if block is currently empty, but all capacity has been exausted, remove
if (lastCachedMessageBlock.IsEmpty && !lastCachedMessageBlock.HasCapacity)
{
lastCachedMessageBlock.Dispose();
this.messageBlocks.RemoveLast();
}
}
private enum CursorStates
{
Unset, // Not yet set, or points to some data in the future.
Set, // Points to a message in the cache
Idle, // Has iterated over all relevant events in the cache and is waiting for more data on the stream.
}
private class Cursor
{
public readonly IStreamIdentity StreamIdentity;
public Cursor(IStreamIdentity streamIdentity)
{
StreamIdentity = streamIdentity;
State = CursorStates.Unset;
}
public CursorStates State;
// current sequence token
public StreamSequenceToken SequenceToken;
// reference into cache
public LinkedListNode<CachedMessageBlock> CurrentBlock;
public int Index;
// utilities
public bool IsNewestInBlock => Index == CurrentBlock.Value.NewestMessageIndex;
public CachedMessage Message => CurrentBlock.Value[Index];
}
}
}
| |
#pragma warning disable 0414
using UnityEngine;
using UniRx.UI;
using System.Collections;
using UniRx;
using System.Threading;
using System.Collections.Generic;
using System;
using System.Text;
using UniRx.Diagnostics;
#if !(UNITY_METRO || UNITY_WP8) && (UNITY_4_3 || UNITY_4_2 || UNITY_4_1 || UNITY_4_0_1 || UNITY_4_0 || UNITY_3_5 || UNITY_3_4 || UNITY_3_3 || UNITY_3_2 || UNITY_3_1 || UNITY_3_0_0 || UNITY_3_0 || UNITY_2_6_1 || UNITY_2_6)
// Fallback for Unity versions below 4.5
using Hash = System.Collections.Hashtable;
using HashEntry = System.Collections.DictionaryEntry;
#else
using Hash = System.Collections.Generic.Dictionary<string, string>;
using HashEntry = System.Collections.Generic.KeyValuePair<string, string>;
using UniRx.InternalUtil;
#endif
namespace UniRx.ObjectTest
{
// test sandbox
public class UniRxTestSandbox : MonoBehaviour
{
//readonly static Logger logger = new Logger("UniRx.Test.NewBehaviour");
StringBuilder logtext = new StringBuilder();
//[ThreadStatic]
static object threadstaticobj;
public void Awake()
{
Debug.Log("Awake");
//ObservableLogger.Listener.LogToUnityDebug();
//MainThreadDispatcher.Initialize();
threadstaticobj = new object();
/*
ObservableLogger.Listener.ObserveOnMainThread().Subscribe(x =>
{
logtext.AppendLine(x.Message);
});
*/
}
public void Start()
{
Debug.Log("Start");
// DoubleCLick Sample of
// The introduction to Reactive Programming you've been missing
// https://gist.github.com/staltz/868e7e9bc2a7b8c1f754
/*
var clickStream = Observable.EveryUpdate()
.Where(_ => Input.GetMouseButtonDown(0));
clickStream.Buffer(clickStream.Throttle(TimeSpan.FromMilliseconds(250)))
.Where(xs => xs.Count >= 2)
.Subscribe(xs => Debug.Log("DoubleClick Detected! Count:" + xs.Count));
*/
}
public void Update()
{
// logtext.AppendLine(Time.frameCount.ToString());
}
IDisposable yieldCancel = null;
// Subscriber subscriber = new Subscriber();
CompositeDisposable disposables = new CompositeDisposable();
IEnumerator Hoge()
{
while (true)
{
// logtext.AppendLine(Time.frameCount.ToString());
yield return null;
}
}
Subject<Unit> throttleSubject = new Subject<Unit>();
public void OnGUI()
{
//var xpos = 0;
//var ypos = 0;
if (GUILayout.Button("Clear"))
{
logtext.Length = 0;
disposables.Clear();
}
if (GUILayout.Button("DelayFrame"))
{
logtext.AppendLine("StartFrame:" + Time.frameCount);
Observable.Return(100)
.DelayFrame(3)
.Subscribe(x => logtext.AppendLine(x.ToString() + ":" + Time.frameCount), () => logtext.AppendLine("completed" + ":" + Time.frameCount));
}
if (GUILayout.Button("DelayFrameEmpty"))
{
logtext.AppendLine("StartFrame:" + Time.frameCount);
Observable.Empty<int>()
.DelayFrame(3)
.Subscribe(x => logtext.AppendLine(x.ToString() + ":" + Time.frameCount), () => logtext.AppendLine("completed" + ":" + Time.frameCount));
}
if (GUILayout.Button("NextFrame"))
{
logtext.AppendLine("StartFrame:" + Time.frameCount);
Observable.NextFrame()
.Subscribe(x => logtext.AppendLine(x.ToString() + ":" + Time.frameCount), () => logtext.AppendLine("completed" + Time.frameCount))
.AddTo(disposables);
}
if (GUILayout.Button("IntervalFrame"))
{
logtext.AppendLine("StartFrame:" + Time.frameCount);
Observable.IntervalFrame(3)
.Subscribe(x => logtext.AppendLine(x.ToString() + ":" + Time.frameCount), () => logtext.AppendLine("completed" + Time.frameCount))
.AddTo(disposables);
}
if (GUILayout.Button("TimerFrame1"))
{
logtext.AppendLine("StartFrame:" + Time.frameCount);
Observable.TimerFrame(3)
.Subscribe(x => logtext.AppendLine(x.ToString() + ":" + Time.frameCount), () => logtext.AppendLine("completed" + Time.frameCount))
.AddTo(disposables);
}
if (GUILayout.Button("TimerFrame2"))
{
logtext.AppendLine("StartFrame:" + Time.frameCount);
Observable.TimerFrame(5, 3)
.Subscribe(x => logtext.AppendLine(x.ToString() + ":" + Time.frameCount), () => logtext.AppendLine("completed" + Time.frameCount))
.AddTo(disposables);
}
if (GUILayout.Button("TimeScaleZero"))
{
logtext.AppendLine("StartFrame:" + Time.frameCount);
Time.timeScale = 0f;
Scheduler.MainThreadIgnoreTimeScale.Schedule(TimeSpan.FromSeconds(3), () =>
{
logtext.AppendLine(Time.frameCount.ToString());
});
}
if (GUILayout.Button("SampleFrame"))
{
logtext.AppendLine("SampleFrame:" + Time.frameCount);
Observable.IntervalFrame(10)
.SampleFrame(25)
.Take(6)
.Subscribe(x =>
{
logtext.AppendLine("Sample:" + Time.frameCount.ToString());
}, () =>
{
logtext.AppendLine("Complete:" + Time.frameCount.ToString());
})
.AddTo(disposables);
}
if (GUILayout.Button("ThrottleClick"))
{
logtext.AppendLine("ClickFrame:" + Time.frameCount);
throttleSubject.OnNext(Unit.Default);
}
if (GUILayout.Button("ThrottleFrame"))
{
logtext.AppendLine("ThrottleFrame:" + Time.frameCount);
throttleSubject
.ThrottleFrame(60)
.Subscribe(x =>
{
logtext.AppendLine("Throttle:" + Time.frameCount.ToString());
}, () =>
{
logtext.AppendLine("Complete:" + Time.frameCount.ToString());
})
.AddTo(disposables);
}
if (GUILayout.Button("TimeoutFrame"))
{
logtext.AppendLine("TimeoutFrame:" + Time.frameCount);
throttleSubject
.TimeoutFrame(60)
.Subscribe(x =>
{
logtext.AppendLine("Throttle:" + Time.frameCount.ToString());
}, ex =>
{
logtext.AppendLine("Timeout:" + ex.ToString());
}, () =>
{
logtext.AppendLine("Complete:" + Time.frameCount.ToString());
})
.AddTo(disposables);
}
if (GUILayout.Button("ReactiveProperty"))
{
var enemy = new Enemy(1000);
enemy.CurrentHp.Subscribe(x => logtext.AppendLine(x.ToString())).AddTo(disposables);
enemy.CurrentHp.Value -= 900;
var person = new Person("hoge", "huga");
person.FullName.Subscribe(x => logtext.AppendLine(x)).AddTo(disposables);
person.GivenName.Value = "aiueo";
person.FamilyName.Value = "kakikukeko";
}
//if (GUI.Button(new Rect(xpos, ypos, 100, 100), "Clear"))
//{
// logtext.Length = 0;
// disposables.Clear();
//}
//ypos += 100;
//if (GUI.Button(new Rect(xpos, ypos, 100, 100), "CurrentThreadScheduler"))
//{
// try
// {
// Scheduler.CurrentThread.Schedule(() =>
// {
// try
// {
// logtext.AppendLine("test threadscheduler");
// }
// catch (Exception ex)
// {
// logtext.AppendLine("innner ex" + ex.ToString());
// }
// });
// }
// catch (Exception ex)
// {
// logtext.AppendLine("outer ex" + ex.ToString());
// }
//}
//ypos += 100;
//if (GUI.Button(new Rect(xpos, ypos, 100, 100), "EveryUpdate"))
//{
// Observable.EveryUpdate()
// .Subscribe(x => logtext.AppendLine(x.ToString()), ex => logtext.AppendLine("ex:" + ex.ToString()))
// .AddTo(disposables);
//}
//ypos += 100;
//if (GUI.Button(new Rect(xpos, ypos, 100, 100), "FromCoroutinePure"))
//{
// Observable.Create<Unit>(observer =>
// {
// var cancel = new BooleanDisposable();
// MainThreadDispatcher.StartCoroutine(Hoge(observer));
// return cancel;
// })
// .Subscribe(x => logtext.AppendLine(x.ToString()), ex => logtext.AppendLine("ex:" + ex.ToString()));
//}
//ypos += 100;
//if (GUI.Button(new Rect(xpos, ypos, 100, 100), "FromCoroutine"))
//{
// Observable.FromCoroutine<Unit>(Hoge)
// .Subscribe(x => logtext.AppendLine(x.ToString()), ex => logtext.AppendLine("ex:" + ex.ToString()));
//}
/*
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "TimeScale-1"))
{
Time.timeScale -= 1f;
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "TimeScale+1"))
{
Time.timeScale += 1f;
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "TimeScale=0"))
{
Time.timeScale = 0;
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "TimeScale=100"))
{
Time.timeScale = 100;
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "Scheduler0"))
{
logger.Debug("run");
Scheduler.MainThread.Schedule(TimeSpan.FromMilliseconds(5000), () =>
{
logger.Debug(DateTime.Now);
});
}
xpos += 100;
ypos = 0;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "Scheduler1"))
{
logger.Debug("Before Start");
Scheduler.MainThread.Schedule(() => logger.Debug("immediate"));
Scheduler.MainThread.Schedule(TimeSpan.Zero, () => logger.Debug("zero span"));
Scheduler.MainThread.Schedule(TimeSpan.FromMilliseconds(1), () => logger.Debug("0.1 span"));
logger.Debug("After Start");
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "Scheduler2"))
{
logger.Debug("M:Before Start");
Scheduler.MainThread.Schedule(TimeSpan.FromSeconds(5), () => logger.Debug("M:after 5 minutes"));
Scheduler.MainThread.Schedule(TimeSpan.FromMilliseconds(5500), () => logger.Debug("M:after 5.5 minutes"));
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "Realtime"))
{
logger.Debug("R:Before Start");
Scheduler.MainThreadIgnoreTimeScale.Schedule(TimeSpan.FromSeconds(5), () => logger.Debug("R:after 5 minutes"));
Scheduler.MainThreadIgnoreTimeScale.Schedule(TimeSpan.FromMilliseconds(5500), () => logger.Debug("R:after 5.5 minutes"));
}
#if !UNITY_METRO
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "ManagedThreadId"))
{
logger.Debug("Current:" + Thread.CurrentThread.ManagedThreadId);
new Thread(_ => logger.Debug("NewThread:" + Thread.CurrentThread.ManagedThreadId)).Start();
ThreadPool.QueueUserWorkItem(_ =>
{
logger.Debug("ThraedPool:" + Thread.CurrentThread.ManagedThreadId);
this.transform.position = new Vector3(0, 0, 0); // exception
});
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "ThreadStatic"))
{
logger.Debug(threadstaticobj != null);
new Thread(_ => logger.Debug(threadstaticobj != null)).Start();
ThreadPool.QueueUserWorkItem(_ => logger.Debug(threadstaticobj != null));
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "Log"))
{
logger.Debug("test", this);
ThreadPool.QueueUserWorkItem(_ => logger.Debug("test2", this));
}
#endif
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "POST"))
{
var form = new WWWForm();
form.AddField("test", "abcdefg");
ObservableWWW.PostWWW("http://localhost:53395/Handler1.ashx", form, new Hash
{
{"aaaa", "bbb"},
{"User-Agent", "HugaHuga"}
})
.Subscribe(x => logger.Debug(x.text));
}
xpos += 100;
ypos = 0;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "Yield"))
{
yieldCancel = Observable.FromCoroutineValue<string>(StringYield, false)
.Subscribe(x => logger.Debug(x), ex => logger.Debug("E-x:" + ex));
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "YieldCancel"))
{
yieldCancel.Dispose();
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "ThreadPool"))
{
Observable.Timer(TimeSpan.FromMilliseconds(400), Scheduler.ThreadPool)
.ObserveOnMainThread()
.Subscribe(x => logger.Debug(x));
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "Subscribe"))
{
subscriber.InitSubscriptions();
logger.Debug("Subscribe++ : " + subscriber.SubscriptionCount);
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "Push"))
{
Publisher.foo();
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "Unsubscriber"))
{
subscriber.RemoveSubscriptions();
logger.Debug("UnsubscribeAll : " + subscriber.SubscriptionCount);
}
ypos += 100;
if (GUI.Button(new Rect(xpos, ypos, 100, 100), "DistinctUntilChanged"))
{
new[] { "hoge", null, null, "huga", "huga", "hoge" }
.ToObservable()
.DistinctUntilChanged()
.Subscribe(x => logger.Debug(x));
}
* */
// Time
var sb = new StringBuilder();
sb.AppendLine("CaptureFramerate:" + Time.captureFramerate);
sb.AppendLine("deltaTime:" + Time.deltaTime);
sb.AppendLine("fixedDeltaTime:" + Time.fixedDeltaTime);
sb.AppendLine("fixedTime:" + Time.fixedTime);
sb.AppendLine("frameCount:" + Time.frameCount);
sb.AppendLine("maximumDeltaTime:" + Time.maximumDeltaTime);
sb.AppendLine("realtimeSinceStartup:" + Time.realtimeSinceStartup);
sb.AppendLine("renderedFrameCount:" + Time.renderedFrameCount);
sb.AppendLine("smoothDeltaTime:" + Time.smoothDeltaTime);
sb.AppendLine("time:" + Time.time);
sb.AppendLine("timeScale:" + Time.timeScale);
sb.AppendLine("timeSinceLevelLoad:" + Time.timeSinceLevelLoad);
sb.AppendLine("unscaledDeltaTime:" + Time.unscaledDeltaTime);
sb.AppendLine("unscaledTime:" + Time.unscaledTime);
//GUI.Box(new Rect(Screen.width - 300, Screen.height - 300, 300, 300), "Time");
//GUI.Label(new Rect(Screen.width - 290, Screen.height - 290, 290, 290), sb.ToString());
// logtext only
GUI.Box(new Rect(Screen.width - 300, Screen.height - 300, 300, 300), "logtext");
GUI.Label(new Rect(Screen.width - 290, Screen.height - 290, 290, 290), logtext.ToString());
// Log
//GUI.Box(new Rect(Screen.width - 300, 0, 300, 300), "Log");
//GUI.Label(new Rect(Screen.width - 290, 10, 290, 290), logtext.ToString());
}
/*
IEnumerator StringYield()
{
try
{
yield return "aaa";
yield return "bbb";
yield return new WaitForSeconds(5);
yield return "ccc";
yield return null;
throw new Exception("ex!!!");
}
finally
{
logger.Debug("finally!");
}
}
IEnumerator Work()
{
var t1 = Observable.Interval(TimeSpan.FromSeconds(1)).Take(4).ToLazyTask();
var t2 = Observable.Interval(TimeSpan.FromSeconds(1)).Select(x => x * x).Take(4).ToLazyTask();
var t3 = Observable.Throw<Unit>(new Exception()).ToLazyTask();
yield return LazyTask.WhenAll(t1, t2, t3);
logger.Debug(t1.Result + ":" + t2.Result);
logger.Debug(t3.Exception);
}
IEnumerator Test()
{
logger.Debug("first");
yield return 1000;
logger.Debug("second");
}
// Question from UnityForum #45
public static class Publisher
{
private static readonly object _Lock = new object();
private static Subject<bool> item = new UniRx.Subject<bool>();
public static IObservable<bool> Item
{
get
{
return item; // no needs lock
}
}
public static void foo()
{
item.OnNext(true);
}
}
public class Subscriber
{
private CompositeDisposable m_Subscriptions = new CompositeDisposable();
public int SubscriptionCount { get { return m_Subscriptions.Count; } }
public void InitSubscriptions()
{
m_Subscriptions.Add(Publisher.Item.Subscribe(UniRx.Observer.Create<bool>(result => this.HandleItem(result), ex => this.HandleError(ex), () => { })));
}
void HandleItem(bool args)
{
logger.Debug("Received Item: " + args);
}
void HandleError(Exception ex)
{
logger.Debug("Exception: " + ex.Message);
}
public void RemoveSubscriptions()
{
m_Subscriptions.Clear();
}
}
* */
}
public class Enemy
{
public ReactiveProperty<long> CurrentHp { get; private set; }
public ReactiveProperty<bool> IsDead { get; private set; }
public Enemy(int initialHp)
{
CurrentHp = new ReactiveProperty<long>(initialHp);
IsDead = CurrentHp.Select(x => x <= 0).ToReactiveProperty();
}
}
public class Person
{
public ReactiveProperty<string> GivenName { get; private set; }
public ReactiveProperty<string> FamilyName { get; private set; }
public ReadOnlyReactiveProperty<string> FullName { get; private set; }
public Person(string givenName, string familyName)
{
GivenName = new ReactiveProperty<string>(givenName);
FamilyName = new ReactiveProperty<string>(familyName);
// If change the givenName or familyName, notify with fullName!
FullName = GivenName.CombineLatest(FamilyName, (x, y) => x + " " + y).ToReadOnlyReactiveProperty();
}
}
}
#pragma warning restore 0414
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Globalization;
namespace System.Management
{
/// <summary>
/// <para> Provides methods to convert DMTF datetime and time interval to CLR compliant
/// <see cref='System.DateTime'/> and <see cref='System.TimeSpan'/> format and vice versa.
/// </para>
/// </summary>
/// <example>
/// <code lang='C#'>
/// using System;
/// using System.Management;
///
/// // The sample below demonstrates the various conversions that can be done using ManagementDateTimeConverter class
/// class Sample_ManagementDateTimeConverterClass
/// {
/// public static int Main(string[] args)
/// {
/// string dmtfDate = "20020408141835.999999-420";
/// string dmtfTimeInterval = "00000010122532:123456:000";
///
/// // Converting DMTF datetime to System.DateTime
/// DateTime dt = ManagementDateTimeConverter.ToDateTime(dmtfDate);
///
/// // Converting System.DateTime to DMTF datetime
/// string dmtfDate = ManagementDateTimeConverter.ToDateTime(DateTime.Now);
///
/// // Converting DMTF timeinterval to System.TimeSpan
/// System.TimeSpan tsRet = ManagementDateTimeConverter. ToTimeSpan(dmtfTimeInterval);
///
/// //Converting System.TimeSpan to DMTF time interval format
/// System.TimeSpan ts = new System.TimeSpan(10,12,25,32,456);
/// string dmtfTimeInt = ManagementDateTimeConverter.ToDmtfTimeInterval(ts);
///
/// return 0;
///
/// }
/// }
/// </code>
/// <code lang='VB'>
/// Imports System
/// Imports System.Management
///
/// 'The sample below demonstrates the various conversions that can be done using ManagementDateTimeConverter class
/// Class Sample_ManagementClass
/// Overloads Public Shared Function Main(args() As String) As Integer
/// Dim dmtfDate As String = "20020408141835.999999-420"
/// Dim dmtfTimeInterval As String = "00000010122532:123456:000"
///
/// 'Converting DMTF datetime and intervals to System.DateTime
/// Dim dt As DateTime = ManagementDateTimeConverter.ToDateTime(dmtfDate)
///
/// 'Converting System.DateTime to DMTF datetime
/// dmtfDate = ManagementDateTimeConverter.ToDateTime(DateTime.Now)
///
/// ' Converting DMTF timeinterval to System.TimeSpan
/// Dim tsRet As System.TimeSpan = ManagementDateTimeConverter.ToTimeSpan(dmtfTimeInterval)
///
/// 'Converting System.TimeSpan to DMTF time interval format
/// Dim ts As System.TimeSpan = New System.TimeSpan(10, 12, 25, 32, 456)
/// String dmtfTimeInt = ManagementDateTimeConverter.ToDmtfTimeInterval(ts)
///
/// Return 0
/// End Function
/// End Class
///
/// </code>
/// </example>
public sealed class ManagementDateTimeConverter
{
// constants
private const int SIZEOFDMTFDATETIME = 25;
private const int MAXSIZE_UTC_DMTF = 999;
private const long MAXDATE_INTIMESPAN = 99999999;
private ManagementDateTimeConverter()
{
}
/// <summary>
/// <para>Converts a given DMTF datetime to <see cref='System.DateTime'/> object. The returned DateTime will be in the
/// current TimeZone of the system.</para>
/// </summary>
/// <param name='dmtfDate'>A string representing the datetime in DMTF format.</param>
/// <returns>
/// <para>A <see cref='System.DateTime'/> object that represents the given DMTF datetime.</para>
/// </returns>
/// <remarks>
/// <para> Date and time in WMI is represented in DMTF datetime format. This format is explained in WMI SDK documentation.
/// DMTF datetime string has an UTC offset which this datetime string represents.
/// During conversion to <see cref='System.DateTime'/>, UTC offset is used to convert the date to the
/// current timezone. According to DMTF format a particular field can be represented by the character
/// '*'. This will be converted to the MinValue of this field that can be represented in <see cref='System.DateTime'/>.
/// </para>
/// </remarks>
/// <example>
/// <code lang='C#'>
/// // Convert a DMTF datetime to System.DateTime
/// DateTime date = ManagementDateTimeConverter.ToDateTime("20020408141835.999999-420");
/// </code>
/// <code lang='VB'>
/// ' Convert a DMTF datetime to System.DateTime
/// Dim date as DateTime = ManagementDateTimeConverter.ToDateTime("20020408141835.999999-420")
/// </code>
/// </example>
public static DateTime ToDateTime(string dmtfDate)
{
int year = DateTime.MinValue.Year;
int month = DateTime.MinValue.Month;
int day = DateTime.MinValue.Day;
int hour = DateTime.MinValue.Hour;
int minute = DateTime.MinValue.Minute;
int second = DateTime.MinValue.Second;
string dmtf = dmtfDate;
// If the string passed is empty or null then throw
// an exception
if (dmtf == null)
{
throw new ArgumentOutOfRangeException(nameof(dmtfDate));
}
if (dmtf.Length == 0)
{
throw new ArgumentOutOfRangeException(nameof(dmtfDate));
}
// if the length of the string is not equal to the
// standard length of the DMTF datetime then throw an exception
if (dmtf.Length != SIZEOFDMTFDATETIME)
{
throw new ArgumentOutOfRangeException(nameof(dmtfDate));
}
IFormatProvider frmInt32 = (IFormatProvider)CultureInfo.InvariantCulture.GetFormat(typeof(int));
long ticks = 0;
int utcOffset = 0;
try
{
var tempString = dmtf.Substring(0, 4);
if (("****" != tempString))
{
year = int.Parse(tempString, frmInt32);
}
tempString = dmtf.Substring(4, 2);
if (("**" != tempString))
{
month = int.Parse(tempString, frmInt32);
}
tempString = dmtf.Substring(6, 2);
if (("**" != tempString))
{
day = int.Parse(tempString, frmInt32);
}
tempString = dmtf.Substring(8, 2);
if (("**" != tempString))
{
hour = int.Parse(tempString, frmInt32);
}
tempString = dmtf.Substring(10, 2);
if (("**" != tempString))
{
minute = int.Parse(tempString, frmInt32);
}
tempString = dmtf.Substring(12, 2);
if (("**" != tempString))
{
second = int.Parse(tempString, frmInt32);
}
tempString = dmtf.Substring(15, 6);
if (("******" != tempString))
{
ticks = (long.Parse(tempString, (IFormatProvider)CultureInfo.InvariantCulture.GetFormat(typeof(long)))) * (TimeSpan.TicksPerMillisecond / 1000);
}
tempString = dmtf.Substring(22, 3);
if (("***" != tempString))
{
tempString = dmtf.Substring(21, 4);
utcOffset = int.Parse(tempString, frmInt32);
}
if (year < 0 || month < 0 || day < 0 || hour < 0 || minute < 0 || second < 0 || ticks < 0)
{
throw new ArgumentOutOfRangeException(nameof(dmtfDate));
}
}
catch
{
throw new ArgumentOutOfRangeException(nameof(dmtfDate));
}
// Construct a new System.DateTime object, netfx uses date kind unspecified so use the same
var datetime = new DateTime(year, month, day, hour, minute, second, 0, DateTimeKind.Unspecified);
// Then add the ticks calculated from the microseconds
datetime = datetime.AddTicks(ticks);
// Then adjust the offset, using a manual calulation to keep the same possible range as netfx
datetime = datetime.AddMinutes(-(utcOffset - TimeZoneInfo.Local.GetUtcOffset(datetime).Ticks / TimeSpan.TicksPerMinute));
return datetime;
}
/// <summary>
/// <para>Converts a given <see cref='System.DateTime'/> object to DMTF format.</para>
///
/// </summary>
/// <param name='date'>A <see cref='System.DateTime'/> object representing the datetime to be converted to DMTF datetime.</param>
/// <returns>
/// <para>A string that represents the DMTF datetime for the given DateTime object.</para>
/// </returns>
/// <remarks>
/// <para> Date and time in WMI is represented in DMTF datetime format. This format is explained in WMI SDK documentation.
/// The DMTF datetime string represented will be with respect to the UTC offset of the
/// current timezone. The lowest precision in DMTF is microseconds and
/// in <see cref='System.DateTime'/> is Ticks , which is equivalent to 100 of nanoseconds.
/// During conversion these Ticks are converted to microseconds and rounded
/// off to the nearest microsecond.
/// </para>
/// </remarks>
/// <example>
/// <code lang='C#'>
/// // Convert the current time in System.DateTime to DMTF format
/// string dmtfDateTime = ManagementDateTimeConverter.ToDmtfDateTime(DateTime.Now);
/// </code>
/// <code lang='VB'>
/// ' Convert the current time in System.DateTime to DMTF format
/// Dim dmtfDateTime as String = ManagementDateTimeConverter.ToDmtfDateTime(DateTime.Now)
/// </code>
/// </example>
public static string ToDmtfDateTime(DateTime date)
{
string UtcString = string.Empty;
// Fill up the UTC field in the DMTF date with the current
// zones UTC value. If date kind is UTC use offset of zero to match netfx (i.e.: TimeZone.GetUtcOffset)
TimeSpan tickOffset = date.Kind == DateTimeKind.Utc ? TimeSpan.Zero : TimeZoneInfo.Local.GetUtcOffset(date);
long OffsetMins = (tickOffset.Ticks / System.TimeSpan.TicksPerMinute);
IFormatProvider frmInt32 = (IFormatProvider)CultureInfo.InvariantCulture.GetFormat(typeof(int));
// If the offset is more than that what can be specified in DMTF format, then
// convert the date to UniversalTime
if (Math.Abs(OffsetMins) > MAXSIZE_UTC_DMTF)
{
date = date.ToUniversalTime();
UtcString = "+000";
}
else
if ((tickOffset.Ticks >= 0))
{
UtcString = "+" + ((tickOffset.Ticks / System.TimeSpan.TicksPerMinute)).ToString(frmInt32).PadLeft(3, '0');
}
else
{
string strTemp = OffsetMins.ToString(frmInt32);
UtcString = "-" + strTemp.Substring(1, strTemp.Length - 1).PadLeft(3, '0');
}
string dmtfDateTime = date.Year.ToString(frmInt32).PadLeft(4, '0');
dmtfDateTime = (dmtfDateTime + date.Month.ToString(frmInt32).PadLeft(2, '0'));
dmtfDateTime = (dmtfDateTime + date.Day.ToString(frmInt32).PadLeft(2, '0'));
dmtfDateTime = (dmtfDateTime + date.Hour.ToString(frmInt32).PadLeft(2, '0'));
dmtfDateTime = (dmtfDateTime + date.Minute.ToString(frmInt32).PadLeft(2, '0'));
dmtfDateTime = (dmtfDateTime + date.Second.ToString(frmInt32).PadLeft(2, '0'));
dmtfDateTime = (dmtfDateTime + ".");
// Construct a DateTime with the precision to Second as same as the passed DateTime and so get
// the ticks difference so that the microseconds can be calculated
DateTime dtTemp = new DateTime(date.Year, date.Month, date.Day, date.Hour, date.Minute, date.Second, 0);
long microsec = ((date.Ticks - dtTemp.Ticks) * 1000) / System.TimeSpan.TicksPerMillisecond;
// fill the microseconds field
string strMicrosec = microsec.ToString((IFormatProvider)CultureInfo.InvariantCulture.GetFormat(typeof(long)));
if (strMicrosec.Length > 6)
{
strMicrosec = strMicrosec.Substring(0, 6);
}
dmtfDateTime = dmtfDateTime + strMicrosec.PadLeft(6, '0');
// adding the UTC offset
dmtfDateTime = dmtfDateTime + UtcString;
return dmtfDateTime;
}
/// <summary>
/// <para>Converts a given DMTF time interval to <see cref='System.TimeSpan'/> object.</para>
/// </summary>
/// <param name='dmtfTimespan'>A string represesentation of the DMTF time interval.</param>
/// <returns>
/// <para>A <see cref='System.TimeSpan'/> object that represents the given DMTF time interval.</para>
/// </returns>
/// <remarks>
/// <para> Time interval in WMI is represented in DMTF format. This format is explained in WMI SDK documentation.
/// If the DMTF time interval value is more than that of
/// <see cref='System.TimeSpan.MaxValue'/> then <see cref='System.ArgumentOutOfRangeException'/> is thrown.
/// </para>
/// </remarks>
/// <example>
/// <code lang='C#'>
/// // Convert a DMTF time interval to System.TimeSpan
/// TimeSpan dmtfTimeInterval = ManagementDateTimeConverter.ToTimeSpan("00000010122532:123456:000");
/// </code>
/// <code lang='VB'>
/// ' Convert a DMTF time interval to System.TimeSpan
/// Dim ts as TimeSpan = ManagementDateTimeConverter.ToTimeSpan("00000010122532:123456:000")
/// </code>
/// </example>
public static TimeSpan ToTimeSpan(string dmtfTimespan)
{
int days = 0;
int hours = 0;
int minutes = 0;
int seconds = 0;
IFormatProvider frmInt32 = (IFormatProvider)CultureInfo.InvariantCulture.GetFormat(typeof(int));
string dmtfts = dmtfTimespan;
TimeSpan timespan = TimeSpan.MinValue;
if (dmtfts == null)
{
throw new System.ArgumentOutOfRangeException(nameof(dmtfTimespan));
}
if (dmtfts.Length == 0)
{
throw new System.ArgumentOutOfRangeException(nameof(dmtfTimespan));
}
if (dmtfts.Length != SIZEOFDMTFDATETIME)
{
throw new System.ArgumentOutOfRangeException(nameof(dmtfTimespan));
}
if (dmtfts.Substring(21, 4) != ":000")
{
throw new System.ArgumentOutOfRangeException(nameof(dmtfTimespan));
}
long ticks = 0;
try
{
string tempString = string.Empty;
tempString = dmtfts.Substring(0, 8);
days = int.Parse(tempString, frmInt32);
tempString = dmtfts.Substring(8, 2);
hours = int.Parse(tempString, frmInt32);
tempString = dmtfts.Substring(10, 2);
minutes = int.Parse(tempString, frmInt32);
tempString = dmtfts.Substring(12, 2);
seconds = int.Parse(tempString, frmInt32);
tempString = dmtfts.Substring(15, 6);
ticks = (long.Parse(tempString, (IFormatProvider)CultureInfo.InvariantCulture.GetFormat(typeof(long)))) * (System.TimeSpan.TicksPerMillisecond / 1000);
}
catch
{
throw new System.ArgumentOutOfRangeException(nameof(dmtfTimespan));
}
if (days < 0 || hours < 0 || minutes < 0 || seconds < 0 || ticks < 0)
{
throw new System.ArgumentOutOfRangeException(nameof(dmtfTimespan));
}
timespan = new System.TimeSpan(days, hours, minutes, seconds, 0);
// Get a timepan for the additional ticks obtained for the microsecond part of DMTF time interval
// and then add it to the original timespan
TimeSpan tsTemp = System.TimeSpan.FromTicks(ticks);
timespan = timespan + tsTemp;
return timespan;
}
/// <summary>
/// <para>Converts a given <see cref='System.TimeSpan'/> object to DMTF time interval.</para>
/// </summary>
/// <param name='timespan'> A <see cref='System.TimeSpan'/> object representing the datetime to be converted to DMTF time interval.
/// </param>
/// <returns>
/// <para>A string that represents the DMTF time interval for the given TimeSpan object.</para>
/// </returns>
/// <remarks>
/// <para> Time interval in WMI is represented in DMTF datetime format. This format
/// is explained in WMI SDK documentation. The lowest precision in
/// DMTF is microseconds and in <see cref='System.TimeSpan'/> is Ticks , which is equivalent
/// to 100 of nanoseconds.During conversion these Ticks are converted to
/// microseconds and rounded off to the nearest microsecond.
/// </para>
/// </remarks>
/// <example>
/// <code lang='C#'>
/// // Construct a Timespan object and convert it to DMTF format
/// System.TimeSpan ts = new System.TimeSpan(10,12,25,32,456);
/// String dmtfTimeInterval = ManagementDateTimeConverter.ToDmtfTimeInterval(ts);
/// </code>
/// <code lang='VB'>
/// // Construct a Timespan object and convert it to DMTF format
/// Dim ts as System.TimeSpan = new System.TimeSpan(10,12,25,32,456)
/// Dim dmtfTimeInterval as String = ManagementDateTimeConverter.ToDmtfTimeInterval(ts)
/// </code>
/// </example>
public static string ToDmtfTimeInterval(TimeSpan timespan)
{
string dmtftimespan = timespan.Days.ToString((IFormatProvider)CultureInfo.InvariantCulture.GetFormat(typeof(int))).PadLeft(8, '0');
IFormatProvider frmInt32 = (IFormatProvider)CultureInfo.InvariantCulture.GetFormat(typeof(int));
// Days that can be represented is more than what can be represented
// then throw an exception
// and also negative timespan cannot be represented in DMTF
if (timespan.Days > MAXDATE_INTIMESPAN || timespan < TimeSpan.Zero)
{
throw new System.ArgumentOutOfRangeException();
}
dmtftimespan = (dmtftimespan + timespan.Hours.ToString(frmInt32).PadLeft(2, '0'));
dmtftimespan = (dmtftimespan + timespan.Minutes.ToString(frmInt32).PadLeft(2, '0'));
dmtftimespan = (dmtftimespan + timespan.Seconds.ToString(frmInt32).PadLeft(2, '0'));
dmtftimespan = (dmtftimespan + ".");
// Construct a DateTime with the precision to Second as same as the passed DateTime and so get
// the ticks difference so that the microseconds can be calculated
TimeSpan tsTemp = new TimeSpan(timespan.Days, timespan.Hours, timespan.Minutes, timespan.Seconds, 0);
long microsec = ((timespan.Ticks - tsTemp.Ticks) * 1000) / System.TimeSpan.TicksPerMillisecond;
// fill the microseconds field
string strMicrosec = microsec.ToString((IFormatProvider)CultureInfo.InvariantCulture.GetFormat(typeof(long)));
if (strMicrosec.Length > 6)
{
strMicrosec = strMicrosec.Substring(0, 6);
}
dmtftimespan = dmtftimespan + strMicrosec.PadLeft(6, '0');
dmtftimespan = dmtftimespan + ":000";
return dmtftimespan;
}
} // ManagementDateTimeConverter
}
| |
// Visual Studio Shared Project
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Security.Permissions;
using System.Text;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell.Interop;
using OleConstants = Microsoft.VisualStudio.OLE.Interop.Constants;
using VsCommands2K = Microsoft.VisualStudio.VSConstants.VSStd2KCmdID;
#if DEV14_OR_LATER
using Microsoft.VisualStudio.Imaging;
using Microsoft.VisualStudio.Imaging.Interop;
#endif
namespace Microsoft.VisualStudioTools.Project {
internal abstract class ReferenceNode : HierarchyNode {
internal delegate void CannotAddReferenceErrorMessage();
#region ctors
/// <summary>
/// constructor for the ReferenceNode
/// </summary>
protected ReferenceNode(ProjectNode root, ProjectElement element)
: base(root, element) {
this.ExcludeNodeFromScc = true;
}
/// <summary>
/// constructor for the ReferenceNode
/// </summary>
internal ReferenceNode(ProjectNode root)
: base(root) {
this.ExcludeNodeFromScc = true;
}
#endregion
#region overridden properties
public override int MenuCommandId {
get { return VsMenus.IDM_VS_CTXT_REFERENCE; }
}
public override Guid ItemTypeGuid {
get { return Guid.Empty; }
}
public override string Url {
get {
return String.Empty;
}
}
public override string Caption {
get {
return String.Empty;
}
}
#endregion
#region overridden methods
protected override NodeProperties CreatePropertiesObject() {
return new ReferenceNodeProperties(this);
}
/// <summary>
/// Get an instance of the automation object for ReferenceNode
/// </summary>
/// <returns>An instance of Automation.OAReferenceItem type if succeeded</returns>
public override object GetAutomationObject() {
if (this.ProjectMgr == null || this.ProjectMgr.IsClosed) {
return null;
}
return new Automation.OAReferenceItem(this.ProjectMgr.GetAutomationObject() as Automation.OAProject, this);
}
/// <summary>
/// Disable inline editing of Caption of a ReferendeNode
/// </summary>
/// <returns>null</returns>
public override string GetEditLabel() {
return null;
}
#if DEV14_OR_LATER
protected override bool SupportsIconMonikers {
get { return true; }
}
protected override ImageMoniker GetIconMoniker(bool open) {
return CanShowDefaultIcon() ? KnownMonikers.Reference : KnownMonikers.ReferenceWarning;
}
#else
public override int ImageIndex {
get {
return ProjectMgr.GetIconIndex(CanShowDefaultIcon() ?
ProjectNode.ImageName.Reference :
ProjectNode.ImageName.DanglingReference
);
}
}
#endif
/// <summary>
/// Not supported.
/// </summary>
internal override int ExcludeFromProject() {
return (int)OleConstants.OLECMDERR_E_NOTSUPPORTED;
}
public override void Remove(bool removeFromStorage) {
ReferenceContainerNode parent = Parent as ReferenceContainerNode;
base.Remove(removeFromStorage);
if (parent != null) {
parent.FireChildRemoved(this);
}
}
/// <summary>
/// References node cannot be dragged.
/// </summary>
/// <returns>A stringbuilder.</returns>
protected internal override string PrepareSelectedNodesForClipBoard() {
return null;
}
internal override int QueryStatusOnNode(Guid cmdGroup, uint cmd, IntPtr pCmdText, ref QueryStatusResult result) {
if (cmdGroup == VsMenus.guidStandardCommandSet2K) {
if ((VsCommands2K)cmd == VsCommands2K.QUICKOBJECTSEARCH) {
result |= QueryStatusResult.SUPPORTED | QueryStatusResult.ENABLED;
return VSConstants.S_OK;
}
} else {
return (int)OleConstants.OLECMDERR_E_UNKNOWNGROUP;
}
return base.QueryStatusOnNode(cmdGroup, cmd, pCmdText, ref result);
}
internal override int ExecCommandOnNode(Guid cmdGroup, uint cmd, uint nCmdexecopt, IntPtr pvaIn, IntPtr pvaOut) {
if (cmdGroup == VsMenus.guidStandardCommandSet2K) {
if ((VsCommands2K)cmd == VsCommands2K.QUICKOBJECTSEARCH) {
return this.ShowObjectBrowser();
}
}
return base.ExecCommandOnNode(cmdGroup, cmd, nCmdexecopt, pvaIn, pvaOut);
}
protected internal override void ShowDeleteMessage(IList<HierarchyNode> nodes, __VSDELETEITEMOPERATION action, out bool cancel, out bool useStandardDialog) {
// Don't prompt if all the nodes are references
useStandardDialog = !nodes.All(n => n is ReferenceNode);
cancel = false;
}
#endregion
#region methods
/// <summary>
/// Links a reference node to the project and hierarchy.
/// </summary>
public virtual void AddReference() {
ProjectMgr.Site.GetUIThread().MustBeCalledFromUIThread();
ReferenceContainerNode referencesFolder = this.ProjectMgr.GetReferenceContainer() as ReferenceContainerNode;
Utilities.CheckNotNull(referencesFolder, "Could not find the References node");
CannotAddReferenceErrorMessage referenceErrorMessageHandler = null;
if (!this.CanAddReference(out referenceErrorMessageHandler)) {
if (referenceErrorMessageHandler != null) {
referenceErrorMessageHandler.DynamicInvoke(new object[] { });
}
return;
}
// Link the node to the project file.
this.BindReferenceData();
// At this point force the item to be refreshed
this.ItemNode.RefreshProperties();
referencesFolder.AddChild(this);
return;
}
/// <summary>
/// Refreshes a reference by re-resolving it and redrawing the icon.
/// </summary>
internal virtual void RefreshReference() {
this.ResolveReference();
ProjectMgr.ReDrawNode(this, UIHierarchyElement.Icon);
}
/// <summary>
/// Resolves references.
/// </summary>
protected virtual void ResolveReference() {
}
/// <summary>
/// Validates that a reference can be added.
/// </summary>
/// <param name="errorHandler">A CannotAddReferenceErrorMessage delegate to show the error message.</param>
/// <returns>true if the reference can be added.</returns>
protected virtual bool CanAddReference(out CannotAddReferenceErrorMessage errorHandler) {
// When this method is called this refererence has not yet been added to the hierarchy, only instantiated.
errorHandler = null;
if (this.IsAlreadyAdded()) {
return false;
}
return true;
}
/// <summary>
/// Checks if a reference is already added. The method parses all references and compares the Url.
/// </summary>
/// <returns>true if the assembly has already been added.</returns>
protected virtual bool IsAlreadyAdded() {
ReferenceContainerNode referencesFolder = this.ProjectMgr.GetReferenceContainer() as ReferenceContainerNode;
Utilities.CheckNotNull(referencesFolder, "Could not find the References node");
for (HierarchyNode n = referencesFolder.FirstChild; n != null; n = n.NextSibling) {
ReferenceNode refererenceNode = n as ReferenceNode;
if (null != refererenceNode) {
// We check if the Url of the assemblies is the same.
if (CommonUtils.IsSamePath(refererenceNode.Url, this.Url)) {
return true;
}
}
}
return false;
}
/// <summary>
/// Shows the Object Browser
/// </summary>
/// <returns></returns>
protected virtual int ShowObjectBrowser() {
if (!File.Exists(this.Url)) {
return (int)OleConstants.OLECMDERR_E_NOTSUPPORTED;
}
// Request unmanaged code permission in order to be able to create the unmanaged memory representing the guid.
new SecurityPermission(SecurityPermissionFlag.UnmanagedCode).Demand();
Guid guid = VSConstants.guidCOMPLUSLibrary;
IntPtr ptr = System.Runtime.InteropServices.Marshal.AllocCoTaskMem(guid.ToByteArray().Length);
System.Runtime.InteropServices.Marshal.StructureToPtr(guid, ptr, false);
int returnValue = VSConstants.S_OK;
try {
VSOBJECTINFO[] objInfo = new VSOBJECTINFO[1];
objInfo[0].pguidLib = ptr;
objInfo[0].pszLibName = this.Url;
IVsObjBrowser objBrowser = this.ProjectMgr.Site.GetService(typeof(SVsObjBrowser)) as IVsObjBrowser;
ErrorHandler.ThrowOnFailure(objBrowser.NavigateTo(objInfo, 0));
} catch (COMException e) {
Trace.WriteLine("Exception" + e.ErrorCode);
returnValue = e.ErrorCode;
} finally {
if (ptr != IntPtr.Zero) {
System.Runtime.InteropServices.Marshal.FreeCoTaskMem(ptr);
}
}
return returnValue;
}
internal override bool CanDeleteItem(__VSDELETEITEMOPERATION deleteOperation) {
if (deleteOperation == __VSDELETEITEMOPERATION.DELITEMOP_RemoveFromProject) {
return true;
}
return false;
}
protected abstract void BindReferenceData();
#endregion
}
}
| |
using System;
using System.Globalization;
using System.Collections.Generic;
using Sasoma.Utils;
using Sasoma.Microdata.Interfaces;
using Sasoma.Languages.Core;
using Sasoma.Microdata.Properties;
namespace Sasoma.Microdata.Types
{
/// <summary>
/// The footer section of the page.
/// </summary>
public class WPFooter_Core : TypeCore, IWebPageElement
{
public WPFooter_Core()
{
this._TypeId = 289;
this._Id = "WPFooter";
this._Schema_Org_Url = "http://schema.org/WPFooter";
string label = "";
GetLabel(out label, "WPFooter", typeof(WPFooter_Core));
this._Label = label;
this._Ancestors = new int[]{266,78,294};
this._SubTypes = new int[0];
this._SuperTypes = new int[]{294};
this._Properties = new int[]{67,108,143,229,0,2,10,12,18,20,24,26,21,50,51,54,57,58,59,61,62,64,70,72,81,97,100,110,115,116,126,138,151,178,179,180,199,211,219,230,231};
}
/// <summary>
/// The subject matter of the content.
/// </summary>
private About_Core about;
public About_Core About
{
get
{
return about;
}
set
{
about = value;
SetPropertyInstance(about);
}
}
/// <summary>
/// Specifies the Person that is legally accountable for the CreativeWork.
/// </summary>
private AccountablePerson_Core accountablePerson;
public AccountablePerson_Core AccountablePerson
{
get
{
return accountablePerson;
}
set
{
accountablePerson = value;
SetPropertyInstance(accountablePerson);
}
}
/// <summary>
/// The overall rating, based on a collection of reviews or ratings, of the item.
/// </summary>
private Properties.AggregateRating_Core aggregateRating;
public Properties.AggregateRating_Core AggregateRating
{
get
{
return aggregateRating;
}
set
{
aggregateRating = value;
SetPropertyInstance(aggregateRating);
}
}
/// <summary>
/// A secondary title of the CreativeWork.
/// </summary>
private AlternativeHeadline_Core alternativeHeadline;
public AlternativeHeadline_Core AlternativeHeadline
{
get
{
return alternativeHeadline;
}
set
{
alternativeHeadline = value;
SetPropertyInstance(alternativeHeadline);
}
}
/// <summary>
/// The media objects that encode this creative work. This property is a synonym for encodings.
/// </summary>
private AssociatedMedia_Core associatedMedia;
public AssociatedMedia_Core AssociatedMedia
{
get
{
return associatedMedia;
}
set
{
associatedMedia = value;
SetPropertyInstance(associatedMedia);
}
}
/// <summary>
/// An embedded audio object.
/// </summary>
private Audio_Core audio;
public Audio_Core Audio
{
get
{
return audio;
}
set
{
audio = value;
SetPropertyInstance(audio);
}
}
/// <summary>
/// The author of this content. Please note that author is special in that HTML 5 provides a special mechanism for indicating authorship via the rel tag. That is equivalent to this and may be used interchangabely.
/// </summary>
private Author_Core author;
public Author_Core Author
{
get
{
return author;
}
set
{
author = value;
SetPropertyInstance(author);
}
}
/// <summary>
/// Awards won by this person or for this creative work.
/// </summary>
private Awards_Core awards;
public Awards_Core Awards
{
get
{
return awards;
}
set
{
awards = value;
SetPropertyInstance(awards);
}
}
/// <summary>
/// Comments, typically from users, on this CreativeWork.
/// </summary>
private Comment_Core comment;
public Comment_Core Comment
{
get
{
return comment;
}
set
{
comment = value;
SetPropertyInstance(comment);
}
}
/// <summary>
/// The location of the content.
/// </summary>
private ContentLocation_Core contentLocation;
public ContentLocation_Core ContentLocation
{
get
{
return contentLocation;
}
set
{
contentLocation = value;
SetPropertyInstance(contentLocation);
}
}
/// <summary>
/// Official rating of a piece of content\u2014for example,'MPAA PG-13'.
/// </summary>
private ContentRating_Core contentRating;
public ContentRating_Core ContentRating
{
get
{
return contentRating;
}
set
{
contentRating = value;
SetPropertyInstance(contentRating);
}
}
/// <summary>
/// A secondary contributor to the CreativeWork.
/// </summary>
private Contributor_Core contributor;
public Contributor_Core Contributor
{
get
{
return contributor;
}
set
{
contributor = value;
SetPropertyInstance(contributor);
}
}
/// <summary>
/// The party holding the legal copyright to the CreativeWork.
/// </summary>
private CopyrightHolder_Core copyrightHolder;
public CopyrightHolder_Core CopyrightHolder
{
get
{
return copyrightHolder;
}
set
{
copyrightHolder = value;
SetPropertyInstance(copyrightHolder);
}
}
/// <summary>
/// The year during which the claimed copyright for the CreativeWork was first asserted.
/// </summary>
private CopyrightYear_Core copyrightYear;
public CopyrightYear_Core CopyrightYear
{
get
{
return copyrightYear;
}
set
{
copyrightYear = value;
SetPropertyInstance(copyrightYear);
}
}
/// <summary>
/// The creator/author of this CreativeWork or UserComments. This is the same as the Author property for CreativeWork.
/// </summary>
private Creator_Core creator;
public Creator_Core Creator
{
get
{
return creator;
}
set
{
creator = value;
SetPropertyInstance(creator);
}
}
/// <summary>
/// The date on which the CreativeWork was created.
/// </summary>
private DateCreated_Core dateCreated;
public DateCreated_Core DateCreated
{
get
{
return dateCreated;
}
set
{
dateCreated = value;
SetPropertyInstance(dateCreated);
}
}
/// <summary>
/// The date on which the CreativeWork was most recently modified.
/// </summary>
private DateModified_Core dateModified;
public DateModified_Core DateModified
{
get
{
return dateModified;
}
set
{
dateModified = value;
SetPropertyInstance(dateModified);
}
}
/// <summary>
/// Date of first broadcast/publication.
/// </summary>
private DatePublished_Core datePublished;
public DatePublished_Core DatePublished
{
get
{
return datePublished;
}
set
{
datePublished = value;
SetPropertyInstance(datePublished);
}
}
/// <summary>
/// A short description of the item.
/// </summary>
private Description_Core description;
public Description_Core Description
{
get
{
return description;
}
set
{
description = value;
SetPropertyInstance(description);
}
}
/// <summary>
/// A link to the page containing the comments of the CreativeWork.
/// </summary>
private DiscussionURL_Core discussionURL;
public DiscussionURL_Core DiscussionURL
{
get
{
return discussionURL;
}
set
{
discussionURL = value;
SetPropertyInstance(discussionURL);
}
}
/// <summary>
/// Specifies the Person who edited the CreativeWork.
/// </summary>
private Editor_Core editor;
public Editor_Core Editor
{
get
{
return editor;
}
set
{
editor = value;
SetPropertyInstance(editor);
}
}
/// <summary>
/// The media objects that encode this creative work
/// </summary>
private Encodings_Core encodings;
public Encodings_Core Encodings
{
get
{
return encodings;
}
set
{
encodings = value;
SetPropertyInstance(encodings);
}
}
/// <summary>
/// Genre of the creative work
/// </summary>
private Genre_Core genre;
public Genre_Core Genre
{
get
{
return genre;
}
set
{
genre = value;
SetPropertyInstance(genre);
}
}
/// <summary>
/// Headline of the article
/// </summary>
private Headline_Core headline;
public Headline_Core Headline
{
get
{
return headline;
}
set
{
headline = value;
SetPropertyInstance(headline);
}
}
/// <summary>
/// URL of an image of the item.
/// </summary>
private Image_Core image;
public Image_Core Image
{
get
{
return image;
}
set
{
image = value;
SetPropertyInstance(image);
}
}
/// <summary>
/// The language of the content. please use one of the language codes from the <a href=\http://tools.ietf.org/html/bcp47\>IETF BCP 47 standard.</a>
/// </summary>
private InLanguage_Core inLanguage;
public InLanguage_Core InLanguage
{
get
{
return inLanguage;
}
set
{
inLanguage = value;
SetPropertyInstance(inLanguage);
}
}
/// <summary>
/// A count of a specific user interactions with this item\u2014for example, <code>20 UserLikes</code>, <code>5 UserComments</code>, or <code>300 UserDownloads</code>. The user interaction type should be one of the sub types of <a href=\http://schema.org/UserInteraction\>UserInteraction</a>.
/// </summary>
private InteractionCount_Core interactionCount;
public InteractionCount_Core InteractionCount
{
get
{
return interactionCount;
}
set
{
interactionCount = value;
SetPropertyInstance(interactionCount);
}
}
/// <summary>
/// Indicates whether this content is family friendly.
/// </summary>
private IsFamilyFriendly_Core isFamilyFriendly;
public IsFamilyFriendly_Core IsFamilyFriendly
{
get
{
return isFamilyFriendly;
}
set
{
isFamilyFriendly = value;
SetPropertyInstance(isFamilyFriendly);
}
}
/// <summary>
/// The keywords/tags used to describe this content.
/// </summary>
private Keywords_Core keywords;
public Keywords_Core Keywords
{
get
{
return keywords;
}
set
{
keywords = value;
SetPropertyInstance(keywords);
}
}
/// <summary>
/// Indicates that the CreativeWork contains a reference to, but is not necessarily about a concept.
/// </summary>
private Mentions_Core mentions;
public Mentions_Core Mentions
{
get
{
return mentions;
}
set
{
mentions = value;
SetPropertyInstance(mentions);
}
}
/// <summary>
/// The name of the item.
/// </summary>
private Name_Core name;
public Name_Core Name
{
get
{
return name;
}
set
{
name = value;
SetPropertyInstance(name);
}
}
/// <summary>
/// An offer to sell this item\u2014for example, an offer to sell a product, the DVD of a movie, or tickets to an event.
/// </summary>
private Offers_Core offers;
public Offers_Core Offers
{
get
{
return offers;
}
set
{
offers = value;
SetPropertyInstance(offers);
}
}
/// <summary>
/// Specifies the Person or Organization that distributed the CreativeWork.
/// </summary>
private Provider_Core provider;
public Provider_Core Provider
{
get
{
return provider;
}
set
{
provider = value;
SetPropertyInstance(provider);
}
}
/// <summary>
/// The publisher of the creative work.
/// </summary>
private Publisher_Core publisher;
public Publisher_Core Publisher
{
get
{
return publisher;
}
set
{
publisher = value;
SetPropertyInstance(publisher);
}
}
/// <summary>
/// Link to page describing the editorial principles of the organization primarily responsible for the creation of the CreativeWork.
/// </summary>
private PublishingPrinciples_Core publishingPrinciples;
public PublishingPrinciples_Core PublishingPrinciples
{
get
{
return publishingPrinciples;
}
set
{
publishingPrinciples = value;
SetPropertyInstance(publishingPrinciples);
}
}
/// <summary>
/// Review of the item.
/// </summary>
private Reviews_Core reviews;
public Reviews_Core Reviews
{
get
{
return reviews;
}
set
{
reviews = value;
SetPropertyInstance(reviews);
}
}
/// <summary>
/// The Organization on whose behalf the creator was working.
/// </summary>
private SourceOrganization_Core sourceOrganization;
public SourceOrganization_Core SourceOrganization
{
get
{
return sourceOrganization;
}
set
{
sourceOrganization = value;
SetPropertyInstance(sourceOrganization);
}
}
/// <summary>
/// A thumbnail image relevant to the Thing.
/// </summary>
private ThumbnailURL_Core thumbnailURL;
public ThumbnailURL_Core ThumbnailURL
{
get
{
return thumbnailURL;
}
set
{
thumbnailURL = value;
SetPropertyInstance(thumbnailURL);
}
}
/// <summary>
/// URL of the item.
/// </summary>
private Properties.URL_Core uRL;
public Properties.URL_Core URL
{
get
{
return uRL;
}
set
{
uRL = value;
SetPropertyInstance(uRL);
}
}
/// <summary>
/// The version of the CreativeWork embodied by a specified resource.
/// </summary>
private Version_Core version;
public Version_Core Version
{
get
{
return version;
}
set
{
version = value;
SetPropertyInstance(version);
}
}
/// <summary>
/// An embedded video object.
/// </summary>
private Video_Core video;
public Video_Core Video
{
get
{
return video;
}
set
{
video = value;
SetPropertyInstance(video);
}
}
}
}
| |
#region License, Terms and Author(s)
//
// ELMAH - Error Logging Modules and Handlers for ASP.NET
// Copyright (c) 2004-9 Atif Aziz. All rights reserved.
//
// Author(s):
//
// Erik Ejlskov Jensen, http://erikej.blogspot.com/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
[assembly: Elmah.Scc("$Id: SqlServerCompactErrorLog.cs 925 2011-12-23 22:46:09Z azizatif $")]
namespace Elmah
{
#region Imports
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlServerCe;
using System.IO;
using IDictionary = System.Collections.IDictionary;
#endregion
/// <summary>
/// An <see cref="ErrorLog"/> implementation that uses SQL Server
/// Compact 4 as its backing store.
/// </summary>
public class SqlServerCompactErrorLog : ErrorLog
{
private readonly string _connectionString;
/// <summary>
/// Initializes a new instance of the <see cref="SqlServerCompactErrorLog"/> class
/// using a dictionary of configured settings.
/// </summary>
public SqlServerCompactErrorLog(IDictionary config)
{
if (config == null)
throw new ArgumentNullException("config");
string connectionString = ConnectionStringHelper.GetConnectionString(config, true);
//
// If there is no connection string to use then throw an
// exception to abort construction.
//
if (connectionString.Length == 0)
throw new Elmah.ApplicationException("Connection string is missing for the SQL Server Compact error log.");
_connectionString = connectionString;
InitializeDatabase();
ApplicationName = (string) config["applicationName"] ?? string.Empty;
}
/// <summary>
/// Initializes a new instance of the <see cref="SqlServerCompactErrorLog"/> class
/// to use a specific connection string for connecting to the database.
/// </summary>
public SqlServerCompactErrorLog(string connectionString)
{
if (connectionString == null)
throw new ArgumentNullException("connectionString");
if (connectionString.Length == 0)
throw new ArgumentException(null, "connectionString");
_connectionString = ConnectionStringHelper.GetResolvedConnectionString(connectionString);
InitializeDatabase();
}
private void InitializeDatabase()
{
string connectionString = ConnectionString;
Debug.AssertStringNotEmpty(connectionString);
string dbFilePath = ConnectionStringHelper.GetDataSourceFilePath(connectionString);
if (File.Exists(dbFilePath))
return;
using (SqlCeEngine engine = new SqlCeEngine(ConnectionString))
{
engine.CreateDatabase();
}
using (SqlCeConnection conn = new SqlCeConnection(ConnectionString))
{
using (SqlCeCommand cmd = new SqlCeCommand())
{
conn.Open();
SqlCeTransaction transaction = conn.BeginTransaction();
try
{
cmd.Connection = conn;
cmd.Transaction = transaction;
cmd.CommandText = @"
SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = N'ELMAH_Error'";
object obj = cmd.ExecuteScalar();
if (obj == null)
{
cmd.CommandText = @"
CREATE TABLE ELMAH_Error (
[ErrorId] UNIQUEIDENTIFIER NOT NULL PRIMARY KEY DEFAULT newid(),
[Application] NVARCHAR(60) NOT NULL,
[Host] NVARCHAR(50) NOT NULL,
[Type] NVARCHAR(100) NOT NULL,
[Source] NVARCHAR(60) NOT NULL,
[Message] NVARCHAR(500) NOT NULL,
[User] NVARCHAR(50) NOT NULL,
[StatusCode] INT NOT NULL,
[TimeUtc] DATETIME NOT NULL,
[Sequence] INT IDENTITY (1, 1) NOT NULL,
[AllXml] NTEXT NOT NULL
)";
cmd.ExecuteNonQuery();
cmd.CommandText = @"
CREATE NONCLUSTERED INDEX [IX_Error_App_Time_Seq] ON [ELMAH_Error]
(
[Application] ASC,
[TimeUtc] DESC,
[Sequence] DESC
)";
cmd.ExecuteNonQuery();
}
transaction.Commit(CommitMode.Immediate);
}
catch (SqlCeException)
{
transaction.Rollback();
throw;
}
}
}
}
/// <summary>
/// Gets the name of this error log implementation.
/// </summary>
public override string Name
{
get { return "SQL Server Compact Error Log"; }
}
/// <summary>
/// Gets the connection string used by the log to connect to the database.
/// </summary>
public virtual string ConnectionString
{
get { return _connectionString; }
}
/// <summary>
/// Logs an error to the database.
/// </summary>
/// <remarks>
/// Use the stored procedure called by this implementation to set a
/// policy on how long errors are kept in the log. The default
/// implementation stores all errors for an indefinite time.
/// </remarks>
public override string Log(Error error)
{
if (error == null)
throw new ArgumentNullException("error");
string errorXml = ErrorXml.EncodeString(error);
Guid id = Guid.NewGuid();
const string query = @"
INSERT INTO ELMAH_Error (
[ErrorId], [Application], [Host],
[Type], [Source], [Message], [User], [StatusCode],
[TimeUtc], [AllXml] )
VALUES (
@ErrorId, @Application, @Host,
@Type, @Source, @Message, @User, @StatusCode,
@TimeUtc, @AllXml);";
using (SqlCeConnection connection = new SqlCeConnection(ConnectionString))
{
using (SqlCeCommand command = new SqlCeCommand(query, connection))
{
SqlCeParameterCollection parameters = command.Parameters;
parameters.Add("@ErrorId", SqlDbType.UniqueIdentifier).Value = id;
parameters.Add("@Application", SqlDbType.NVarChar, 60).Value = ApplicationName;
parameters.Add("@Host", SqlDbType.NVarChar, 30).Value = error.HostName;
parameters.Add("@Type", SqlDbType.NVarChar, 100).Value = error.Type;
parameters.Add("@Source", SqlDbType.NVarChar, 60).Value = error.Source;
parameters.Add("@Message", SqlDbType.NVarChar, 500).Value = error.Message;
parameters.Add("@User", SqlDbType.NVarChar, 50).Value = error.User;
parameters.Add("@StatusCode", SqlDbType.Int).Value = error.StatusCode;
parameters.Add("@TimeUtc", SqlDbType.DateTime).Value = error.Time.ToUniversalTime();
parameters.Add("@AllXml", SqlDbType.NText).Value = errorXml;
command.Connection = connection;
connection.Open();
command.ExecuteNonQuery();
return id.ToString();
}
}
}
/// <summary>
/// Returns a page of errors from the databse in descending order
/// of logged time.
/// </summary>
///
public override int GetErrors(int pageIndex, int pageSize, ICollection<ErrorLogEntry> errorEntryList)
{
if (pageIndex < 0)
throw new ArgumentOutOfRangeException("pageIndex", pageIndex, null);
if (pageSize < 0)
throw new ArgumentOutOfRangeException("pageSize", pageSize, null);
const string sql = @"
SELECT
[ErrorId],
[Application],
[Host],
[Type],
[Source],
[Message],
[User],
[StatusCode],
[TimeUtc]
FROM
[ELMAH_Error]
ORDER BY
[TimeUtc] DESC,
[Sequence] DESC
OFFSET @PageSize * @PageIndex ROWS FETCH NEXT @PageSize ROWS ONLY;
";
const string getCount = @"
SELECT COUNT(*) FROM [ELMAH_Error]";
using (SqlCeConnection connection = new SqlCeConnection(ConnectionString))
{
connection.Open();
using (SqlCeCommand command = new SqlCeCommand(sql, connection))
{
SqlCeParameterCollection parameters = command.Parameters;
parameters.Add("@PageIndex", SqlDbType.Int).Value = pageIndex;
parameters.Add("@PageSize", SqlDbType.Int).Value = pageSize;
parameters.Add("@Application", SqlDbType.NVarChar, 60).Value = ApplicationName;
using (SqlCeDataReader reader = command.ExecuteReader())
{
if (errorEntryList != null)
{
while (reader.Read())
{
string id = reader["ErrorId"].ToString();
Elmah.Error error = new Elmah.Error();
error.ApplicationName = reader["Application"].ToString();
error.HostName = reader["Host"].ToString();
error.Type = reader["Type"].ToString();
error.Source = reader["Source"].ToString();
error.Message = reader["Message"].ToString();
error.User = reader["User"].ToString();
error.StatusCode = Convert.ToInt32(reader["StatusCode"]);
error.Time = Convert.ToDateTime(reader["TimeUtc"]).ToLocalTime();
errorEntryList.Add(new ErrorLogEntry(this, id, error));
}
}
}
}
using (SqlCeCommand command = new SqlCeCommand(getCount, connection))
{
return (int)command.ExecuteScalar();
}
}
}
/// <summary>
/// Returns the specified error from the database, or null
/// if it does not exist.
/// </summary>
public override ErrorLogEntry GetError(string id)
{
if (id == null)
throw new ArgumentNullException("id");
if (id.Length == 0)
throw new ArgumentException(null, "id");
Guid errorGuid;
try
{
errorGuid = new Guid(id);
}
catch (FormatException e)
{
throw new ArgumentException(e.Message, "id", e);
}
const string sql = @"
SELECT
[AllXml]
FROM
[ELMAH_Error]
WHERE
[ErrorId] = @ErrorId";
using (SqlCeConnection connection = new SqlCeConnection(ConnectionString))
{
using (SqlCeCommand command = new SqlCeCommand(sql, connection))
{
command.Parameters.Add("@ErrorId", SqlDbType.UniqueIdentifier).Value = errorGuid;
connection.Open();
string errorXml = (string)command.ExecuteScalar();
if (errorXml == null)
return null;
Error error = ErrorXml.DecodeString(errorXml);
return new ErrorLogEntry(this, id, error);
}
}
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright company="TheNucleus">
// Copyright (c) TheNucleus. All rights reserved.
// Licensed under the Apache License, Version 2.0 license. See LICENCE.md file in the project root for full license information.
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.IO.Abstractions;
using System.Security.AccessControl;
using System.Text;
namespace Test.Mocks
{
[SuppressMessage(
"Microsoft.Performance",
"CA1812:AvoidUninstantiatedInternalClasses",
Justification = "This class is used in other assemblies")]
internal sealed class MockFile : FileBase
{
private readonly Dictionary<string, string> _content
= new Dictionary<string, string>();
private readonly Dictionary<string, string> _copiedOrMovedFiles
= new Dictionary<string, string>();
[SuppressMessage(
"Microsoft.Performance",
"CA1811:AvoidUncalledPrivateCode",
Justification = "This method may be used in other projects.")]
public MockFile(string path, string content)
{
_content.Add(path, content);
}
[SuppressMessage(
"Microsoft.Performance",
"CA1811:AvoidUncalledPrivateCode",
Justification = "This method may be used in other projects.")]
public MockFile(string path, string content, Dictionary<string, string> copiedFiles)
{
_content.Add(path, content);
_copiedOrMovedFiles = copiedFiles;
}
[SuppressMessage(
"Microsoft.Performance",
"CA1811:AvoidUncalledPrivateCode",
Justification = "This method may be used in other projects.")]
public MockFile(Dictionary<string, string> files)
{
_content = files;
}
[SuppressMessage(
"Microsoft.Performance",
"CA1811:AvoidUncalledPrivateCode",
Justification = "This method may be used in other projects.")]
public MockFile(Dictionary<string, string> files, Dictionary<string, string> copiedFiles)
{
_content = files;
_copiedOrMovedFiles = copiedFiles;
}
public override void AppendAllText(string path, string contents)
{
// Do nothing for now
}
public override void AppendAllText(string path, string contents, Encoding encoding)
{
// Do nothing for now
}
public override StreamWriter AppendText(string path)
{
throw new NotImplementedException();
}
public override void Copy(string sourceFileName, string destFileName)
{
_copiedOrMovedFiles.Add(sourceFileName, destFileName);
}
public override void Copy(string sourceFileName, string destFileName, bool overwrite)
{
_copiedOrMovedFiles.Add(sourceFileName, destFileName);
}
public override Stream Create(string path)
{
throw new NotImplementedException();
}
public override Stream Create(string path, int bufferSize)
{
throw new NotImplementedException();
}
public override Stream Create(string path, int bufferSize, FileOptions options)
{
throw new NotImplementedException();
}
public override Stream Create(string path, int bufferSize, FileOptions options, FileSecurity fileSecurity)
{
throw new NotImplementedException();
}
public override StreamWriter CreateText(string path)
{
throw new NotImplementedException();
}
public override void Decrypt(string path)
{
// Do nothing for now
}
public override void Delete(string path)
{
// Do nothing for now
}
public override void Encrypt(string path)
{
// Do nothing for now
}
public override bool Exists(string path)
{
return _content.ContainsKey(path);
}
public override FileSecurity GetAccessControl(string path)
{
throw new NotImplementedException();
}
public override FileSecurity GetAccessControl(string path, AccessControlSections includeSections)
{
throw new NotImplementedException();
}
public override FileAttributes GetAttributes(string path)
{
throw new NotImplementedException();
}
public override DateTime GetCreationTime(string path)
{
return DateTime.Now.AddHours(-1);
}
public override DateTime GetCreationTimeUtc(string path)
{
return DateTime.Now.AddHours(-1);
}
public override DateTime GetLastAccessTime(string path)
{
return DateTime.Now.AddHours(-1);
}
public override DateTime GetLastAccessTimeUtc(string path)
{
return DateTime.Now.AddHours(-1);
}
public override DateTime GetLastWriteTime(string path)
{
return DateTime.Now.AddHours(-1);
}
public override DateTime GetLastWriteTimeUtc(string path)
{
return DateTime.Now.AddHours(-1);
}
public override void Move(string sourceFileName, string destFileName)
{
_copiedOrMovedFiles.Add(sourceFileName, destFileName);
}
public override Stream Open(string path, FileMode mode)
{
return Open(path, mode, FileAccess.ReadWrite);
}
public override Stream Open(string path, FileMode mode, FileAccess access)
{
return Open(path, mode, access, FileShare.ReadWrite);
}
[SuppressMessage(
"Microsoft.Reliability",
"CA2000:Dispose objects before losing scope",
Justification = "Disposing of the output stream should be done by the caller.")]
public override Stream Open(string path, FileMode mode, FileAccess access, FileShare share)
{
var output = new MemoryStream();
var stream = new MemoryStream();
using (var writer = new StreamWriter(stream))
{
writer.Write(_content[path]);
writer.Flush();
stream.Position = 0;
stream.CopyTo(output);
output.Position = 0;
}
return output;
}
public override Stream OpenRead(string path)
{
return Open(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
}
public override StreamReader OpenText(string path)
{
throw new NotImplementedException();
}
public override Stream OpenWrite(string path)
{
return Open(path, FileMode.OpenOrCreate, FileAccess.Write, FileShare.None);
}
public override byte[] ReadAllBytes(string path)
{
throw new NotImplementedException();
}
public override string[] ReadAllLines(string path)
{
throw new NotImplementedException();
}
public override string[] ReadAllLines(string path, Encoding encoding)
{
throw new NotImplementedException();
}
public override string ReadAllText(string path)
{
throw new NotImplementedException();
}
public override string ReadAllText(string path, Encoding encoding)
{
throw new NotImplementedException();
}
public override void Replace(string sourceFileName, string destinationFileName, string destinationBackupFileName)
{
// Do nothing for now ...
}
public override void Replace(string sourceFileName, string destinationFileName, string destinationBackupFileName, bool ignoreMetadataErrors)
{
// Do nothing for now ...
}
public override void SetAccessControl(string path, FileSecurity fileSecurity)
{
// Do nothing for now ...
}
public override void SetAttributes(string path, FileAttributes fileAttributes)
{
// Do nothing for now ...
}
public override void SetCreationTime(string path, DateTime creationTime)
{
// Do nothing for now ...
}
public override void SetCreationTimeUtc(string path, DateTime creationTimeUtc)
{
// Do nothing for now ...
}
public override void SetLastAccessTime(string path, DateTime lastAccessTime)
{
// Do nothing for now ...
}
public override void SetLastAccessTimeUtc(string path, DateTime lastAccessTimeUtc)
{
// Do nothing for now ...
}
public override void SetLastWriteTime(string path, DateTime lastWriteTime)
{
// Do nothing for now ...
}
public override void SetLastWriteTimeUtc(string path, DateTime lastWriteTimeUtc)
{
// Do nothing for now ...
}
public override void WriteAllBytes(string path, byte[] bytes)
{
// Do nothing for now ...
}
public override void WriteAllLines(string path, string[] contents)
{
// Do nothing for now ...
}
public override void WriteAllLines(string path, string[] contents, Encoding encoding)
{
// Do nothing for now ...
}
public override void WriteAllText(string path, string contents)
{
// Do nothing for now ...
}
public override void WriteAllText(string path, string contents, Encoding encoding)
{
// Do nothing for now ...
}
public override void AppendAllLines(string path, IEnumerable<string> contents)
{
// Do nothing for now ...
}
public override void AppendAllLines(string path, IEnumerable<string> contents, Encoding encoding)
{
// Do nothing for now ...
}
public override IEnumerable<string> ReadLines(string path)
{
throw new NotImplementedException();
}
public override IEnumerable<string> ReadLines(string path, Encoding encoding)
{
throw new NotImplementedException();
}
public override void WriteAllLines(string path, IEnumerable<string> contents)
{
// Do nothing for now ...
}
public override void WriteAllLines(string path, IEnumerable<string> contents, Encoding encoding)
{
// Do nothing for now ...
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Nethereum.ABI.FunctionEncoding.AttributeEncoding;
using Nethereum.ABI.FunctionEncoding.Attributes;
using Nethereum.ABI.Model;
using Nethereum.Hex.HexConvertors.Extensions;
namespace Nethereum.ABI.FunctionEncoding
{
public class ParameterDecoder
{
private readonly AttributesToABIExtractor attributesToABIExtractor;
public ParameterDecoder()
{
attributesToABIExtractor = new AttributesToABIExtractor();
}
public object DecodeAttributes(byte[] output, Type objectType)
{
var properties = PropertiesExtractor.GetPropertiesWithParameterAttribute(objectType);
var objectResult = Activator.CreateInstance(objectType);
return DecodeAttributes(output, objectResult, properties.ToArray());
}
public object DecodeAttributes(string output, Type objectType)
{
return DecodeAttributes(output.HexToByteArray(), objectType);
}
public object DecodeAttributes(byte[] output, object result, params PropertyInfo[] properties)
{
if (output == null || output.Length == 0) return result;
var parameterObjects = GetParameterOutputsFromAttributes(properties);
var orderedParameters = parameterObjects.OrderBy(x => x.Parameter.Order).ToArray();
var parameterResults = DecodeOutput(output, orderedParameters);
foreach (var parameterResult in parameterResults)
{
var parameter = (ParameterOutputProperty)parameterResult;
var propertyInfo = parameter.PropertyInfo;
var decodedResult = parameter.Result;
if (parameter.Parameter.ABIType is TupleType tupleType)
{
decodedResult = Activator.CreateInstance(propertyInfo.PropertyType);
AssingValuesFromPropertyList(decodedResult, parameter);
}
#if DOTNET35
propertyInfo.SetValue(result, decodedResult, null);
#else
propertyInfo.SetValue(result, decodedResult);
#endif
}
return result;
}
public object DecodeAttributes(string output, object result, params PropertyInfo[] properties)
{
if (output == "0x") return result;
return DecodeAttributes(output.HexToByteArray(), result, properties);
}
public T DecodeAttributes<T>(string output, T result, params PropertyInfo[] properties)
{
return (T)DecodeAttributes(output, (object)result, properties);
}
public void AssingValuesFromPropertyList(object instance, ParameterOutputProperty result)
{
if (result.Parameter.ABIType is TupleType)
{
var childrenProperties = result.ChildrenProperties;
if (result.Result != null)
{
var outputResult = (List<ParameterOutput>) result.Result;
foreach (var parameterOutput in outputResult)
{
var childrenProperty =
childrenProperties.FirstOrDefault(x =>
x.Parameter.Order == parameterOutput.Parameter.Order);
if (childrenProperty != null)
{
var decodedResult = parameterOutput.Result;
if (childrenProperty.Parameter.ABIType is TupleType)
{
//Adding the result to the children property for assignment to the instance
childrenProperty.Result = parameterOutput.Result;
//creating a new instance of our object property
decodedResult = Activator.CreateInstance(childrenProperty.PropertyInfo.PropertyType);
AssingValuesFromPropertyList(decodedResult, childrenProperty);
}
#if DOTNET35
childrenProperty.PropertyInfo.SetValue(instance, decodedResult, null);
#else
childrenProperty.PropertyInfo.SetValue(instance, decodedResult);
#endif
}
}
}
}
}
public List<ParameterOutputProperty> GetParameterOutputsFromAttributes(PropertyInfo[] properties)
{
var parameterObjects = new List<ParameterOutputProperty>();
foreach (var property in properties)
if (property.IsDefined(typeof(ParameterAttribute), true))
{
#if DOTNET35
var parameterAttribute =
(ParameterAttribute)property.GetCustomAttributes(typeof(ParameterAttribute), true)[0];
#else
var parameterAttribute = property.GetCustomAttribute<ParameterAttribute>(true);
#endif
var parameterOutputProperty = new ParameterOutputProperty
{
Parameter = parameterAttribute.Parameter,
PropertyInfo = property,
};
if (parameterAttribute.Parameter.ABIType is TupleType tupleType)
{
attributesToABIExtractor.InitTupleComponentsFromTypeAttributes(property.PropertyType,
tupleType);
parameterOutputProperty.ChildrenProperties =
GetParameterOutputsFromAttributes(property.PropertyType);
}
else if (parameterAttribute.Parameter.ABIType is ArrayType arrayType)
{
if (arrayType.ElementType is TupleType tupleTypeElement)
{
#if NETSTANDARD1_1 || PCL && !NET35
var type = property.PropertyType.GenericTypeArguments.FirstOrDefault();
#else
var type = property.PropertyType.GetGenericArguments().FirstOrDefault();
#endif
if (type == null) throw new Exception("Tuple array has to decode to a IList<T>: " + parameterAttribute.Parameter.Name);
attributesToABIExtractor.InitTupleComponentsFromTypeAttributes(type,
tupleTypeElement);
}
parameterAttribute.Parameter.DecodedType = property.PropertyType;
}
else
{
parameterAttribute.Parameter.DecodedType = property.PropertyType;
}
parameterObjects.Add(parameterOutputProperty);
}
return parameterObjects;
}
public List<ParameterOutputProperty> GetParameterOutputsFromAttributes(Type type)
{
var properties = PropertiesExtractor.GetPropertiesWithParameterAttribute(type);
return GetParameterOutputsFromAttributes(properties.ToArray());
}
public List<ParameterOutput> DecodeDefaultData(byte[] data, params Parameter[] inputParameters)
{
var parameterOutputs = new List<ParameterOutput>();
foreach (var inputParameter in inputParameters)
{
inputParameter.DecodedType = inputParameter.ABIType.GetDefaultDecodingType();
parameterOutputs.Add(new ParameterOutput
{
Parameter = inputParameter
});
}
return DecodeOutput(data, parameterOutputs.ToArray());
}
public List<ParameterOutput> DecodeDefaultData(string data, params Parameter[] inputParameters)
{
return DecodeDefaultData(data.HexToByteArray(), inputParameters);
}
public List<ParameterOutput> DecodeOutput(byte[] outputBytes, params ParameterOutput[] outputParameters)
{
var currentIndex = 0;
Array.Sort(outputParameters, (x, y) => x.Parameter.Order.CompareTo(y.Parameter.Order));
foreach (var outputParam in outputParameters)
{
var param = outputParam.Parameter;
if (param.ABIType.IsDynamic())
{
outputParam.DataIndexStart =
EncoderDecoderHelpers.GetNumberOfBytes(outputBytes.Skip(currentIndex).ToArray());
currentIndex = currentIndex + 32;
}
else
{
var bytes = outputBytes.Skip(currentIndex).Take(param.ABIType.FixedSize).ToArray();
outputParam.Result = param.ABIType.Decode(bytes, outputParam.Parameter.DecodedType);
currentIndex = currentIndex + param.ABIType.FixedSize;
}
}
ParameterOutput currentDataItem = null;
foreach (
var nextDataItem in outputParameters.Where(outputParam => outputParam.Parameter.ABIType.IsDynamic()))
{
if (currentDataItem != null)
{
var bytes =
outputBytes.Skip(currentDataItem.DataIndexStart).Take(nextDataItem.DataIndexStart - currentDataItem.DataIndexStart).ToArray();
currentDataItem.Result = currentDataItem.Parameter.ABIType.Decode(bytes, currentDataItem.Parameter.DecodedType);
}
currentDataItem = nextDataItem;
}
if (currentDataItem != null)
{
var bytes = outputBytes.Skip(currentDataItem.DataIndexStart).ToArray();
currentDataItem.Result = currentDataItem.Parameter.ABIType.Decode(bytes, currentDataItem.Parameter.DecodedType);
}
return outputParameters.ToList();
}
public List<ParameterOutput> DecodeOutput(string output, params ParameterOutput[] outputParameters)
{
var outputBytes = output.HexToByteArray();
return DecodeOutput(outputBytes, outputParameters);
}
protected void InitTupleElementComponents(Type type, TupleType tupleType)
{
if (tupleType.Components == null)
attributesToABIExtractor.InitTupleComponentsFromTypeAttributes(type,
tupleType);
}
}
}
| |
using Lucene.Net.Analysis;
using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.Store;
using Lucene.Net.Util;
using Lucene.Net.Util.Automaton;
using System;
using System.Globalization;
using System.Text;
using Assert = Lucene.Net.TestFramework.Assert;
using BitSet = Lucene.Net.Util.OpenBitSet;
namespace Lucene.Net.Search
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// Simple base class for checking search equivalence.
/// Extend it, and write tests that create <see cref="RandomTerm()"/>s
/// (all terms are single characters a-z), and use
/// <see cref="AssertSameSet(Query, Query)"/> and
/// <see cref="AssertSubsetOf(Query, Query)"/>.
/// </summary>
public abstract class SearchEquivalenceTestBase : LuceneTestCase
#if TESTFRAMEWORK_XUNIT
, Xunit.IClassFixture<BeforeAfterClass>
{
public SearchEquivalenceTestBase(BeforeAfterClass beforeAfter)
: base(beforeAfter)
{
#if !FEATURE_INSTANCE_TESTDATA_INITIALIZATION
beforeAfter.SetBeforeAfterClassActions(BeforeClass, AfterClass);
#endif
}
#else
{
#endif
protected static IndexSearcher m_s1, m_s2;
protected static Directory m_directory;
protected static IndexReader m_reader;
protected static Analyzer m_analyzer;
protected static string m_stopword; // we always pick a character as a stopword
//#if TESTFRAMEWORK_MSTEST
// private static readonly IList<string> initalizationLock = new List<string>();
// // LUCENENET TODO: Add support for attribute inheritance when it is released (2.0.0)
// //[Microsoft.VisualStudio.TestTools.UnitTesting.ClassInitialize(Microsoft.VisualStudio.TestTools.UnitTesting.InheritanceBehavior.BeforeEachDerivedClass)]
// new public static void BeforeClass(Microsoft.VisualStudio.TestTools.UnitTesting.TestContext context)
// {
// lock (initalizationLock)
// {
// if (!initalizationLock.Contains(context.FullyQualifiedTestClassName))
// initalizationLock.Add(context.FullyQualifiedTestClassName);
// else
// return; // Only allow this class to initialize once (MSTest bug)
// }
//#else
#if TESTFRAMEWORK_NUNIT
[NUnit.Framework.OneTimeSetUp]
#endif
// new public static void BeforeClass()
// {
//#endif
// LUCENENET specific
// Is non-static because ClassEnvRule is no longer static.
////[OneTimeSetUp]
public override void BeforeClass()
{
base.BeforeClass();
Random random = Random;
m_directory = NewDirectory();
m_stopword = "" + GetRandomChar();
CharacterRunAutomaton stopset = new CharacterRunAutomaton(BasicAutomata.MakeString(m_stopword));
m_analyzer = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false, stopset);
RandomIndexWriter iw = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
this,
#endif
random, m_directory, m_analyzer);
Document doc = new Document();
Field id = new StringField("id", "", Field.Store.NO);
Field field = new TextField("field", "", Field.Store.NO);
doc.Add(id);
doc.Add(field);
// index some docs
int numDocs = AtLeast(1000);
for (int i = 0; i < numDocs; i++)
{
id.SetStringValue(Convert.ToString(i, CultureInfo.InvariantCulture));
field.SetStringValue(RandomFieldContents());
iw.AddDocument(doc);
}
// delete some docs
int numDeletes = numDocs / 20;
for (int i = 0; i < numDeletes; i++)
{
Term toDelete = new Term("id", Convert.ToString(random.Next(numDocs), CultureInfo.InvariantCulture));
if (random.NextBoolean())
{
iw.DeleteDocuments(toDelete);
}
else
{
iw.DeleteDocuments(new TermQuery(toDelete));
}
}
m_reader = iw.GetReader();
m_s1 = NewSearcher(m_reader);
m_s2 = NewSearcher(m_reader);
iw.Dispose();
}
//#if TESTFRAMEWORK_MSTEST
// // LUCENENET TODO: Add support for attribute inheritance when it is released (2.0.0)
// //[Microsoft.VisualStudio.TestTools.UnitTesting.ClassCleanup(Microsoft.VisualStudio.TestTools.UnitTesting.InheritanceBehavior.BeforeEachDerivedClass)]
//# el
#if TESTFRAMEWORK_NUNIT
[NUnit.Framework.OneTimeTearDown]
#endif
// new public static void AfterClass()
//#else
//[OneTimeTearDown]
public override void AfterClass()
{
m_reader.Dispose();
m_directory.Dispose();
m_analyzer.Dispose();
m_reader = null;
m_directory = null;
m_analyzer = null;
m_s1 = m_s2 = null;
base.AfterClass();
}
/// <summary>
/// Populate a field with random contents.
/// Terms should be single characters in lowercase (a-z)
/// tokenization can be assumed to be on whitespace.
/// </summary>
internal static string RandomFieldContents()
{
// TODO: zipf-like distribution
StringBuilder sb = new StringBuilder();
int numTerms = Random.Next(15);
for (int i = 0; i < numTerms; i++)
{
if (sb.Length > 0)
{
sb.Append(' '); // whitespace
}
sb.Append(GetRandomChar());
}
return sb.ToString();
}
/// <summary>
/// Returns random character (a-z)
/// </summary>
internal static char GetRandomChar()
{
return (char)TestUtil.NextInt32(Random, 'a', 'z');
}
/// <summary>
/// Returns a term suitable for searching.
/// Terms are single characters in lowercase (a-z).
/// </summary>
protected virtual Term RandomTerm()
{
return new Term("field", "" + GetRandomChar());
}
/// <summary>
/// Returns a random filter over the document set.
/// </summary>
protected virtual Filter RandomFilter()
{
return new QueryWrapperFilter(TermRangeQuery.NewStringRange("field", "a", "" + GetRandomChar(), true, true));
}
/// <summary>
/// Asserts that the documents returned by <paramref name="q1"/>
/// are the same as of those returned by <paramref name="q2"/>.
/// </summary>
public virtual void AssertSameSet(Query q1, Query q2)
{
AssertSubsetOf(q1, q2);
AssertSubsetOf(q2, q1);
}
/// <summary>
/// Asserts that the documents returned by <paramref name="q1"/>
/// are a subset of those returned by <paramref name="q2"/>.
/// </summary>
public virtual void AssertSubsetOf(Query q1, Query q2)
{
// test without a filter
AssertSubsetOf(q1, q2, null);
// test with a filter (this will sometimes cause advance'ing enough to test it)
AssertSubsetOf(q1, q2, RandomFilter());
}
/// <summary>
/// Asserts that the documents returned by <paramref name="q1"/>
/// are a subset of those returned by <paramref name="q2"/>.
/// <para/>
/// Both queries will be filtered by <paramref name="filter"/>.
/// </summary>
protected virtual void AssertSubsetOf(Query q1, Query q2, Filter filter)
{
// TRUNK ONLY: test both filter code paths
if (filter != null && Random.NextBoolean())
{
q1 = new FilteredQuery(q1, filter, TestUtil.RandomFilterStrategy(Random));
q2 = new FilteredQuery(q2, filter, TestUtil.RandomFilterStrategy(Random));
filter = null;
}
// not efficient, but simple!
TopDocs td1 = m_s1.Search(q1, filter, m_reader.MaxDoc);
TopDocs td2 = m_s2.Search(q2, filter, m_reader.MaxDoc);
Assert.IsTrue(td1.TotalHits <= td2.TotalHits);
// fill the superset into a bitset
var bitset = new BitSet(td2.ScoreDocs.Length);
for (int i = 0; i < td2.ScoreDocs.Length; i++)
{
bitset.Set(td2.ScoreDocs[i].Doc);
}
// check in the subset, that every bit was set by the super
for (int i = 0; i < td1.ScoreDocs.Length; i++)
{
Assert.IsTrue(bitset.Get(td1.ScoreDocs[i].Doc));
}
}
}
}
| |
using System;
using System.Collections.Generic;
namespace Wc3o.Pages.Game {
public partial class Command_aspx : System.Web.UI.Page {
Player player;
Sector sector;
protected void Page_PreInit(object sender, EventArgs e) {
this.Theme = Wc3o.Game.Theme;
}
protected void Page_Load(object sender, EventArgs e) {
if (Request.QueryString["Action"] == null) {
CloseWindow(0);
return;
}
player = Wc3o.Game.CurrentPlayer;
sector = Wc3o.Game.CurrentSector;
if (!IsPostBack) {
pnlDestroy.Visible = false;
pnlSplit.Visible = false;
pnlMove.Visible = false;
#region " Train Units "
if (Request.QueryString["Action"] == "Train") {
UnitInfo i = UnitInfo.Get((UnitType)Enum.Parse(typeof(UnitType), Request.QueryString["Unit"]));
int c = 0;
for (int j = 1; j <= int.Parse(Request.QueryString["Number"]); j++) {
if (!Wc3o.Game.IsAvailable(player, sector, i))
break;
c++;
DateTime d = DateTime.Now;
foreach (Unit u in sector.Units)
if (u.IsInTraining && Wc3o.Game.TrainedInSameBuilding(i, u.UnitInfo) && u.Date > d)
d = u.Date;
new Unit(i.Type, sector, player, d.AddMinutes(i.Minutes));
player.Gold -= i.Gold;
player.Lumber -= i.Lumber;
}
if (c <= 0)
Wc3o.Game.Message(this, "You cannot train a " + i.Name + ".", MessageType.Error);
else
Wc3o.Game.Message(this, "You started the training of " + Wc3o.Game.Format(c) + " " + i.Name + ".", MessageType.Acknowledgement);
RefreshPage(sector.Coordinate.ToString(), "Training");
CloseWindow(3000);
}
#endregion
#region " Construct Buildings "
else if (Request.QueryString["Action"] == "Construct") {
BuildingInfo i = BuildingInfo.Get((BuildingType)Enum.Parse(typeof(BuildingType), Request.QueryString["Building"]));
int c = 0;
for (int j = 1; j <= int.Parse(Request.QueryString["Number"]); j++) {
if (!Wc3o.Game.IsAvailable(player, sector, i))
break;
c++;
DateTime d = DateTime.Now;
foreach (Building b in sector.Buildings)
if (b.IsInConstruction && !b.IsUpgrading && b.Date > d)
d = b.Date;
new Building(i.Type, sector, d.AddMinutes(i.Minutes));
player.Gold -= i.Gold;
player.Lumber -= i.Lumber;
}
if (c <= 0)
Wc3o.Game.Message(this, "You cannot construct a " + i.Name + ".", MessageType.Error);
else
Wc3o.Game.Message(this, "You started the construction of " + Wc3o.Game.Format(c) + " " + i.Name + ".", MessageType.Acknowledgement);
RefreshPage(sector.Coordinate.ToString(), "Constructing");
CloseWindow(3000);
}
#endregion
#region " Destroy Unit(s) "
else if (Request.QueryString["Action"] == "DestroyUnit") {
int i = int.Parse(Request.QueryString["Unit"]);
if (i == -1) {
lblDestroy.Text = "Are you sure to destroy all units?";
imgDestroy.Visible = false;
}
else if (i == -2) {
lblDestroy.Text = "Are you sure to stop the training of all units?";
imgDestroy.Visible = false;
}
else {
Unit u = player.GetUnitByHashcode(i);
if (u == null) {
CloseWindow(0);
return;
}
imgDestroy.ImageUrl = player.Gfx + u.Info.Image;
if (u.IsInTraining)
lblDestroy.Text = "Are you sure you want to stop the training of this " + u.Info.Name + "?";
else
lblDestroy.Text = "Are you sure you want to destroy " + Wc3o.Game.Format(u.Number) + " " + u.Info.Name + "?";
}
pnlDestroy.Visible = true;
}
#endregion
#region " Destroy Building(s) "
else if (Request.QueryString["Action"] == "DestroyBuilding") {
int i = int.Parse(Request.QueryString["Building"]);
if (i == -1) {
lblDestroy.Text = "Are you sure to destroy all buildings?";
imgDestroy.Visible = false;
}
else if (i == -2) {
lblDestroy.Text = "Are you sure to stop the construction of all buildings?";
imgDestroy.Visible = false;
}
else {
Building b = player.GetBuildingByHashcode(i);
if (b == null) {
CloseWindow(0);
return;
}
imgDestroy.ImageUrl = player.Gfx + b.Info.Image;
if (b.IsInConstruction) {
if (b.UpgradingFrom == BuildingType.None)
lblDestroy.Text = "Are you sure you want to stop the construction of this " + b.Info.Name + "?";
else
lblDestroy.Text = "Are you sure you want to stop upgrading of this " + b.Info.Name + "?";
}
else
lblDestroy.Text = "Are you sure you want to destroy " + Wc3o.Game.Format(b.Number) + " " + b.Info.Name + "?";
}
pnlDestroy.Visible = true;
}
#endregion
#region " Upgrade Buildings "
else if (Request.QueryString["Action"] == "UpgradeBuilding") {
Building b = player.GetBuildingByHashcode(int.Parse(Request.QueryString["Building"].ToString()));
BuildingInfo i = BuildingInfo.Get((BuildingType)Enum.Parse(typeof(BuildingType), Request.QueryString["To"]));
int c = 0;
for (int j = 1; j <= b.Number; j++)
if (Wc3o.Game.IsAvailable(player, sector, i, true)) {
player.Gold -= i.Gold;
player.Lumber -= i.Lumber;
c++;
}
else
break;
if (c <= 0)
Wc3o.Game.Message(this, "You cannot upgrade to " + i.Name + ".", MessageType.Error);
else {
b.Number -= c;
if (b.Number <= 0)
b.Destroy();
Building newBuilding = new Building(i.Type, b.Sector, DateTime.Now.AddMinutes(i.Minutes * c));
newBuilding.Number = c;
newBuilding.UpgradingFrom = b.Type;
newBuilding.Hitpoints -= b.Info.Hitpoints - b.Hitpoints;
Wc3o.Game.Message(this, "You started upgrading to " + i.Name + " of " + Wc3o.Game.Format(c) + " buildings.", MessageType.Acknowledgement);
}
Refresh(7, b.Sector.Coordinate.ToString());
Refresh(5, "");
CloseWindow(3000);
}
#endregion
#region " Work For Gold "
else if (Request.QueryString["Action"] == "WorkForGold") {
Unit u = player.GetUnitByHashcode(int.Parse(Request.QueryString["Unit"]));
if (u == null || !u.IsAvailable || !u.Sector.Owner.IsAlly(player)) {
CloseWindow(0);
return;
}
int i = 0;
foreach (Unit unit in sector.Units)
if (u.IsWorking && u.Action == UnitAction.WorkForGold)
i += unit.Number;
if (i + u.Number > Configuration.Max_Gold_Worker_Per_Sector) {
Wc3o.Game.Message(this, "You can only send a maximum of " + Configuration.Max_Gold_Worker_Per_Sector + " workers per sector to mine gold.", MessageType.Error);
CloseWindow(3000);
return;
}
u.Action = UnitAction.WorkForGold;
Wc3o.Game.Message(this, Wc3o.Game.Format(u.Number) + " " + u.Info.Name + " started to mine gold.", MessageType.Acknowledgement);
Refresh(6, u.Sector.Coordinate.ToString());
CloseWindow(3000);
}
#endregion
#region " Work For Lumber "
else if (Request.QueryString["Action"] == "WorkForLumber") {
Unit u = player.GetUnitByHashcode(int.Parse(Request.QueryString["Unit"]));
if (u == null || !u.IsAvailable || !u.Sector.Owner.IsAlly(player)) {
CloseWindow(0);
return;
}
u.Action = UnitAction.WorkForLumber;
Wc3o.Game.Message(this, Wc3o.Game.Format(u.Number) + " " + u.Info.Name + " started to cut lumber.", MessageType.Acknowledgement);
Refresh(6, u.Sector.Coordinate.ToString());
CloseWindow(3000);
}
#endregion
#region " Stop Working "
else if (Request.QueryString["Action"] == "StopWorking") {
Unit u = player.GetUnitByHashcode(int.Parse(Request.QueryString["Unit"]));
if (u == null || !u.IsWorking) {
CloseWindow(0);
return;
}
u.Action = UnitAction.None;
Wc3o.Game.Message(this, Wc3o.Game.Format(u.Number) + " " + u.Info.Name + " stopped working.", MessageType.Acknowledgement);
Refresh(6, u.Sector.Coordinate.ToString());
CloseWindow(3000);
}
#endregion
#region " Merge Unit "
else if (Request.QueryString["Action"] == "MergeUnits") {
int i = int.Parse(Request.QueryString["Unit"]);
if (i == -1) {
Sector s = Wc3o.Game.GameData.Sectors[new Coordinate(Request.QueryString["Sector"])];
List<Entity> l = new List<Entity>();
foreach (Unit u in s.Units)
if (u.IsAvailable && u.Owner == player)
l.Add(u);
Wc3o.Game.Merge(l);
Wc3o.Game.Message(this, "You merged all your units.", MessageType.Acknowledgement);
Refresh(6, s.Coordinate.ToString());
CloseWindow(3000);
}
else {
Unit unit = player.GetUnitByHashcode(i);
List<Entity> l = new List<Entity>();
foreach (Unit u in unit.Sector.Units)
if (u.IsAvailable && u.Owner == player && u.Type == unit.Type)
l.Add(u);
Wc3o.Game.Merge(l);
Wc3o.Game.Message(this, "You merged your " + unit.Info.Name + ".", MessageType.Acknowledgement);
Refresh(6, unit.Sector.Coordinate.ToString());
CloseWindow(3000);
}
}
#endregion
#region " Merge Building "
else if (Request.QueryString["Action"] == "MergeBuildings") {
int i = int.Parse(Request.QueryString["Building"]);
if (i == -1) {
Sector s = Wc3o.Game.GameData.Sectors[new Coordinate(Request.QueryString["Sector"])];
List<Entity> l = new List<Entity>();
foreach (Building b in s.Buildings)
if (b.IsAvailable && s.Owner == player)
l.Add(b);
Wc3o.Game.Merge(l);
Wc3o.Game.Message(this, "You merged all your buildings.", MessageType.Acknowledgement);
Refresh(7, s.Coordinate.ToString());
CloseWindow(3000);
}
else {
Building building = player.GetBuildingByHashcode(i);
List<Entity> l = new List<Entity>();
foreach (Building b in building.Sector.Buildings)
if (b.IsAvailable && b.Sector.Owner == player && b.Type == building.Type)
l.Add(b);
Wc3o.Game.Merge(l);
Wc3o.Game.Message(this, "You merged your " + building.Info.Name + ".", MessageType.Acknowledgement);
Refresh(7, building.Sector.Coordinate.ToString());
CloseWindow(3000);
}
}
#endregion
#region " Split Units "
else if (Request.QueryString["Action"] == "SplitUnits") {
Unit u = player.GetUnitByHashcode(int.Parse(Request.QueryString["Unit"]));
if (u == null || !u.IsAvailable) {
CloseWindow(0);
return;
}
pnlSplit.Visible = true;
imgSplit.ImageUrl = player.Gfx + u.Info.Image;
lblSplit.Text = "How many of your " + u.Info.Name + " do you want to split into a new unit?";
for (int i = 1; i < u.Number; i++)
drpSplit.Items.Add(i.ToString());
}
#endregion
#region " Split Buildings "
else if (Request.QueryString["Action"] == "SplitBuildings") {
Building b = player.GetBuildingByHashcode(int.Parse(Request.QueryString["Building"]));
if (b == null || b.IsInConstruction) {
CloseWindow(0);
return;
}
pnlSplit.Visible = true;
imgSplit.ImageUrl = player.Gfx + b.Info.Image;
lblSplit.Text = "How many of your " + b.Info.Name + " do you want to split?";
for (int i = 1; i < b.Number; i++)
drpSplit.Items.Add(i.ToString());
}
#endregion
#region " Move "
else if (Request.QueryString["Action"] == "Move") {
int i = int.Parse(Request.QueryString["Unit"]);
if (i == -1) {
Sector s = Wc3o.Game.GameData.Sectors[new Coordinate(Request.QueryString["Sector"])];
bool hasUnits = false;
foreach (Unit u in s.Units)
if (u.Owner == player) {
hasUnits = true;
break;
}
if (!hasUnits) {
Wc3o.Game.Message(this, "You have no units on this sector.", MessageType.Error);
CloseWindow(3000);
return;
}
pnlMove.Visible = true;
imgMove.Visible = false;
}
else {
Unit u = player.GetUnitByHashcode(i);
if (u == null || !u.IsAvailable) {
CloseWindow(0);
return;
}
pnlMove.Visible = true;
imgMove.ImageUrl = player.Gfx + u.Info.Image;
chkIgnore.Visible = false;
chkTime.Visible = false;
}
}
#endregion
#region " Return "
else if (Request.QueryString["Action"] == "Return") {
int i = int.Parse(Request.QueryString["Unit"]);
if (i == -1) {
Sector s = Wc3o.Game.GameData.Sectors[new Coordinate(Request.QueryString["Sector"])];
List<Unit> l = new List<Unit>();
foreach (Unit u in s.Units)
if (u.Owner == player && u.IsMoving)
l.Add(u);
foreach (Unit u in l)
Return(u);
Wc3o.Game.Message(this, "Your units return.", MessageType.Acknowledgement);
RefreshPage(s.Coordinate.ToString(), "Units");
CloseWindow(3000);
}
else {
Unit u = player.GetUnitByHashcode(i);
if (u == null || !u.IsMoving) {
CloseWindow(0);
return;
}
Return(u);
Wc3o.Game.Message(this, "Your " + u.Info.Name + " return.", MessageType.Acknowledgement);
RefreshPage(u.Sector.Coordinate.ToString(), "Units");
CloseWindow(3000);
}
}
#endregion
#region " Morph Units "
else if (Request.QueryString["Action"] == "MorphUnits") {
Unit u = player.GetUnitByHashcode(int.Parse(Request.QueryString["Unit"]));
double factor = (double)u.Hitpoints / (double)u.Info.Hitpoints;
Response.Write(factor.ToString());
int c = 0;
switch (u.Type) {
case UnitType.DruidOfTheClawBearForm:
c = u.Number;
u.Type = UnitType.DruidOfTheClawDruidForm;
break;
case UnitType.DruidOfTheClawDruidForm:
c = u.Number;
u.Type = UnitType.DruidOfTheClawBearForm;
break;
case UnitType.DruidOfTheTalonCrowForm:
c = u.Number;
u.Type = UnitType.DruidOfTheTalonDruidForm;
break;
case UnitType.DruidOfTheTalonDruidForm:
c = u.Number;
u.Type = UnitType.DruidOfTheTalonCrowForm;
break;
case UnitType.Peasant:
if (sector.Owner == player)
foreach (Building b in sector.Buildings)
if (!b.IsInConstruction && (b.Type == BuildingType.TownHall || b.Type == BuildingType.Keep || b.Type == BuildingType.Castle)) {
c = u.Number;
u.Type = UnitType.Militia;
break;
}
break;
case UnitType.Militia:
if (sector.Owner == player)
foreach (Building b in sector.Buildings)
if (!b.IsInConstruction && (b.Type == BuildingType.TownHall || b.Type == BuildingType.Keep || b.Type == BuildingType.Castle)) {
c = u.Number;
u.Type = UnitType.Peasant;
break;
}
break;
case UnitType.Hippogryph:
foreach (Unit unit in sector.Units)
if (unit.Owner == player && unit.IsAvailable && unit.Type == UnitType.Archer) {
if (unit.Number <= u.Number - c) {
c += unit.Number;
unit.Destroy();
}
else {
Unit newUnit = (Unit)unit.Clone();
newUnit.Number = unit.Number - (u.Number - c);
unit.Destroy();
c = u.Number;
}
}
Unit newHippgoryphRiders = new Unit(UnitType.HippogryphRider, sector, player, DateTime.Now);
newHippgoryphRiders.Number = c;
break;
case UnitType.HippogryphRider:
c = u.Number;
u.Type = UnitType.Hippogryph;
Unit newArchers = new Unit(UnitType.Archer, sector, player, DateTime.Now);
newArchers.Number = c;
break;
case UnitType.Acolyte:
if (sector.Owner == player)
foreach (Building b in sector.Buildings)
if (!b.IsInConstruction && b.Type == BuildingType.SacrificialPit) {
c = u.Number;
u.Type = UnitType.Shade;
break;
}
break;
case UnitType.ObsidianStatue:
c = u.Number;
u.Type = UnitType.Destroyer;
break;
}
u.Hitpoints = (int)(u.Info.Hitpoints * factor);
if (c > 0)
Wc3o.Game.Message(this, "You morphed " + Wc3o.Game.Format(c) + " units.", MessageType.Acknowledgement);
else
Wc3o.Game.Message(this, "You cannot morph.", MessageType.Error);
Refresh(6, u.Sector.Coordinate.ToString());
CloseWindow(3000);
}
#endregion
}
}
#region " Methods for javascript "
void CloseWindow(int timeout) {
lblScript.Text += "<script language='JavaScript'>setTimeout('close()'," + timeout + ");</script>";
}
void Refresh(int type, string sector) {
//1=UNUSED,2=Training,3=Constructing,4=Overview,5=Navigation,6=Player Units, 7=Player Buildings
lblScript.Text += "<script language='JavaScript'>RefreshParent(" + type + ",'" + sector + "');</script>";
}
void RefreshPage(string sector, string refresh) {
//refresh = Units || Buildings || Constructing || Training
lblScript.Text += "<script language='JavaScript'>window.opener.document.location=\"Sector.aspx?Sector=" + sector + "&Refresh=" + refresh + "\";</script>";
}
#endregion
#region " Events "
protected void btnDestroyNo_Click(object sender, EventArgs e) {
CloseWindow(0);
}
protected void btnDestroyYes_Click(object sender, EventArgs e) {
Sector s = sector;
#region " Destroy Unit "
if (Request.QueryString["Action"] == "DestroyUnit") {
int i = int.Parse(Request.QueryString["Unit"]);
if (i == -1) {
List<Unit> l = new List<Unit>();
foreach (Unit u in sector.Units)
if (u.Owner == player && u.IsAvailable)
l.Add(u);
foreach (Unit u in l) {
s = u.Sector;
DestroyUnit(u);
}
}
else if (i == -2) {
List<Unit> l = new List<Unit>();
foreach (Unit u in sector.Units)
if (u.Owner == player && u.IsInTraining)
l.Add(u);
foreach (Unit u in l) {
s = u.Sector;
DestroyUnit(u);
}
}
else {
Unit u = player.GetUnitByHashcode(i);
s = u.Sector;
DestroyUnit(u);
}
RefreshPage(s.Coordinate.ToString(), "Units");
CloseWindow(0);
return;
}
#endregion
#region " Destroy Building "
else if (Request.QueryString["Action"] == "DestroyBuilding") {
int i = int.Parse(Request.QueryString["Building"]);
if (i == -1) {
if (sector.Owner != player) {
CloseWindow(0);
return;
}
List<Building> l = new List<Building>();
foreach (Building b in sector.Buildings)
if (b.IsAvailable)
l.Add(b);
foreach (Building b in l) {
s = b.Sector;
DestroyBuilding(b);
}
}
else if (i == -2) {
if (sector.Owner != player) {
RefreshPage(s.Coordinate.ToString(), "Buildings");
CloseWindow(0);
return;
}
List<Building> l = new List<Building>();
foreach (Building b in sector.Buildings)
if (b.IsInConstruction)
l.Add(b);
foreach (Building b in l) {
s = b.Sector;
DestroyBuilding(b);
}
}
else {
Building b = player.GetBuildingByHashcode(i);
s = b.Sector;
if (b.IsUpgrading) {
int hitpointsDiff = b.Info.Hitpoints - b.Hitpoints;
b.Date = DateTime.Now;
player.Gold += b.Info.Gold;
player.Lumber += b.Info.Lumber;
b.Type = b.UpgradingFrom;
b.Hitpoints = b.Info.Hitpoints - hitpointsDiff;
}
else
DestroyBuilding(b);
}
RefreshPage(s.Coordinate.ToString(), "Buildings");
CloseWindow(0);
return;
}
#endregion
CloseWindow(0);
}
protected void DestroyUnit(Unit u) {
u.Destroy();
if (u.IsInTraining) {
DateTime date = DateTime.Now;
foreach (Unit unit in sector.Units)
if (unit.Owner==player && unit.IsInTraining && Wc3o.Game.TrainedInSameBuilding(u.UnitInfo,unit.UnitInfo))
if (unit.Date < u.Date) { //units that are finished earlier are always on top of the queue
if (unit.Date > date)
date = unit.Date;
}
else {
date = date.AddMinutes(unit.Info.Minutes);
unit.Date = date;
}
player.Gold += u.Number * u.Info.Gold;
player.Lumber += u.Number * u.Info.Lumber;
}
}
protected void DestroyBuilding(Building b) {
b.Destroy();
if (b.IsInConstruction) {
DateTime date = DateTime.Now;
foreach (Building building in sector.Buildings)
if (building.IsInConstruction && !building.IsUpgrading)
if (building.Date < b.Date) { //units that are finished earlier are always on top of the queue
if (building.Date > date)
date = building.Date;
}
else {
date = date.AddMinutes(building.Info.Minutes);
building.Date = date;
}
if (b.IsUpgrading) {
player.Gold += (int)(b.Number * b.Info.Gold * 0.66);
player.Lumber += (int)(b.Number * b.Info.Lumber * 0.66);
}
else {
player.Gold += (int)(b.Number * b.Info.Gold * 0.75);
player.Lumber += (int)(b.Number * b.Info.Lumber * 0.75);
}
}
}
protected void btnSplit_Click(object sender, EventArgs e) {
Sector s = sector;
#region " Split Unit "
if (Request.QueryString["Action"] == "SplitUnits") {
Unit u = player.GetUnitByHashcode(int.Parse(Request.QueryString["Unit"]));
if (u == null) {
CloseWindow(0);
return;
}
int i = int.Parse(drpSplit.SelectedItem.Text);
Unit v = (Unit)u.Clone();
u.Number -= i;
v.Number = i;
s = u.Sector;
}
#endregion
#region " Split Building "
else if (Request.QueryString["Action"] == "SplitBuildings") {
Building b = player.GetBuildingByHashcode(int.Parse(Request.QueryString["Building"]));
if (b == null) {
CloseWindow(0);
return;
}
int i = int.Parse(drpSplit.SelectedItem.Text);
Building c = (Building)b.Clone();
b.Number -= i;
c.Number = i;
s = b.Sector;
}
#endregion
Refresh(7, s.Coordinate.ToString());
CloseWindow(0);
}
protected void btnSplitCancel_Click(object sender, EventArgs e) {
CloseWindow(0);
}
protected void btnMoveCancel_Click(object sender, EventArgs e) {
CloseWindow(0);
}
protected void btnMove_Click(object sender, EventArgs e) {
Sector s = null;
if (txtSector.Text.Length > 0)
s = Wc3o.Game.GetSectorByName(txtSector.Text);
try {
s = Wc3o.Game.GameData.Sectors[new Coordinate(int.Parse(txtX.Text), int.Parse(txtY.Text))];
} catch {
}
if (s == null) {
Wc3o.Game.Message(this, "This sector does not exist.", MessageType.Error);
return;
}
if (Request.QueryString["Action"] == "Move" && Request.QueryString["Unit"] == "-1") {
List<Unit> l = new List<Unit>();
foreach (Unit u in sector.Units)
if (u.Owner == player && u.IsAvailable && (!chkIgnore.Checked || (chkIgnore.Checked && !u.UnitInfo.ForLumber && !u.UnitInfo.ForGold)))
l.Add(u);
if (l.Count <= 0) {
CloseWindow(0);
return;
}
int speed = 1;
Sector unitSector = null;
foreach (Unit u in l)
if (u.UnitInfo.Speed > speed) {
speed = u.UnitInfo.Speed;
unitSector = u.Sector;
}
DateTime d = DateTime.Now.AddMinutes(GetMoveTime(speed, unitSector, s));
foreach (Unit u in l) {
if (chkTime.Checked)
u.Date = d;
else
u.Date = DateTime.Now.AddMinutes(GetMoveTime(u.UnitInfo.Speed, unitSector, s));
u.SourceSector = u.Sector;
u.SourceDate = DateTime.Now;
u.Sector = s;
u.Action = UnitAction.Moving;
}
RefreshPage(s.Coordinate.ToString(), "Units");
CloseWindow(0);
}
else {
Unit u = player.GetUnitByHashcode(int.Parse(Request.QueryString["Unit"]));
u.Date = DateTime.Now.AddMinutes(GetMoveTime(u.UnitInfo.Speed, u.Sector, s));
u.SourceSector = u.Sector;
u.SourceDate = DateTime.Now;
u.Sector = s;
u.Action = UnitAction.Moving;
RefreshPage(s.Coordinate.ToString(), "Units");
CloseWindow(0);
}
}
#endregion
#region " Methods "
static void Return(Unit u) {
u.Sector = u.SourceSector;
double alreadyMoved = ((TimeSpan)(DateTime.Now - u.SourceDate)).TotalMinutes;
u.Date = DateTime.Now.AddMinutes(alreadyMoved);
}
static double GetMoveTime(int i, Sector s, Sector t) {
int distance = 0;
if (s.Coordinate.X > t.Coordinate.X)
distance += s.Coordinate.X - t.Coordinate.X;
else
distance += t.Coordinate.X - s.Coordinate.X;
if (s.Coordinate.Y > t.Coordinate.Y)
distance += s.Coordinate.Y - t.Coordinate.Y;
else
distance += t.Coordinate.Y - s.Coordinate.Y;
return distance * i;
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using hw.Helper;
using JetBrains.Annotations;
namespace hw.DebugFormatter
{
/// <summary>
/// Summary description for Dumpable.
/// </summary>
[Dump("Dump")]
[DebuggerDisplay("{DebuggerDumpString}")]
public class Dumpable
{
static readonly Stack<MethodDumpTraceItem> _methodDumpTraceSwitches = new Stack<MethodDumpTraceItem>();
/// <summary>
/// generate dump string to be shown in debug windows
/// </summary>
/// <returns> </returns>
public virtual string DebuggerDump() { return Tracer.Dump(this); }
/// <summary>
/// dump string to be shown in debug windows
/// </summary>
[DisableDump]
[UsedImplicitly]
public string DebuggerDumpString { get { return DebuggerDump().Replace("\n", "\r\n"); } }
[DisableDump]
[UsedImplicitly]
public string D => DebuggerDumpString;
public void T() => Tracer.Line(DebuggerDumpString);
/// <summary>
/// Method dump with break,
/// </summary>
/// <param name="p"> </param>
/// <returns> </returns>
[DebuggerHidden]
public static void NotImplementedFunction(params object[] p)
{
var os = Tracer.DumpMethodWithData("not implemented", null, p, 1);
Tracer.Line(os);
Tracer.TraceBreak();
}
/// <summary>
/// Method start dump,
/// </summary>
/// <param name="trace"> </param>
/// <param name="p"> </param>
/// <returns> </returns>
[DebuggerHidden]
protected void StartMethodDump(bool trace, params object[] p)
{
StartMethodDump(1, trace);
if(IsMethodDumpTraceActive)
{
var os = Tracer.DumpMethodWithData("", this, p, 1);
Tracer.Line(os);
Tracer.IndentStart();
}
}
/// <summary>
/// Method start dump,
/// </summary>
/// <param name="name"> </param>
/// <param name="value"> </param>
/// <returns> </returns>
[DebuggerHidden]
public static void Dump(string name, object value)
{
if(IsMethodDumpTraceActive)
{
var os = Tracer.DumpData("", new[] {name, value}, 1);
Tracer.Line(os);
}
}
/// <summary>
/// Method start dump,
/// </summary>
/// <param name="name"> </param>
/// <param name="getValue"> </param>
/// <returns> </returns>
[DebuggerHidden]
public static void Dump(string name, Func<object> getValue)
{
if (IsMethodDumpTraceActive)
{
var os = Tracer.DumpData("", new[] { name, getValue() }, 1);
Tracer.Line(os);
}
}
/// <summary>
/// Method dump,
/// </summary>
/// <param name="rv"> </param>
/// <param name="breakExecution"> </param>
/// <returns> </returns>
[DebuggerHidden]
protected static T ReturnMethodDump<T>(T rv, bool breakExecution = true)
{
if(IsMethodDumpTraceActive)
{
Tracer.IndentEnd();
Tracer.Line(Tracer.MethodHeader(stackFrameDepth: 1) + "[returns] " + Tracer.Dump(rv));
if(breakExecution)
Tracer.TraceBreak();
}
return rv;
}
/// <summary>
/// Method dump,
/// </summary>
[DebuggerHidden]
protected static void ReturnVoidMethodDump(bool breakExecution = true)
{
if(IsMethodDumpTraceActive)
{
Tracer.IndentEnd();
Tracer.Line(Tracer.MethodHeader(stackFrameDepth: 1) + "[returns]");
if(breakExecution)
Tracer.TraceBreak();
}
}
[DebuggerHidden]
protected void BreakExecution()
{
if(IsMethodDumpTraceActive)
Tracer.TraceBreak();
}
/// <summary>
/// Method dump,
/// </summary>
[DebuggerHidden]
protected static void EndMethodDump()
{
if(!Debugger.IsAttached)
return;
CheckDumpLevel(1);
_methodDumpTraceSwitches.Pop();
}
static void CheckDumpLevel(int depth)
{
if(!Debugger.IsAttached)
return;
var top = _methodDumpTraceSwitches.Peek();
if(top.Trace)
Tracer.Assert(top.FrameCount == Tracer.CurrentFrameCount(depth + 1));
}
/// <summary>
/// Method dump with break,
/// </summary>
/// <param name="text"> </param>
/// <param name="p"> </param>
/// <returns> </returns>
[DebuggerHidden]
protected void DumpMethodWithBreak(string text, params object[] p)
{
var os = Tracer.DumpMethodWithData(text, this, p, 1);
Tracer.Line(os);
Tracer.TraceBreak();
}
/// <summary>
/// Method dump with break,
/// </summary>
/// <param name="text"> </param>
/// <param name="p"> </param>
/// <returns> </returns>
[DebuggerHidden]
protected static void DumpDataWithBreak(string text, params object[] p)
{
var os = Tracer.DumpData(text, p, 1);
Tracer.Line(os);
Tracer.TraceBreak();
}
/// <summary>
/// Method dump with break,
/// </summary>
/// <param name="p"> </param>
/// <returns> </returns>
[DebuggerHidden]
protected void NotImplementedMethod(params object[] p)
{
if(IsInDump)
throw new NotImplementedException();
var os = Tracer.DumpMethodWithData("not implemented", this, p, 1);
Tracer.Line(os);
Tracer.TraceBreak();
}
public string Dump()
{
var oldIsInDump = _isInDump;
_isInDump = true;
try
{
return Dump(oldIsInDump);
}
finally
{
_isInDump = oldIsInDump;
}
}
protected virtual string Dump(bool isRecursion)
{
var surround = "<recursion>";
if(!isRecursion)
surround = DumpData().Surround("{", "}");
return GetType().PrettyName() + surround;
}
/// <summary>
/// Gets a value indicating whether this instance is in dump.
/// </summary>
/// <value>
/// <c>true</c>
/// if this instance is in dump; otherwise,
/// <c>false</c>
/// .
/// </value>
/// created 23.09.2006 17:39
[DisableDump]
public bool IsInDump { get { return _isInDump; } }
bool _isInDump;
public static bool? IsMethodDumpTraceInhibited;
public virtual string DumpData()
{
string result;
try
{
result = Tracer.DumpData(this);
}
catch(Exception)
{
result = "<not implemented>";
}
return result;
}
public void Dispose() { }
static void StartMethodDump(int depth, bool trace)
{
if(!Debugger.IsAttached)
return;
var frameCount = trace ? Tracer.CurrentFrameCount(depth + 1) : 0;
_methodDumpTraceSwitches.Push(new MethodDumpTraceItem(frameCount, trace));
}
sealed class MethodDumpTraceItem
{
readonly int _frameCount;
readonly bool _trace;
public MethodDumpTraceItem(int frameCount, bool trace)
{
_frameCount = frameCount;
_trace = trace;
}
internal int FrameCount { get { return _frameCount; } }
internal bool Trace { get { return _trace; } }
}
static bool IsMethodDumpTraceActive
{
get
{
if(IsMethodDumpTraceInhibited != null)
return !IsMethodDumpTraceInhibited.Value;
if(!Debugger.IsAttached)
return false;
//CheckDumpLevel(2);
return _methodDumpTraceSwitches.Peek().Trace;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using Norm;
using Norm.BSON;
using Norm.Collections;
using Norm.Commands.Modifiers;
using Norm.Responses;
using Xunit;
using Norm.Collections;
namespace Norm.Tests
{
public class QueryTests : IDisposable
{
private readonly IMongo _server;
private BuildInfoResponse _buildInfo = null;
private readonly IMongoCollection<Person> _collection;
public QueryTests()
{
var admin = new MongoAdmin("mongodb://localhost/admin?pooling=false&strict=true");
_server = Mongo.Create("mongodb://localhost/NormTests?pooling=false");
_collection = _server.GetCollection<Person>("People");
_buildInfo = admin.BuildInfo();
//cause the collection to exist on the server by inserting, then deleting some things.
_collection.Insert(new Person());
_collection.Delete(new { });
}
public void Dispose()
{
_server.Database.DropCollection("People");
using (var admin = new MongoAdmin("mongodb://localhost/NormTests?pooling=false"))
{
admin.DropDatabase();
}
_server.Dispose();
}
[Fact]
public void FindUsesLimit()
{
_collection.Insert(new Person { Name = "BBB" });
_collection.Insert(new Person { Name = "CCC" });
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "DDD" });
var result = _collection.Find(new { }, 3).ToArray();
Assert.Equal(3, result.Length);
}
public void MongoCollection_Supports_LINQ()
{
_collection.Insert(new Person { Name = "BBB" });
_collection.Insert(new Person { Name = "CCC" });
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "DDD" });
var result = _collection.AsQueryable().Where(y => y.Name == "AAA").ToArray();
Assert.Equal(1, result.Length);
}
[Fact]
public void Count_Works()
{
_collection.Insert(new Person { Name = "BBB" });
_collection.Insert(new Person { Name = "CCC" });
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "DDD" });
var result = _collection.Count();
Assert.Equal(4, result);
}
[Fact]
public void Count_With_Filter_Works()
{
_collection.Insert(new Person { Name = "BBB" });
_collection.Insert(new Person { Name = "CCC" });
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "DDD" });
var result = _collection.Count(new { Name = "AAA" });
Assert.Equal(1, result);
}
[Fact]
public void DateTime_GreaterThan_Qualifier_Works()
{
_collection.Insert(new Person { Birthday = new DateTime(1910, 1, 1) });
_collection.Insert(new Person { Birthday = new DateTime(1920, 1, 1) });
_collection.Insert(new Person { Birthday = new DateTime(1930, 1, 1) });
var find = _collection.Find(new { Birthday = Q.GreaterThan(new DateTime(1920, 1, 1)) });
Assert.Equal(1, find.Count());
}
[Fact]
public void Element_Match_Matches()
{
using (var db = Mongo.Create(TestHelper.ConnectionString()))
{
var coll = db.GetCollection<Post>();
coll.Delete(new { });
coll.Insert(new Post
{
Comments = new Comment[] {
new Comment { Text = "xabc" },
new Comment { Text = "abc" }
}
},
new Post { Tags = new String[] { "hello", "world" } },
new Post
{
Comments = new Comment[] {
new Comment { Text = "xyz" },
new Comment { Text = "abc" }
}
});
Assert.Equal(1, coll.Find(new { Comments = Q.ElementMatch(new { Text = "xyz" }) }).Count());
Assert.Equal(2, coll.Find(new { Comments = Q.ElementMatch(new { Text = Q.Matches("^x") }) }).Count());
}
}
[Fact]
public void Where_Qualifier_Works()
{
_collection.Insert(new Person { Name = "Gnomey" });
_collection.Insert(new Person { Name = "kde" });
_collection.Insert(new Person { Name = "Elfy" });
var find = _collection.Find(Q.Where("this.Name === 'Elfy';"));
Assert.Equal(1, find.Count());
}
[Fact]
public void Find_Uses_Limit_Orderby_And_Skip()
{
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "BBB" });
_collection.Insert(new Person { Name = "CCC" });
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "DDD" });
var result = _collection.Find(new { Name = Q.NotEqual(new int?()) }, new { Name = OrderBy.Descending }, 3, 1).ToArray();
Assert.Equal(3, result.Length);
Assert.Equal("CCC", result[0].Name);
Assert.Equal("BBB", result[1].Name);
Assert.Equal("AAA", result[2].Name);
}
[Fact]
public void Find_Uses_Query_And_Orderby()
{
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "BBB" });
_collection.Insert(new Person { Name = "CCC" });
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "DDD" });
var result = _collection.Find(new { Name = Q.NotEqual("AAA") }, new { Name = OrderBy.Descending }).ToArray();
Assert.Equal(3, result.Length);
Assert.Equal("DDD", result[0].Name);
Assert.Equal("CCC", result[1].Name);
Assert.Equal("BBB", result[2].Name);
}
[Fact]
public void Find_Uses_Query_And_Orderby_And_Limit()
{
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "BBB" });
_collection.Insert(new Person { Name = "CCC" });
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "DDD" });
var result = _collection.Find(new { Name = Q.NotEqual("DDD") }, new { Name = OrderBy.Descending }, 2, 0).ToArray();
Assert.Equal(2, result.Length);
Assert.Equal("CCC", result[0].Name);
Assert.Equal("BBB", result[1].Name);
}
[Fact]
public void Find_Uses_Null_Qualifier()
{
_collection.Insert(new Person { Name = null });
_collection.Insert(new Person { Name = "BBB" });
_collection.Insert(new Person { Name = "CCC" });
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "DDD" });
var result = _collection.Find(new { Name = Q.IsNull() }, new { Name = OrderBy.Descending }, 2, 0).ToArray();
Assert.Equal(1, result.Length);
Assert.Equal(null, result[0].Name);
result = _collection.Find(new { Name = Q.IsNotNull() }, new { Name = OrderBy.Descending }).ToArray();
Assert.Equal(4, result.Length);
Assert.Equal("DDD", result[0].Name);
}
[Fact]
public void FindUsesLimitAndSkip()
{
_collection.Insert(new Person { Name = "BBB" });
_collection.Insert(new Person { Name = "CCC" });
_collection.Insert(new Person { Name = "AAA" });
_collection.Insert(new Person { Name = "DDD" });
var result = _collection.Find(new { }, 1, 1).ToArray();
Assert.Equal(1, result.Length);
Assert.Equal("CCC", result[0].Name);
}
[Fact]
public void FindCanQueryEmbeddedArray()
{
_collection.Delete(new { });
var person1 = new Person
{
Name = "Joe Cool",
Address =
{
Street = "123 Main St",
City = "Anytown",
State = "CO",
Zip = "45123"
}
};
var person2 = new Person
{
Name = "Sam Cool",
Address =
{
Street = "300 Main St",
City = "Anytown",
State = "CO",
Zip = "45123"
},
Relatives = new List<string>() { "Emma", "Bruce", "Charlie" }
};
_collection.Insert(person1);
_collection.Insert(person2);
var elem = new Expando();
elem["Relatives"] = "Charlie";
var a = _collection.Find(elem).ToArray();
Assert.Equal(1, a.Length);
}
[Fact]
public void BasicQueryUsingProperty()
{
_collection.Insert(new Person { Name = "Lisa Cool", Address = { Street = "300 Main St", City = "Anytown", State = "CO", Zip = "45123" } });
_collection.Insert(new Person { Name = "Joe Cool", Address = { Street = "123 Main St", City = "Anytown", State = "CO", Zip = "45123" } });
_collection.Insert(new Person { Name = "Sam Cool", Address = { Street = "300 Main St", City = "Anytown", State = "CO", Zip = "45123" } });
var matchRegex = new Regex("^.{4}Cool$");
var results = _collection.Find(new { Name = matchRegex }).ToArray();
Assert.Equal(2, results.Length);
Assert.True(results.All(y => matchRegex.IsMatch(y.Name)));
}
[Fact]
public void BasicQueryWithSort()
{
//remove everything from the collection.
_collection.Delete(new { });
_collection.Insert(new Person { Name = "Third", LastContact = new DateTime(2010, 1, 1) });
_collection.Insert(new Person { Name = "First", LastContact = new DateTime(2000, 1, 1) });
_collection.Insert(new Person { Name = "Second", LastContact = new DateTime(2005, 1, 1) });
var people = _collection.Find(new { }, new { LastContact = 1 }).ToArray();
Assert.Equal(3, people.Length);
Assert.Equal("First", people[0].Name);
Assert.Equal("Second", people[1].Name);
Assert.Equal("Third", people[2].Name);
}
[Fact]
public void BasicQueryWithMultiSortOrdering()
{
//remove everything from the collection.
_collection.Delete(new { });
_collection.Insert(new Person { Name = "Third", LastContact = new DateTime(2010, 1, 1) });
_collection.Insert(new Person { Name = "First", LastContact = new DateTime(2005, 1, 1) });
_collection.Insert(new Person { Name = "Second", LastContact = new DateTime(2005, 1, 1) });
var people = _collection.Find(new { }, new { LastContact = -1, Name = 1 }).ToArray();
Assert.Equal(3, people.Length);
Assert.Equal("Third", people[0].Name);
Assert.Equal("First", people[1].Name);
Assert.Equal("Second", people[2].Name);
}
[Fact]
public void BasicQueryUsingChildProperty()
{
_collection.Insert(new Person { Name = "Joe Cool", Address = { Street = "123 Main St", City = "Anytown", State = "CO", Zip = "45123" } });
_collection.Insert(new Person { Name = "Sam Cool", Address = { Street = "300 Main St", City = "Anytown", State = "CO", Zip = "45123" } });
var query = new Expando();
query["Address.City"] = Q.Equals<string>("Anytown");
var results = _collection.Find(query);
Assert.Equal(2, results.Count());
}
[Fact]
public void QueryWithinEmbeddedArray()
{
var post1 = new Person { Name = "First", Relatives = new List<String> { "comment1", "comment2" } };
var post2 = new Person { Name = "Second", Relatives = new List<String> { "commentA", "commentB" } };
_collection.Insert(post1);
_collection.Insert(post2);
var results = _collection.Find(new { Relatives = "commentA" });
Assert.Equal("Second", results.First().Name);
}
[Fact]
public void Distinct_On_Collection_Should_Return_Arrays_As_Value_If_Earlier_Than_1_5_0()
{
var isLessThan150 = Regex.IsMatch(_buildInfo.Version, "^([01][.][01234])");
// Any version earlier than MongoDB 1.5.0
if (isLessThan150)
{
_collection.Insert(new Person { Name = "Joe Cool", Relatives = new List<string>(new[] { "Tom Cool", "Sam Cool" }) });
_collection.Insert(new Person { Name = "Sam Cool", Relatives = new List<string>(new[] { "Joe Cool", "Jay Cool" }) });
_collection.Insert(new Person { Name = "Ted Cool", Relatives = new List<string>(new[] { "Tom Cool", "Sam Cool" }) });
_collection.Insert(new Person { Name = "Jay Cool", Relatives = new List<string>(new[] { "Sam Cool" }) });
var results = _collection.Distinct<string[]>("Relatives");
Assert.Equal(3, results.Count());
}
}
[Fact]
public void Distinct_On_Collection_Should_Return_Array_Values_In_1_5_0_Or_Later()
{
var isLessThan150 = Regex.IsMatch(_buildInfo.Version, "^([01][.][01234])");
// Any version MongoDB 1.5.0 +
if (!isLessThan150)
{
_collection.Insert(new Person { Name = "Joe Cool", Relatives = new List<string>(new[] { "Tom Cool", "Sam Cool" }) });
_collection.Insert(new Person { Name = "Sam Cool", Relatives = new List<string>(new[] { "Joe Cool", "Jay Cool" }) });
_collection.Insert(new Person { Name = "Ted Cool", Relatives = new List<string>(new[] { "Tom Cool", "Sam Cool" }) });
_collection.Insert(new Person { Name = "Jay Cool", Relatives = new List<string>(new[] { "Sam Cool" }) });
var results = _collection.Distinct<string>("Relatives");
Assert.Equal(4, results.Count());
}
}
[Fact]
public void DistinctOnSimpleProperty()
{
_collection.Insert(new Person { Name = "Joe Cool", Relatives = new List<string>(new[] { "Tom Cool", "Sam Cool" }) });
_collection.Insert(new Person { Name = "Sam Cool", Relatives = new List<string>(new[] { "Joe Cool", "Jay Cool" }) });
_collection.Insert(new Person { Name = "Ted Cool", Relatives = new List<string>(new[] { "Tom Cool", "Sam Cool" }) });
_collection.Insert(new Person { Name = "Jay Cool", Relatives = new List<string>(new[] { "Sam Cool" }) });
var results = _collection.Distinct<string>("Name");
Assert.Equal(4, results.Count());
}
[Fact]
public void DistinctOnComplexProperty()
{
_collection.Insert(new Person { Name = "Joe Cool", Address = new Address { State = "CA" } });
_collection.Insert(new Person { Name = "Sam Cool", Address = new Address { State = "CA" } });
_collection.Insert(new Person { Name = "Ted Cool", Address = new Address { State = "CA", Zip = "90010" } });
_collection.Insert(new Person { Name = "Jay Cool", Address = new Address { State = "NY" } });
var results = _collection.Distinct<Address>("Address");
Assert.Equal(3, results.Count());
}
[Fact]
public void FindAndModify()
{
_collection.Insert(new Person { Name = "Joe Cool", Age = 10 });
var update = new Expando();
update["$inc"] = new { Age = 1 };
var result = _collection.FindAndModify(new { Name = "Joe Cool" }, update);
Assert.Equal(10, result.Age);
var result2 = _collection.Find(new { Name = "Joe Cool" }).FirstOrDefault();
Assert.Equal(11, result2.Age);
}
[Fact]
public void FindAndModifyWithSort()
{
_collection.Insert(new Person { Name = "Joe Cool", Age = 10 });
_collection.Insert(new Person { Name = "Joe Cool", Age = 15 });
var update = new Expando();
update["$inc"] = new { Age = 1 };
var result = _collection.FindAndModify(new { Name = "Joe Cool" }, update, new { Age = Norm.OrderBy.Descending });
Assert.Equal(15, result.Age);
var result2 = _collection.Find(new { Name = "Joe Cool" }).OrderByDescending(x => x.Age).ToList();
Assert.Equal(16, result2[0].Age);
Assert.Equal(10, result2[1].Age);
}
[Fact]
public void FindAndModifyReturnsNullWhenQueryNotFound()
{
_collection.Insert(new Person { Name = "Joe Cool", Age = 10 });
_collection.Insert(new Person { Name = "Joe Cool", Age = 15 });
var update = new Expando();
update["$inc"] = new { Age = 1 };
var result = _collection.FindAndModify(new { Name = "Joe Cool1" }, update, new { Age = Norm.OrderBy.Descending });
Assert.Null(result);
var result2 = _collection.Find(new { Age = 15 }).ToList();
Assert.Equal(1, result2.Count);
}
[Fact]
public void SliceOperatorBringsBackCorrectItems()
{
var isLessThan151 = Regex.IsMatch(_buildInfo.Version, "^(([01][.][01234])|(1.5.0))");
if (!isLessThan151)
{
Person p = new Person() { Relatives = new List<string>() { "Bob", "Joe", "Helen" } };
_collection.Insert(p);
var result = _collection.Find(new { }, new { _id = 1 }, new { Relatives = Q.Slice(1) }, 1, 0).FirstOrDefault();
Assert.NotNull(result);
Assert.Equal("Joe", result.Relatives.First());
result = _collection.Find(new { }, new { _id = 1 }, new { Relatives = Q.Slice(1, 2) }, 1, 0).FirstOrDefault();
Assert.NotNull(result);
Assert.True((new[] { "Joe", "Helen" }).SequenceEqual(result.Relatives));
}
}
}
}
| |
namespace Orleans.Serialization
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Reflection;
using Orleans.Runtime;
internal class ILSerializerGenerator
{
private static readonly RuntimeTypeHandle IntPtrTypeHandle = typeof(IntPtr).TypeHandle;
private static readonly RuntimeTypeHandle UintPtrTypeHandle = typeof(UIntPtr).TypeHandle;
private static readonly TypeInfo DelegateTypeInfo = typeof(Delegate).GetTypeInfo();
private readonly Dictionary<RuntimeTypeHandle, SimpleTypeSerializer> directSerializers;
private readonly ReflectedSerializationMethodInfo methods = new ReflectedSerializationMethodInfo();
private readonly SerializationManager.DeepCopier immutableTypeCopier = obj => obj;
private readonly ILFieldBuilder fieldBuilder = new ILFieldBuilder();
public ILSerializerGenerator()
{
this.directSerializers = new Dictionary<RuntimeTypeHandle, SimpleTypeSerializer>
{
[typeof(int).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(int)), r => r.ReadInt()),
[typeof(uint).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(uint)), r => r.ReadUInt()),
[typeof(short).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(short)), r => r.ReadShort()),
[typeof(ushort).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(ushort)), r => r.ReadUShort()),
[typeof(long).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(long)), r => r.ReadLong()),
[typeof(ulong).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(ulong)), r => r.ReadULong()),
[typeof(byte).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(byte)), r => r.ReadByte()),
[typeof(sbyte).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(sbyte)), r => r.ReadSByte()),
[typeof(float).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(float)), r => r.ReadFloat()),
[typeof(double).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(double)), r => r.ReadDouble()),
[typeof(decimal).TypeHandle] =
new SimpleTypeSerializer(w => w.Write(default(decimal)), r => r.ReadDecimal()),
[typeof(string).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(string)), r => r.ReadString()),
[typeof(char).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(char)), r => r.ReadChar()),
[typeof(Guid).TypeHandle] = new SimpleTypeSerializer(w => w.Write(default(Guid)), r => r.ReadGuid()),
[typeof(DateTime).TypeHandle] =
new SimpleTypeSerializer(w => w.Write(default(DateTime)), r => r.ReadDateTime()),
[typeof(TimeSpan).TypeHandle] =
new SimpleTypeSerializer(w => w.Write(default(TimeSpan)), r => r.ReadTimeSpan()),
[typeof(GrainId).TypeHandle] =
new SimpleTypeSerializer(w => w.Write(default(GrainId)), r => r.ReadGrainId()),
[typeof(ActivationId).TypeHandle] =
new SimpleTypeSerializer(w => w.Write(default(ActivationId)), r => r.ReadActivationId()),
[typeof(SiloAddress).TypeHandle] =
new SimpleTypeSerializer(w => w.Write(default(SiloAddress)), r => r.ReadSiloAddress()),
[typeof(ActivationAddress).TypeHandle] =
new SimpleTypeSerializer(w => w.Write(default(ActivationAddress)), r => r.ReadActivationAddress()),
[typeof(IPAddress).TypeHandle] =
new SimpleTypeSerializer(w => w.Write(default(IPAddress)), r => r.ReadIPAddress()),
[typeof(IPEndPoint).TypeHandle] =
new SimpleTypeSerializer(w => w.Write(default(IPEndPoint)), r => r.ReadIPEndPoint()),
[typeof(CorrelationId).TypeHandle] =
new SimpleTypeSerializer(w => w.Write(default(CorrelationId)), r => r.ReadCorrelationId())
};
}
/// <summary>
/// Returns a value indicating whether the provided <paramref name="type"/> is supported.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>A value indicating whether the provided <paramref name="type"/> is supported.</returns>
public static bool IsSupportedType(TypeInfo type)
{
return !type.IsAbstract && !type.IsInterface && !type.IsArray && !type.IsEnum && IsSupportedFieldType(type);
}
/// <summary>
/// Generates a serializer for the specified type.
/// </summary>
/// <param name="type">The type to generate the serializer for.</param>
/// <param name="serializationFieldsFilter">
/// The predicate used in addition to the default logic to select which fields are included in serialization and deserialization.
/// </param>
/// <param name="copyFieldsFilter">
/// The predicate used in addition to the default logic to select which fields are included in copying.
/// </param>
/// <returns>The generated serializer.</returns>
public SerializationManager.SerializerMethods GenerateSerializer(
Type type,
Func<FieldInfo, bool> serializationFieldsFilter = null,
Func<FieldInfo, bool> copyFieldsFilter = null)
{
try
{
var serializationFields = this.GetFields(type, serializationFieldsFilter);
List<FieldInfo> copyFields;
if (copyFieldsFilter == serializationFieldsFilter)
{
copyFields = serializationFields;
}
else
{
copyFields = this.GetFields(type, copyFieldsFilter);
}
SerializationManager.DeepCopier copier;
if (type.IsOrleansShallowCopyable()) copier = this.immutableTypeCopier;
else copier = this.EmitCopier(type, copyFields).CreateDelegate();
var serializer = this.EmitSerializer(type, serializationFields);
var deserializer = this.EmitDeserializer(type, serializationFields);
return new SerializationManager.SerializerMethods(
copier,
serializer.CreateDelegate(),
deserializer.CreateDelegate());
}
catch (Exception exception)
{
throw new ILGenerationException($"Serializer generation failed for type {type}", exception);
}
}
private ILDelegateBuilder<SerializationManager.DeepCopier> EmitCopier(Type type, List<FieldInfo> fields)
{
var il = new ILDelegateBuilder<SerializationManager.DeepCopier>(
this.fieldBuilder,
type.Name + "DeepCopier",
this.methods,
this.methods.DeepCopierDelegate);
// Declare local variables.
var result = il.DeclareLocal(type);
var typedInput = il.DeclareLocal(type);
// Set the typed input variable from the method parameter.
il.LoadArgument(0);
il.CastOrUnbox(type);
il.StoreLocal(typedInput);
// Construct the result.
il.CreateInstance(type, result);
// Record the object.
il.Call(this.methods.GetCurrentSerializationContext);
il.LoadArgument(0); // Load 'original' parameter.
il.LoadLocal(result); // Load 'result' local.
il.BoxIfValueType(type);
il.Call(this.methods.RecordObjectWhileCopying);
// Copy each field.
foreach (var field in fields)
{
// Load the field.
il.LoadLocalAsReference(type, result);
il.LoadLocal(typedInput);
il.LoadField(field);
// Deep-copy the field if needed, otherwise just leave it as-is.
if (!field.FieldType.IsOrleansShallowCopyable())
{
var copyMethod = this.methods.DeepCopyInner;
il.BoxIfValueType(field.FieldType);
il.Call(copyMethod);
il.CastOrUnbox(field.FieldType);
}
// Store the copy of the field on the result.
il.StoreField(field);
}
il.LoadLocal(result);
il.BoxIfValueType(type);
il.Return();
return il;
}
private ILDelegateBuilder<SerializationManager.Serializer> EmitSerializer(Type type, List<FieldInfo> fields)
{
var il = new ILDelegateBuilder<SerializationManager.Serializer>(
this.fieldBuilder,
type.Name + "Serializer",
this.methods,
this.methods.SerializerDelegate);
// Declare local variables.
var typedInput = il.DeclareLocal(type);
// Set the typed input variable from the method parameter.
il.LoadArgument(0);
il.CastOrUnbox(type);
il.StoreLocal(typedInput);
// Serialize each field
foreach (var field in fields)
{
SimpleTypeSerializer serializer;
var fieldType = field.FieldType.GetTypeInfo();
var typeHandle = field.FieldType.TypeHandle;
if (fieldType.IsEnum)
{
typeHandle = fieldType.GetEnumUnderlyingType().TypeHandle;
}
if (this.directSerializers.TryGetValue(typeHandle, out serializer))
{
il.LoadArgument(1);
il.LoadLocal(typedInput);
il.LoadField(field);
il.Call(serializer.WriteMethod);
}
else
{
var serializeMethod = this.methods.SerializeInner;
// Load the field.
il.LoadLocal(typedInput);
il.LoadField(field);
il.BoxIfValueType(field.FieldType);
// Serialize the field.
il.LoadArgument(1);
il.LoadType(field.FieldType);
il.Call(serializeMethod);
}
}
il.Return();
return il;
}
private ILDelegateBuilder<SerializationManager.Deserializer> EmitDeserializer(Type type, List<FieldInfo> fields)
{
var il = new ILDelegateBuilder<SerializationManager.Deserializer>(
this.fieldBuilder,
type.Name + "Deserializer",
this.methods,
this.methods.DeserializerDelegate);
// Declare local variables.
var result = il.DeclareLocal(type);
// Construct the result.
il.CreateInstance(type, result);
// Record the object.
il.Call(this.methods.GetCurrentDeserializationContext);
il.LoadLocal(result);
il.BoxIfValueType(type);
il.Call(this.methods.RecordObjectWhileDeserializing);
// Deserialize each field.
foreach (var field in fields)
{
// Deserialize the field.
SimpleTypeSerializer serializer;
var fieldType = field.FieldType.GetTypeInfo();
if (fieldType.IsEnum)
{
var typeHandle = fieldType.GetEnumUnderlyingType().TypeHandle;
il.LoadLocalAsReference(type, result);
il.LoadArgument(1);
il.Call(this.directSerializers[typeHandle].ReadMethod);
il.StoreField(field);
}
else if (this.directSerializers.TryGetValue(field.FieldType.TypeHandle, out serializer))
{
il.LoadLocalAsReference(type, result);
il.LoadArgument(1);
il.Call(serializer.ReadMethod);
il.StoreField(field);
}
else
{
var deserializeMethod = this.methods.DeserializeInner;
il.LoadLocalAsReference(type, result);
il.LoadType(field.FieldType);
il.LoadArgument(1);
il.Call(deserializeMethod);
// Store the value on the result.
il.CastOrUnbox(field.FieldType);
il.StoreField(field);
}
}
il.LoadLocal(result);
il.BoxIfValueType(type);
il.Return();
return il;
}
/// <summary>
/// Returns a sorted list of the fields of the provided type.
/// </summary>
/// <param name="type">The type.</param>
/// <param name="fieldFilter">The predicate used in addition to the default logic to select which fields are included.</param>
/// <returns>A sorted list of the fields of the provided type.</returns>
private List<FieldInfo> GetFields(Type type, Func<FieldInfo, bool> fieldFilter = null)
{
var result =
type.GetAllFields()
.Where(
field =>
field.GetCustomAttribute<NonSerializedAttribute>() == null && !field.IsStatic
&& IsSupportedFieldType(field.FieldType.GetTypeInfo())
&& (fieldFilter == null || fieldFilter(field)))
.ToList();
result.Sort(FieldInfoComparer.Instance);
return result;
}
/// <summary>
/// Returns a value indicating whether the provided type is supported as a field by this class.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>A value indicating whether the provided type is supported as a field by this class.</returns>
private static bool IsSupportedFieldType(TypeInfo type)
{
if (type.IsPointer || type.IsByRef) return false;
var handle = type.AsType().TypeHandle;
if (handle.Equals(IntPtrTypeHandle)) return false;
if (handle.Equals(UintPtrTypeHandle)) return false;
if (DelegateTypeInfo.IsAssignableFrom(type)) return false;
return true;
}
/// <summary>
/// A comparer for <see cref="FieldInfo"/> which compares by name.
/// </summary>
private class FieldInfoComparer : IComparer<FieldInfo>
{
/// <summary>
/// Gets the singleton instance of this class.
/// </summary>
public static FieldInfoComparer Instance { get; } = new FieldInfoComparer();
public int Compare(FieldInfo x, FieldInfo y)
{
return string.Compare(x.Name, y.Name, StringComparison.Ordinal);
}
}
private class SimpleTypeSerializer
{
public SimpleTypeSerializer(
Expression<Action<BinaryTokenStreamWriter>> write,
Expression<Action<BinaryTokenStreamReader>> read)
{
this.WriteMethod = TypeUtils.Method(write);
this.ReadMethod = TypeUtils.Method(read);
}
public MethodInfo WriteMethod { get; }
public MethodInfo ReadMethod { get; }
}
}
}
| |
/*
* Copyright (c) 2015, InWorldz Halcyon Developers
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of halcyon nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using OpenSim.Region.Framework.Scenes;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Region.Physics.Manager;
using OpenSim.Region.Framework.Interfaces;
namespace OpenSim.Region.CoreModules.Agent.BotManager
{
public enum BotState
{
Idle,
Walking,
Flying,
Unknown
}
public class BotMovementController
{
#region Declares
private Scene m_scene;
private IBot m_bot;
private object m_movementLock = new object();
private MovementDescription m_currentMovement = null;
private MovementAction m_movementAction = null;
public Scene Scene { get { return m_scene; } }
public IBot Bot { get { return m_bot; } }
#endregion
#region Properties
public bool MovementInProgress
{
get
{
lock (m_movementLock)
{
return m_currentMovement != null;
}
}
}
#endregion
#region Constructor
public BotMovementController(IBot bot)
{
m_bot = bot;
m_scene = (Scene)bot.Scene;
}
#endregion
#region Public Methods
public void StartFollowingAvatar(UUID avatarID, Dictionary<int, object> options)
{
lock (m_movementLock)
{
if (MovementInProgress)
StopMovement();
AvatarFollower.AvatarFollowerDescription desc =
new AvatarFollower.AvatarFollowerDescription(avatarID, options);
BeginTrackingFrames(desc);
}
}
public void StartNavigationPath(List<Vector3> nodes, List<TravelMode> travelModes, Dictionary<int, object> options)
{
lock (m_movementLock)
{
if (MovementInProgress)
StopMovement();
NavigationPathAction.NavigationPathDescription desc =
new NavigationPathAction.NavigationPathDescription(nodes, travelModes, options);
BeginTrackingFrames(desc);
}
}
public void StartWandering(Vector3 origin, Vector3 distance, Dictionary<int, object> options)
{
lock (m_movementLock)
{
if (MovementInProgress)
StopMovement();
WanderingAction.WanderingDescription desc =
new WanderingAction.WanderingDescription(origin, distance, options);
BeginTrackingFrames(desc);
}
}
public void StopMovement()
{
lock (m_movementLock)
{
if (!MovementInProgress)
return;
StopTrackingFrames();
}
}
public void PauseMovement()
{
lock (m_movementLock)
{
if (!MovementInProgress)
return;
m_movementAction.PauseMovement();
}
}
public void ResumeMovement()
{
lock (m_movementLock)
{
if (!MovementInProgress)
return;
m_movementAction.ResumeMovement();
}
}
#endregion
#region Private Methods
/// <summary>
/// MUST BE CALLED FROM WITHIN m_movementLock!
/// </summary>
/// <param name="movement"></param>
private void BeginTrackingFrames(MovementDescription movement)
{
if (m_currentMovement != null)
return;
m_currentMovement = movement;
if (movement is AvatarFollower.AvatarFollowerDescription)
m_movementAction = new AvatarFollower(movement, this);
else if (movement is WanderingAction.WanderingDescription)
m_movementAction = new WanderingAction(movement, this);
else
m_movementAction = new NavigationPathAction(movement, this);
m_movementAction.Start();
m_movementAction.SetBeginningOfMovementFrame();
m_scene.EventManager.OnFrame += Scene_OnFrame;
}
/// <summary>
/// MUST BE CALLED FROM WITHIN m_movementLock!
/// </summary>
/// <param name="movement"></param>
private void StopTrackingFrames()
{
if (m_currentMovement == null)
return;
m_movementAction.Stop();
m_movementAction = null;
m_currentMovement = null;
m_scene.EventManager.OnFrame -= Scene_OnFrame;
}
private void Scene_OnFrame()
{
if (m_movementAction == null)
return;
if (!m_movementAction.Frame())
{
lock (m_movementLock)
StopTrackingFrames();
}
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Orleans.Runtime;
using Orleans.Storage;
using TestExtensions;
using UnitTests.Persistence;
using Xunit;
using Xunit.Abstractions;
namespace UnitTests.StorageTests
{
[Serializable]
public enum ProviderType
{
AzureTable,
Memory,
Mock,
File,
AdoNet
}
[Collection(TestEnvironmentFixture.DefaultCollection)]
public class LocalStoreTests
{
private readonly ITestOutputHelper output;
private readonly TestEnvironmentFixture fixture;
public LocalStoreTests(ITestOutputHelper output, TestEnvironmentFixture fixture)
{
this.output = output;
this.fixture = fixture;
}
[Fact, TestCategory("Functional"), TestCategory("Persistence"), TestCategory("MemoryStore")]
public void Store_Read()
{
string name = Guid.NewGuid().ToString();//TestContext.TestName;
ILocalDataStore store = new HierarchicalKeyStore(2);
GrainReference reference = (GrainReference)this.fixture.InternalGrainFactory.GetGrain(LegacyGrainId.NewId());
TestStoreGrainState state = new TestStoreGrainState();
var stateProperties = AsDictionary(state);
var keys = GetKeys(name, reference);
store.WriteRow(keys, stateProperties, null);
Stopwatch sw = new Stopwatch();
sw.Start();
var data = store.ReadRow(keys);
TimeSpan readTime = sw.Elapsed;
output.WriteLine("{0} - Read time = {1}", store.GetType().FullName, readTime);
Assert.Equal(state.A, data["A"]); // "A"
Assert.Equal(state.B, data["B"]); // "B"
Assert.Equal(state.C, data["C"]); // "C"
}
[Fact, TestCategory("Functional"), TestCategory("Persistence"), TestCategory("MemoryStore")]
public void Store_WriteRead()
{
string name = Guid.NewGuid().ToString();//TestContext.TestName;
ILocalDataStore store = new HierarchicalKeyStore(2);
GrainReference reference = (GrainReference)fixture.InternalGrainFactory.GetGrain(LegacyGrainId.NewId());
var state = TestStoreGrainState.NewRandomState();
Stopwatch sw = new Stopwatch();
sw.Start();
var keys = GetKeys(name, reference);
var stateProperties = AsDictionary(state.State);
store.WriteRow(keys, stateProperties, state.ETag);
TimeSpan writeTime = sw.Elapsed;
sw.Restart();
var data = store.ReadRow(keys);
TimeSpan readTime = sw.Elapsed;
output.WriteLine("{0} - Write time = {1} Read time = {2}", store.GetType().FullName, writeTime, readTime);
Assert.Equal(state.State.A, data["A"]); // "A"
Assert.Equal(state.State.B, data["B"]); // "B"
Assert.Equal(state.State.C, data["C"]); // "C"
}
[Fact, TestCategory("Functional"), TestCategory("Persistence"), TestCategory("MemoryStore")]
public void Store_Delete()
{
string name = Guid.NewGuid().ToString();//TestContext.TestName;
ILocalDataStore store = new HierarchicalKeyStore(2);
GrainReference reference = (GrainReference)this.fixture.InternalGrainFactory.GetGrain(LegacyGrainId.NewId());
var data = TestStoreGrainState.NewRandomState();
output.WriteLine("Using store = {0}", store.GetType().FullName);
Stopwatch sw = new Stopwatch();
var keys = GetKeys(name, reference);
sw.Restart();
string eTag = store.WriteRow(keys, AsDictionary(data.State), null);
output.WriteLine("Write returned Etag={0} after {1} {2}", eTag, sw.Elapsed, StorageProviderUtils.PrintOneWrite(keys, data, eTag));
sw.Restart();
var storedData = store.ReadRow(keys);
output.WriteLine("Read returned {0} after {1}", StorageProviderUtils.PrintOneWrite(keys, storedData, eTag), sw.Elapsed);
Assert.NotNull(data); // Should get some data from Read
sw.Restart();
bool ok = store.DeleteRow(keys, eTag);
Assert.True(ok, $"Row deleted OK after {sw.Elapsed}. Etag={eTag} Keys={StorageProviderUtils.PrintKeys(keys)}");
sw.Restart();
storedData = store.ReadRow(keys); // Try to re-read after delete
output.WriteLine("Re-Read took {0} and returned {1}", sw.Elapsed, StorageProviderUtils.PrintData(storedData));
Assert.NotNull(data); // Should not get null data from Re-Read
Assert.True(storedData.Count == 0, $"Should get no data from Re-Read but got: {StorageProviderUtils.PrintData(storedData)}");
sw.Restart();
const string oldEtag = null;
eTag = store.WriteRow(keys, storedData, oldEtag);
output.WriteLine("Write for Keys={0} Etag={1} Data={2} returned New Etag={3} after {4}",
StorageProviderUtils.PrintKeys(keys), oldEtag, StorageProviderUtils.PrintData(storedData),
eTag, sw.Elapsed);
sw.Restart();
ok = store.DeleteRow(keys, eTag);
Assert.True(ok, $"Row deleted OK after {sw.Elapsed}. Etag={eTag} Keys={StorageProviderUtils.PrintKeys(keys)}");
}
[Fact, TestCategory("Functional"), TestCategory("Persistence"), TestCategory("MemoryStore")]
public void Store_ReadMulti()
{
string name = Guid.NewGuid().ToString();//TestContext.TestName;
ILocalDataStore store = new HierarchicalKeyStore(2);
// Write #1
IList<Tuple<string, string>> keys = new[]
{
Tuple.Create("GrainType", name),
Tuple.Create("GrainId", "1")
}.ToList();
var grainState = TestStoreGrainState.NewRandomState();
var state = grainState.State;
state.A = name;
store.WriteRow(keys, AsDictionary(state), grainState.ETag);
// Write #2
keys = new[]
{
Tuple.Create("GrainType", name),
Tuple.Create("GrainId", "2")
}.ToList();
grainState = TestStoreGrainState.NewRandomState();
state = grainState.State;
state.A = name;
store.WriteRow(keys, AsDictionary(state), grainState.ETag);
// Multi Read
keys = new[]
{
Tuple.Create("GrainType", name)
}.ToList();
var results = store.ReadMultiRow(keys);
Assert.Equal(2, results.Count); // "Count"
}
[Fact, TestCategory("Functional"), TestCategory("Persistence"), TestCategory("MemoryStore")]
public void GrainState_Store_WriteRead()
{
string name = Guid.NewGuid().ToString();//TestContext.TestName;
ILocalDataStore store = new HierarchicalKeyStore(2);
GrainReference reference = (GrainReference)this.fixture.InternalGrainFactory.GetGrain(LegacyGrainId.NewId());
var grainState = TestStoreGrainState.NewRandomState();
var state = grainState.State;
Stopwatch sw = new Stopwatch();
sw.Start();
IList<Tuple<string, string>> keys = new[]
{
Tuple.Create("GrainType", name),
Tuple.Create("GrainId", reference.GrainId.ToString())
}.ToList();
store.WriteRow(keys, AsDictionary(state), grainState.ETag);
TimeSpan writeTime = sw.Elapsed;
sw.Restart();
var data = store.ReadRow(keys);
TimeSpan readTime = sw.Elapsed;
output.WriteLine("{0} - Write time = {1} Read time = {2}", store.GetType().FullName, writeTime, readTime);
Assert.Equal(state.A, data["A"]); // "A"
Assert.Equal(state.B, data["B"]); // "B"
Assert.Equal(state.C, data["C"]); // "C"
}
// ---------- Utility methods ----------
private static IList<Tuple<string, string>> GetKeys(string grainTypeName, GrainReference grain)
{
var keys = new[]
{
Tuple.Create("GrainType", grainTypeName),
Tuple.Create("GrainId", grain.GrainId.ToString())
};
return keys.ToList();
}
private static Dictionary<string, object> AsDictionary(object state)
{
return state.GetType().GetProperties()
.Select(v => new KeyValuePair<string, object>(v.Name, v.GetValue(state)))
.ToDictionary(pair => pair.Key, pair => pair.Value);
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Composition;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CodeFixes.Suppression;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Extensions;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.Utilities;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CodeFixes
{
using Microsoft.CodeAnalysis.ErrorLogger;
using DiagnosticId = String;
using LanguageKind = String;
[Export(typeof(ICodeFixService)), Shared]
internal partial class CodeFixService : ICodeFixService
{
private readonly IDiagnosticAnalyzerService _diagnosticService;
private readonly ImmutableDictionary<LanguageKind, Lazy<ImmutableDictionary<DiagnosticId, ImmutableArray<CodeFixProvider>>>> _workspaceFixersMap;
private readonly ConditionalWeakTable<IReadOnlyList<AnalyzerReference>, ImmutableDictionary<DiagnosticId, List<CodeFixProvider>>> _projectFixersMap;
// Shared by project fixers and workspace fixers.
private ImmutableDictionary<CodeFixProvider, ImmutableArray<DiagnosticId>> _fixerToFixableIdsMap = ImmutableDictionary<CodeFixProvider, ImmutableArray<DiagnosticId>>.Empty;
private readonly ImmutableDictionary<LanguageKind, Lazy<ImmutableDictionary<CodeFixProvider, int>>> _fixerPriorityMap;
private readonly ConditionalWeakTable<AnalyzerReference, ProjectCodeFixProvider> _analyzerReferenceToFixersMap;
private readonly ConditionalWeakTable<AnalyzerReference, ProjectCodeFixProvider>.CreateValueCallback _createProjectCodeFixProvider;
private readonly ImmutableDictionary<LanguageKind, Lazy<ISuppressionFixProvider>> _suppressionProvidersMap;
private readonly IEnumerable<Lazy<IErrorLoggerService>> _errorLoggers;
private ImmutableDictionary<object, FixAllProviderInfo> _fixAllProviderMap;
[ImportingConstructor]
public CodeFixService(
IDiagnosticAnalyzerService service,
[ImportMany]IEnumerable<Lazy<IErrorLoggerService>> loggers,
[ImportMany]IEnumerable<Lazy<CodeFixProvider, CodeChangeProviderMetadata>> fixers,
[ImportMany]IEnumerable<Lazy<ISuppressionFixProvider, CodeChangeProviderMetadata>> suppressionProviders)
{
_errorLoggers = loggers;
_diagnosticService = service;
var fixersPerLanguageMap = fixers.ToPerLanguageMapWithMultipleLanguages();
var suppressionProvidersPerLanguageMap = suppressionProviders.ToPerLanguageMapWithMultipleLanguages();
_workspaceFixersMap = GetFixerPerLanguageMap(fixersPerLanguageMap, null);
_suppressionProvidersMap = GetSuppressionProvidersPerLanguageMap(suppressionProvidersPerLanguageMap);
// REVIEW: currently, fixer's priority is statically defined by the fixer itself. might considering making it more dynamic or configurable.
_fixerPriorityMap = GetFixerPriorityPerLanguageMap(fixersPerLanguageMap);
// Per-project fixers
_projectFixersMap = new ConditionalWeakTable<IReadOnlyList<AnalyzerReference>, ImmutableDictionary<string, List<CodeFixProvider>>>();
_analyzerReferenceToFixersMap = new ConditionalWeakTable<AnalyzerReference, ProjectCodeFixProvider>();
_createProjectCodeFixProvider = new ConditionalWeakTable<AnalyzerReference, ProjectCodeFixProvider>.CreateValueCallback(r => new ProjectCodeFixProvider(r));
_fixAllProviderMap = ImmutableDictionary<object, FixAllProviderInfo>.Empty;
}
public async Task<FirstDiagnosticResult> GetFirstDiagnosticWithFixAsync(Document document, TextSpan range, bool considerSuppressionFixes, CancellationToken cancellationToken)
{
if (document == null || !document.IsOpen())
{
return default(FirstDiagnosticResult);
}
using (var diagnostics = SharedPools.Default<List<DiagnosticData>>().GetPooledObject())
{
var fullResult = await _diagnosticService.TryAppendDiagnosticsForSpanAsync(document, range, diagnostics.Object, cancellationToken: cancellationToken).ConfigureAwait(false);
foreach (var diagnostic in diagnostics.Object)
{
cancellationToken.ThrowIfCancellationRequested();
if (!range.IntersectsWith(diagnostic.TextSpan))
{
continue;
}
// REVIEW: 2 possible designs.
// 1. find the first fix and then return right away. if the lightbulb is actually expanded, find all fixes for the line synchronously. or
// 2. kick off a task that finds all fixes for the given range here but return once we find the first one.
// at the same time, let the task to run to finish. if the lightbulb is expanded, we just simply use the task to get all fixes.
//
// first approach is simpler, so I will implement that first. if the first approach turns out to be not good enough, then
// I will try the second approach which will be more complex but quicker
var hasFix = await ContainsAnyFix(document, diagnostic, considerSuppressionFixes, cancellationToken).ConfigureAwait(false);
if (hasFix)
{
return new FirstDiagnosticResult(!fullResult, hasFix, diagnostic);
}
}
return new FirstDiagnosticResult(!fullResult, false, default(DiagnosticData));
}
}
public async Task<IEnumerable<CodeFixCollection>> GetFixesAsync(Document document, TextSpan range, bool includeSuppressionFixes, CancellationToken cancellationToken)
{
// REVIEW: this is the first and simplest design. basically, when ctrl+. is pressed, it asks diagnostic service to give back
// current diagnostics for the given span, and it will use that to get fixes. internally diagnostic service will either return cached information
// (if it is up-to-date) or synchronously do the work at the spot.
//
// this design's weakness is that each side don't have enough information to narrow down works to do. it will most likely always do more works than needed.
// sometimes way more than it is needed. (compilation)
Dictionary<TextSpan, List<DiagnosticData>> aggregatedDiagnostics = null;
foreach (var diagnostic in await _diagnosticService.GetDiagnosticsForSpanAsync(document, range, cancellationToken: cancellationToken).ConfigureAwait(false))
{
if (diagnostic.IsSuppressed)
{
continue;
}
cancellationToken.ThrowIfCancellationRequested();
aggregatedDiagnostics = aggregatedDiagnostics ?? new Dictionary<TextSpan, List<DiagnosticData>>();
aggregatedDiagnostics.GetOrAdd(diagnostic.TextSpan, _ => new List<DiagnosticData>()).Add(diagnostic);
}
var result = new List<CodeFixCollection>();
if (aggregatedDiagnostics == null)
{
return result;
}
foreach (var spanAndDiagnostic in aggregatedDiagnostics)
{
result = await AppendFixesAsync(document, spanAndDiagnostic.Key, spanAndDiagnostic.Value, result, cancellationToken).ConfigureAwait(false);
}
if (result.Any())
{
// sort the result to the order defined by the fixers
var priorityMap = _fixerPriorityMap[document.Project.Language].Value;
result.Sort((d1, d2) => priorityMap.ContainsKey((CodeFixProvider)d1.Provider) ? (priorityMap.ContainsKey((CodeFixProvider)d2.Provider) ? priorityMap[(CodeFixProvider)d1.Provider] - priorityMap[(CodeFixProvider)d2.Provider] : -1) : 1);
}
if (includeSuppressionFixes)
{
foreach (var spanAndDiagnostic in aggregatedDiagnostics)
{
result = await AppendSuppressionsAsync(document, spanAndDiagnostic.Key, spanAndDiagnostic.Value, result, cancellationToken).ConfigureAwait(false);
}
}
return result;
}
private async Task<List<CodeFixCollection>> AppendFixesAsync(
Document document,
TextSpan span,
IEnumerable<DiagnosticData> diagnosticDataCollection,
List<CodeFixCollection> result,
CancellationToken cancellationToken)
{
Lazy<ImmutableDictionary<DiagnosticId, ImmutableArray<CodeFixProvider>>> fixerMap;
bool hasAnySharedFixer = _workspaceFixersMap.TryGetValue(document.Project.Language, out fixerMap);
var projectFixersMap = GetProjectFixers(document.Project);
var hasAnyProjectFixer = projectFixersMap.Any();
if (!hasAnySharedFixer && !hasAnyProjectFixer)
{
return result;
}
ImmutableArray<CodeFixProvider> workspaceFixers;
List<CodeFixProvider> projectFixers;
var allFixers = new List<CodeFixProvider>();
foreach (var diagnosticId in diagnosticDataCollection.Select(d => d.Id).Distinct())
{
cancellationToken.ThrowIfCancellationRequested();
if (hasAnySharedFixer && fixerMap.Value.TryGetValue(diagnosticId, out workspaceFixers))
{
allFixers.AddRange(workspaceFixers);
}
if (hasAnyProjectFixer && projectFixersMap.TryGetValue(diagnosticId, out projectFixers))
{
allFixers.AddRange(projectFixers);
}
}
var diagnostics = await DiagnosticData.ToDiagnosticsAsync(document.Project, diagnosticDataCollection, cancellationToken).ConfigureAwait(false);
var extensionManager = document.Project.Solution.Workspace.Services.GetService<IExtensionManager>();
foreach (var fixer in allFixers.Distinct())
{
cancellationToken.ThrowIfCancellationRequested();
Func<Diagnostic, bool> hasFix = (d) => this.GetFixableDiagnosticIds(fixer, extensionManager).Contains(d.Id);
Func<ImmutableArray<Diagnostic>, Task<IEnumerable<CodeFix>>> getFixes =
async (dxs) =>
{
var fixes = new List<CodeFix>();
var context = new CodeFixContext(document, span, dxs,
// TODO: Can we share code between similar lambdas that we pass to this API in BatchFixAllProvider.cs, CodeFixService.cs and CodeRefactoringService.cs?
(a, d) =>
{
// Serialize access for thread safety - we don't know what thread the fix provider will call this delegate from.
lock (fixes)
{
fixes.Add(new CodeFix(a, d));
}
},
verifyArguments: false,
cancellationToken: cancellationToken);
var task = fixer.RegisterCodeFixesAsync(context) ?? SpecializedTasks.EmptyTask;
await task.ConfigureAwait(false);
return fixes;
};
await AppendFixesOrSuppressionsAsync(document, span, diagnostics, result, fixer,
hasFix, getFixes, cancellationToken).ConfigureAwait(false);
}
return result;
}
private async Task<List<CodeFixCollection>> AppendSuppressionsAsync(
Document document, TextSpan span, IEnumerable<DiagnosticData> diagnosticDataCollection, List<CodeFixCollection> result, CancellationToken cancellationToken)
{
Lazy<ISuppressionFixProvider> lazySuppressionProvider;
if (!_suppressionProvidersMap.TryGetValue(document.Project.Language, out lazySuppressionProvider) || lazySuppressionProvider.Value == null)
{
return result;
}
var diagnostics = await DiagnosticData.ToDiagnosticsAsync(document.Project, diagnosticDataCollection, cancellationToken).ConfigureAwait(false);
Func<Diagnostic, bool> hasFix = (d) => lazySuppressionProvider.Value.CanBeSuppressed(d);
Func<ImmutableArray<Diagnostic>, Task<IEnumerable<CodeFix>>> getFixes = (dxs) => lazySuppressionProvider.Value.GetSuppressionsAsync(document, span, dxs, cancellationToken);
await AppendFixesOrSuppressionsAsync(document, span, diagnostics, result, lazySuppressionProvider.Value, hasFix, getFixes, cancellationToken).ConfigureAwait(false);
return result;
}
private async Task<List<CodeFixCollection>> AppendFixesOrSuppressionsAsync(
Document document,
TextSpan span,
IEnumerable<Diagnostic> diagnosticsWithSameSpan,
List<CodeFixCollection> result,
object fixer,
Func<Diagnostic, bool> hasFix,
Func<ImmutableArray<Diagnostic>, Task<IEnumerable<CodeFix>>> getFixes,
CancellationToken cancellationToken)
{
var diagnostics = diagnosticsWithSameSpan.Where(d => hasFix(d)).OrderByDescending(d => d.Severity).ToImmutableArray();
if (diagnostics.Length <= 0)
{
// this can happen for suppression case where all diagnostics can't be suppressed
return result;
}
var extensionManager = document.Project.Solution.Workspace.Services.GetService<IExtensionManager>();
var fixes = await extensionManager.PerformFunctionAsync(fixer, () => getFixes(diagnostics), defaultValue: SpecializedCollections.EmptyEnumerable<CodeFix>()).ConfigureAwait(false);
if (fixes != null && fixes.Any())
{
// If the fix provider supports fix all occurrences, then get the corresponding FixAllProviderInfo and fix all context.
var fixAllProviderInfo = extensionManager.PerformFunction(fixer, () => ImmutableInterlocked.GetOrAdd(ref _fixAllProviderMap, fixer, FixAllProviderInfo.Create), defaultValue: null);
FixAllCodeActionContext fixAllContext = null;
if (fixAllProviderInfo != null)
{
var codeFixProvider = (fixer as CodeFixProvider) ?? new WrapperCodeFixProvider((ISuppressionFixProvider)fixer, diagnostics);
fixAllContext = FixAllCodeActionContext.Create(document, fixAllProviderInfo, codeFixProvider, diagnostics, this.GetDocumentDiagnosticsAsync, this.GetProjectDiagnosticsAsync, cancellationToken);
}
result = result ?? new List<CodeFixCollection>();
var codeFix = new CodeFixCollection(fixer, span, fixes, fixAllContext);
result.Add(codeFix);
}
return result;
}
public CodeFixProvider GetSuppressionFixer(string language, ImmutableArray<Diagnostic> diagnostics)
{
Lazy<ISuppressionFixProvider> lazySuppressionProvider;
if (!_suppressionProvidersMap.TryGetValue(language, out lazySuppressionProvider) || lazySuppressionProvider.Value == null)
{
return null;
}
return new WrapperCodeFixProvider(lazySuppressionProvider.Value, diagnostics);
}
private async Task<IEnumerable<Diagnostic>> GetDocumentDiagnosticsAsync(Document document, ImmutableHashSet<string> diagnosticIds, CancellationToken cancellationToken)
{
Contract.ThrowIfNull(document);
var solution = document.Project.Solution;
var diagnostics = await _diagnosticService.GetDiagnosticsForIdsAsync(solution, null, document.Id, diagnosticIds, cancellationToken: cancellationToken).ConfigureAwait(false);
Contract.ThrowIfFalse(diagnostics.All(d => d.DocumentId != null));
return await DiagnosticData.ToDiagnosticsAsync(document.Project, diagnostics, cancellationToken).ConfigureAwait(false);
}
private async Task<IEnumerable<Diagnostic>> GetProjectDiagnosticsAsync(Project project, bool includeAllDocumentDiagnostics, ImmutableHashSet<string> diagnosticIds, CancellationToken cancellationToken)
{
Contract.ThrowIfNull(project);
if (includeAllDocumentDiagnostics)
{
// Get all diagnostics for the entire project, including document diagnostics.
var diagnostics = await _diagnosticService.GetDiagnosticsForIdsAsync(project.Solution, project.Id, diagnosticIds: diagnosticIds, cancellationToken: cancellationToken).ConfigureAwait(false);
var documentIdsToTreeMap = await GetDocumentIdsToTreeMapAsync(project, cancellationToken).ConfigureAwait(false);
return await DiagnosticData.ToDiagnosticsAsync(project, diagnostics, documentIdsToTreeMap, cancellationToken).ConfigureAwait(false);
}
else
{
// Get all no-location diagnostics for the project, doesn't include document diagnostics.
var diagnostics = await _diagnosticService.GetProjectDiagnosticsForIdsAsync(project.Solution, project.Id, diagnosticIds, cancellationToken: cancellationToken).ConfigureAwait(false);
Contract.ThrowIfFalse(diagnostics.All(d => d.DocumentId == null));
return await DiagnosticData.ToDiagnosticsAsync(project, diagnostics, cancellationToken).ConfigureAwait(false);
}
}
private static async Task<ImmutableDictionary<DocumentId, SyntaxTree>> GetDocumentIdsToTreeMapAsync(Project project, CancellationToken cancellationToken)
{
var builder = ImmutableDictionary.CreateBuilder<DocumentId, SyntaxTree>();
foreach (var document in project.Documents)
{
var tree = await document.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false);
builder.Add(document.Id, tree);
}
return builder.ToImmutable();
}
private async Task<bool> ContainsAnyFix(Document document, DiagnosticData diagnostic, bool considerSuppressionFixes, CancellationToken cancellationToken)
{
ImmutableArray<CodeFixProvider> workspaceFixers = ImmutableArray<CodeFixProvider>.Empty;
List<CodeFixProvider> projectFixers = null;
Lazy<ImmutableDictionary<DiagnosticId, ImmutableArray<CodeFixProvider>>> fixerMap;
bool hasAnySharedFixer = _workspaceFixersMap.TryGetValue(document.Project.Language, out fixerMap) && fixerMap.Value.TryGetValue(diagnostic.Id, out workspaceFixers);
var hasAnyProjectFixer = GetProjectFixers(document.Project).TryGetValue(diagnostic.Id, out projectFixers);
Lazy<ISuppressionFixProvider> lazySuppressionProvider = null;
var hasSuppressionFixer =
considerSuppressionFixes &&
_suppressionProvidersMap.TryGetValue(document.Project.Language, out lazySuppressionProvider) &&
lazySuppressionProvider.Value != null;
if (!hasAnySharedFixer && !hasAnyProjectFixer && !hasSuppressionFixer)
{
return false;
}
var allFixers = ImmutableArray<CodeFixProvider>.Empty;
if (hasAnySharedFixer)
{
allFixers = workspaceFixers;
}
if (hasAnyProjectFixer)
{
allFixers = allFixers.AddRange(projectFixers);
}
var dx = await diagnostic.ToDiagnosticAsync(document.Project, cancellationToken).ConfigureAwait(false);
if (hasSuppressionFixer && lazySuppressionProvider.Value.CanBeSuppressed(dx))
{
return true;
}
var fixes = new List<CodeFix>();
var context = new CodeFixContext(document, dx,
// TODO: Can we share code between similar lambdas that we pass to this API in BatchFixAllProvider.cs, CodeFixService.cs and CodeRefactoringService.cs?
(a, d) =>
{
// Serialize access for thread safety - we don't know what thread the fix provider will call this delegate from.
lock (fixes)
{
fixes.Add(new CodeFix(a, d));
}
},
verifyArguments: false,
cancellationToken: cancellationToken);
var extensionManager = document.Project.Solution.Workspace.Services.GetService<IExtensionManager>();
// we do have fixer. now let's see whether it actually can fix it
foreach (var fixer in allFixers)
{
await extensionManager.PerformActionAsync(fixer, () => fixer.RegisterCodeFixesAsync(context) ?? SpecializedTasks.EmptyTask).ConfigureAwait(false);
if (!fixes.Any())
{
continue;
}
return true;
}
return false;
}
private static readonly Func<DiagnosticId, List<CodeFixProvider>> s_createList = _ => new List<CodeFixProvider>();
private ImmutableArray<DiagnosticId> GetFixableDiagnosticIds(CodeFixProvider fixer, IExtensionManager extensionManager)
{
// If we are passed a null extension manager it means we do not have access to a document so there is nothing to
// show the user. In this case we will log any exceptions that occur, but the user will not see them.
if (extensionManager != null)
{
return extensionManager.PerformFunction(
fixer,
() => ImmutableInterlocked.GetOrAdd(ref _fixerToFixableIdsMap, fixer, f => GetAndTestFixableDiagnosticIds(f)),
defaultValue: ImmutableArray<DiagnosticId>.Empty);
}
try
{
return ImmutableInterlocked.GetOrAdd(ref _fixerToFixableIdsMap, fixer, f => GetAndTestFixableDiagnosticIds(f));
}
catch (OperationCanceledException)
{
throw;
}
catch (Exception e)
{
foreach (var logger in _errorLoggers)
{
logger.Value.LogException(fixer, e);
}
return ImmutableArray<DiagnosticId>.Empty;
}
}
private static ImmutableArray<string> GetAndTestFixableDiagnosticIds(CodeFixProvider codeFixProvider)
{
var ids = codeFixProvider.FixableDiagnosticIds;
if (ids.IsDefault)
{
throw new InvalidOperationException(
string.Format(
WorkspacesResources.FixableDiagnosticIdsIncorrectlyInitialized,
codeFixProvider.GetType().Name+ "."+ nameof(CodeFixProvider.FixableDiagnosticIds)));
}
return ids;
}
private ImmutableDictionary<LanguageKind, Lazy<ImmutableDictionary<DiagnosticId, ImmutableArray<CodeFixProvider>>>> GetFixerPerLanguageMap(
Dictionary<LanguageKind, List<Lazy<CodeFixProvider, CodeChangeProviderMetadata>>> fixersPerLanguage,
IExtensionManager extensionManager)
{
var fixerMap = ImmutableDictionary.Create<LanguageKind, Lazy<ImmutableDictionary<DiagnosticId, ImmutableArray<CodeFixProvider>>>>();
foreach (var languageKindAndFixers in fixersPerLanguage)
{
var lazyMap = new Lazy<ImmutableDictionary<DiagnosticId, ImmutableArray<CodeFixProvider>>>(() =>
{
var mutableMap = new Dictionary<DiagnosticId, List<CodeFixProvider>>();
foreach (var fixer in languageKindAndFixers.Value)
{
foreach (var id in this.GetFixableDiagnosticIds(fixer.Value, extensionManager))
{
if (string.IsNullOrWhiteSpace(id))
{
continue;
}
var list = mutableMap.GetOrAdd(id, s_createList);
list.Add(fixer.Value);
}
}
var immutableMap = ImmutableDictionary.CreateBuilder<DiagnosticId, ImmutableArray<CodeFixProvider>>();
foreach (var diagnosticIdAndFixers in mutableMap)
{
immutableMap.Add(diagnosticIdAndFixers.Key, diagnosticIdAndFixers.Value.AsImmutableOrEmpty());
}
return immutableMap.ToImmutable();
}, isThreadSafe: true);
fixerMap = fixerMap.Add(languageKindAndFixers.Key, lazyMap);
}
return fixerMap;
}
private static ImmutableDictionary<LanguageKind, Lazy<ISuppressionFixProvider>> GetSuppressionProvidersPerLanguageMap(
Dictionary<LanguageKind, List<Lazy<ISuppressionFixProvider, CodeChangeProviderMetadata>>> suppressionProvidersPerLanguage)
{
var suppressionFixerMap = ImmutableDictionary.Create<LanguageKind, Lazy<ISuppressionFixProvider>>();
foreach (var languageKindAndFixers in suppressionProvidersPerLanguage)
{
var suppressionFixerLazyMap = new Lazy<ISuppressionFixProvider>(() => languageKindAndFixers.Value.SingleOrDefault().Value);
suppressionFixerMap = suppressionFixerMap.Add(languageKindAndFixers.Key, suppressionFixerLazyMap);
}
return suppressionFixerMap;
}
private static ImmutableDictionary<LanguageKind, Lazy<ImmutableDictionary<CodeFixProvider, int>>> GetFixerPriorityPerLanguageMap(
Dictionary<LanguageKind, List<Lazy<CodeFixProvider, CodeChangeProviderMetadata>>> fixersPerLanguage)
{
var languageMap = ImmutableDictionary.CreateBuilder<LanguageKind, Lazy<ImmutableDictionary<CodeFixProvider, int>>>();
foreach (var languageAndFixers in fixersPerLanguage)
{
var lazyMap = new Lazy<ImmutableDictionary<CodeFixProvider, int>>(() =>
{
var priorityMap = ImmutableDictionary.CreateBuilder<CodeFixProvider, int>();
var fixers = ExtensionOrderer.Order(languageAndFixers.Value);
for (var i = 0; i < fixers.Count; i++)
{
priorityMap.Add(fixers[i].Value, i);
}
return priorityMap.ToImmutable();
}, isThreadSafe: true);
languageMap.Add(languageAndFixers.Key, lazyMap);
}
return languageMap.ToImmutable();
}
private ImmutableDictionary<DiagnosticId, List<CodeFixProvider>> GetProjectFixers(Project project)
{
return _projectFixersMap.GetValue(project.AnalyzerReferences, pId => ComputeProjectFixers(project));
}
private ImmutableDictionary<DiagnosticId, List<CodeFixProvider>> ComputeProjectFixers(Project project)
{
var extensionManager = project.Solution.Workspace.Services.GetService<IExtensionManager>();
ImmutableDictionary<DiagnosticId, List<CodeFixProvider>>.Builder builder = null;
foreach (var reference in project.AnalyzerReferences)
{
var projectCodeFixerProvider = _analyzerReferenceToFixersMap.GetValue(reference, _createProjectCodeFixProvider);
foreach (var fixer in projectCodeFixerProvider.GetFixers(project.Language))
{
var fixableIds = this.GetFixableDiagnosticIds(fixer, extensionManager);
foreach (var id in fixableIds)
{
if (string.IsNullOrWhiteSpace(id))
{
continue;
}
builder = builder ?? ImmutableDictionary.CreateBuilder<DiagnosticId, List<CodeFixProvider>>();
var list = builder.GetOrAdd(id, s_createList);
list.Add(fixer);
}
}
}
if (builder == null)
{
return ImmutableDictionary<DiagnosticId, List<CodeFixProvider>>.Empty;
}
return builder.ToImmutable();
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Buffers;
using System.Buffers.Text;
using System.Globalization;
using System.Runtime.CompilerServices;
using Xunit;
namespace System.Text.Primitives.Tests
{
public class IntegerTests
{
const int NumberOfRandomSamples = 1000;
static readonly SymbolTable[] SymbolTables = new SymbolTable[]
{
SymbolTable.InvariantUtf8,
SymbolTable.InvariantUtf16,
};
static readonly StandardFormat[] Formats = new StandardFormat[]
{
new StandardFormat('N', 0),
new StandardFormat('N', 1),
new StandardFormat('N', 10),
new StandardFormat('N', 30),
new StandardFormat('N', 255),
new StandardFormat('D', 0),
new StandardFormat('D', 1),
new StandardFormat('D', 10),
new StandardFormat('D', 30),
new StandardFormat('D', 255),
new StandardFormat('x', 0),
new StandardFormat('x', 1),
new StandardFormat('x', 10),
new StandardFormat('x', 30),
new StandardFormat('x', 255),
new StandardFormat('X', 0),
new StandardFormat('X', 1),
new StandardFormat('X', 10),
new StandardFormat('X', 30),
new StandardFormat('X', 255),
};
[Fact]
public void SpecificIntegerTests()
{
foreach (var symbolTable in SymbolTables)
{
foreach (var format in Formats)
{
Validate<ulong>(0, format, symbolTable);
Validate<ulong>(1, format, symbolTable);
Validate<ulong>(999999999999, format, symbolTable);
Validate<ulong>(1000000000000, format, symbolTable);
Validate<ulong>(ulong.MaxValue, format, symbolTable);
Validate<uint>(0, format, symbolTable);
Validate<uint>(1, format, symbolTable);
Validate<uint>(999999999, format, symbolTable);
Validate<uint>(1000000000, format, symbolTable);
Validate<uint>(uint.MaxValue, format, symbolTable);
Validate<ushort>(0, format, symbolTable);
Validate<ushort>(1, format, symbolTable);
Validate<ushort>(9999, format, symbolTable);
Validate<ushort>(10000, format, symbolTable);
Validate<ushort>(ushort.MaxValue, format, symbolTable);
Validate<byte>(0, format, symbolTable);
Validate<byte>(1, format, symbolTable);
Validate<byte>(99, format, symbolTable);
Validate<byte>(100, format, symbolTable);
Validate<byte>(byte.MaxValue, format, symbolTable);
Validate<long>(long.MinValue, format, symbolTable);
Validate<long>(-1000000000000, format, symbolTable);
Validate<long>(-999999999999, format, symbolTable);
Validate<long>(-1, format, symbolTable);
Validate<long>(0, format, symbolTable);
Validate<long>(1, format, symbolTable);
Validate<long>(999999999999, format, symbolTable);
Validate<long>(1000000000000, format, symbolTable);
Validate<long>(long.MaxValue, format, symbolTable);
Validate<int>(int.MinValue, format, symbolTable);
Validate<int>(-1000000000, format, symbolTable);
Validate<int>(-999999999, format, symbolTable);
Validate<int>(-1, format, symbolTable);
Validate<int>(0, format, symbolTable);
Validate<int>(1, format, symbolTable);
Validate<int>(999999999, format, symbolTable);
Validate<int>(1000000000, format, symbolTable);
Validate<int>(int.MaxValue, format, symbolTable);
Validate<short>(short.MinValue, format, symbolTable);
Validate<short>(-10000, format, symbolTable);
Validate<short>(-9999, format, symbolTable);
Validate<short>(-1, format, symbolTable);
Validate<short>(0, format, symbolTable);
Validate<short>(1, format, symbolTable);
Validate<short>(9999, format, symbolTable);
Validate<short>(10000, format, symbolTable);
Validate<short>(short.MaxValue, format, symbolTable);
Validate<sbyte>(sbyte.MaxValue, format, symbolTable);
Validate<sbyte>(-100, format, symbolTable);
Validate<sbyte>(-99, format, symbolTable);
Validate<sbyte>(-1, format, symbolTable);
Validate<sbyte>(0, format, symbolTable);
Validate<sbyte>(1, format, symbolTable);
Validate<sbyte>(99, format, symbolTable);
Validate<sbyte>(100, format, symbolTable);
Validate<sbyte>(sbyte.MaxValue, format, symbolTable);
}
}
}
[Fact]
public void RandomIntegerTests()
{
for (var i = 0; i < NumberOfRandomSamples; i++)
{
foreach (var symbolTable in SymbolTables)
{
foreach (var format in Formats)
{
ValidateRandom<ulong>(format, symbolTable);
ValidateRandom<uint>(format, symbolTable);
ValidateRandom<ushort>(format, symbolTable);
ValidateRandom<byte>(format, symbolTable);
ValidateRandom<long>(format, symbolTable);
ValidateRandom<int>(format, symbolTable);
ValidateRandom<short>(format, symbolTable);
ValidateRandom<sbyte>(format, symbolTable);
}
}
}
}
static void ValidateRandom<T>(StandardFormat format, SymbolTable symbolTable)
{
Validate<T>(GetRandom<T>(), format, symbolTable);
}
static void Validate<T>(long value, StandardFormat format, SymbolTable symbolTable)
{
Validate<T>(unchecked((ulong)value), format, symbolTable);
}
static void Validate<T>(ulong value, StandardFormat format, SymbolTable symbolTable)
{
var formatString = format.Precision == 255 ? $"{format.Symbol}" : $"{format.Symbol}{format.Precision}";
var span = new Span<byte>(new byte[128]);
string expected;
int written;
if (typeof(T) == typeof(ulong))
{
expected = value.ToString(formatString, CultureInfo.InvariantCulture);
Assert.True(CustomFormatter.TryFormat(value, span, out written, format, symbolTable));
}
else if (typeof(T) == typeof(uint))
{
expected = ((uint)value).ToString(formatString, CultureInfo.InvariantCulture);
Assert.True(CustomFormatter.TryFormat((uint)value, span, out written, format, symbolTable));
}
else if (typeof(T) == typeof(ushort))
{
expected = ((ushort)value).ToString(formatString, CultureInfo.InvariantCulture);
Assert.True(CustomFormatter.TryFormat((ushort)value, span, out written, format, symbolTable));
}
else if (typeof(T) == typeof(byte))
{
expected = ((byte)value).ToString(formatString, CultureInfo.InvariantCulture);
Assert.True(CustomFormatter.TryFormat((byte)value, span, out written, format, symbolTable));
}
else if (typeof(T) == typeof(long))
{
expected = ((long)value).ToString(formatString, CultureInfo.InvariantCulture);
Assert.True(CustomFormatter.TryFormat((long)value, span, out written, format, symbolTable));
}
else if (typeof(T) == typeof(int))
{
expected = ((int)value).ToString(formatString, CultureInfo.InvariantCulture);
Assert.True(CustomFormatter.TryFormat((int)value, span, out written, format, symbolTable));
}
else if (typeof(T) == typeof(short))
{
expected = ((short)value).ToString(formatString, CultureInfo.InvariantCulture);
Assert.True(CustomFormatter.TryFormat((short)value, span, out written, format, symbolTable));
}
else if (typeof(T) == typeof(sbyte))
{
expected = ((sbyte)value).ToString(formatString, CultureInfo.InvariantCulture);
Assert.True(CustomFormatter.TryFormat((sbyte)value, span, out written, format, symbolTable));
}
else
throw new NotSupportedException();
string actual = TestHelper.SpanToString(span.Slice(0, written), symbolTable);
Assert.Equal(expected, actual);
}
static readonly Random Rnd = new Random(234922);
static ulong GetRandom<T>()
{
int size = Unsafe.SizeOf<T>();
byte[] data = new byte[size];
Rnd.NextBytes(data);
if (typeof(T) == typeof(ulong))
return BitConverter.ToUInt64(data, 0);
else if (typeof(T) == typeof(uint))
return BitConverter.ToUInt32(data, 0);
else if (typeof(T) == typeof(ushort))
return BitConverter.ToUInt16(data, 0);
else if (typeof(T) == typeof(long))
return (ulong)BitConverter.ToInt64(data, 0);
else if (typeof(T) == typeof(int))
return (ulong)BitConverter.ToInt32(data, 0);
else if (typeof(T) == typeof(short))
return (ulong)BitConverter.ToInt16(data, 0);
else if (typeof(T) == typeof(byte) || typeof(T) == typeof(sbyte))
return data[0];
else
throw new NotSupportedException();
}
}
}
| |
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Diagnostics;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Xunit;
namespace TestHelper
{
/// <summary>
/// Superclass of all Unit Tests for DiagnosticAnalyzers
/// </summary>
public abstract partial class DiagnosticVerifier
{
#region To be implemented by Test classes
/// <summary>
/// Get the CSharp analyzer being tested - to be implemented in non-abstract class
/// </summary>
protected virtual DiagnosticAnalyzer GetCSharpDiagnosticAnalyzer() => null;
/// <summary>
/// Get the Visual Basic analyzer being tested (C#) - to be implemented in non-abstract class
/// </summary>
protected virtual DiagnosticAnalyzer GetBasicDiagnosticAnalyzer() => null;
#endregion
#region Verifier wrappers
/// <summary>
/// Called to test a C# DiagnosticAnalyzer when applied on the single inputted string as a source
/// Note: input a DiagnosticResult for each Diagnostic expected
/// </summary>
/// <param name="source">A class in the form of a string to run the analyzer on</param>
/// <param name="expected"> DiagnosticResults that should appear after the analyzer is run on the source</param>
protected void VerifyCSharpDiagnostic(string source, params DiagnosticResult[] expected)
{
VerifyDiagnostics(new[] { source }, LanguageNames.CSharp, GetCSharpDiagnosticAnalyzer(), expected);
}
/// <summary>
/// Called to test a VB DiagnosticAnalyzer when applied on the single inputted string as a source
/// Note: input a DiagnosticResult for each Diagnostic expected
/// </summary>
/// <param name="source">A class in the form of a string to run the analyzer on</param>
/// <param name="expected">DiagnosticResults that should appear after the analyzer is run on the source</param>
protected void VerifyBasicDiagnostic(string source, params DiagnosticResult[] expected)
{
VerifyDiagnostics(new[] { source }, LanguageNames.VisualBasic, GetBasicDiagnosticAnalyzer(), expected);
}
/// <summary>
/// Called to test a C# DiagnosticAnalyzer when applied on the inputted strings as a source
/// Note: input a DiagnosticResult for each Diagnostic expected
/// </summary>
/// <param name="sources">An array of strings to create source documents from to run the analyzers on</param>
/// <param name="expected">DiagnosticResults that should appear after the analyzer is run on the sources</param>
protected void VerifyCSharpDiagnostic(string[] sources, params DiagnosticResult[] expected)
{
VerifyDiagnostics(sources, LanguageNames.CSharp, GetCSharpDiagnosticAnalyzer(), expected);
}
/// <summary>
/// Called to test a VB DiagnosticAnalyzer when applied on the inputted strings as a source
/// Note: input a DiagnosticResult for each Diagnostic expected
/// </summary>
/// <param name="sources">An array of strings to create source documents from to run the analyzers on</param>
/// <param name="expected">DiagnosticResults that should appear after the analyzer is run on the sources</param>
protected void VerifyBasicDiagnostic(string[] sources, params DiagnosticResult[] expected)
{
VerifyDiagnostics(sources, LanguageNames.VisualBasic, GetBasicDiagnosticAnalyzer(), expected);
}
/// <summary>
/// General method that gets a collection of actual diagnostics found in the source after the analyzer is run,
/// then verifies each of them.
/// </summary>
/// <param name="sources">An array of strings to create source documents from to run the analyzers on</param>
/// <param name="language">The language of the classes represented by the source strings</param>
/// <param name="analyzer">The analyzer to be run on the source code</param>
/// <param name="expected">DiagnosticResults that should appear after the analyzer is run on the sources</param>
private void VerifyDiagnostics(string[] sources, string language, DiagnosticAnalyzer analyzer, params DiagnosticResult[] expected)
{
var diagnostics = GetSortedDiagnostics(sources, language, analyzer);
VerifyDiagnosticResults(diagnostics, analyzer, expected);
}
#endregion
#region Actual comparisons and verifications
/// <summary>
/// Checks each of the actual Diagnostics found and compares them with the corresponding DiagnosticResult in the array of expected results.
/// Diagnostics are considered equal only if the DiagnosticResultLocation, Id, Severity, and Message of the DiagnosticResult match the actual diagnostic.
/// </summary>
/// <param name="actualResults">The Diagnostics found by the compiler after running the analyzer on the source code</param>
/// <param name="analyzer">The analyzer that was being run on the sources</param>
/// <param name="expectedResults">Diagnostic Results that should have appeared in the code</param>
protected static void VerifyDiagnosticResults(IEnumerable<Diagnostic> actualResults, DiagnosticAnalyzer analyzer, params DiagnosticResult[] expectedResults)
{
int expectedCount = expectedResults.Count();
int actualCount = actualResults.Count();
if (expectedCount != actualCount)
{
string diagnosticsOutput = actualResults.Any() ? FormatDiagnostics(analyzer, actualResults.ToArray()) : " NONE.";
Assert.True(false,
string.Format("Mismatch between number of diagnostics returned, expected \"{0}\" actual \"{1}\"\r\n\r\nDiagnostics:\r\n{2}\r\n", expectedCount, actualCount, diagnosticsOutput));
}
for (int i = 0; i < expectedResults.Length; i++)
{
var actual = actualResults.ElementAt(i);
var expected = expectedResults[i];
if (expected.Line == -1 && expected.Column == -1)
{
if (actual.Location != Location.None)
{
Assert.True(false,
string.Format("Expected:\nA project diagnostic with No location\nActual:\n{0}",
FormatDiagnostics(analyzer, actual)));
}
}
else
{
VerifyDiagnosticLocation(analyzer, actual, actual.Location, expected.Locations.First());
var additionalLocations = actual.AdditionalLocations.ToArray();
if (additionalLocations.Length != expected.Locations.Length - 1)
{
Assert.True(false,
string.Format("Expected {0} additional locations but got {1} for Diagnostic:\r\n {2}\r\n",
expected.Locations.Length - 1, additionalLocations.Length,
FormatDiagnostics(analyzer, actual)));
}
for (int j = 0; j < additionalLocations.Length; ++j)
{
VerifyDiagnosticLocation(analyzer, actual, additionalLocations[j], expected.Locations[j + 1]);
}
}
if (actual.Id != expected.Id)
{
Assert.True(false,
string.Format("Expected diagnostic id to be \"{0}\" was \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Id, actual.Id, FormatDiagnostics(analyzer, actual)));
}
if (actual.Severity != expected.Severity)
{
Assert.True(false,
string.Format("Expected diagnostic severity to be \"{0}\" was \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Severity, actual.Severity, FormatDiagnostics(analyzer, actual)));
}
if (actual.GetMessage() != expected.Message)
{
Assert.True(false,
string.Format("Expected diagnostic message to be \"{0}\" was \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Message, actual.GetMessage(), FormatDiagnostics(analyzer, actual)));
}
}
}
/// <summary>
/// Helper method to VerifyDiagnosticResult that checks the location of a diagnostic and compares it with the location in the expected DiagnosticResult.
/// </summary>
/// <param name="analyzer">The analyzer that was being run on the sources</param>
/// <param name="diagnostic">The diagnostic that was found in the code</param>
/// <param name="actual">The Location of the Diagnostic found in the code</param>
/// <param name="expected">The DiagnosticResultLocation that should have been found</param>
private static void VerifyDiagnosticLocation(DiagnosticAnalyzer analyzer, Diagnostic diagnostic, Location actual, DiagnosticResultLocation expected)
{
var actualSpan = actual.GetLineSpan();
Assert.True(actualSpan.Path == expected.Path || (actualSpan.Path != null && actualSpan.Path.Contains("Test0.") && expected.Path.Contains("Test.")),
string.Format("Expected diagnostic to be in file \"{0}\" was actually in file \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Path, actualSpan.Path, FormatDiagnostics(analyzer, diagnostic)));
var actualLinePosition = actualSpan.StartLinePosition;
// Only check line position if there is an actual line in the real diagnostic
if (actualLinePosition.Line > 0)
{
if (actualLinePosition.Line + 1 != expected.Line)
{
Assert.True(false,
string.Format("Expected diagnostic to be on line \"{0}\" was actually on line \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Line, actualLinePosition.Line + 1, FormatDiagnostics(analyzer, diagnostic)));
}
}
// Only check column position if there is an actual column position in the real diagnostic
if (actualLinePosition.Character > 0)
{
if (actualLinePosition.Character + 1 != expected.Column)
{
Assert.True(false,
string.Format("Expected diagnostic to start at column \"{0}\" was actually at column \"{1}\"\r\n\r\nDiagnostic:\r\n {2}\r\n",
expected.Column, actualLinePosition.Character + 1, FormatDiagnostics(analyzer, diagnostic)));
}
}
}
#endregion
#region Formatting Diagnostics
/// <summary>
/// Helper method to format a Diagnostic into an easily readable string
/// </summary>
/// <param name="analyzer">The analyzer that this verifier tests</param>
/// <param name="diagnostics">The Diagnostics to be formatted</param>
/// <returns>The Diagnostics formatted as a string</returns>
private static string FormatDiagnostics(DiagnosticAnalyzer analyzer, params Diagnostic[] diagnostics)
{
var builder = new StringBuilder();
for (int i = 0; i < diagnostics.Length; ++i)
{
builder.AppendLine("// " + diagnostics[i].ToString());
var analyzerType = analyzer.GetType();
var rules = analyzer.SupportedDiagnostics;
foreach (var rule in rules)
{
if (rule != null && rule.Id == diagnostics[i].Id)
{
var location = diagnostics[i].Location;
if (location == Location.None)
{
builder.AppendFormat("GetGlobalResult({0}.{1})", analyzerType.Name, rule.Id);
}
else
{
Assert.True(location.IsInSource,
string.Format("Test base does not currently handle diagnostics in metadata locations. Diagnostic in metadata:\r\n", diagnostics[i]));
string resultMethodName = diagnostics[i].Location.SourceTree.FilePath.EndsWith(".cs") ? "GetCSharpResultAt" : "GetBasicResultAt";
var linePosition = diagnostics[i].Location.GetLineSpan().StartLinePosition;
builder.AppendFormat("{0}({1}, {2}, {3}.{4})",
resultMethodName,
linePosition.Line + 1,
linePosition.Character + 1,
analyzerType.Name,
rule.Id);
}
if (i != diagnostics.Length - 1)
{
builder.Append(',');
}
builder.AppendLine();
break;
}
}
}
return builder.ToString();
}
#endregion
}
}
| |
//
// Copyright (c) 2004-2017 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog.UnitTests.Internal.NetworkSenders
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using NLog.Internal.NetworkSenders;
using Xunit;
public class TcpNetworkSenderTests : NLogTestBase
{
[Fact]
public void TcpHappyPathTest()
{
foreach (bool async in new[] { false, true })
{
var sender = new MyTcpNetworkSender("tcp://hostname:123", AddressFamily.Unspecified)
{
Async = async,
};
sender.Initialize();
byte[] buffer = Encoding.UTF8.GetBytes("quick brown fox jumps over the lazy dog");
var exceptions = new List<Exception>();
for (int i = 1; i < 8; i *= 2)
{
sender.Send(
buffer, 0, i, ex =>
{
lock (exceptions) exceptions.Add(ex);
});
}
var mre = new ManualResetEvent(false);
sender.FlushAsync(ex =>
{
lock (exceptions)
{
exceptions.Add(ex);
}
mre.Set();
});
mre.WaitOne();
var actual = sender.Log.ToString();
Assert.True(actual.IndexOf("Parse endpoint address tcp://hostname:123/ Unspecified") != -1);
Assert.True(actual.IndexOf("create socket 10000 Stream Tcp") != -1);
Assert.True(actual.IndexOf("connect async to {mock end point: tcp://hostname:123/}") != -1);
Assert.True(actual.IndexOf("send async 0 1 'q'") != -1);
Assert.True(actual.IndexOf("send async 0 2 'qu'") != -1);
Assert.True(actual.IndexOf("send async 0 4 'quic'") != -1);
mre.Reset();
for (int i = 1; i < 8; i *= 2)
{
sender.Send(
buffer, 0, i, ex =>
{
lock (exceptions) exceptions.Add(ex);
});
}
sender.Close(ex =>
{
lock (exceptions)
{
exceptions.Add(ex);
}
mre.Set();
});
mre.WaitOne();
actual = sender.Log.ToString();
Assert.True(actual.IndexOf("Parse endpoint address tcp://hostname:123/ Unspecified") != -1);
Assert.True(actual.IndexOf("create socket 10000 Stream Tcp") != -1);
Assert.True(actual.IndexOf("connect async to {mock end point: tcp://hostname:123/}") != -1);
Assert.True(actual.IndexOf("send async 0 1 'q'") != -1);
Assert.True(actual.IndexOf("send async 0 2 'qu'") != -1);
Assert.True(actual.IndexOf("send async 0 4 'quic'") != -1);
Assert.True(actual.IndexOf("send async 0 1 'q'") != -1);
Assert.True(actual.IndexOf("send async 0 2 'qu'") != -1);
Assert.True(actual.IndexOf("send async 0 4 'quic'") != -1);
Assert.True(actual.IndexOf("close") != -1);
foreach (var ex in exceptions)
{
Assert.Null(ex);
}
}
}
[Fact]
public void TcpProxyTest()
{
var sender = new TcpNetworkSender("tcp://foo:1234", AddressFamily.Unspecified);
var socket = sender.CreateSocket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
Assert.IsType<SocketProxy>(socket);
}
[Fact]
public void TcpConnectFailureTest()
{
var sender = new MyTcpNetworkSender("tcp://hostname:123", AddressFamily.Unspecified)
{
ConnectFailure = 1,
Async = true,
};
sender.Initialize();
byte[] buffer = Encoding.UTF8.GetBytes("quick brown fox jumps over the lazy dog");
var exceptions = new List<Exception>();
var allSent = new ManualResetEvent(false);
for (int i = 1; i < 8; i++)
{
sender.Send(
buffer, 0, i, ex =>
{
lock (exceptions)
{
exceptions.Add(ex);
if (exceptions.Count == 7)
{
allSent.Set();
}
}
});
}
Assert.True(allSent.WaitOne(3000, false));
var mre = new ManualResetEvent(false);
sender.FlushAsync(ex => mre.Set());
mre.WaitOne(3000, false);
var actual = sender.Log.ToString();
Assert.True(actual.IndexOf("Parse endpoint address tcp://hostname:123/ Unspecified") != -1);
Assert.True(actual.IndexOf("create socket 10000 Stream Tcp") != -1);
Assert.True(actual.IndexOf("connect async to {mock end point: tcp://hostname:123/}") != -1);
Assert.True(actual.IndexOf("failed") != -1);
foreach (var ex in exceptions)
{
Assert.NotNull(ex);
}
}
[Fact]
public void TcpSendFailureTest()
{
var sender = new MyTcpNetworkSender("tcp://hostname:123", AddressFamily.Unspecified)
{
SendFailureIn = 3, // will cause failure on 3rd send
Async = true,
};
sender.Initialize();
byte[] buffer = Encoding.UTF8.GetBytes("quick brown fox jumps over the lazy dog");
var exceptions = new Exception[9];
var writeFinished = new ManualResetEvent(false);
int remaining = exceptions.Length;
for (int i = 1; i < 10; i++)
{
int pos = i - 1;
sender.Send(
buffer, 0, i, ex =>
{
lock (exceptions)
{
exceptions[pos] = ex;
if (--remaining == 0)
{
writeFinished.Set();
}
}
});
}
var mre = new ManualResetEvent(false);
writeFinished.WaitOne();
sender.Close(ex => mre.Set());
mre.WaitOne();
var actual = sender.Log.ToString();
Assert.True(actual.IndexOf("Parse endpoint address tcp://hostname:123/ Unspecified") != -1);
Assert.True(actual.IndexOf("create socket 10000 Stream Tcp") != -1);
Assert.True(actual.IndexOf("connect async to {mock end point: tcp://hostname:123/}") != -1);
Assert.True(actual.IndexOf("send async 0 1 'q'") != -1);
Assert.True(actual.IndexOf("send async 0 2 'qu'") != -1);
Assert.True(actual.IndexOf("send async 0 3 'qui'") != -1);
Assert.True(actual.IndexOf("failed") != -1);
Assert.True(actual.IndexOf("close") != -1);
for (int i = 0; i < exceptions.Length; ++i)
{
if (i < 2)
{
Assert.Null(exceptions[i]);
}
else
{
Assert.NotNull(exceptions[i]);
}
}
}
internal class MyTcpNetworkSender : TcpNetworkSender
{
public StringWriter Log { get; set; }
public MyTcpNetworkSender(string url, AddressFamily addressFamily)
: base(url, addressFamily)
{
Log = new StringWriter();
}
protected internal override ISocket CreateSocket(AddressFamily addressFamily, SocketType socketType, ProtocolType protocolType)
{
return new MockSocket(addressFamily, socketType, protocolType, this);
}
protected override EndPoint ParseEndpointAddress(Uri uri, AddressFamily addressFamily)
{
Log.WriteLine("Parse endpoint address {0} {1}", uri, addressFamily);
return new MockEndPoint(uri);
}
public int ConnectFailure { get; set; }
public bool Async { get; set; }
public int SendFailureIn { get; set; }
}
internal class MockSocket : ISocket
{
private readonly MyTcpNetworkSender sender;
private readonly StringWriter log;
private bool faulted = false;
public MockSocket(AddressFamily addressFamily, SocketType socketType, ProtocolType protocolType, MyTcpNetworkSender sender)
{
this.sender = sender;
log = sender.Log;
log.WriteLine("create socket {0} {1} {2}", addressFamily, socketType, protocolType);
}
public bool ConnectAsync(SocketAsyncEventArgs args)
{
log.WriteLine("connect async to {0}", args.RemoteEndPoint);
lock (this)
{
if (sender.ConnectFailure > 0)
{
sender.ConnectFailure--;
faulted = true;
args.SocketError = SocketError.SocketError;
log.WriteLine("failed");
}
}
return InvokeCallback(args);
}
private bool InvokeCallback(SocketAsyncEventArgs args)
{
lock (this)
{
var args2 = args as TcpNetworkSender.MySocketAsyncEventArgs;
if (sender.Async)
{
ThreadPool.QueueUserWorkItem(s =>
{
Thread.Sleep(10);
args2.RaiseCompleted();
});
return true;
}
else
{
return false;
}
}
}
public void Close()
{
lock (this)
{
log.WriteLine("close");
}
}
public bool SendAsync(SocketAsyncEventArgs args)
{
lock (this)
{
log.WriteLine("send async {0} {1} '{2}'", args.Offset, args.Count, Encoding.UTF8.GetString(args.Buffer, args.Offset, args.Count));
if (sender.SendFailureIn > 0)
{
sender.SendFailureIn--;
if (sender.SendFailureIn == 0)
{
faulted = true;
}
}
if (faulted)
{
log.WriteLine("failed");
args.SocketError = SocketError.SocketError;
}
}
return InvokeCallback(args);
}
public bool SendToAsync(SocketAsyncEventArgs args)
{
lock (this)
{
log.WriteLine("sendto async {0} {1} '{2}' {3}", args.Offset, args.Count, Encoding.UTF8.GetString(args.Buffer, args.Offset, args.Count), args.RemoteEndPoint);
return InvokeCallback(args);
}
}
}
internal class MockEndPoint : EndPoint
{
private readonly Uri uri;
public MockEndPoint(Uri uri)
{
this.uri = uri;
}
public override AddressFamily AddressFamily => (AddressFamily)10000;
public override string ToString()
{
return "{mock end point: " + uri + "}";
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void HorizontalAddSingle()
{
var test = new HorizontalBinaryOpTest__HorizontalAddSingle();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Sse.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Sse.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (Sse.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Sse.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (Sse.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (Sse.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (Sse.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (Sse.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class HorizontalBinaryOpTest__HorizontalAddSingle
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Single[] inArray1, Single[] inArray2, Single[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Single>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>();
if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Single, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector128<Single> _fld1;
public Vector128<Single> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref testStruct._fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
return testStruct;
}
public void RunStructFldScenario(HorizontalBinaryOpTest__HorizontalAddSingle testClass)
{
var result = Sse3.HorizontalAdd(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(HorizontalBinaryOpTest__HorizontalAddSingle testClass)
{
fixed (Vector128<Single>* pFld1 = &_fld1)
fixed (Vector128<Single>* pFld2 = &_fld2)
{
var result = Sse3.HorizontalAdd(
Sse.LoadVector128((Single*)(pFld1)),
Sse.LoadVector128((Single*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 16;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single);
private static Single[] _data1 = new Single[Op1ElementCount];
private static Single[] _data2 = new Single[Op2ElementCount];
private static Vector128<Single> _clsVar1;
private static Vector128<Single> _clsVar2;
private Vector128<Single> _fld1;
private Vector128<Single> _fld2;
private DataTable _dataTable;
static HorizontalBinaryOpTest__HorizontalAddSingle()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
}
public HorizontalBinaryOpTest__HorizontalAddSingle()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new DataTable(_data1, _data2, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Sse3.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Sse3.HorizontalAdd(
Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Sse3.HorizontalAdd(
Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Sse3.HorizontalAdd(
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Sse3).GetMethod(nameof(Sse3.HorizontalAdd), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Sse3).GetMethod(nameof(Sse3.HorizontalAdd), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Sse3).GetMethod(nameof(Sse3.HorizontalAdd), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Sse3.HorizontalAdd(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector128<Single>* pClsVar1 = &_clsVar1)
fixed (Vector128<Single>* pClsVar2 = &_clsVar2)
{
var result = Sse3.HorizontalAdd(
Sse.LoadVector128((Single*)(pClsVar1)),
Sse.LoadVector128((Single*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr);
var result = Sse3.HorizontalAdd(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr));
var op2 = Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr));
var result = Sse3.HorizontalAdd(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var op1 = Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr));
var op2 = Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr));
var result = Sse3.HorizontalAdd(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new HorizontalBinaryOpTest__HorizontalAddSingle();
var result = Sse3.HorizontalAdd(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new HorizontalBinaryOpTest__HorizontalAddSingle();
fixed (Vector128<Single>* pFld1 = &test._fld1)
fixed (Vector128<Single>* pFld2 = &test._fld2)
{
var result = Sse3.HorizontalAdd(
Sse.LoadVector128((Single*)(pFld1)),
Sse.LoadVector128((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Sse3.HorizontalAdd(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector128<Single>* pFld1 = &_fld1)
fixed (Vector128<Single>* pFld2 = &_fld2)
{
var result = Sse3.HorizontalAdd(
Sse.LoadVector128((Single*)(pFld1)),
Sse.LoadVector128((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Sse3.HorizontalAdd(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = Sse3.HorizontalAdd(
Sse.LoadVector128((Single*)(&test._fld1)),
Sse.LoadVector128((Single*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector128<Single> op1, Vector128<Single> op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
for (var outer = 0; outer < (LargestVectorSize / 16); outer++)
{
for (var inner = 0; inner < (8 / sizeof(Single)); inner++)
{
var i1 = (outer * (16 / sizeof(Single))) + inner;
var i2 = i1 + (8 / sizeof(Single));
var i3 = (outer * (16 / sizeof(Single))) + (inner * 2);
if (BitConverter.SingleToInt32Bits(result[i1]) != BitConverter.SingleToInt32Bits(left[i3] + left[i3 + 1]))
{
succeeded = false;
break;
}
if (BitConverter.SingleToInt32Bits(result[i2]) != BitConverter.SingleToInt32Bits(right[i3] + right[i3 + 1]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Sse3)}.{nameof(Sse3.HorizontalAdd)}<Single>(Vector128<Single>, Vector128<Single>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
// ------------------------------------------------------------------------------
// Copyright (c) 2014 Microsoft Corporation
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// ------------------------------------------------------------------------------
namespace Microsoft.Live.Serialization
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
/// <summary>
/// Responsible for deserializing from JSON.
/// </summary>
internal sealed class JsonReader : IDisposable
{
/// <summary>
/// The max depth a JSON object originating from the user can be.
/// </summary>
private const int MaxUserJsonDepth = 10;
/// <summary>
/// The reader containing the serialized JSON.
/// </summary>
private readonly TextReader reader;
/// <summary>
/// The maximum depth the current object is allowed to be.
/// 0 indicates no restriction.
/// </summary>
private readonly int maxObjectDepth;
/// <summary>
/// The current depth of the object being parsed.
/// </summary>
private int currentDepth;
/// <summary>
/// Whether this instance has been disposed.
/// </summary>
private bool isDisposed;
/// <summary>
/// Initializes a new instance of the JsonReader class.
/// </summary>
/// <param name="text">The text to pull the JSON from.</param>
public JsonReader(string text)
: this(text, true)
{
}
/// <summary>
/// Initializes a new instance of the JsonReader class.
/// </summary>
/// <param name="text">The text to pull the JSON from.</param>
/// <param name="trusted">Whether the JSON was received trusted source.</param>
public JsonReader(string text, bool trusted)
{
this.reader = new StringReader(text);
this.maxObjectDepth = trusted ? 0 : JsonReader.MaxUserJsonDepth;
}
/// <summary>
/// Disposes of resources used by the instance.
/// </summary>
public void Dispose()
{
if (!this.isDisposed)
{
if (this.reader != null)
{
this.reader.Dispose();
}
this.isDisposed = true;
}
}
/// <summary>
/// Returns an object containing the data in the stream.
/// </summary>
/// <returns>An object type depending on the type of the data.</returns>
public object ReadValue()
{
if (this.isDisposed)
{
throw new ObjectDisposedException("Object was disposed.");
}
this.currentDepth++;
if (this.maxObjectDepth != 0 && this.currentDepth > this.maxObjectDepth)
{
throw new FormatException("JSON object is too deep.");
}
object value = null;
bool allowNull = false;
char ch = this.PeekNextSignificantCharacter();
if (ch == '[')
{
value = this.ReadArray();
}
else if (ch == '{')
{
value = this.ReadObject();
}
else if (ch == '\'' || ch == '"')
{
value = this.ReadString();
}
else if (Char.IsDigit(ch) || ch == '-' || ch == '.')
{
value = this.ReadNumber();
}
else if (ch == 't' || ch == 'f')
{
value = this.ReadBoolean();
}
else if (ch == 'n')
{
this.ReadNull();
allowNull = true;
}
if (value == null && !allowNull)
{
throw new FormatException("Invalid JSON text.");
}
this.currentDepth--;
return value;
}
private char GetNextSignificantCharacter()
{
char ch;
do
{
ch = this.ReadCharFromReader();
}
while (ch != '\0' && Char.IsWhiteSpace(ch));
return ch;
}
private string GetCharacters(int count)
{
string s = String.Empty;
for (int i = 0; i < count; i++)
{
char ch = this.ReadCharFromReader();
if (ch == '\0')
{
return null;
}
s += ch;
}
return s;
}
private char PeekNextSignificantCharacter()
{
char ch = this.PeekCharFromReader();
while (ch != '\0' && Char.IsWhiteSpace(ch))
{
this.ReadCharFromReader();
ch = this.PeekCharFromReader();
}
return ch;
}
private IList ReadArray()
{
IList array = new List<object>();
// Consume the '['
this.ReadCharFromReader();
while (true)
{
char ch = this.PeekNextSignificantCharacter();
if (ch == '\0')
{
throw new FormatException("Unterminated array literal.");
}
if (ch == ']')
{
this.ReadCharFromReader();
return array;
}
if (array.Count != 0)
{
if (ch != ',')
{
throw new FormatException("Invalid array literal.");
}
else
{
this.ReadCharFromReader();
}
}
object item = this.ReadValue();
array.Add(item);
}
}
private bool ReadBoolean()
{
string s = this.ReadName(/* allowQuotes */ false);
if (s != null)
{
if (s.Equals("true", StringComparison.Ordinal))
{
return true;
}
else if (s.Equals("false", StringComparison.Ordinal))
{
return false;
}
}
throw new FormatException("Invalid boolean literal.");
}
private char PeekCharFromReader()
{
int val = this.reader.Peek();
if (val == -1)
{
throw new FormatException("Unexpected end of string.");
}
return (char)val;
}
private char ReadCharFromReader()
{
int val = this.reader.Read();
if (val == -1)
{
throw new FormatException("Unexpected end of string.");
}
return (char)val;
}
private string ReadName(bool allowQuotes)
{
char ch = this.PeekNextSignificantCharacter();
if (ch == '"' || ch == '\'')
{
if (allowQuotes)
{
return this.ReadString();
}
}
else
{
var sb = new StringBuilder();
while (true)
{
ch = this.PeekCharFromReader();
if (ch == '_' || Char.IsLetterOrDigit(ch))
{
this.ReadCharFromReader();
sb.Append(ch);
}
else
{
return sb.ToString();
}
}
}
return null;
}
private void ReadNull()
{
string s = this.ReadName(/* allowQuotes */ false);
if ((s == null) || !s.Equals("null", StringComparison.Ordinal))
{
throw new FormatException("Invalid null literal.");
}
}
private object ReadNumber()
{
char ch = this.ReadCharFromReader();
bool isFloat = (ch == '.');
var sb = new StringBuilder();
sb.Append(ch);
while (true)
{
ch = this.PeekNextSignificantCharacter();
if (Char.IsDigit(ch))
{
// Do nothing
}
else if (ch == '.' || ch == '+' || ch == '-' || char.ToLowerInvariant(ch) == 'e')
{
// These characters are only allowed in floats (keep in mind the very first
// character will not come through here)
isFloat = true;
}
else
{
break;
}
this.ReadCharFromReader();
sb.Append(ch);
}
string s = sb.ToString();
if (isFloat)
{
float value;
if (Single.TryParse(s, NumberStyles.Float, CultureInfo.InvariantCulture, out value))
{
return value;
}
}
else
{
int value;
if (Int32.TryParse(s, NumberStyles.Integer, CultureInfo.InvariantCulture, out value))
{
return value;
}
long val;
if (Int64.TryParse(s, NumberStyles.Integer, CultureInfo.InvariantCulture, out val))
{
return val;
}
}
throw new FormatException("Invalid numeric literal.");
}
private IDictionary<string, object> ReadObject()
{
IDictionary<string, object> record = new DynamicDictionary();
// Consume the '{'
this.ReadCharFromReader();
while (true)
{
char ch = this.PeekNextSignificantCharacter();
if (ch == '\0')
{
throw new FormatException("Unterminated object literal.");
}
if (ch == '}')
{
this.ReadCharFromReader();
return record;
}
if (record.Count != 0)
{
if (ch != ',')
{
throw new FormatException("Invalid object literal.");
}
else
{
this.ReadCharFromReader();
}
}
string name = this.ReadName(/* allowQuotes */ true);
ch = this.PeekNextSignificantCharacter();
if (ch != ':')
{
throw new FormatException("Unexpected name/value pair syntax in object literal.");
}
else
{
this.ReadCharFromReader();
}
object item = this.ReadValue();
record[name] = item;
}
}
private string ReadString()
{
var sb = new StringBuilder();
char endQuoteCharacter = this.ReadCharFromReader();
bool inEscape = false;
while (true)
{
char ch = this.ReadCharFromReader();
if (ch == '\0')
{
throw new FormatException("Unterminated string literal.");
}
if (inEscape)
{
switch (ch)
{
case 'u':
string unicodeSequence = this.GetCharacters(4);
if (unicodeSequence == null)
{
throw new FormatException("Unterminated string literal.");
}
ch = (char)Int32.Parse(unicodeSequence, NumberStyles.HexNumber, CultureInfo.InvariantCulture);
break;
case 'b':
ch = '\b';
break;
case 't':
ch = '\t';
break;
case 'n':
ch = '\n';
break;
case 'f':
ch = '\f';
break;
case 'r':
ch = '\r';
break;
}
sb.Append(ch);
inEscape = false;
continue;
}
if (ch == '\\')
{
inEscape = true;
continue;
}
if (ch == endQuoteCharacter)
{
return sb.ToString();
}
sb.Append(ch);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Text;
using System.Net.Mail;
using System.Globalization;
using System.Collections.Generic;
using System.Diagnostics;
namespace System.Net.Mime
{
internal static class MailBnfHelper
{
// characters allowed in atoms
internal static readonly bool[] Atext = CreateCharactersAllowedInAtoms();
// characters allowed in quoted strings (not including Unicode)
internal static readonly bool[] Qtext = CreateCharactersAllowedInQuotedStrings();
// characters allowed in domain literals
internal static readonly bool[] Dtext = CreateCharactersAllowedInDomainLiterals();
// characters allowed in header names
internal static readonly bool[] Ftext = CreateCharactersAllowedInHeaderNames();
// characters allowed in tokens
internal static readonly bool[] Ttext = CreateCharactersAllowedInTokens();
// characters allowed inside of comments
internal static readonly bool[] Ctext = CreateCharactersAllowedInComments();
internal static readonly int Ascii7bitMaxValue = 127;
internal static readonly char Quote = '\"';
internal static readonly char Space = ' ';
internal static readonly char Tab = '\t';
internal static readonly char CR = '\r';
internal static readonly char LF = '\n';
internal static readonly char StartComment = '(';
internal static readonly char EndComment = ')';
internal static readonly char Backslash = '\\';
internal static readonly char At = '@';
internal static readonly char EndAngleBracket = '>';
internal static readonly char StartAngleBracket = '<';
internal static readonly char StartSquareBracket = '[';
internal static readonly char EndSquareBracket = ']';
internal static readonly char Comma = ',';
internal static readonly char Dot = '.';
internal static readonly IList<char> Whitespace = CreateAllowedWhitespace();
private static List<char> CreateAllowedWhitespace()
{
// all allowed whitespace characters
var whitespace = new List<char>(4);
whitespace.Add(Tab);
whitespace.Add(Space);
whitespace.Add(CR);
whitespace.Add(LF);
return whitespace;
}
// NOTE: See RFC 2822 for more detail. By default, every value in the array is false and only
// those values which are allowed in that particular set are then set to true. The numbers
// annotating each definition below are the range of ASCII values which are allowed in that definition.
private static bool[] CreateCharactersAllowedInAtoms()
{
// atext = ALPHA / DIGIT / "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "/" / "=" / "?" / "^" / "_" / "`" / "{" / "|" / "}" / "~"
var atext = new bool[128];
for (int i = '0'; i <= '9'; i++) { atext[i] = true; }
for (int i = 'A'; i <= 'Z'; i++) { atext[i] = true; }
for (int i = 'a'; i <= 'z'; i++) { atext[i] = true; }
atext['!'] = true;
atext['#'] = true;
atext['$'] = true;
atext['%'] = true;
atext['&'] = true;
atext['\''] = true;
atext['*'] = true;
atext['+'] = true;
atext['-'] = true;
atext['/'] = true;
atext['='] = true;
atext['?'] = true;
atext['^'] = true;
atext['_'] = true;
atext['`'] = true;
atext['{'] = true;
atext['|'] = true;
atext['}'] = true;
atext['~'] = true;
return atext;
}
private static bool[] CreateCharactersAllowedInQuotedStrings()
{
// fqtext = %d1-9 / %d11 / %d12 / %d14-33 / %d35-91 / %d93-127
var qtext = new bool[128];
for (int i = 1; i <= 9; i++) { qtext[i] = true; }
qtext[11] = true;
qtext[12] = true;
for (int i = 14; i <= 33; i++) { qtext[i] = true; }
for (int i = 35; i <= 91; i++) { qtext[i] = true; }
for (int i = 93; i <= 127; i++) { qtext[i] = true; }
return qtext;
}
private static bool[] CreateCharactersAllowedInDomainLiterals()
{
// fdtext = %d1-8 / %d11 / %d12 / %d14-31 / %d33-90 / %d94-127
var dtext = new bool[128];
for (int i = 1; i <= 8; i++) { dtext[i] = true; }
dtext[11] = true;
dtext[12] = true;
for (int i = 14; i <= 31; i++) { dtext[i] = true; }
for (int i = 33; i <= 90; i++) { dtext[i] = true; }
for (int i = 94; i <= 127; i++) { dtext[i] = true; }
return dtext;
}
private static bool[] CreateCharactersAllowedInHeaderNames()
{
// ftext = %d33-57 / %d59-126
var ftext = new bool[128];
for (int i = 33; i <= 57; i++) { ftext[i] = true; }
for (int i = 59; i <= 126; i++) { ftext[i] = true; }
return ftext;
}
private static bool[] CreateCharactersAllowedInTokens()
{
// ttext = %d33-126 except '()<>@,;:\"/[]?='
var ttext = new bool[128];
for (int i = 33; i <= 126; i++) { ttext[i] = true; }
ttext['('] = false;
ttext[')'] = false;
ttext['<'] = false;
ttext['>'] = false;
ttext['@'] = false;
ttext[','] = false;
ttext[';'] = false;
ttext[':'] = false;
ttext['\\'] = false;
ttext['"'] = false;
ttext['/'] = false;
ttext['['] = false;
ttext[']'] = false;
ttext['?'] = false;
ttext['='] = false;
return ttext;
}
private static bool[] CreateCharactersAllowedInComments()
{
// ctext- %d1-8 / %d11 / %d12 / %d14-31 / %33-39 / %42-91 / %93-127
var ctext = new bool[128];
for (int i = 1; i <= 8; i++) { ctext[i] = true; }
ctext[11] = true;
ctext[12] = true;
for (int i = 14; i <= 31; i++) { ctext[i] = true; }
for (int i = 33; i <= 39; i++) { ctext[i] = true; }
for (int i = 42; i <= 91; i++) { ctext[i] = true; }
for (int i = 93; i <= 127; i++) { ctext[i] = true; }
return ctext;
}
internal static bool SkipCFWS(string data, ref int offset)
{
int comments = 0;
for (; offset < data.Length; offset++)
{
if (data[offset] > 127)
throw new FormatException(SR.Format(SR.MailHeaderFieldInvalidCharacter, data[offset]));
else if (data[offset] == '\\' && comments > 0)
offset += 2;
else if (data[offset] == '(')
comments++;
else if (data[offset] == ')')
comments--;
else if (data[offset] != ' ' && data[offset] != '\t' && comments == 0)
return true;
if (comments < 0)
{
throw new FormatException(SR.Format(SR.MailHeaderFieldInvalidCharacter, data[offset]));
}
}
//returns false if end of string
return false;
}
internal static void ValidateHeaderName(string data)
{
int offset = 0;
for (; offset < data.Length; offset++)
{
if (data[offset] > Ftext.Length || !Ftext[data[offset]])
throw new FormatException(SR.InvalidHeaderName);
}
if (offset == 0)
throw new FormatException(SR.InvalidHeaderName);
}
internal static string ReadQuotedString(string data, ref int offset, StringBuilder builder)
{
return ReadQuotedString(data, ref offset, builder, false, false);
}
internal static string ReadQuotedString(string data, ref int offset, StringBuilder builder, bool doesntRequireQuotes, bool permitUnicodeInDisplayName)
{
// assume first char is the opening quote
if (!doesntRequireQuotes)
{
++offset;
}
int start = offset;
StringBuilder localBuilder = (builder != null ? builder : new StringBuilder());
for (; offset < data.Length; offset++)
{
if (data[offset] == '\\')
{
localBuilder.Append(data, start, offset - start);
start = ++offset;
}
else if (data[offset] == '"')
{
localBuilder.Append(data, start, offset - start);
offset++;
return (builder != null ? null : localBuilder.ToString());
}
else if (data[offset] == '=' &&
data.Length > offset + 3 &&
data[offset + 1] == '\r' &&
data[offset + 2] == '\n' &&
(data[offset + 3] == ' ' || data[offset + 3] == '\t'))
{
//it's a soft crlf so it's ok
offset += 3;
}
else if (permitUnicodeInDisplayName)
{
//if data contains Unicode and Unicode is permitted, then
//it is valid in a quoted string in a header.
if (data[offset] <= Ascii7bitMaxValue && !Qtext[data[offset]])
throw new FormatException(SR.Format(SR.MailHeaderFieldInvalidCharacter, data[offset]));
}
//not permitting Unicode, in which case Unicode is a formatting error
else if (data[offset] > Ascii7bitMaxValue || !Qtext[data[offset]])
{
throw new FormatException(SR.Format(SR.MailHeaderFieldInvalidCharacter, data[offset]));
}
}
if (doesntRequireQuotes)
{
localBuilder.Append(data, start, offset - start);
return (builder != null ? null : localBuilder.ToString());
}
throw new FormatException(SR.MailHeaderFieldMalformedHeader);
}
internal static string ReadParameterAttribute(string data, ref int offset, StringBuilder builder)
{
if (!SkipCFWS(data, ref offset))
return null; //
return ReadToken(data, ref offset, null);
}
internal static string ReadToken(string data, ref int offset, StringBuilder builder)
{
int start = offset;
for (; offset < data.Length; offset++)
{
if (data[offset] > Ascii7bitMaxValue)
{
throw new FormatException(SR.Format(SR.MailHeaderFieldInvalidCharacter, data[offset]));
}
else if (!Ttext[data[offset]])
{
break;
}
}
if (start == offset)
{
throw new FormatException(SR.Format(SR.MailHeaderFieldInvalidCharacter, data[offset]));
}
return data.Substring(start, offset - start);
}
private static string[] s_months = new string[] { null, "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" };
internal static string GetDateTimeString(DateTime value, StringBuilder builder)
{
StringBuilder localBuilder = (builder != null ? builder : new StringBuilder());
localBuilder.Append(value.Day);
localBuilder.Append(' ');
localBuilder.Append(s_months[value.Month]);
localBuilder.Append(' ');
localBuilder.Append(value.Year);
localBuilder.Append(' ');
if (value.Hour <= 9)
{
localBuilder.Append('0');
}
localBuilder.Append(value.Hour);
localBuilder.Append(':');
if (value.Minute <= 9)
{
localBuilder.Append('0');
}
localBuilder.Append(value.Minute);
localBuilder.Append(':');
if (value.Second <= 9)
{
localBuilder.Append('0');
}
localBuilder.Append(value.Second);
string offset = TimeZoneInfo.Local.GetUtcOffset(value).ToString();
if (offset[0] != '-')
{
localBuilder.Append(" +");
}
else
{
localBuilder.Append(' ');
}
string[] offsetFields = offset.Split(':');
localBuilder.Append(offsetFields[0]);
localBuilder.Append(offsetFields[1]);
return (builder != null ? null : localBuilder.ToString());
}
internal static void GetTokenOrQuotedString(string data, StringBuilder builder, bool allowUnicode)
{
int offset = 0, start = 0;
for (; offset < data.Length; offset++)
{
if (CheckForUnicode(data[offset], allowUnicode))
{
continue;
}
if (!Ttext[data[offset]] || data[offset] == ' ')
{
builder.Append('"');
for (; offset < data.Length; offset++)
{
if (CheckForUnicode(data[offset], allowUnicode))
{
continue;
}
else if (IsFWSAt(data, offset)) // Allow FWS == "\r\n "
{
// No-op, skip these three chars
offset++;
offset++;
}
else if (!Qtext[data[offset]])
{
builder.Append(data, start, offset - start);
builder.Append('\\');
start = offset;
}
}
builder.Append(data, start, offset - start);
builder.Append('"');
return;
}
}
//always a quoted string if it was empty.
if (data.Length == 0)
{
builder.Append("\"\"");
}
// Token, no quotes needed
builder.Append(data);
}
private static bool CheckForUnicode(char ch, bool allowUnicode)
{
if (ch < Ascii7bitMaxValue)
{
return false;
}
if (!allowUnicode)
{
throw new FormatException(SR.Format(SR.MailHeaderFieldInvalidCharacter, ch));
}
return true;
}
internal static bool HasCROrLF(string data)
{
for (int i = 0; i < data.Length; i++)
{
if (data[i] == '\r' || data[i] == '\n')
{
return true;
}
}
return false;
}
// Is there a FWS ("\r\n " or "\r\n\t") starting at the given index?
internal static bool IsFWSAt(string data, int index)
{
Debug.Assert(index >= 0);
Debug.Assert(index < data.Length);
return (data[index] == MailBnfHelper.CR
&& index + 2 < data.Length
&& data[index + 1] == MailBnfHelper.LF
&& (data[index + 2] == MailBnfHelper.Space
|| data[index + 2] == MailBnfHelper.Tab));
}
}
}
| |
namespace SLeek
{
partial class frmObjects
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(frmObjects));
this.lbxPrims = new System.Windows.Forms.ListBox();
this.gbxInworld = new System.Windows.Forms.GroupBox();
this.btnTouch = new System.Windows.Forms.Button();
this.btnSitOn = new System.Windows.Forms.Button();
this.btnPointAt = new System.Windows.Forms.Button();
this.txtSearch = new System.Windows.Forms.TextBox();
this.timer1 = new System.Windows.Forms.Timer(this.components);
this.lblStatus = new System.Windows.Forms.Label();
this.label1 = new System.Windows.Forms.Label();
this.btnClear = new System.Windows.Forms.Button();
this.btnClose = new System.Windows.Forms.Button();
this.gbxInworld.SuspendLayout();
this.SuspendLayout();
//
// lbxPrims
//
this.lbxPrims.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.lbxPrims.DrawMode = System.Windows.Forms.DrawMode.OwnerDrawFixed;
this.lbxPrims.FormattingEnabled = true;
this.lbxPrims.IntegralHeight = false;
this.lbxPrims.ItemHeight = 18;
this.lbxPrims.Location = new System.Drawing.Point(12, 39);
this.lbxPrims.Name = "lbxPrims";
this.lbxPrims.Size = new System.Drawing.Size(362, 375);
this.lbxPrims.Sorted = true;
this.lbxPrims.TabIndex = 0;
this.lbxPrims.Visible = false;
this.lbxPrims.DrawItem += new System.Windows.Forms.DrawItemEventHandler(this.lbxPrims_DrawItem);
this.lbxPrims.SelectedIndexChanged += new System.EventHandler(this.lbxPrims_SelectedIndexChanged);
//
// gbxInworld
//
this.gbxInworld.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.gbxInworld.Controls.Add(this.btnTouch);
this.gbxInworld.Controls.Add(this.btnSitOn);
this.gbxInworld.Controls.Add(this.btnPointAt);
this.gbxInworld.Enabled = false;
this.gbxInworld.Location = new System.Drawing.Point(380, 12);
this.gbxInworld.Name = "gbxInworld";
this.gbxInworld.Size = new System.Drawing.Size(100, 116);
this.gbxInworld.TabIndex = 2;
this.gbxInworld.TabStop = false;
this.gbxInworld.Text = "In-world";
//
// btnTouch
//
this.btnTouch.Location = new System.Drawing.Point(6, 78);
this.btnTouch.Name = "btnTouch";
this.btnTouch.Size = new System.Drawing.Size(88, 23);
this.btnTouch.TabIndex = 2;
this.btnTouch.Text = "Touch/Click";
this.btnTouch.UseVisualStyleBackColor = true;
this.btnTouch.Click += new System.EventHandler(this.btnTouch_Click);
//
// btnSitOn
//
this.btnSitOn.Location = new System.Drawing.Point(6, 49);
this.btnSitOn.Name = "btnSitOn";
this.btnSitOn.Size = new System.Drawing.Size(88, 23);
this.btnSitOn.TabIndex = 1;
this.btnSitOn.Text = "Sit On";
this.btnSitOn.UseVisualStyleBackColor = true;
this.btnSitOn.Click += new System.EventHandler(this.btnSitOn_Click);
//
// btnPointAt
//
this.btnPointAt.Location = new System.Drawing.Point(6, 20);
this.btnPointAt.Name = "btnPointAt";
this.btnPointAt.Size = new System.Drawing.Size(88, 23);
this.btnPointAt.TabIndex = 0;
this.btnPointAt.Text = "Point At";
this.btnPointAt.UseVisualStyleBackColor = true;
this.btnPointAt.Click += new System.EventHandler(this.btnPointAt_Click);
//
// txtSearch
//
this.txtSearch.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.txtSearch.Enabled = false;
this.txtSearch.Location = new System.Drawing.Point(62, 12);
this.txtSearch.Name = "txtSearch";
this.txtSearch.Size = new System.Drawing.Size(246, 21);
this.txtSearch.TabIndex = 4;
this.txtSearch.TextChanged += new System.EventHandler(this.txtSearch_TextChanged);
//
// timer1
//
this.timer1.Enabled = true;
this.timer1.Interval = 1500;
this.timer1.Tick += new System.EventHandler(this.timer1_Tick);
//
// lblStatus
//
this.lblStatus.AutoSize = true;
this.lblStatus.Font = new System.Drawing.Font("Tahoma", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.lblStatus.Location = new System.Drawing.Point(12, 36);
this.lblStatus.Name = "lblStatus";
this.lblStatus.Size = new System.Drawing.Size(103, 13);
this.lblStatus.TabIndex = 6;
this.lblStatus.Text = "Getting objects...";
//
// label1
//
this.label1.AutoSize = true;
this.label1.Location = new System.Drawing.Point(12, 15);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(44, 13);
this.label1.TabIndex = 7;
this.label1.Text = "Search:";
//
// btnClear
//
this.btnClear.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.btnClear.Location = new System.Drawing.Point(314, 10);
this.btnClear.Name = "btnClear";
this.btnClear.Size = new System.Drawing.Size(60, 23);
this.btnClear.TabIndex = 8;
this.btnClear.Text = "Clear";
this.btnClear.UseVisualStyleBackColor = true;
this.btnClear.Click += new System.EventHandler(this.btnClear_Click);
//
// btnClose
//
this.btnClose.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.btnClose.Location = new System.Drawing.Point(380, 391);
this.btnClose.Name = "btnClose";
this.btnClose.Size = new System.Drawing.Size(100, 23);
this.btnClose.TabIndex = 9;
this.btnClose.Text = "Close";
this.btnClose.UseVisualStyleBackColor = true;
this.btnClose.Click += new System.EventHandler(this.btnClose_Click);
//
// frmObjects
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(492, 426);
this.Controls.Add(this.btnClose);
this.Controls.Add(this.btnClear);
this.Controls.Add(this.label1);
this.Controls.Add(this.lblStatus);
this.Controls.Add(this.txtSearch);
this.Controls.Add(this.gbxInworld);
this.Controls.Add(this.lbxPrims);
this.Font = new System.Drawing.Font("Tahoma", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.Name = "frmObjects";
this.Text = "Objects - SLeek";
this.Load += new System.EventHandler(this.frmObjects_Load);
this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.frmObjects_FormClosing);
this.gbxInworld.ResumeLayout(false);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.ListBox lbxPrims;
private System.Windows.Forms.GroupBox gbxInworld;
private System.Windows.Forms.Button btnSitOn;
private System.Windows.Forms.Button btnPointAt;
private System.Windows.Forms.Button btnTouch;
private System.Windows.Forms.TextBox txtSearch;
private System.Windows.Forms.Timer timer1;
private System.Windows.Forms.Label lblStatus;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.Button btnClear;
private System.Windows.Forms.Button btnClose;
}
}
| |
//
// Copyright (c) 2004-2011 Jaroslaw Kowalski <jaak@jkowalski.net>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
#if !SILVERLIGHT && !__IOS__ && !__ANDROID__
namespace NLog.Targets
{
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Data;
using System.Data.Common;
using System.Globalization;
using System.Reflection;
using System.Text;
using System.Transactions;
using NLog.Common;
using NLog.Config;
using NLog.Internal;
using NLog.Layouts;
using ConfigurationManager = System.Configuration.ConfigurationManager;
/// <summary>
/// Writes log messages to the database using an ADO.NET provider.
/// </summary>
/// <seealso href="https://github.com/nlog/nlog/wiki/Database-target">Documentation on NLog Wiki</seealso>
/// <example>
/// <para>
/// The configuration is dependent on the database type, because
/// there are differnet methods of specifying connection string, SQL
/// command and command parameters.
/// </para>
/// <para>MS SQL Server using System.Data.SqlClient:</para>
/// <code lang="XML" source="examples/targets/Configuration File/Database/MSSQL/NLog.config" height="450" />
/// <para>Oracle using System.Data.OracleClient:</para>
/// <code lang="XML" source="examples/targets/Configuration File/Database/Oracle.Native/NLog.config" height="350" />
/// <para>Oracle using System.Data.OleDBClient:</para>
/// <code lang="XML" source="examples/targets/Configuration File/Database/Oracle.OleDB/NLog.config" height="350" />
/// <para>To set up the log target programmatically use code like this (an equivalent of MSSQL configuration):</para>
/// <code lang="C#" source="examples/targets/Configuration API/Database/MSSQL/Example.cs" height="630" />
/// </example>
[Target("Database")]
public sealed class DatabaseTarget : Target, IInstallable
{
private static Assembly systemDataAssembly = typeof(IDbConnection).Assembly;
private IDbConnection activeConnection = null;
private string activeConnectionString;
/// <summary>
/// Initializes a new instance of the <see cref="DatabaseTarget" /> class.
/// </summary>
public DatabaseTarget()
{
this.Parameters = new List<DatabaseParameterInfo>();
this.InstallDdlCommands = new List<DatabaseCommandInfo>();
this.UninstallDdlCommands = new List<DatabaseCommandInfo>();
this.DBProvider = "sqlserver";
this.DBHost = ".";
this.ConnectionStringsSettings = ConfigurationManager.ConnectionStrings;
this.CommandType = CommandType.Text;
}
/// <summary>
/// Gets or sets the name of the database provider.
/// </summary>
/// <remarks>
/// <para>
/// The parameter name should be a provider invariant name as registered in machine.config or app.config. Common values are:
/// </para>
/// <ul>
/// <li><c>System.Data.SqlClient</c> - <see href="http://msdn.microsoft.com/en-us/library/system.data.sqlclient.aspx">SQL Sever Client</see></li>
/// <li><c>System.Data.SqlServerCe.3.5</c> - <see href="http://www.microsoft.com/sqlserver/2005/en/us/compact.aspx">SQL Sever Compact 3.5</see></li>
/// <li><c>System.Data.OracleClient</c> - <see href="http://msdn.microsoft.com/en-us/library/system.data.oracleclient.aspx">Oracle Client from Microsoft</see> (deprecated in .NET Framework 4)</li>
/// <li><c>Oracle.DataAccess.Client</c> - <see href="http://www.oracle.com/technology/tech/windows/odpnet/index.html">ODP.NET provider from Oracle</see></li>
/// <li><c>System.Data.SQLite</c> - <see href="http://sqlite.phxsoftware.com/">System.Data.SQLite driver for SQLite</see></li>
/// <li><c>Npgsql</c> - <see href="http://npgsql.projects.postgresql.org/">Npgsql driver for PostgreSQL</see></li>
/// <li><c>MySql.Data.MySqlClient</c> - <see href="http://www.mysql.com/downloads/connector/net/">MySQL Connector/Net</see></li>
/// </ul>
/// <para>(Note that provider invariant names are not supported on .NET Compact Framework).</para>
/// <para>
/// Alternatively the parameter value can be be a fully qualified name of the provider
/// connection type (class implementing <see cref="IDbConnection" />) or one of the following tokens:
/// </para>
/// <ul>
/// <li><c>sqlserver</c>, <c>mssql</c>, <c>microsoft</c> or <c>msde</c> - SQL Server Data Provider</li>
/// <li><c>oledb</c> - OLEDB Data Provider</li>
/// <li><c>odbc</c> - ODBC Data Provider</li>
/// </ul>
/// </remarks>
/// <docgen category='Connection Options' order='10' />
[RequiredParameter]
[DefaultValue("sqlserver")]
public string DBProvider { get; set; }
/// <summary>
/// Gets or sets the name of the connection string (as specified in <see href="http://msdn.microsoft.com/en-us/library/bf7sd233.aspx"><connectionStrings> configuration section</see>.
/// </summary>
/// <docgen category='Connection Options' order='10' />
public string ConnectionStringName { get; set; }
/// <summary>
/// Gets or sets the connection string. When provided, it overrides the values
/// specified in DBHost, DBUserName, DBPassword, DBDatabase.
/// </summary>
/// <docgen category='Connection Options' order='10' />
public Layout ConnectionString { get; set; }
/// <summary>
/// Gets or sets the connection string using for installation and uninstallation. If not provided, regular ConnectionString is being used.
/// </summary>
/// <docgen category='Installation Options' order='10' />
public Layout InstallConnectionString { get; set; }
/// <summary>
/// Gets the installation DDL commands.
/// </summary>
/// <docgen category='Installation Options' order='10' />
[ArrayParameter(typeof(DatabaseCommandInfo), "install-command")]
public IList<DatabaseCommandInfo> InstallDdlCommands { get; private set; }
/// <summary>
/// Gets the uninstallation DDL commands.
/// </summary>
/// <docgen category='Installation Options' order='10' />
[ArrayParameter(typeof(DatabaseCommandInfo), "uninstall-command")]
public IList<DatabaseCommandInfo> UninstallDdlCommands { get; private set; }
/// <summary>
/// Gets or sets a value indicating whether to keep the
/// database connection open between the log events.
/// </summary>
/// <docgen category='Connection Options' order='10' />
[DefaultValue(false)]
public bool KeepConnection { get; set; }
/// <summary>
/// Obsolete - value will be ignored! The logging code always runs outside of transaction.
///
/// Gets or sets a value indicating whether to use database transactions.
/// Some data providers require this.
/// </summary>
/// <docgen category='Connection Options' order='10' />
/// <remarks>
/// This option was removed in NLog 4.0 because the logging code always runs outside of transaction.
/// This ensures that the log gets written to the database if you rollback the main transaction because of an error and want to log the error.
/// </remarks>
[Obsolete("Obsolete - value will be ignored - logging code always runs outside of transaction. Will be removed in NLog 6.")]
public bool? UseTransactions { get; set; }
/// <summary>
/// Gets or sets the database host name. If the ConnectionString is not provided
/// this value will be used to construct the "Server=" part of the
/// connection string.
/// </summary>
/// <docgen category='Connection Options' order='10' />
public Layout DBHost { get; set; }
/// <summary>
/// Gets or sets the database user name. If the ConnectionString is not provided
/// this value will be used to construct the "User ID=" part of the
/// connection string.
/// </summary>
/// <docgen category='Connection Options' order='10' />
public Layout DBUserName { get; set; }
/// <summary>
/// Gets or sets the database password. If the ConnectionString is not provided
/// this value will be used to construct the "Password=" part of the
/// connection string.
/// </summary>
/// <docgen category='Connection Options' order='10' />
public Layout DBPassword { get; set; }
/// <summary>
/// Gets or sets the database name. If the ConnectionString is not provided
/// this value will be used to construct the "Database=" part of the
/// connection string.
/// </summary>
/// <docgen category='Connection Options' order='10' />
public Layout DBDatabase { get; set; }
/// <summary>
/// Gets or sets the text of the SQL command to be run on each log level.
/// </summary>
/// <remarks>
/// Typically this is a SQL INSERT statement or a stored procedure call.
/// It should use the database-specific parameters (marked as <c>@parameter</c>
/// for SQL server or <c>:parameter</c> for Oracle, other data providers
/// have their own notation) and not the layout renderers,
/// because the latter is prone to SQL injection attacks.
/// The layout renderers should be specified as <parameter /> elements instead.
/// </remarks>
/// <docgen category='SQL Statement' order='10' />
[RequiredParameter]
public Layout CommandText { get; set; }
/// <summary>
/// Gets or sets the type of the SQL command to be run on each log level.
/// </summary>
/// <remarks>
/// This specifies how the command text is interpreted, as "Text" (default) or as "StoredProcedure".
/// When using the value StoredProcedure, the commandText-property would
/// normally be the name of the stored procedure. TableDirect method is not supported in this context.
/// </remarks>
/// <docgen category='SQL Statement' order='11' />
[DefaultValue(CommandType.Text)]
public CommandType CommandType { get; set; }
/// <summary>
/// Gets the collection of parameters. Each parameter contains a mapping
/// between NLog layout and a database named or positional parameter.
/// </summary>
/// <docgen category='SQL Statement' order='12' />
[ArrayParameter(typeof(DatabaseParameterInfo), "parameter")]
public IList<DatabaseParameterInfo> Parameters { get; private set; }
internal DbProviderFactory ProviderFactory { get; set; }
// this is so we can mock the connection string without creating sub-processes
internal ConnectionStringSettingsCollection ConnectionStringsSettings { get; set; }
internal Type ConnectionType { get; set; }
/// <summary>
/// Performs installation which requires administrative permissions.
/// </summary>
/// <param name="installationContext">The installation context.</param>
public void Install(InstallationContext installationContext)
{
this.RunInstallCommands(installationContext, this.InstallDdlCommands);
}
/// <summary>
/// Performs uninstallation which requires administrative permissions.
/// </summary>
/// <param name="installationContext">The installation context.</param>
public void Uninstall(InstallationContext installationContext)
{
this.RunInstallCommands(installationContext, this.UninstallDdlCommands);
}
/// <summary>
/// Determines whether the item is installed.
/// </summary>
/// <param name="installationContext">The installation context.</param>
/// <returns>
/// Value indicating whether the item is installed or null if it is not possible to determine.
/// </returns>
public bool? IsInstalled(InstallationContext installationContext)
{
return null;
}
internal IDbConnection OpenConnection(string connectionString)
{
IDbConnection connection;
if (this.ProviderFactory != null)
{
connection = this.ProviderFactory.CreateConnection();
}
else
{
connection = (IDbConnection)Activator.CreateInstance(this.ConnectionType);
}
connection.ConnectionString = connectionString;
connection.Open();
return connection;
}
/// <summary>
/// Initializes the target. Can be used by inheriting classes
/// to initialize logging.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA2204:Literals should be spelled correctly", MessageId = "connectionStrings", Justification = "Name of the config file section.")]
protected override void InitializeTarget()
{
base.InitializeTarget();
#pragma warning disable 618
if (UseTransactions.HasValue)
#pragma warning restore 618
{
InternalLogger.Warn("UseTransactions is obsolete and will not be used - will be removed in NLog 6");
}
bool foundProvider = false;
if (!string.IsNullOrEmpty(this.ConnectionStringName))
{
// read connection string and provider factory from the configuration file
var cs = this.ConnectionStringsSettings[this.ConnectionStringName];
if (cs == null)
{
throw new NLogConfigurationException("Connection string '" + this.ConnectionStringName + "' is not declared in <connectionStrings /> section.");
}
this.ConnectionString = SimpleLayout.Escape(cs.ConnectionString);
this.ProviderFactory = DbProviderFactories.GetFactory(cs.ProviderName);
foundProvider = true;
}
if (!foundProvider)
{
foreach (DataRow row in DbProviderFactories.GetFactoryClasses().Rows)
{
if ((string)row["InvariantName"] == this.DBProvider)
{
this.ProviderFactory = DbProviderFactories.GetFactory(this.DBProvider);
foundProvider = true;
}
}
}
if (!foundProvider)
{
switch (this.DBProvider.ToUpper(CultureInfo.InvariantCulture))
{
case "SQLSERVER":
case "MSSQL":
case "MICROSOFT":
case "MSDE":
this.ConnectionType = systemDataAssembly.GetType("System.Data.SqlClient.SqlConnection", true);
break;
case "OLEDB":
this.ConnectionType = systemDataAssembly.GetType("System.Data.OleDb.OleDbConnection", true);
break;
case "ODBC":
this.ConnectionType = systemDataAssembly.GetType("System.Data.Odbc.OdbcConnection", true);
break;
default:
this.ConnectionType = Type.GetType(this.DBProvider, true);
break;
}
}
}
/// <summary>
/// Closes the target and releases any unmanaged resources.
/// </summary>
protected override void CloseTarget()
{
base.CloseTarget();
InternalLogger.Trace("DatabaseTarget: close connection because of CloseTarget");
this.CloseConnection();
}
/// <summary>
/// Writes the specified logging event to the database. It creates
/// a new database command, prepares parameters for it by calculating
/// layouts and executes the command.
/// </summary>
/// <param name="logEvent">The logging event.</param>
protected override void Write(LogEventInfo logEvent)
{
try
{
this.WriteEventToDatabase(logEvent);
}
catch (Exception exception)
{
InternalLogger.Error(exception, "Error when writing to database.");
if (exception.MustBeRethrownImmediately())
{
throw;
}
InternalLogger.Trace("DatabaseTarget: close connection because of error");
this.CloseConnection();
throw;
}
finally
{
if (!this.KeepConnection)
{
InternalLogger.Trace("DatabaseTarget: close connection (KeepConnection = false).");
this.CloseConnection();
}
}
}
/// <summary>
/// Writes an array of logging events to the log target. By default it iterates on all
/// events and passes them to "Write" method. Inheriting classes can use this method to
/// optimize batch writes.
/// </summary>
/// <param name="logEvents">Logging events to be written out.</param>
protected override void Write(AsyncLogEventInfo[] logEvents)
{
var buckets = SortHelpers.BucketSort(logEvents, c => this.BuildConnectionString(c.LogEvent));
try
{
foreach (var kvp in buckets)
{
foreach (AsyncLogEventInfo ev in kvp.Value)
{
try
{
this.WriteEventToDatabase(ev.LogEvent);
ev.Continuation(null);
}
catch (Exception exception)
{
// in case of exception, close the connection and report it
InternalLogger.Error(exception, "Error when writing to database.");
if (exception.MustBeRethrownImmediately())
{
throw;
}
InternalLogger.Trace("DatabaseTarget: close connection because of exception");
this.CloseConnection();
ev.Continuation(exception);
if (exception.MustBeRethrown())
{
throw;
}
}
}
}
}
finally
{
if (!this.KeepConnection)
{
InternalLogger.Trace("DatabaseTarget: close connection because of KeepConnection=false");
this.CloseConnection();
}
}
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "It's up to the user to ensure proper quoting.")]
private void WriteEventToDatabase(LogEventInfo logEvent)
{
//Always suppress transaction so that the caller does not rollback loggin if they are rolling back their transaction.
using (TransactionScope transactionScope = new TransactionScope(TransactionScopeOption.Suppress))
{
this.EnsureConnectionOpen(this.BuildConnectionString(logEvent));
IDbCommand command = this.activeConnection.CreateCommand();
command.CommandText = this.CommandText.Render(logEvent);
command.CommandType = this.CommandType;
InternalLogger.Trace("Executing {0}: {1}", command.CommandType, command.CommandText);
foreach (DatabaseParameterInfo par in this.Parameters)
{
IDbDataParameter p = command.CreateParameter();
p.Direction = ParameterDirection.Input;
if (par.Name != null)
{
p.ParameterName = par.Name;
}
if (par.Size != 0)
{
p.Size = par.Size;
}
if (par.Precision != 0)
{
p.Precision = par.Precision;
}
if (par.Scale != 0)
{
p.Scale = par.Scale;
}
string stringValue = par.Layout.Render(logEvent);
p.Value = stringValue;
command.Parameters.Add(p);
InternalLogger.Trace(" Parameter: '{0}' = '{1}' ({2})", p.ParameterName, p.Value, p.DbType);
}
int result = command.ExecuteNonQuery();
InternalLogger.Trace("Finished execution, result = {0}", result);
//not really needed as there is no transaction at all.
transactionScope.Complete();
}
}
private string BuildConnectionString(LogEventInfo logEvent)
{
if (this.ConnectionString != null)
{
return this.ConnectionString.Render(logEvent);
}
var sb = new StringBuilder();
sb.Append("Server=");
sb.Append(this.DBHost.Render(logEvent));
sb.Append(";");
if (this.DBUserName == null)
{
sb.Append("Trusted_Connection=SSPI;");
}
else
{
sb.Append("User id=");
sb.Append(this.DBUserName.Render(logEvent));
sb.Append(";Password=");
sb.Append(this.DBPassword.Render(logEvent));
sb.Append(";");
}
if (this.DBDatabase != null)
{
sb.Append("Database=");
sb.Append(this.DBDatabase.Render(logEvent));
}
return sb.ToString();
}
private void EnsureConnectionOpen(string connectionString)
{
if (this.activeConnection != null)
{
if (this.activeConnectionString != connectionString)
{
InternalLogger.Trace("DatabaseTarget: close connection because of opening new.");
this.CloseConnection();
}
}
if (this.activeConnection != null)
{
return;
}
InternalLogger.Trace("DatabaseTarget: open connection.");
this.activeConnection = this.OpenConnection(connectionString);
this.activeConnectionString = connectionString;
}
private void CloseConnection()
{
if (this.activeConnection != null)
{
this.activeConnection.Close();
this.activeConnection.Dispose();
this.activeConnection = null;
this.activeConnectionString = null;
}
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "It's up to the user to ensure proper quoting.")]
private void RunInstallCommands(InstallationContext installationContext, IEnumerable<DatabaseCommandInfo> commands)
{
// create log event that will be used to render all layouts
LogEventInfo logEvent = installationContext.CreateLogEvent();
try
{
foreach (var commandInfo in commands)
{
string cs;
if (commandInfo.ConnectionString != null)
{
// if there is connection string specified on the command info, use it
cs = commandInfo.ConnectionString.Render(logEvent);
}
else if (this.InstallConnectionString != null)
{
// next, try InstallConnectionString
cs = this.InstallConnectionString.Render(logEvent);
}
else
{
// if it's not defined, fall back to regular connection string
cs = this.BuildConnectionString(logEvent);
}
this.EnsureConnectionOpen(cs);
var command = this.activeConnection.CreateCommand();
command.CommandType = commandInfo.CommandType;
command.CommandText = commandInfo.Text.Render(logEvent);
try
{
installationContext.Trace("Executing {0} '{1}'", command.CommandType, command.CommandText);
command.ExecuteNonQuery();
}
catch (Exception exception)
{
if (exception.MustBeRethrownImmediately())
{
throw;
}
if (commandInfo.IgnoreFailures || installationContext.IgnoreFailures)
{
installationContext.Warning(exception.Message);
}
else
{
installationContext.Error(exception.Message);
throw;
}
}
}
}
finally
{
InternalLogger.Trace("DatabaseTarget: close connection after install.");
this.CloseConnection();
}
}
}
}
#endif
| |
/*
* @author John Casimiro
* @created_date 2014-01-30
* @last_modified_by Ammar Alammar
* @last_modified_date 2014-06-08
* @description Salesforce REST API wrapper for Unity 3d.
* @version 1.04
*
*/
using UnityEngine;
using System;
using System.Collections;
using System.Text;
using Boomlagoon.JSON;
using HutongGames.PlayMaker;
namespace SFDC
{
public class Salesforce : MonoBehaviour {
public string oAuthEndpoint = "https://login.salesforce.com/services/oauth2/token";
public string clientSecret = "<<INSERT FROM SALESFORCE>>";
public string clientId = "<<INSERT FROM SALESFORCE>>";
public string personalSecurityToken;
private string attachmentObjPrefix = "xRay";
private bool playmakerOn = true; // Playmaker is a declarative (point & click) tool for building complex logic and behaviours in Unity3D
// ******************************** DO NOT TOUCH BELOW THIS LINE ********************************
public string grantType = "password";
public string version = "v29.0";
public string token;
public string instanceUrl;
public byte[] textureBytes;
// holder for responses from the REST API
public string response = null;
/*
* @author Cas
* @date 2014-01-30
* @description Executes the authorization of the application with Salesforce.
* Saves the instance url and token to vars of the class.
*
* @param username The user's salesforce.com username.
* @param password The user's salesforce.com password
*/
public void login(string username, string password){
// check if Auth Token is already set
if (token != null) return;
WWWForm form = new WWWForm();
form.AddField("username", username);
form.AddField("password", password);
form.AddField("client_secret", clientSecret);
form.AddField("client_id", clientId);
form.AddField("grant_type", grantType);
WWW result = new WWW(oAuthEndpoint, form);
StartCoroutine(setToken(result));
}
/*
* @author Cas
* @date 2014-01-30
* description Executes a query against salesforce.com. The results are stored
* in the response variable.
*
* @param q The SOQL query to be executed
*/
public void query(string q){
string url = instanceUrl + "/services/data/" + version + "/query?q=" + WWW.EscapeURL(q);
WWWForm form = new WWWForm();
Hashtable headers = form.headers;
headers["Authorization"] = "Bearer " + token;
headers["Content-Type"] = "application/json";
headers["Method"] = "GET";
headers["X-PrettyPrint"] = 1;
WWW www = new WWW(url, null, headers);
request(www);
}
/*
* @author Cas
* @date 2014-01-30
* @description Inserts a record into salesforce.
*
* @param sobject The object in salesforce(custom or standard) that you are
* trying to insert a record to.
* @param body The JSON for the data(fields and values) that will be inserted.
*/
public void insert(string sobject, string body){
string url = instanceUrl + "/services/data/" + version + "/sobjects/" + sobject;
WWWForm form = new WWWForm();
Hashtable headers = form.headers;
headers["Authorization"] = "Bearer " + token;
headers["Content-Type"] = "application/json";
headers["Method"] = "POST";
headers["X-PrettyPrint"] = 1;
WWW www = new WWW(url, System.Text.Encoding.UTF8.GetBytes(body), headers);
request(www);
}
/*
* @author Cas
* @date 2014-01-31
* @description Updates a record in salesforce.
*
* @param id The salesforce id of the record you are trying to update.
* @param sobject The sobject of the record you are trying to update.
* @param body The JSON for the data(fields and values) that will be updated.
*/
public void update(string id, string sobject, string body){
string url = instanceUrl + "/services/data/" + version + "/sobjects/" + sobject + "/" + id + "?_HttpMethod=PATCH";
WWWForm form = new WWWForm();
Hashtable headers = form.headers;
headers["Authorization"] = "Bearer " + token;
headers["Content-Type"] = "application/json";
headers["Method"] = "POST";
headers["X-PrettyPrint"] = 1;
WWW www = new WWW(url, System.Text.Encoding.UTF8.GetBytes(body), headers);
request(www);
}
/*
* @author Cas
* @date 2014-01-31
* @description Deletes a record in salesforce.
*
* @param id The salesforce id of the record you are trying to delete.
* @param sobject The sobject of the record you are trying to delete.
*/
public void delete(string id, string sobject){
string url = instanceUrl + "/services/data/" + version + "/sobjects/" + sobject + "/" + id + "?_HttpMethod=DELETE";
WWWForm form = new WWWForm();
Hashtable headers = form.headers;
headers["Authorization"] = "Bearer " + token;
headers["Method"] = "POST";
headers["X-PrettyPrint"] = 1;
// need something in the body for DELETE to work for some reason
String body = "DELETE";
WWW www = new WWW(url, System.Text.Encoding.UTF8.GetBytes(body), headers);
request(www);
}
/*
* @author Ammar Alammar
* @date 2014-07-05
* retriefes an attachment body (Base64 Blob) from salesforce.com. The results are stored
* in the response variable.
*
* @param url Executing a SOQL query on Attachmnets that selects the Body field will return
* a URL, which in turn contains the BAse64 body (retrieve via a GET operation)
* Review CoRoutines in Unity's documentation. It is a specifically important concept
* in a game development enviornment, where methods can not block the processing workflow.
* In Sumamry, a coroutine allows the execution to be returned to the uninty engine and free-up the frame.
* When the next frame is executed, the code in the coroutine continues from where it left off.
* This emulates "context switching" or interleaving of the blocking method's processing and the rest of the envionrment.
*/
public void getAttachmentBody(string url, int seq){
WWWForm form = new WWWForm();
Hashtable headers = form.headers;
headers["Authorization"] = "Bearer " + token;
headers["Content-Type"] = "application/json";
headers["Method"] = "GET";
headers["X-PrettyPrint"] = 1;
WWW www = new WWW(instanceUrl + url, null, headers);
StartCoroutine(executeDownload(www,seq));
}
/*
* @author Ammar ALammar
* @date 2014-07-05
* @description Wait for a response from the callout & wait for the whole attachment body
* to be downloaded then assign as a texture to a game object
*
* @param www The wwwForm being executed.
*/
IEnumerator executeDownload(WWW www, int seq){
yield return www;
if (www.error == null){
response = www.text;
} else {
response = www.error;
}
// Obtain the binary byte array of the textures
textureBytes = www.bytes;
// Assign Textures on Xray Objects
Texture2D tex = new Texture2D(1024, 1024);
tex.LoadImage(www.bytes);
GameObject xrayObject = GameObject.Find (attachmentObjPrefix + seq);
xrayObject.renderer.material.mainTexture = tex;
FsmVariables.GlobalVariables.FindFsmInt ("numImagesToLoad").Value -=1;
}
/*
* @author Cas
* @date 2014-01-30
* @description Generic function that lears the response var and kicks off
* the startCoroutine.
*
* @param www The wwwForm being executed.
*/
public void request(WWW www){
response = null;
StartCoroutine(executeCall(www));
}
/*
* @author Cas
* @date 2014-01-30
* @description Generic IEnumerator to wait for a response from the callout.
*
* @param www The wwwForm being executed.
*/
IEnumerator executeCall(WWW www){
yield return www;
if (www.error == null){
response = www.text;
} else {
response = www.error;
}
}
/*
* @author John Casimiro
* @created_date 2014-01-30
* @last_modified_by Ammar Alammar
* @last_modified_date 2014-06-08
* @description IEnumerator to wait & set auth token and instance url.
*
* @param www The wwwForm being executed.
*/
IEnumerator setToken(WWW www) {
yield return www;
if (www.error == null){
// parse JSON Response
var obj = JSONObject.Parse(www.text);
// set token and instance url
token = obj.GetString("access_token");
instanceUrl = obj.GetString("instance_url");
// Fire Playmaker event to display inform the Playmaker engine that the login is omplete.
// Other integrations to salesforce can now reuse the token.
// PlayMakerFSM targetFSM = gameObject.GetComponent<PlayMakerFSM>();
// targetFSM.Fsm.Event ("tokenReady");
} else {
Debug.Log("Login Error: "+ www.error.ToString());
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Threading.Tasks.Dataflow;
using Marten.Internal;
using Marten.Internal.Operations;
using Marten.Internal.Sessions;
using Weasel.Postgresql;
using Marten.Services;
using Npgsql;
namespace Marten.Events.Daemon
{
/// <summary>
/// Incrementally built batch command for projection updates
/// </summary>
public class ProjectionUpdateBatch : IUpdateBatch, IDisposable, ISessionWorkTracker
{
public EventRange Range { get; }
private DocumentSessionBase _session;
private readonly CancellationToken _token;
private readonly IList<Page> _pages = new List<Page>();
private Page _current;
internal ProjectionUpdateBatch(EventGraph events, DocumentSessionBase session, EventRange range, CancellationToken token)
{
Range = range;
_session = session;
_token = token;
Queue = new ActionBlock<IStorageOperation>(processOperation,
new ExecutionDataflowBlockOptions {MaxDegreeOfParallelism = 1, EnsureOrdered = true, CancellationToken = token});
startNewPage(session);
var progressOperation = range.BuildProgressionOperation(events);
Queue.Post(progressOperation);
}
public ActionBlock<IStorageOperation> Queue { get; }
private void startNewPage(IMartenSession session)
{
_current = new Page(session);
_pages.Add(_current);
}
private void processOperation(IStorageOperation operation)
{
if (_token.IsCancellationRequested) return;
_current.Append(operation);
if (_current.Count >= _session.Options.UpdateBatchSize)
{
startNewPage(_session);
}
}
void IUpdateBatch.ApplyChanges(IMartenSession session)
{
if (_token.IsCancellationRequested) return;
var exceptions = new List<Exception>();
foreach (var page in _pages)
{
page.ApplyChanges(exceptions, session);
// Wanna fail fast here instead of trying the next batch
if (exceptions.Any())
{
throw new AggregateException(exceptions);
}
}
}
async Task IUpdateBatch.ApplyChangesAsync(IMartenSession session, CancellationToken token)
{
if (_token.IsCancellationRequested) return;
var exceptions = new List<Exception>();
foreach (var page in _pages)
{
await page.ApplyChangesAsync(exceptions, session, token).ConfigureAwait(false);
// Wanna fail fast here instead of trying the next batch
if (exceptions.Any())
{
throw new AggregateException(exceptions);
}
}
}
public class Page
{
private IMartenSession _session;
public int Count { get; private set; }
private readonly NpgsqlCommand _command = new NpgsqlCommand();
private readonly CommandBuilder _builder;
private readonly List<IStorageOperation> _operations = new List<IStorageOperation>();
public Page(IMartenSession session)
{
_session = session;
_builder = new CommandBuilder(_command);
}
public void Append(IStorageOperation operation)
{
Count++;
operation.ConfigureCommand(_builder, _session);
_builder.Append(";");
_operations.Add(operation);
}
public void ApplyChanges(IList<Exception> exceptions, IMartenSession session)
{
if (Count == 0)
{
return;
}
_command.CommandText = _builder.ToString();
using var reader = session.Database.ExecuteReader(_command);
UpdateBatch.ApplyCallbacks(_operations, reader, exceptions);
}
public async Task ApplyChangesAsync(IList<Exception> exceptions, IMartenSession session, CancellationToken token)
{
if (Count == 0)
{
return;
}
_command.CommandText = _builder.ToString();
using var reader = await session.Database.ExecuteReaderAsync(_command, token).ConfigureAwait(false);
await UpdateBatch.ApplyCallbacksAsync(_operations, reader, exceptions, token).ConfigureAwait(false);
}
public void ReleaseSession()
{
_session = null;
}
}
public void Dispose()
{
_session?.Dispose();
Queue.Complete();
}
IEnumerable<IDeletion> IUnitOfWork.Deletions()
{
throw new NotSupportedException();
}
IEnumerable<IDeletion> IUnitOfWork.DeletionsFor<T>()
{
throw new NotSupportedException();
}
IEnumerable<IDeletion> IUnitOfWork.DeletionsFor(Type documentType)
{
throw new NotSupportedException();
}
IEnumerable<object> IUnitOfWork.Updates()
{
throw new NotSupportedException();
}
IEnumerable<object> IUnitOfWork.Inserts()
{
throw new NotSupportedException();
}
IEnumerable<T> IUnitOfWork.UpdatesFor<T>()
{
throw new NotSupportedException();
}
IEnumerable<T> IUnitOfWork.InsertsFor<T>()
{
throw new NotSupportedException();
}
IEnumerable<T> IUnitOfWork.AllChangedFor<T>()
{
throw new NotSupportedException();
}
IList<StreamAction> IUnitOfWork.Streams()
{
throw new NotSupportedException();
}
IEnumerable<IStorageOperation> IUnitOfWork.Operations()
{
throw new NotSupportedException();
}
IEnumerable<IStorageOperation> IUnitOfWork.OperationsFor<T>()
{
throw new NotSupportedException();
}
IEnumerable<IStorageOperation> IUnitOfWork.OperationsFor(Type documentType)
{
throw new NotSupportedException();
}
IEnumerable<object> IChangeSet.Updated => throw new NotSupportedException();
IEnumerable<object> IChangeSet.Inserted => throw new NotSupportedException();
IEnumerable<IDeletion> IChangeSet.Deleted => throw new NotSupportedException();
IEnumerable<IEvent> IChangeSet.GetEvents()
{
throw new NotSupportedException();
}
IEnumerable<StreamAction> IChangeSet.GetStreams()
{
throw new NotSupportedException();
}
IChangeSet IChangeSet.Clone()
{
throw new NotSupportedException();
}
void ISessionWorkTracker.Reset()
{
throw new NotSupportedException();
}
void ISessionWorkTracker.Add(IStorageOperation operation)
{
Queue.Post(operation);
}
void ISessionWorkTracker.Sort(StoreOptions options)
{
throw new NotSupportedException();
}
List<StreamAction> ISessionWorkTracker.Streams => throw new NotSupportedException();
IReadOnlyList<IStorageOperation> ISessionWorkTracker.AllOperations => throw new NotSupportedException();
void ISessionWorkTracker.Eject<T>(T document)
{
throw new NotSupportedException();
}
void ISessionWorkTracker.EjectAllOfType(Type type)
{
throw new NotSupportedException();
}
bool ISessionWorkTracker.TryFindStream(string streamKey, out StreamAction stream)
{
throw new NotSupportedException();
}
bool ISessionWorkTracker.TryFindStream(Guid streamId, out StreamAction stream)
{
throw new NotSupportedException();
}
bool ISessionWorkTracker.HasOutstandingWork()
{
throw new NotSupportedException();
}
public async ValueTask CloseSession()
{
foreach (var page in _pages)
{
page.ReleaseSession();
}
await _session.DisposeAsync().ConfigureAwait(false);
_session = null;
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/type/timeofday.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Google.Type {
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public static partial class Timeofday {
#region Descriptor
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static Timeofday() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"Chtnb29nbGUvdHlwZS90aW1lb2ZkYXkucHJvdG8SC2dvb2dsZS50eXBlIksK",
"CVRpbWVPZkRheRINCgVob3VycxgBIAEoBRIPCgdtaW51dGVzGAIgASgFEg8K",
"B3NlY29uZHMYAyABKAUSDQoFbmFub3MYBCABKAVCJgoPY29tLmdvb2dsZS50",
"eXBlQg5UaW1lT2ZEYXlQcm90b1ABoAEBYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedCodeInfo(null, new pbr::GeneratedCodeInfo[] {
new pbr::GeneratedCodeInfo(typeof(global::Google.Type.TimeOfDay), new[]{ "Hours", "Minutes", "Seconds", "Nanos" }, null, null, null)
}));
}
#endregion
}
#region Messages
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class TimeOfDay : pb::IMessage<TimeOfDay> {
private static readonly pb::MessageParser<TimeOfDay> _parser = new pb::MessageParser<TimeOfDay>(() => new TimeOfDay());
public static pb::MessageParser<TimeOfDay> Parser { get { return _parser; } }
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Type.Timeofday.Descriptor.MessageTypes[0]; }
}
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
public TimeOfDay() {
OnConstruction();
}
partial void OnConstruction();
public TimeOfDay(TimeOfDay other) : this() {
hours_ = other.hours_;
minutes_ = other.minutes_;
seconds_ = other.seconds_;
nanos_ = other.nanos_;
}
public TimeOfDay Clone() {
return new TimeOfDay(this);
}
public const int HoursFieldNumber = 1;
private int hours_;
public int Hours {
get { return hours_; }
set {
hours_ = value;
}
}
public const int MinutesFieldNumber = 2;
private int minutes_;
public int Minutes {
get { return minutes_; }
set {
minutes_ = value;
}
}
public const int SecondsFieldNumber = 3;
private int seconds_;
public int Seconds {
get { return seconds_; }
set {
seconds_ = value;
}
}
public const int NanosFieldNumber = 4;
private int nanos_;
public int Nanos {
get { return nanos_; }
set {
nanos_ = value;
}
}
public override bool Equals(object other) {
return Equals(other as TimeOfDay);
}
public bool Equals(TimeOfDay other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Hours != other.Hours) return false;
if (Minutes != other.Minutes) return false;
if (Seconds != other.Seconds) return false;
if (Nanos != other.Nanos) return false;
return true;
}
public override int GetHashCode() {
int hash = 1;
if (Hours != 0) hash ^= Hours.GetHashCode();
if (Minutes != 0) hash ^= Minutes.GetHashCode();
if (Seconds != 0) hash ^= Seconds.GetHashCode();
if (Nanos != 0) hash ^= Nanos.GetHashCode();
return hash;
}
public override string ToString() {
return pb::JsonFormatter.Default.Format(this);
}
public void WriteTo(pb::CodedOutputStream output) {
if (Hours != 0) {
output.WriteRawTag(8);
output.WriteInt32(Hours);
}
if (Minutes != 0) {
output.WriteRawTag(16);
output.WriteInt32(Minutes);
}
if (Seconds != 0) {
output.WriteRawTag(24);
output.WriteInt32(Seconds);
}
if (Nanos != 0) {
output.WriteRawTag(32);
output.WriteInt32(Nanos);
}
}
public int CalculateSize() {
int size = 0;
if (Hours != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Hours);
}
if (Minutes != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Minutes);
}
if (Seconds != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Seconds);
}
if (Nanos != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Nanos);
}
return size;
}
public void MergeFrom(TimeOfDay other) {
if (other == null) {
return;
}
if (other.Hours != 0) {
Hours = other.Hours;
}
if (other.Minutes != 0) {
Minutes = other.Minutes;
}
if (other.Seconds != 0) {
Seconds = other.Seconds;
}
if (other.Nanos != 0) {
Nanos = other.Nanos;
}
}
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 8: {
Hours = input.ReadInt32();
break;
}
case 16: {
Minutes = input.ReadInt32();
break;
}
case 24: {
Seconds = input.ReadInt32();
break;
}
case 32: {
Nanos = input.ReadInt32();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using LightJson.Serialization;
namespace LightJson
{
/// <summary>
/// Represents a key-value pair collection of JsonValue objects.
/// </summary>
[DebuggerDisplay("Count = {Count}")]
[DebuggerTypeProxy(typeof(JsonObjectDebugView))]
public sealed class JsonObject : IEnumerable<KeyValuePair<string, JsonValue>>, IEnumerable<JsonValue>
{
private IDictionary<string, JsonValue> properties;
/// <summary>
/// Gets the number of properties in this JsonObject.
/// </summary>
public int Count
{
get
{
return this.properties.Count;
}
}
/// <summary>
/// Gets or sets the property with the given key.
/// </summary>
/// <param name="key">The key of the property to get or set.</param>
/// <remarks>
/// The getter will return JsonValue.Null if the given key is not assosiated with any value.
/// </remarks>
public JsonValue this[string key]
{
get
{
if (this.properties.TryGetValue(key, out var value))
{
return value;
}
else
{
return JsonValue.Null;
}
}
set
{
this.properties[key] = value;
}
}
/// <summary>
/// Initializes a new instance of JsonObject.
/// </summary>
public JsonObject()
{
this.properties = new Dictionary<string, JsonValue>();
}
/// <summary>
/// Adds a key with a null value to this collection.
/// </summary>
/// <param name="key">The key of the property to be added.</param>
/// <remarks>Returns this JsonObject.</remarks>
public JsonObject Add(string key)
{
return Add(key, JsonValue.Null);
}
/// <summary>
/// Adds a value associated with a key to this collection.
/// </summary>
/// <param name="key">The key of the property to be added.</param>
/// <param name="value">The value of the property to be added.</param>
/// <returns>Returns this JsonObject.</returns>
public JsonObject Add(string key, JsonValue value)
{
this.properties.Add(key, value);
return this;
}
/// <summary>
/// Adds a value associated with a key to this collection only if the value is not null.
/// </summary>
/// <param name="key">The key of the property to be added.</param>
/// <param name="value">The value of the property to be added.</param>
/// <returns>Returns this JsonObject.</returns>
public JsonObject AddIfNotNull(string key, JsonValue value)
{
if (!value.IsNull)
{
Add(key, value);
}
return this;
}
/// <summary>
/// Removes a property with the given key.
/// </summary>
/// <param name="key">The key of the property to be removed.</param>
/// <returns>
/// Returns true if the given key is found and removed; otherwise, false.
/// </returns>
public bool Remove(string key)
{
return this.properties.Remove(key);
}
/// <summary>
/// Clears the contents of this collection.
/// </summary>
/// <returns>Returns this JsonObject.</returns>
public JsonObject Clear()
{
this.properties.Clear();
return this;
}
/// <summary>
/// Changes the key of one of the items in the collection.
/// </summary>
/// <remarks>
/// This method has no effects if the <i>oldKey</i> does not exists.
/// If the <i>newKey</i> already exists, the value will be overwritten.
/// </remarks>
/// <param name="oldKey">The name of the key to be changed.</param>
/// <param name="newKey">The new name of the key.</param>
/// <returns>Returns this JsonObject.</returns>
public JsonObject Rename(string oldKey, string newKey)
{
if (this.properties.TryGetValue(oldKey, out var value))
{
Remove(oldKey);
this[newKey] = value;
}
return this;
}
/// <summary>
/// Determines whether this collection contains an item assosiated with the given key.
/// </summary>
/// <param name="key">The key to locate in this collection.</param>
/// <returns>Returns true if the key is found; otherwise, false.</returns>
public bool ContainsKey(string key)
{
return this.properties.ContainsKey(key);
}
/// <summary>
/// Determines whether this collection contains an item assosiated with the given key
/// </summary>
/// <param name="key">The key to locate in this collection.</param>
/// <param name="value">
/// When this method returns, this value gets assigned the JsonValue assosiated with
/// the key, if the key is found; otherwise, JsonValue.Null is assigned.
/// </param>
/// <returns>Returns true if the key is found; otherwise, false.</returns>
public bool ContainsKey(string key, out JsonValue value)
{
return this.properties.TryGetValue(key, out value);
}
/// <summary>
/// Determines whether this collection contains the given JsonValue.
/// </summary>
/// <param name="value">The value to locate in this collection.</param>
/// <returns>Returns true if the value is found; otherwise, false.</returns>
public bool Contains(JsonValue value)
{
return this.properties.Values.Contains(value);
}
/// <summary>
/// Returns an enumerator that iterates through this collection.
/// </summary>
public IEnumerator<KeyValuePair<string, JsonValue>> GetEnumerator()
{
return this.properties.GetEnumerator();
}
/// <summary>
/// Returns an enumerator that iterates through this collection.
/// </summary>
IEnumerator<JsonValue> IEnumerable<JsonValue>.GetEnumerator()
{
return this.properties.Values.GetEnumerator();
}
/// <summary>
/// Returns an enumerator that iterates through this collection.
/// </summary>
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return this.GetEnumerator();
}
/// <summary>
/// Returns a JSON string representing the state of the object.
/// </summary>
/// <remarks>
/// The resulting string is safe to be inserted as is into dynamically
/// generated JavaScript or JSON code.
/// </remarks>
public override string ToString()
{
return ToString(false);
}
/// <summary>
/// Returns a JSON string representing the state of the object.
/// </summary>
/// <remarks>
/// The resulting string is safe to be inserted as is into dynamically
/// generated JavaScript or JSON code.
/// </remarks>
/// <param name="pretty">
/// Indicates whether the resulting string should be formatted for human-readability.
/// </param>
public string ToString(bool pretty)
{
return JsonWriter.Serialize(this, pretty);
}
private class JsonObjectDebugView
{
private JsonObject jsonObject;
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public KeyValuePair[] Keys
{
get
{
var keys = new KeyValuePair[jsonObject.Count];
var i = 0;
foreach (var property in jsonObject)
{
keys[i] = new KeyValuePair(property.Key, property.Value);
i += 1;
}
return keys;
}
}
public JsonObjectDebugView(JsonObject jsonObject)
{
this.jsonObject = jsonObject;
}
[DebuggerDisplay("{value.ToString(),nq}", Name = "{key}", Type = "JsonValue({Type})")]
public class KeyValuePair
{
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
private string key;
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
private JsonValue value;
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
private JsonValueType Type
{
get
{
return value.Type;
}
}
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public object View
{
get
{
if (this.value.IsJsonObject)
{
return (JsonObject)this.value;
}
else if (this.value.IsJsonArray)
{
return (JsonArray)this.value;
}
else
{
return this.value;
}
}
}
public KeyValuePair(string key, JsonValue value)
{
this.key = key;
this.value = value;
}
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
#region Simple Binary format
/*
.-------------.----------------.
struct | base fields | derived fields |
'-------------'----------------'
.----------.----------. .----------.
fields | field | field |...| field |
'----------'----------' '----------'
.----------.
field | value |
'----------'
.---.---.---.---.---.---.---.---.
value bool | | | | | | | | v |
'---'---'---'---'---'---'---'---'
0
all integral types are written binary, native size, uncompressed, little endian
float, double little endian
.-------.------------.
string, wstring | count | characters |
'-------'------------'
count uint32 count of 1-byte or 2-byte characters (variable encoded in v2)
characters 1-byte or 2-byte characters
.-------. .-------.
blob, list, set, | count | | items |...
vector, nullable '-------' '-------'
count uint32 count of items (variable encoded in v2)
items each item encoded according to its type
.-------. .-----.--------.
map | count | | key | mapped |...
'-------' '-----'--------'
count uint32 count of {key,mapped} pairs (variable encoded in v2)
key, mapped each item encoded according to its type
*/
#endregion
namespace Bond.Protocols
{
using System;
using System.IO;
using System.Runtime.CompilerServices;
using System.Text;
using Bond.IO;
[Reader(typeof(SimpleBinaryReader<>))]
public struct SimpleBinaryWriter<O> : IProtocolWriter
where O : IOutputStream
{
const ushort Magic = (ushort)ProtocolType.SIMPLE_PROTOCOL;
readonly O output;
readonly ushort version;
/// <summary> Construct a new SimpleBinaryWriter
/// </summary>
/// <param name="output">Serialize payload output</param>
/// <param name="version">Protocol version</param>
public SimpleBinaryWriter(O output, ushort version = 1)
{
this.output = output;
this.version = version;
}
/// <summary>
/// Write protocol magic number and version
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteVersion()
{
output.WriteUInt16(Magic);
output.WriteUInt16(version);
}
#region Complex Types
#region Unused in tagged protocol
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteFieldBegin(BondDataType type, ushort id, Metadata metadata) { }
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteFieldEnd() { }
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteStructBegin(Metadata metadata) { }
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteBaseBegin(Metadata metadata) { }
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteStructEnd() { }
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteBaseEnd() { }
#endregion
/// <summary>
/// Indicate that field was omitted because it was set to its default value
/// </summary>
/// <param name="dataType">Type of the field</param>
/// <param name="id">Identifier of the field</param>
/// <param name="metadata">Metadata of the field</param>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteFieldOmitted(BondDataType dataType, ushort id, Metadata metadata)
{
// Simple doesn't support omitting fields so instead we write the default value
Audit.ArgRule(!metadata.default_value.nothing, "Field set to nothing can't be serialized.");
switch (dataType)
{
case BondDataType.BT_BOOL:
WriteBool(0 != metadata.default_value.uint_value);
break;
case BondDataType.BT_UINT8:
WriteUInt8((byte)metadata.default_value.uint_value);
break;
case BondDataType.BT_UINT16:
WriteUInt16((UInt16)metadata.default_value.uint_value);
break;
case BondDataType.BT_UINT32:
WriteUInt32((UInt32)metadata.default_value.uint_value);
break;
case BondDataType.BT_UINT64:
WriteUInt64(metadata.default_value.uint_value);
break;
case BondDataType.BT_FLOAT:
WriteFloat((float)metadata.default_value.double_value);
break;
case BondDataType.BT_DOUBLE:
WriteDouble(metadata.default_value.double_value);
break;
case BondDataType.BT_STRING:
WriteString(metadata.default_value.string_value);
break;
case BondDataType.BT_LIST:
case BondDataType.BT_SET:
case BondDataType.BT_MAP:
WriteContainerBegin(0, dataType);
break;
case BondDataType.BT_INT8:
WriteInt8((sbyte)metadata.default_value.int_value);
break;
case BondDataType.BT_INT16:
WriteInt16((Int16)metadata.default_value.int_value);
break;
case BondDataType.BT_INT32:
WriteInt32((Int32)metadata.default_value.int_value);
break;
case BondDataType.BT_INT64:
WriteInt64(metadata.default_value.int_value);
break;
case BondDataType.BT_WSTRING:
WriteWString(metadata.default_value.wstring_value);
break;
default:
Throw.InvalidBondDataType(dataType);
break;
}
}
/// <summary>
/// Start writing a list or set container
/// </summary>
/// <param name="count">Number of elements in the container</param>
/// <param name="elementType">Type of the elements</param>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteContainerBegin(int count, BondDataType elementType)
{
WriteLength(count);
}
/// <summary>
/// Start writing a map container
/// </summary>
/// <param name="count">Number of elements in the container</param>
/// <param name="keyType">Type of the keys</param>
/// /// <param name="valueType">Type of the values</param>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteContainerBegin(int count, BondDataType keyType, BondDataType valueType)
{
WriteLength(count);
}
/// <summary>
/// End writing a container
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteContainerEnd() { }
#endregion
#region Primitive types
/// <summary>
/// Write an UInt8
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteUInt8(Byte value)
{
output.WriteUInt8(value);
}
/// <summary>
/// Write an UInt16
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteUInt16(UInt16 value)
{
output.WriteUInt16(value);
}
/// <summary>
/// Write an UInt32
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteUInt32(UInt32 value)
{
output.WriteUInt32(value);
}
/// <summary>
/// Write an UInt64
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteUInt64(UInt64 value)
{
output.WriteUInt64(value);
}
/// <summary>
/// Write array of bytes verbatim
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteBytes(ArraySegment<byte> data)
{
output.WriteBytes(data);
}
/// <summary>
/// Write an Int8
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteInt8(SByte value)
{
output.WriteUInt8(unchecked((Byte)value));
}
/// <summary>
/// Write an Int16
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteInt16(Int16 value)
{
output.WriteUInt16(unchecked((UInt16) value));
}
/// <summary>
/// Write an Int32
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteInt32(Int32 value)
{
output.WriteUInt32(unchecked((UInt32)value));
}
/// <summary>
/// Write an Int64
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteInt64(Int64 value)
{
output.WriteUInt64(unchecked((UInt64)value));
}
/// <summary>
/// Write a float
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteFloat(float value)
{
output.WriteFloat(value);
}
/// <summary>
/// Write a double
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteDouble(double value)
{
output.WriteDouble(value);
}
/// <summary> write bool, extending the stream if necessary and possible
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteBool(bool value)
{
output.WriteUInt8((byte)(value ? 1 : 0));
}
/// <summary>
/// Write a UTF-8 string
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteString(string value)
{
if (value.Length == 0)
{
WriteLength(0);
}
else
{
var size = Encoding.UTF8.GetByteCount(value);
WriteLength(size);
output.WriteString(Encoding.UTF8, value, size);
}
}
/// <summary>
/// Write a UTF-16 string
/// </summary>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteWString(string value)
{
if (value.Length == 0)
{
WriteLength(0);
}
else
{
WriteLength(value.Length);
output.WriteString(Encoding.Unicode, value, value.Length << 1);
}
}
#endregion
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
void WriteLength(int value)
{
if (version == 2)
output.WriteVarUInt32((uint)value);
else
output.WriteUInt32((uint)value);
}
}
public struct SimpleBinaryReader<I> : IUntaggedProtocolReader, ICloneable<SimpleBinaryReader<I>>
where I : IInputStream, ICloneable<I>
{
readonly I input;
readonly ushort version;
public SimpleBinaryReader(I reader, ushort version = 1)
{
input = reader;
this.version = version;
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public SimpleBinaryReader<I> Clone()
{
return new SimpleBinaryReader<I>(input.Clone(), version);
}
#region Complex types
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public bool ReadFieldOmitted()
{
return false;
}
/// <summary>
/// Start reading a list or set container
/// </summary>
/// <returns>Number of elements</returns>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public int ReadContainerBegin()
{
return ReadLength();
}
/// <summary>
/// End reading a container
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void ReadContainerEnd()
{}
#endregion
#region Primitive types
/// <summary>
/// Read an UInt8
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public byte ReadUInt8()
{
return input.ReadUInt8();
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipUInt8()
{
input.SkipBytes(1);
}
/// <summary>
/// Read an UInt16
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public ushort ReadUInt16()
{
return input.ReadUInt16();
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipUInt16()
{
input.SkipBytes(2);
}
/// <summary>
/// Read an UInt32
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public uint ReadUInt32()
{
return input.ReadUInt32();
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipUInt32()
{
input.SkipBytes(4);
}
/// <summary>
/// Read an UInt64
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public UInt64 ReadUInt64()
{
return input.ReadUInt64();
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipUInt64()
{
input.SkipBytes(8);
}
/// <summary>
/// Read an Int8
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public sbyte ReadInt8()
{
return unchecked((sbyte)input.ReadUInt8());
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipInt8()
{
input.SkipBytes(1);
}
/// <summary>
/// Read an Int16
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public short ReadInt16()
{
return unchecked((short)input.ReadUInt16());
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipInt16()
{
input.SkipBytes(2);
}
/// <summary>
/// Read an Int32
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public int ReadInt32()
{
return unchecked((int)input.ReadUInt32());
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipInt32()
{
input.SkipBytes(4);
}
/// <summary>
/// Read an Int64
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public Int64 ReadInt64()
{
return unchecked((Int64)input.ReadUInt64());
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipInt64()
{
input.SkipBytes(8);
}
/// <summary>
/// Read an bool
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public bool ReadBool()
{
return input.ReadUInt8() != 0;
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipBool()
{
input.SkipBytes(1);
}
/// <summary>
/// Read an float
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public float ReadFloat()
{
return input.ReadFloat();
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipFloat()
{
input.SkipBytes(4);
}
/// <summary>
/// Read an double
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public double ReadDouble()
{
return input.ReadDouble();
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipDouble()
{
input.SkipBytes(8);
}
/// <summary>
/// Read a UTF-8 string
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public String ReadString()
{
var length = ReadLength();
return length == 0 ? string.Empty : input.ReadString(Encoding.UTF8, length);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipString()
{
input.SkipBytes(ReadLength());
}
/// <summary>
/// Read a UTF-16 string
/// </summary>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public string ReadWString()
{
var length = ReadLength();
return length == 0 ? string.Empty : input.ReadString(Encoding.Unicode, length << 1);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipWString()
{
input.SkipBytes(ReadLength() << 1);
}
/// <summary>
/// Read an array of bytes verbatim
/// </summary>
/// <param name="count">Number of bytes to read</param>
/// <exception cref="EndOfStreamException"/>
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public ArraySegment<byte> ReadBytes(int count)
{
return input.ReadBytes(count);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void SkipBytes(int count)
{
input.SkipBytes(count);
}
#endregion
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
int ReadLength()
{
return (int)((version == 2) ? input.ReadVarUInt32() : input.ReadUInt32());
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.CSharp.Test.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.SymbolId
{
public abstract class SymbolKeyTestBase : CSharpTestBase
{
[Flags]
internal enum SymbolKeyComparison
{
None = 0x0,
IgnoreCase = 0x1,
IgnoreAssemblyIds = 0x2
}
[Flags]
internal enum SymbolCategory
{
All = 0,
DeclaredNamespace = 2,
DeclaredType = 4,
NonTypeMember = 8,
Parameter = 16,
}
#region "Verification"
internal static void ResolveAndVerifySymbolList(IEnumerable<ISymbol> newSymbols, IEnumerable<ISymbol> originalSymbols, CSharpCompilation originalComp)
{
var newlist = newSymbols.OrderBy(s => s.Name).ToList();
var origlist = originalSymbols.OrderBy(s => s.Name).ToList();
Assert.Equal(origlist.Count, newlist.Count);
for (int i = 0; i < newlist.Count; i++)
{
ResolveAndVerifySymbol(newlist[i], origlist[i], originalComp);
}
}
internal static void ResolveAndVerifyTypeSymbol(ExpressionSyntax node, ITypeSymbol sourceSymbol, SemanticModel model, CSharpCompilation sourceComp)
{
var typeinfo = model.GetTypeInfo(node);
ResolveAndVerifySymbol(typeinfo.Type ?? typeinfo.ConvertedType, sourceSymbol, sourceComp);
}
internal static void ResolveAndVerifySymbol(ExpressionSyntax node, ISymbol sourceSymbol, SemanticModel model, CSharpCompilation sourceComp, SymbolKeyComparison comparison = SymbolKeyComparison.None)
{
var syminfo = model.GetSymbolInfo(node);
ResolveAndVerifySymbol(syminfo.Symbol, sourceSymbol, sourceComp, comparison);
}
internal static void ResolveAndVerifySymbol(ISymbol symbol1, ISymbol symbol2, Compilation compilation2, SymbolKeyComparison comparison = SymbolKeyComparison.None)
{
// same ID
AssertSymbolKeysEqual(symbol1, symbol2, comparison);
var resolvedSymbol = ResolveSymbol(symbol1, compilation2, comparison);
Assert.NotNull(resolvedSymbol);
// same Symbol
Assert.Equal(symbol2, resolvedSymbol);
Assert.Equal(symbol2.GetHashCode(), resolvedSymbol.GetHashCode());
}
internal static ISymbol ResolveSymbol(ISymbol originalSymbol, Compilation targetCompilation, SymbolKeyComparison comparison)
{
var sid = SymbolKey.Create(originalSymbol, CancellationToken.None);
// Verify that serialization works.
var serialized = sid.ToString();
var deserialized = new SymbolKey(serialized);
var comparer = SymbolKey.GetComparer(ignoreCase: false, ignoreAssemblyKeys: false);
Assert.True(comparer.Equals(sid, deserialized));
var symInfo = sid.Resolve(targetCompilation, (comparison & SymbolKeyComparison.IgnoreAssemblyIds) == SymbolKeyComparison.IgnoreAssemblyIds);
return symInfo.Symbol;
}
internal static void AssertSymbolKeysEqual(ISymbol symbol1, ISymbol symbol2, SymbolKeyComparison comparison, bool expectEqual = true)
{
var sid1 = SymbolKey.Create(symbol1, CancellationToken.None);
var sid2 = SymbolKey.Create(symbol2, CancellationToken.None);
// default is Insensitive
var ignoreCase = (comparison & SymbolKeyComparison.IgnoreCase) == SymbolKeyComparison.IgnoreCase;
// default is NOT ignore
var ignoreAssemblyIds = (comparison & SymbolKeyComparison.IgnoreAssemblyIds) == SymbolKeyComparison.IgnoreAssemblyIds;
var message = string.Concat(
ignoreCase ? "SymbolID IgnoreCase" : "SymbolID",
ignoreAssemblyIds ? " IgnoreAssemblyIds " : " ",
"Compare");
var ret = CodeAnalysis.SymbolKey.GetComparer(ignoreCase, ignoreAssemblyIds).Equals(sid2, sid1);
if (expectEqual)
{
Assert.True(ret, message);
}
else
{
Assert.False(ret, message);
}
}
#endregion
#region "Utilities"
internal static List<BlockSyntax> GetBlockSyntaxList(MethodSymbol symbol)
{
var list = new List<BlockSyntax>();
foreach (var node in symbol.DeclaringSyntaxReferences.Select(d => d.GetSyntax()))
{
BlockSyntax body = null;
if (node is BaseMethodDeclarationSyntax)
{
body = (node as BaseMethodDeclarationSyntax).Body;
}
else if (node is AccessorDeclarationSyntax)
{
body = (node as AccessorDeclarationSyntax).Body;
}
if (body != null || body.Statements.Any())
{
list.Add(body);
}
}
return list;
}
internal static IEnumerable<ISymbol> GetSourceSymbols(Microsoft.CodeAnalysis.CSharp.CSharpCompilation compilation, SymbolCategory category)
{
// NYI for local symbols
var list = GetSourceSymbols(compilation, includeLocal: false);
List<SymbolKind> kinds = new List<SymbolKind>();
if ((category & SymbolCategory.DeclaredNamespace) != 0)
{
kinds.Add(SymbolKind.Namespace);
}
if ((category & SymbolCategory.DeclaredType) != 0)
{
kinds.Add(SymbolKind.NamedType);
kinds.Add(SymbolKind.TypeParameter);
}
if ((category & SymbolCategory.NonTypeMember) != 0)
{
kinds.Add(SymbolKind.Field);
kinds.Add(SymbolKind.Event);
kinds.Add(SymbolKind.Property);
kinds.Add(SymbolKind.Method);
}
if ((category & SymbolCategory.Parameter) != 0)
{
kinds.Add(SymbolKind.Parameter);
}
return list.Where(s =>
{
if (s.IsImplicitlyDeclared)
{
return false;
}
foreach (var k in kinds)
{
if (s.Kind == k)
{
return true;
}
}
return false;
});
}
internal static IList<ISymbol> GetSourceSymbols(CSharpCompilation compilation, bool includeLocal)
{
var list = new List<ISymbol>();
LocalSymbolDumper localDumper = includeLocal ? new LocalSymbolDumper(compilation) : null;
GetSourceMemberSymbols(compilation.SourceModule.GlobalNamespace, list, localDumper);
// ??
// if (includeLocal)
GetSourceAliasSymbols(compilation, list);
list.Add(compilation.Assembly);
list.AddRange(compilation.Assembly.Modules);
return list;
}
#endregion
#region "Private Helpers"
private static void GetSourceMemberSymbols(NamespaceOrTypeSymbol symbol, List<ISymbol> list, LocalSymbolDumper localDumper)
{
foreach (var memberSymbol in symbol.GetMembers())
{
list.Add(memberSymbol);
switch (memberSymbol.Kind)
{
case SymbolKind.NamedType:
case SymbolKind.Namespace:
GetSourceMemberSymbols((NamespaceOrTypeSymbol)memberSymbol, list, localDumper);
break;
case SymbolKind.Method:
var method = (MethodSymbol)memberSymbol;
foreach (var parameter in method.Parameters)
{
list.Add(parameter);
}
if (localDumper != null)
{
localDumper.GetLocalSymbols(method, list);
}
break;
case SymbolKind.Field:
if (localDumper != null)
{
localDumper.GetLocalSymbols((FieldSymbol)memberSymbol, list);
}
break;
}
}
}
private static void GetSourceAliasSymbols(CSharpCompilation comp, List<ISymbol> list)
{
foreach (var tree in comp.SyntaxTrees)
{
var usingNodes = tree.GetRoot().DescendantNodes().OfType<UsingDirectiveSyntax>();
var model = comp.GetSemanticModel(tree);
foreach (var u in usingNodes)
{
if (u.Alias != null)
{
// var sym = model.GetSymbolInfo(u.Alias.Identifier).Symbol;
var sym = model.GetDeclaredSymbol(u);
if (sym != null && !list.Contains(sym))
{
list.Add(sym);
}
}
}
}
}
#endregion
private class LocalSymbolDumper
{
private CSharpCompilation _compilation;
public LocalSymbolDumper(CSharpCompilation compilation)
{
_compilation = compilation;
}
public void GetLocalSymbols(FieldSymbol symbol, List<ISymbol> list)
{
foreach (var node in symbol.DeclaringSyntaxReferences.Select(d => d.GetSyntax()))
{
var declarator = node as VariableDeclaratorSyntax;
if (declarator != null && declarator.Initializer != null)
{
var model = _compilation.GetSemanticModel(declarator.SyntaxTree);
// Expression
var df = model.AnalyzeDataFlow(declarator.Initializer.Value);
GetLocalAndType(df, list);
GetAnonymousExprSymbols(declarator.Initializer.Value, model, list);
}
}
}
public void GetLocalSymbols(MethodSymbol symbol, List<ISymbol> list)
{
foreach (var node in symbol.DeclaringSyntaxReferences.Select(d => d.GetSyntax()))
{
BlockSyntax body = null;
if (node is BaseMethodDeclarationSyntax)
{
body = (node as BaseMethodDeclarationSyntax).Body;
}
else if (node is AccessorDeclarationSyntax)
{
body = (node as AccessorDeclarationSyntax).Body;
}
var model = _compilation.GetSemanticModel(node.SyntaxTree);
if (body != null && body.Statements.Any())
{
var df = model.AnalyzeDataFlow(body);
GetLocalAndType(df, list);
GetAnonymousTypeOrFuncSymbols(body, model, list);
GetLabelSymbols(body, model, list);
}
// C# specific (this|base access)
var ctor = node as ConstructorDeclarationSyntax;
if (ctor != null && ctor.Initializer != null)
{
foreach (var a in ctor.Initializer.ArgumentList.Arguments)
{
var df = model.AnalyzeDataFlow(a.Expression);
// VisitLocals(arg, df);
list.AddRange(df.VariablesDeclared.OfType<Symbol>());
GetAnonymousExprSymbols(a.Expression, model, list);
}
}
}
}
private void GetLocalAndType(DataFlowAnalysis df, List<ISymbol> list)
{
foreach (var v in df.VariablesDeclared)
{
list.Add((Symbol)v);
var local = v as LocalSymbol;
if (local != null && (local.Type.Kind == SymbolKind.ArrayType || local.Type.Kind == SymbolKind.PointerType))
{
list.Add(local.Type);
}
}
}
private void GetLabelSymbols(BlockSyntax body, SemanticModel model, List<ISymbol> list)
{
var labels = body.DescendantNodes().OfType<LabeledStatementSyntax>();
foreach (var n in labels)
{
// Label: -> 'Label' is token
var sym = model.GetDeclaredSymbol(n);
list.Add(sym);
}
var swlabels = body.DescendantNodes().OfType<SwitchLabelSyntax>();
foreach (var n in swlabels)
{
// label value has NO symbol, Type is expr's type
// e.g. case "A": -> string type
// var info1 = model.GetTypeInfo(n.Value);
// var info2 = model.GetSymbolInfo(n.Value);
var sym = model.GetDeclaredSymbol(n);
list.Add(sym);
}
}
private void GetAnonymousTypeOrFuncSymbols(BlockSyntax body, SemanticModel model, List<ISymbol> list)
{
IEnumerable<ExpressionSyntax> exprs = body.DescendantNodes().OfType<SimpleLambdaExpressionSyntax>();
IEnumerable<ExpressionSyntax> tmp = body.DescendantNodes().OfType<ParenthesizedLambdaExpressionSyntax>();
exprs = exprs.Concat(tmp);
tmp = body.DescendantNodes().OfType<AnonymousMethodExpressionSyntax>();
exprs = exprs.Concat(tmp);
tmp = body.DescendantNodes().OfType<AnonymousObjectCreationExpressionSyntax>();
exprs = exprs.Concat(tmp);
foreach (var expr in exprs)
{
GetAnonymousExprSymbols(expr, model, list);
}
}
private void GetAnonymousExprSymbols(ExpressionSyntax expr, SemanticModel model, List<ISymbol> list)
{
var kind = expr.Kind();
if (kind != SyntaxKind.AnonymousObjectCreationExpression &&
kind != SyntaxKind.AnonymousMethodExpression &&
kind != SyntaxKind.ParenthesizedLambdaExpression &&
kind != SyntaxKind.SimpleLambdaExpression)
{
return;
}
var tinfo = model.GetTypeInfo(expr);
var conv = model.GetConversion(expr);
if (conv.IsAnonymousFunction)
{
// Lambda has no Type unless in part of case expr (C# specific)
// var f = (Func<int>)(() => { return 1; }); Type is delegate
// method symbol
var sinfo = model.GetSymbolInfo(expr);
list.Add((Symbol)sinfo.Symbol);
}
else if (tinfo.Type != null && tinfo.Type.TypeKind != TypeKind.Delegate)
{
// bug#12625
// GetSymbolInfo -> .ctor (part of members)
list.Add((Symbol)tinfo.Type); // NamedType with empty name
foreach (var m in tinfo.Type.GetMembers())
{
list.Add((Symbol)m);
}
}
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.Identity.Client;
using Microsoft.Identity.Test.Common.Core.Mocks;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Microsoft.Identity.Test.Unit
{
[TestClass]
public class WwwAuthenticateParametersTests
{
private const string WwwAuthenticateHeaderName = "WWW-Authenticate";
private const string ClientIdKey = "client_id";
private const string ResourceIdKey = "resource_id";
private const string ResourceKey = "resource";
private const string GraphGuid = "00000003-0000-0000-c000-000000000000";
private const string AuthorizationUriKey = "authorization_uri";
private const string AuthorizationKey = "authorization";
private const string AuthorityKey = "authority";
private const string AuthorizationValue = "https://login.microsoftonline.com/common/oauth2/authorize";
private const string Realm = "realm";
private const string EncodedClaims = "eyJpZF90b2tlbiI6eyJhdXRoX3RpbWUiOnsiZXNzZW50aWFsIjp0cnVlfSwiYWNyIjp7InZhbHVlcyI6WyJ1cm46bWFjZTppbmNvbW1vbjppYXA6c2lsdmVyIl19fX0=";
private const string DecodedClaims = "{\"id_token\":{\"auth_time\":{\"essential\":true},\"acr\":{\"values\":[\"urn:mace:incommon:iap:silver\"]}}}";
private const string DecodedClaimsHeader = "{\\\"id_token\\\":{\\\"auth_time\\\":{\\\"essential\\\":true},\\\"acr\\\":{\\\"values\\\":[\\\"urn:mace:incommon:iap:silver\\\"]}}}";
private const string SomeClaims = "some_claims";
private const string ClaimsKey = "claims";
private const string ErrorKey = "error";
[TestMethod]
[DataRow("client_id=00000003-0000-0000-c000-000000000000", "authorization_uri=\"https://login.microsoftonline.com/common/oauth2/authorize\"")]
[DataRow("client_id=00000003-0000-0000-c000-000000000000", "authorization=\"https://login.microsoftonline.com/common/oauth2/authorize\"")]
[DataRow("client_id=00000003-0000-0000-c000-000000000000", "authority=\"https://login.microsoftonline.com/common/\"")]
[DataRow("client_id=00000003-0000-0000-c000-000000000000", "authority=\"https://login.microsoftonline.com/common\"")]
[DataRow("resource_id=00000003-0000-0000-c000-000000000000", "authorization=\"https://login.microsoftonline.com/common/oauth2/authorize\"")]
[DataRow("resource=00000003-0000-0000-c000-000000000000", "authorization=\"https://login.microsoftonline.com/common/oauth2/authorize\"")]
public void CreateWwwAuthenticateResponse(string resource, string authorizationUri)
{
// Arrange
HttpResponseMessage httpResponse = new HttpResponseMessage(HttpStatusCode.Unauthorized);
httpResponse.Headers.Add(WwwAuthenticateHeaderName, $"Bearer realm=\"\", {resource}, {authorizationUri}");
// Act
var authParams = WwwAuthenticateParameters.CreateFromResponseHeaders(httpResponse.Headers);
// Assert
Assert.AreEqual(TestConstants.AuthorityCommonTenant.TrimEnd('/'), authParams.Authority);
Assert.AreEqual(3, authParams.RawParameters.Count);
Assert.IsNull(authParams.Claims);
Assert.IsNull(authParams.Error);
}
[TestMethod]
[DataRow(ClientIdKey, AuthorizationUriKey)]
[DataRow(ClientIdKey, AuthorizationKey)]
[DataRow(ClientIdKey, AuthorityKey)]
[DataRow(ResourceIdKey, AuthorizationUriKey)]
[DataRow(ResourceIdKey, AuthorizationKey)]
[DataRow(ResourceIdKey, AuthorityKey)]
[DataRow(ResourceKey, AuthorizationUriKey)]
[DataRow(ResourceKey, AuthorizationKey)]
[DataRow(ResourceKey, AuthorityKey)]
public void CreateRawParameters(string resourceHeaderKey, string authorizationUriHeaderKey)
{
// Arrange
HttpResponseMessage httpResponse = CreateGraphHttpResponse(resourceHeaderKey, authorizationUriHeaderKey);
// Act
var authParams = WwwAuthenticateParameters.CreateFromResponseHeaders(httpResponse.Headers);
// Assert
Assert.IsTrue(authParams.RawParameters.ContainsKey(resourceHeaderKey));
Assert.IsTrue(authParams.RawParameters.ContainsKey(authorizationUriHeaderKey));
Assert.IsTrue(authParams.RawParameters.ContainsKey(Realm));
Assert.AreEqual(string.Empty, authParams[Realm]);
Assert.AreEqual(GraphGuid, authParams[resourceHeaderKey]);
Assert.ThrowsException<KeyNotFoundException>(
() => authParams[ErrorKey]);
Assert.ThrowsException<KeyNotFoundException>(
() => authParams[ClaimsKey]);
}
[TestMethod]
[DataRow(DecodedClaimsHeader)]
[DataRow(EncodedClaims)]
[DataRow(SomeClaims)]
public void CreateRawParameters_ClaimsAndErrorReturned(string claims)
{
// Arrange
HttpResponseMessage httpResponse = CreateClaimsHttpResponse(claims);
// Act
var authParams = WwwAuthenticateParameters.CreateFromResponseHeaders(httpResponse.Headers);
// Assert
const string errorValue = "insufficient_claims";
Assert.IsTrue(authParams.RawParameters.TryGetValue(AuthorizationUriKey, out string authorizationUri));
Assert.AreEqual(AuthorizationValue, authorizationUri);
Assert.AreEqual(AuthorizationValue, authParams[AuthorizationUriKey]);
Assert.IsTrue(authParams.RawParameters.ContainsKey(Realm));
Assert.IsTrue(authParams.RawParameters.TryGetValue(Realm, out string realmValue));
Assert.AreEqual(string.Empty, realmValue);
Assert.AreEqual(string.Empty, authParams[Realm]);
Assert.IsTrue(authParams.RawParameters.TryGetValue(ClaimsKey, out string claimsValue));
Assert.AreEqual(claims, claimsValue);
Assert.AreEqual(claimsValue, authParams[ClaimsKey]);
Assert.IsTrue(authParams.RawParameters.TryGetValue(ErrorKey, out string errorValueParam));
Assert.AreEqual(errorValue, errorValueParam);
Assert.AreEqual(errorValue, authParams[ErrorKey]);
}
[TestMethod]
[DataRow("client_id=00000003-0000-0000-c000-000000000000", "authorization_uri=\"https://login.microsoftonline.com/common/oauth2/authorize\"")]
[DataRow("client_id=00000003-0000-0000-c000-000000000000", "authorization=\"https://login.microsoftonline.com/common/oauth2/authorize\"")]
[DataRow("client_id=00000003-0000-0000-c000-000000000000", "authority=\"https://login.microsoftonline.com/common/\"")]
[DataRow("client_id=00000003-0000-0000-c000-000000000000", "authority=\"https://login.microsoftonline.com/common\"")]
[DataRow("resource_id=00000003-0000-0000-c000-000000000000", "authorization=\"https://login.microsoftonline.com/common/oauth2/authorize\"")]
[DataRow("resource=00000003-0000-0000-c000-000000000000", "authorization=\"https://login.microsoftonline.com/common/oauth2/authorize\"")]
public void CreateWwwAuthenticateParamsFromWwwAuthenticateHeader(string clientId, string authorizationUri)
{
// Arrange
HttpResponseMessage httpResponse = new HttpResponseMessage(HttpStatusCode.Unauthorized);
httpResponse.Headers.Add(WwwAuthenticateHeaderName, $"Bearer realm=\"\", {clientId}, {authorizationUri}");
var wwwAuthenticateResponse = httpResponse.Headers.WwwAuthenticate.First().Parameter;
// Act
var authParams = WwwAuthenticateParameters.CreateFromWwwAuthenticateHeaderValue(wwwAuthenticateResponse);
// Assert
Assert.AreEqual(TestConstants.AuthorityCommonTenant.TrimEnd('/'), authParams.Authority);
Assert.AreEqual(3, authParams.RawParameters.Count);
Assert.IsNull(authParams.Claims);
Assert.IsNull(authParams.Error);
}
[TestMethod]
public async Task CreateFromResourceResponseAsync_HttpClient_Arm_GetTenantId_Async()
{
const string resourceUri = "https://example.com/";
string tenantId = Guid.NewGuid().ToString();
var handler = new MockHttpMessageHandler
{
ExpectedMethod = HttpMethod.Get,
ExpectedUrl = resourceUri,
ResponseMessage = CreateInvalidTokenHttpErrorResponse(tenantId)
};
var httpClient = new HttpClient(handler);
var authParams = await WwwAuthenticateParameters.CreateFromResourceResponseAsync(httpClient, resourceUri).ConfigureAwait(false);
Assert.AreEqual(authParams.GetTenantId(), tenantId);
}
[TestMethod]
public async Task CreateFromResourceResponseAsync_HttpClient_B2C_GetTenantId_Async()
{
const string resourceUri = "https://example.com/";
const string tenantId = "tenant";
var handler = new MockHttpMessageHandler
{
ExpectedMethod = HttpMethod.Get,
ExpectedUrl = resourceUri,
ResponseMessage = CreateInvalidTokenHttpErrorResponse(authority: TestConstants.B2CAuthority)
};
var httpClient = new HttpClient(handler);
var authParams = await WwwAuthenticateParameters.CreateFromResourceResponseAsync(httpClient, resourceUri).ConfigureAwait(false);
Assert.AreEqual(authParams.GetTenantId(), tenantId);
}
[TestMethod]
[DataRow(TestConstants.ADFSAuthority)]
[DataRow(TestConstants.ADFSAuthority2)]
public async Task CreateFromResourceResponseAsync_HttpClient_ADFS_GetTenantId_Null_Async(string authority)
{
const string resourceUri = "https://example.com/";
string tenantId = Guid.NewGuid().ToString();
var handler = new MockHttpMessageHandler
{
ExpectedMethod = HttpMethod.Get,
ExpectedUrl = resourceUri,
ResponseMessage = CreateInvalidTokenHttpErrorResponse(tenantId, authority)
};
var httpClient = new HttpClient(handler);
var authParams = await WwwAuthenticateParameters.CreateFromResourceResponseAsync(httpClient, resourceUri).ConfigureAwait(false);
Assert.IsNull(authParams.GetTenantId());
}
[DataRow(null)]
[TestMethod]
public async Task CreateFromResourceResponseAsync_HttpClient_Null_Async(HttpClient httpClient)
{
const string resourceUri = "https://example.com/";
Func<Task> action = () => WwwAuthenticateParameters.CreateFromResourceResponseAsync(httpClient, resourceUri);
await Assert.ThrowsExceptionAsync<ArgumentNullException>(action).ConfigureAwait(false);
}
[TestMethod]
[DataRow(null)]
[DataRow("")]
public async Task CreateFromResourceResponseAsync_Incorrect_ResourceUri_Async(string resourceUri)
{
Func<Task> action = () => WwwAuthenticateParameters.CreateFromResourceResponseAsync(resourceUri);
await Assert.ThrowsExceptionAsync<ArgumentNullException>(action).ConfigureAwait(false);
}
[TestMethod]
public void ExtractClaimChallengeFromHeader()
{
// Arrange
HttpResponseMessage httpResponse = CreateClaimsHttpResponse(DecodedClaimsHeader);
// Act
string extractedClaims = WwwAuthenticateParameters.GetClaimChallengeFromResponseHeaders(httpResponse.Headers);
// Assert
Assert.AreEqual(DecodedClaimsHeader, extractedClaims);
}
[TestMethod]
public void ExtractEncodedClaimChallengeFromHeader()
{
// Arrange
HttpResponseMessage httpResponse = CreateClaimsHttpResponse(EncodedClaims);
// Act
string extractedClaims = WwwAuthenticateParameters.GetClaimChallengeFromResponseHeaders(httpResponse.Headers);
// Assert
Assert.AreEqual(DecodedClaims, extractedClaims);
}
[TestMethod]
public void ExtractClaimChallengeFromHeader_IncorrectError_ReturnNull()
{
// Arrange
HttpResponseMessage httpResponse = CreateClaimsHttpErrorResponse();
// Act & Assert
Assert.IsNull(WwwAuthenticateParameters.GetClaimChallengeFromResponseHeaders(httpResponse.Headers));
}
private static HttpResponseMessage CreateClaimsHttpResponse(string claims)
{
HttpResponseMessage httpResponse = new HttpResponseMessage(HttpStatusCode.Unauthorized);
httpResponse.Headers.Add(WwwAuthenticateHeaderName, $"Bearer realm=\"\", client_id=\"00000003-0000-0000-c000-000000000000\", authorization_uri=\"https://login.microsoftonline.com/common/oauth2/authorize\", error=\"insufficient_claims\", claims=\"{claims}\"");
return httpResponse;
}
private static HttpResponseMessage CreateClaimsHttpErrorResponse()
{
return new HttpResponseMessage(HttpStatusCode.Unauthorized)
{
Headers =
{
{ WwwAuthenticateHeaderName, $"Bearer realm=\"\", client_id=\"00000003-0000-0000-c000-000000000000\", authorization_uri=\"https://login.microsoftonline.com/common/oauth2/authorize\", error=\"some_error\", claims=\"{DecodedClaimsHeader}\"" }
}
};
}
private static HttpResponseMessage CreateGraphHttpResponse(string resourceHeaderKey, string authorizationUriHeaderKey)
{
return new HttpResponseMessage(HttpStatusCode.Unauthorized)
{
Headers =
{
{ WwwAuthenticateHeaderName, $"Bearer realm=\"\", {resourceHeaderKey}=\"{GraphGuid}\", {authorizationUriHeaderKey}=\"{AuthorizationValue}\"" }
}
};
}
private static HttpResponseMessage CreateInvalidTokenHttpErrorResponse(string tenantId = "", string authority = "https://login.windows.net")
{
return new HttpResponseMessage(HttpStatusCode.Unauthorized)
{
Headers =
{
{ WwwAuthenticateHeaderName, $"Bearer authorization_uri=\"{authority}/{tenantId}\", error=\"invalid_token\", error_description=\"The authentication failed because of missing 'Authorization' header.\"" }
}
};
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagve = Google.Ads.GoogleAds.V8.Enums;
using gagvr = Google.Ads.GoogleAds.V8.Resources;
using gaxgrpc = Google.Api.Gax.Grpc;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using NUnit.Framework;
using Google.Ads.GoogleAds.V8.Services;
namespace Google.Ads.GoogleAds.Tests.V8.Services
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedCustomerUserAccessInvitationServiceClientTest
{
[Category("Autogenerated")][Test]
public void GetCustomerUserAccessInvitationRequestObject()
{
moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient> mockGrpcClient = new moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient>(moq::MockBehavior.Strict);
GetCustomerUserAccessInvitationRequest request = new GetCustomerUserAccessInvitationRequest
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
};
gagvr::CustomerUserAccessInvitation expectedResponse = new gagvr::CustomerUserAccessInvitation
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
InvitationId = -4996900927385391354L,
AccessRole = gagve::AccessRoleEnum.Types.AccessRole.Unspecified,
EmailAddress = "email_addressf3aae0b5",
CreationDateTime = "creation_date_time2f8c0159",
InvitationStatus = gagve::AccessInvitationStatusEnum.Types.AccessInvitationStatus.Pending,
};
mockGrpcClient.Setup(x => x.GetCustomerUserAccessInvitation(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
CustomerUserAccessInvitationServiceClient client = new CustomerUserAccessInvitationServiceClientImpl(mockGrpcClient.Object, null);
gagvr::CustomerUserAccessInvitation response = client.GetCustomerUserAccessInvitation(request);
Assert.AreEqual(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public async stt::Task GetCustomerUserAccessInvitationRequestObjectAsync()
{
moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient> mockGrpcClient = new moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient>(moq::MockBehavior.Strict);
GetCustomerUserAccessInvitationRequest request = new GetCustomerUserAccessInvitationRequest
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
};
gagvr::CustomerUserAccessInvitation expectedResponse = new gagvr::CustomerUserAccessInvitation
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
InvitationId = -4996900927385391354L,
AccessRole = gagve::AccessRoleEnum.Types.AccessRole.Unspecified,
EmailAddress = "email_addressf3aae0b5",
CreationDateTime = "creation_date_time2f8c0159",
InvitationStatus = gagve::AccessInvitationStatusEnum.Types.AccessInvitationStatus.Pending,
};
mockGrpcClient.Setup(x => x.GetCustomerUserAccessInvitationAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::CustomerUserAccessInvitation>(stt::Task.FromResult(expectedResponse), null, null, null, null));
CustomerUserAccessInvitationServiceClient client = new CustomerUserAccessInvitationServiceClientImpl(mockGrpcClient.Object, null);
gagvr::CustomerUserAccessInvitation responseCallSettings = await client.GetCustomerUserAccessInvitationAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
Assert.AreEqual(expectedResponse, responseCallSettings);
gagvr::CustomerUserAccessInvitation responseCancellationToken = await client.GetCustomerUserAccessInvitationAsync(request, st::CancellationToken.None);
Assert.AreEqual(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public void GetCustomerUserAccessInvitation()
{
moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient> mockGrpcClient = new moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient>(moq::MockBehavior.Strict);
GetCustomerUserAccessInvitationRequest request = new GetCustomerUserAccessInvitationRequest
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
};
gagvr::CustomerUserAccessInvitation expectedResponse = new gagvr::CustomerUserAccessInvitation
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
InvitationId = -4996900927385391354L,
AccessRole = gagve::AccessRoleEnum.Types.AccessRole.Unspecified,
EmailAddress = "email_addressf3aae0b5",
CreationDateTime = "creation_date_time2f8c0159",
InvitationStatus = gagve::AccessInvitationStatusEnum.Types.AccessInvitationStatus.Pending,
};
mockGrpcClient.Setup(x => x.GetCustomerUserAccessInvitation(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
CustomerUserAccessInvitationServiceClient client = new CustomerUserAccessInvitationServiceClientImpl(mockGrpcClient.Object, null);
gagvr::CustomerUserAccessInvitation response = client.GetCustomerUserAccessInvitation(request.ResourceName);
Assert.AreEqual(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public async stt::Task GetCustomerUserAccessInvitationAsync()
{
moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient> mockGrpcClient = new moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient>(moq::MockBehavior.Strict);
GetCustomerUserAccessInvitationRequest request = new GetCustomerUserAccessInvitationRequest
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
};
gagvr::CustomerUserAccessInvitation expectedResponse = new gagvr::CustomerUserAccessInvitation
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
InvitationId = -4996900927385391354L,
AccessRole = gagve::AccessRoleEnum.Types.AccessRole.Unspecified,
EmailAddress = "email_addressf3aae0b5",
CreationDateTime = "creation_date_time2f8c0159",
InvitationStatus = gagve::AccessInvitationStatusEnum.Types.AccessInvitationStatus.Pending,
};
mockGrpcClient.Setup(x => x.GetCustomerUserAccessInvitationAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::CustomerUserAccessInvitation>(stt::Task.FromResult(expectedResponse), null, null, null, null));
CustomerUserAccessInvitationServiceClient client = new CustomerUserAccessInvitationServiceClientImpl(mockGrpcClient.Object, null);
gagvr::CustomerUserAccessInvitation responseCallSettings = await client.GetCustomerUserAccessInvitationAsync(request.ResourceName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
Assert.AreEqual(expectedResponse, responseCallSettings);
gagvr::CustomerUserAccessInvitation responseCancellationToken = await client.GetCustomerUserAccessInvitationAsync(request.ResourceName, st::CancellationToken.None);
Assert.AreEqual(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public void GetCustomerUserAccessInvitationResourceNames()
{
moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient> mockGrpcClient = new moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient>(moq::MockBehavior.Strict);
GetCustomerUserAccessInvitationRequest request = new GetCustomerUserAccessInvitationRequest
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
};
gagvr::CustomerUserAccessInvitation expectedResponse = new gagvr::CustomerUserAccessInvitation
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
InvitationId = -4996900927385391354L,
AccessRole = gagve::AccessRoleEnum.Types.AccessRole.Unspecified,
EmailAddress = "email_addressf3aae0b5",
CreationDateTime = "creation_date_time2f8c0159",
InvitationStatus = gagve::AccessInvitationStatusEnum.Types.AccessInvitationStatus.Pending,
};
mockGrpcClient.Setup(x => x.GetCustomerUserAccessInvitation(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
CustomerUserAccessInvitationServiceClient client = new CustomerUserAccessInvitationServiceClientImpl(mockGrpcClient.Object, null);
gagvr::CustomerUserAccessInvitation response = client.GetCustomerUserAccessInvitation(request.ResourceNameAsCustomerUserAccessInvitationName);
Assert.AreEqual(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public async stt::Task GetCustomerUserAccessInvitationResourceNamesAsync()
{
moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient> mockGrpcClient = new moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient>(moq::MockBehavior.Strict);
GetCustomerUserAccessInvitationRequest request = new GetCustomerUserAccessInvitationRequest
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
};
gagvr::CustomerUserAccessInvitation expectedResponse = new gagvr::CustomerUserAccessInvitation
{
ResourceNameAsCustomerUserAccessInvitationName = gagvr::CustomerUserAccessInvitationName.FromCustomerInvitation("[CUSTOMER_ID]", "[INVITATION_ID]"),
InvitationId = -4996900927385391354L,
AccessRole = gagve::AccessRoleEnum.Types.AccessRole.Unspecified,
EmailAddress = "email_addressf3aae0b5",
CreationDateTime = "creation_date_time2f8c0159",
InvitationStatus = gagve::AccessInvitationStatusEnum.Types.AccessInvitationStatus.Pending,
};
mockGrpcClient.Setup(x => x.GetCustomerUserAccessInvitationAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::CustomerUserAccessInvitation>(stt::Task.FromResult(expectedResponse), null, null, null, null));
CustomerUserAccessInvitationServiceClient client = new CustomerUserAccessInvitationServiceClientImpl(mockGrpcClient.Object, null);
gagvr::CustomerUserAccessInvitation responseCallSettings = await client.GetCustomerUserAccessInvitationAsync(request.ResourceNameAsCustomerUserAccessInvitationName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
Assert.AreEqual(expectedResponse, responseCallSettings);
gagvr::CustomerUserAccessInvitation responseCancellationToken = await client.GetCustomerUserAccessInvitationAsync(request.ResourceNameAsCustomerUserAccessInvitationName, st::CancellationToken.None);
Assert.AreEqual(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public void MutateCustomerUserAccessInvitationRequestObject()
{
moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient> mockGrpcClient = new moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient>(moq::MockBehavior.Strict);
MutateCustomerUserAccessInvitationRequest request = new MutateCustomerUserAccessInvitationRequest
{
CustomerId = "customer_id3b3724cb",
Operation = new CustomerUserAccessInvitationOperation(),
};
MutateCustomerUserAccessInvitationResponse expectedResponse = new MutateCustomerUserAccessInvitationResponse
{
Result = new MutateCustomerUserAccessInvitationResult(),
};
mockGrpcClient.Setup(x => x.MutateCustomerUserAccessInvitation(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
CustomerUserAccessInvitationServiceClient client = new CustomerUserAccessInvitationServiceClientImpl(mockGrpcClient.Object, null);
MutateCustomerUserAccessInvitationResponse response = client.MutateCustomerUserAccessInvitation(request);
Assert.AreEqual(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public async stt::Task MutateCustomerUserAccessInvitationRequestObjectAsync()
{
moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient> mockGrpcClient = new moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient>(moq::MockBehavior.Strict);
MutateCustomerUserAccessInvitationRequest request = new MutateCustomerUserAccessInvitationRequest
{
CustomerId = "customer_id3b3724cb",
Operation = new CustomerUserAccessInvitationOperation(),
};
MutateCustomerUserAccessInvitationResponse expectedResponse = new MutateCustomerUserAccessInvitationResponse
{
Result = new MutateCustomerUserAccessInvitationResult(),
};
mockGrpcClient.Setup(x => x.MutateCustomerUserAccessInvitationAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MutateCustomerUserAccessInvitationResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
CustomerUserAccessInvitationServiceClient client = new CustomerUserAccessInvitationServiceClientImpl(mockGrpcClient.Object, null);
MutateCustomerUserAccessInvitationResponse responseCallSettings = await client.MutateCustomerUserAccessInvitationAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
Assert.AreEqual(expectedResponse, responseCallSettings);
MutateCustomerUserAccessInvitationResponse responseCancellationToken = await client.MutateCustomerUserAccessInvitationAsync(request, st::CancellationToken.None);
Assert.AreEqual(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public void MutateCustomerUserAccessInvitation()
{
moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient> mockGrpcClient = new moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient>(moq::MockBehavior.Strict);
MutateCustomerUserAccessInvitationRequest request = new MutateCustomerUserAccessInvitationRequest
{
CustomerId = "customer_id3b3724cb",
Operation = new CustomerUserAccessInvitationOperation(),
};
MutateCustomerUserAccessInvitationResponse expectedResponse = new MutateCustomerUserAccessInvitationResponse
{
Result = new MutateCustomerUserAccessInvitationResult(),
};
mockGrpcClient.Setup(x => x.MutateCustomerUserAccessInvitation(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
CustomerUserAccessInvitationServiceClient client = new CustomerUserAccessInvitationServiceClientImpl(mockGrpcClient.Object, null);
MutateCustomerUserAccessInvitationResponse response = client.MutateCustomerUserAccessInvitation(request.CustomerId, request.Operation);
Assert.AreEqual(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public async stt::Task MutateCustomerUserAccessInvitationAsync()
{
moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient> mockGrpcClient = new moq::Mock<CustomerUserAccessInvitationService.CustomerUserAccessInvitationServiceClient>(moq::MockBehavior.Strict);
MutateCustomerUserAccessInvitationRequest request = new MutateCustomerUserAccessInvitationRequest
{
CustomerId = "customer_id3b3724cb",
Operation = new CustomerUserAccessInvitationOperation(),
};
MutateCustomerUserAccessInvitationResponse expectedResponse = new MutateCustomerUserAccessInvitationResponse
{
Result = new MutateCustomerUserAccessInvitationResult(),
};
mockGrpcClient.Setup(x => x.MutateCustomerUserAccessInvitationAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MutateCustomerUserAccessInvitationResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
CustomerUserAccessInvitationServiceClient client = new CustomerUserAccessInvitationServiceClientImpl(mockGrpcClient.Object, null);
MutateCustomerUserAccessInvitationResponse responseCallSettings = await client.MutateCustomerUserAccessInvitationAsync(request.CustomerId, request.Operation, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
Assert.AreEqual(expectedResponse, responseCallSettings);
MutateCustomerUserAccessInvitationResponse responseCancellationToken = await client.MutateCustomerUserAccessInvitationAsync(request.CustomerId, request.Operation, st::CancellationToken.None);
Assert.AreEqual(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
}
}
| |
/*
* Copyright 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
namespace ZXing.QrCode.Internal
{
/// <summary>
///
/// </summary>
/// <author>Satoru Takabayashi</author>
/// <author>Daniel Switkin</author>
/// <author>Sean Owen</author>
public static class MaskUtil
{
// Penalty weights from section 6.8.2.1
private const int N1 = 3;
private const int N2 = 3;
private const int N3 = 40;
private const int N4 = 10;
/// <summary>
/// Apply mask penalty rule 1 and return the penalty. Find repetitive cells with the same color and
/// give penalty to them. Example: 00000 or 11111.
/// </summary>
/// <param name="matrix">The matrix.</param>
/// <returns></returns>
public static int applyMaskPenaltyRule1(ByteMatrix matrix)
{
return applyMaskPenaltyRule1Internal(matrix, true) + applyMaskPenaltyRule1Internal(matrix, false);
}
/// <summary>
/// Apply mask penalty rule 2 and return the penalty. Find 2x2 blocks with the same color and give
/// penalty to them. This is actually equivalent to the spec's rule, which is to find MxN blocks and give a
/// penalty proportional to (M-1)x(N-1), because this is the number of 2x2 blocks inside such a block.
/// </summary>
/// <param name="matrix">The matrix.</param>
/// <returns></returns>
public static int applyMaskPenaltyRule2(ByteMatrix matrix)
{
int penalty = 0;
var array = matrix.Array;
int width = matrix.Width;
int height = matrix.Height;
for (int y = 0; y < height - 1; y++)
{
for (int x = 0; x < width - 1; x++)
{
int value = array[y][x];
if (value == array[y][x + 1] && value == array[y + 1][x] && value == array[y + 1][x + 1])
{
penalty++;
}
}
}
return N2 * penalty;
}
/// <summary>
/// Apply mask penalty rule 3 and return the penalty. Find consecutive cells of 00001011101 or
/// 10111010000, and give penalty to them. If we find patterns like 000010111010000, we give
/// penalties twice (i.e. 40 * 2).
/// </summary>
/// <param name="matrix">The matrix.</param>
/// <returns></returns>
public static int applyMaskPenaltyRule3(ByteMatrix matrix)
{
int numPenalties = 0;
byte[][] array = matrix.Array;
int width = matrix.Width;
int height = matrix.Height;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
byte[] arrayY = array[y]; // We can at least optimize this access
if (x + 6 < width &&
arrayY[x] == 1 &&
arrayY[x + 1] == 0 &&
arrayY[x + 2] == 1 &&
arrayY[x + 3] == 1 &&
arrayY[x + 4] == 1 &&
arrayY[x + 5] == 0 &&
arrayY[x + 6] == 1 &&
(isWhiteHorizontal(arrayY, x - 4, x) || isWhiteHorizontal(arrayY, x + 7, x + 11)))
{
numPenalties++;
}
if (y + 6 < height &&
array[y][x] == 1 &&
array[y + 1][x] == 0 &&
array[y + 2][x] == 1 &&
array[y + 3][x] == 1 &&
array[y + 4][x] == 1 &&
array[y + 5][x] == 0 &&
array[y + 6][x] == 1 &&
(isWhiteVertical(array, x, y - 4, y) || isWhiteVertical(array, x, y + 7, y + 11)))
{
numPenalties++;
}
}
}
return numPenalties * N3;
}
private static bool isWhiteHorizontal(byte[] rowArray, int from, int to)
{
from = Math.Max(from, 0);
to = Math.Min(to, rowArray.Length);
for (int i = from; i < to; i++)
{
if (rowArray[i] == 1)
{
return false;
}
}
return true;
}
private static bool isWhiteVertical(byte[][] array, int col, int from, int to)
{
from = Math.Max(from, 0);
to = Math.Min(to, array.Length);
for (int i = from; i < to; i++)
{
if (array[i][col] == 1)
{
return false;
}
}
return true;
}
/// <summary>
/// Apply mask penalty rule 4 and return the penalty. Calculate the ratio of dark cells and give
/// penalty if the ratio is far from 50%. It gives 10 penalty for 5% distance.
/// </summary>
/// <param name="matrix">The matrix.</param>
/// <returns></returns>
public static int applyMaskPenaltyRule4(ByteMatrix matrix)
{
int numDarkCells = 0;
var array = matrix.Array;
int width = matrix.Width;
int height = matrix.Height;
for (int y = 0; y < height; y++)
{
var arrayY = array[y];
for (int x = 0; x < width; x++)
{
if (arrayY[x] == 1)
{
numDarkCells++;
}
}
}
var numTotalCells = matrix.Height * matrix.Width;
var darkRatio = (double)numDarkCells / numTotalCells;
var fivePercentVariances = (int)(Math.Abs(darkRatio - 0.5) * 20.0); // * 100.0 / 5.0
return fivePercentVariances * N4;
}
/// <summary>
/// Return the mask bit for "getMaskPattern" at "x" and "y". See 8.8 of JISX0510:2004 for mask
/// pattern conditions.
/// </summary>
/// <param name="maskPattern">The mask pattern.</param>
/// <param name="x">The x.</param>
/// <param name="y">The y.</param>
/// <returns></returns>
public static bool getDataMaskBit(int maskPattern, int x, int y)
{
int intermediate, temp;
switch (maskPattern)
{
case 0:
intermediate = (y + x) & 0x1;
break;
case 1:
intermediate = y & 0x1;
break;
case 2:
intermediate = x % 3;
break;
case 3:
intermediate = (y + x) % 3;
break;
case 4:
intermediate = (((int)((uint)y >> 1)) + (x / 3)) & 0x1;
break;
case 5:
temp = y * x;
intermediate = (temp & 0x1) + (temp % 3);
break;
case 6:
temp = y * x;
intermediate = (((temp & 0x1) + (temp % 3)) & 0x1);
break;
case 7:
temp = y * x;
intermediate = (((temp % 3) + ((y + x) & 0x1)) & 0x1);
break;
default:
throw new ArgumentException("Invalid mask pattern: " + maskPattern);
}
return intermediate == 0;
}
/// <summary>
/// Helper function for applyMaskPenaltyRule1. We need this for doing this calculation in both
/// vertical and horizontal orders respectively.
/// </summary>
/// <param name="matrix">The matrix.</param>
/// <param name="isHorizontal">if set to <c>true</c> [is horizontal].</param>
/// <returns></returns>
private static int applyMaskPenaltyRule1Internal(ByteMatrix matrix, bool isHorizontal)
{
int penalty = 0;
int iLimit = isHorizontal ? matrix.Height : matrix.Width;
int jLimit = isHorizontal ? matrix.Width : matrix.Height;
var array = matrix.Array;
for (int i = 0; i < iLimit; i++)
{
int numSameBitCells = 0;
int prevBit = -1;
for (int j = 0; j < jLimit; j++)
{
int bit = isHorizontal ? array[i][j] : array[j][i];
if (bit == prevBit)
{
numSameBitCells++;
}
else
{
if (numSameBitCells >= 5)
{
penalty += N1 + (numSameBitCells - 5);
}
numSameBitCells = 1; // Include the cell itself.
prevBit = bit;
}
}
if (numSameBitCells >= 5)
{
penalty += N1 + (numSameBitCells - 5);
}
}
return penalty;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Swank.Extensions;
using Swank.Specification;
namespace Swank.Web.Handlers.App
{
public class BodyDescriptionService
{
public const string Whitespace = " ";
private readonly Configuration.Configuration _configuration;
public BodyDescriptionService(Configuration.Configuration configuration)
{
_configuration = configuration;
}
public List<BodyDefinitionModel> Create(DataType type)
{
var data = new List<BodyDefinitionModel>();
WalkGraph(data, type, 0);
data.ForEach((x, i) => x.Index = i + 1);
return data;
}
private void WalkGraph(List<BodyDefinitionModel> data, DataType type, int level,
Action<BodyDefinitionModel> opening = null,
Action<BodyDefinitionModel> closing = null)
{
if (type.IsSimple) WalkSimpleType(data, type, level, opening);
else if (type.IsArray) WalkArray(data, type, level, opening, closing);
else if (type.IsDictionary) WalkDictionary(data, type, level, opening, closing);
else if (type.IsComplex) WalkComplexType(data, type, level, opening, closing);
if (level == 0)
{
data.First().IsFirst = true;
data.Last().IsLast = true;
}
}
private void WalkSimpleType(
List<BodyDefinitionModel> description,
DataType type, int level,
Action<BodyDefinitionModel> opening)
{
var data = new BodyDefinitionModel
{
Name = type.Name,
TypeName = type.Name,
Comments = type.Comments,
Namespace = type.Namespace,
FullNamespace = type.FullNamespace,
Whitespace = Whitespace.Repeat(level),
IsSimpleType = true,
Nullable = type.IsNullable,
SampleValue = type.SampleValue
};
switch (type.Name)
{
case Xml.UnsignedLongType:
case Xml.LongType:
case Xml.UnsignedIntType:
case Xml.IntType:
case Xml.UnsignedShortType:
case Xml.ShortType:
case Xml.ByteType:
case Xml.UnsignedByteType:
data.IsNumeric = true;
break;
case Xml.FloatType:
case Xml.DoubleType:
case Xml.DecimalType:
data.IsNumeric = true;
break;
case Xml.BooleanType:
data.IsBoolean = true;
break;
case Xml.DateTimeType:
data.IsDateTime = true;
break;
case Xml.DurationType:
data.IsDuration = true;
break;
case Xml.UuidType:
data.IsGuid = true;
break;
default:
data.IsString = true;
break;
}
data.Enumeration = WalkOptions(type);
opening?.Invoke(data);
description.Add(data);
}
private static Enumeration WalkOptions(DataType type)
{
if (type.Enumeration == null) return null;
var enumeration = new Enumeration
{
Name = type.Enumeration.Name,
Comments = type.Enumeration.Comments,
Options = new List<Option>(type.Enumeration.Options.Select(x => new Option
{
Name = x.Name,
Value = x.Value,
Comments = x.Comments
}))
};
return enumeration;
}
private void WalkArray(List<BodyDefinitionModel> data, DataType type, int level,
Action<BodyDefinitionModel> opening = null,
Action<BodyDefinitionModel> closing = null)
{
var arrayOpening = new BodyDefinitionModel
{
Name = type.Name,
TypeName = type.ArrayItem.Type.Name,
Namespace = type.ArrayItem.Type.Namespace,
FullNamespace = type.ArrayItem.Type.FullNamespace,
LogicalName = type.ArrayItem.Type.LogicalName,
Comments = type.Comments,
Whitespace = Whitespace.Repeat(level),
IsOpening = true,
IsArray = true,
Enumeration = WalkOptions(type.ArrayItem.Type)
};
if (type.ArrayItem.Type.IsSimple)
arrayOpening.TypeName = type.ArrayItem.Type.Name;
opening?.Invoke(arrayOpening);
data.Add(arrayOpening);
WalkGraph(data, type.ArrayItem.Type, level + 1,
x =>
{
if (type.ArrayItem == null) return;
if (type.ArrayItem.Name != null)
x.Name = type.ArrayItem.Name;
if (type.ArrayItem.Comments != null)
x.Comments = type.ArrayItem.Comments;
},
x =>
{
if (type.ArrayItem?.Name != null) x.Name = type.ArrayItem.Name;
});
var arrayClosing = new BodyDefinitionModel
{
Name = type.Name,
TypeName = type.ArrayItem.Type.Name,
Whitespace = Whitespace.Repeat(level),
IsClosing = true,
IsArray = true
};
closing?.Invoke(arrayClosing);
data.Add(arrayClosing);
}
private void WalkDictionary(List<BodyDefinitionModel> data, DataType type, int level,
Action<BodyDefinitionModel> opening = null,
Action<BodyDefinitionModel> closing = null)
{
var dictionaryOpening = new BodyDefinitionModel
{
Name = type.Name,
TypeName = type.DictionaryEntry.ValueType.Name,
Namespace = type.DictionaryEntry.ValueType.Namespace,
FullNamespace = type.DictionaryEntry.ValueType.FullNamespace,
LogicalName = type.DictionaryEntry.ValueType.LogicalName,
Comments = type.Comments,
Whitespace = Whitespace.Repeat(level),
IsOpening = true,
IsDictionary = true,
Enumeration = WalkOptions(type.DictionaryEntry.ValueType)
};
if (type.DictionaryEntry.ValueType.IsSimple)
dictionaryOpening.TypeName = type.DictionaryEntry.ValueType.Name;
opening?.Invoke(dictionaryOpening);
data.Add(dictionaryOpening);
WalkGraph(data, type.DictionaryEntry.ValueType, level + 1,
x =>
{
x.Name = type.DictionaryEntry.KeyName ??
_configuration.DefaultDictionaryKeyName;
x.IsDictionaryEntry = true;
if (type.DictionaryEntry.ValueComments != null)
x.Comments = type.DictionaryEntry.ValueComments;
x.DictionaryKey = new KeyModel
{
TypeName = type.DictionaryEntry.KeyType.Name,
Enumeration = WalkOptions(type.DictionaryEntry.KeyType),
Comments = type.DictionaryEntry.KeyComments
};
},
x =>
{
x.Name = _configuration.DefaultDictionaryKeyName;
x.IsDictionaryEntry = true;
});
var dictionaryClosing = new BodyDefinitionModel
{
Name = type.Name,
TypeName = type.DictionaryEntry.ValueType.Name,
Whitespace = Whitespace.Repeat(level),
IsClosing = true,
IsDictionary = true
};
closing?.Invoke(dictionaryClosing);
data.Add(dictionaryClosing);
}
private void WalkComplexType(List<BodyDefinitionModel> data,
DataType type, int level,
Action<BodyDefinitionModel> opening = null,
Action<BodyDefinitionModel> closing = null)
{
var complexOpening = new BodyDefinitionModel
{
Name = type.Name,
TypeName = type.Name,
Namespace = type.Namespace,
FullNamespace = type.FullNamespace,
LogicalName = type.LogicalName,
Comments = type.Comments,
Whitespace = Whitespace.Repeat(level),
IsOpening = true,
IsComplexType = true
};
opening?.Invoke(complexOpening);
data.Add(complexOpening);
foreach (var member in type.Members)
{
var lastMember = member == type.Members.Last();
WalkGraph(data, member.Type, level + 1,
x => {
x.Name = member.Name;
x.Comments = member.Comments;
x.DefaultValue = member.DefaultValue;
if (member.SampleValue != null) x.SampleValue =
member.SampleValue.ToSampleValueString(_configuration);
x.IsMember = true;
if (lastMember) x.IsLastMember = true;
if (!member.Type.IsSimple) x.IsOpening = true;
x.Optional = member.Optional;
x.MaxLength = member.MaxLength;
x.Encoding = member.Encoding;
if (member.Deprecated)
{
x.IsDeprecated = true;
x.DeprecationMessage = member.DeprecationMessage;
}
},
x => {
x.Name = member.Name;
x.IsMember = true;
if (lastMember) x.IsLastMember = true;
});
}
var complexClosing = new BodyDefinitionModel
{
Name = type.Name,
TypeName = type.Name,
Whitespace = Whitespace.Repeat(level),
IsClosing = true,
IsComplexType = true
};
closing?.Invoke(complexClosing);
data.Add(complexClosing);
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Compute.V1.Snippets
{
using Google.Api.Gax;
using System;
using System.Linq;
using System.Threading.Tasks;
using lro = Google.LongRunning;
/// <summary>Generated snippets.</summary>
public sealed class GeneratedRegionInstanceGroupsClientSnippets
{
/// <summary>Snippet for Get</summary>
public void GetRequestObject()
{
// Snippet: Get(GetRegionInstanceGroupRequest, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = RegionInstanceGroupsClient.Create();
// Initialize request argument(s)
GetRegionInstanceGroupRequest request = new GetRegionInstanceGroupRequest
{
InstanceGroup = "",
Region = "",
Project = "",
};
// Make the request
InstanceGroup response = regionInstanceGroupsClient.Get(request);
// End snippet
}
/// <summary>Snippet for GetAsync</summary>
public async Task GetRequestObjectAsync()
{
// Snippet: GetAsync(GetRegionInstanceGroupRequest, CallSettings)
// Additional: GetAsync(GetRegionInstanceGroupRequest, CancellationToken)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = await RegionInstanceGroupsClient.CreateAsync();
// Initialize request argument(s)
GetRegionInstanceGroupRequest request = new GetRegionInstanceGroupRequest
{
InstanceGroup = "",
Region = "",
Project = "",
};
// Make the request
InstanceGroup response = await regionInstanceGroupsClient.GetAsync(request);
// End snippet
}
/// <summary>Snippet for Get</summary>
public void Get()
{
// Snippet: Get(string, string, string, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = RegionInstanceGroupsClient.Create();
// Initialize request argument(s)
string project = "";
string region = "";
string instanceGroup = "";
// Make the request
InstanceGroup response = regionInstanceGroupsClient.Get(project, region, instanceGroup);
// End snippet
}
/// <summary>Snippet for GetAsync</summary>
public async Task GetAsync()
{
// Snippet: GetAsync(string, string, string, CallSettings)
// Additional: GetAsync(string, string, string, CancellationToken)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = await RegionInstanceGroupsClient.CreateAsync();
// Initialize request argument(s)
string project = "";
string region = "";
string instanceGroup = "";
// Make the request
InstanceGroup response = await regionInstanceGroupsClient.GetAsync(project, region, instanceGroup);
// End snippet
}
/// <summary>Snippet for List</summary>
public void ListRequestObject()
{
// Snippet: List(ListRegionInstanceGroupsRequest, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = RegionInstanceGroupsClient.Create();
// Initialize request argument(s)
ListRegionInstanceGroupsRequest request = new ListRegionInstanceGroupsRequest
{
Region = "",
OrderBy = "",
Project = "",
Filter = "",
ReturnPartialSuccess = false,
};
// Make the request
PagedEnumerable<RegionInstanceGroupList, InstanceGroup> response = regionInstanceGroupsClient.List(request);
// Iterate over all response items, lazily performing RPCs as required
foreach (InstanceGroup item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (RegionInstanceGroupList page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (InstanceGroup item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<InstanceGroup> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (InstanceGroup item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListAsync</summary>
public async Task ListRequestObjectAsync()
{
// Snippet: ListAsync(ListRegionInstanceGroupsRequest, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = await RegionInstanceGroupsClient.CreateAsync();
// Initialize request argument(s)
ListRegionInstanceGroupsRequest request = new ListRegionInstanceGroupsRequest
{
Region = "",
OrderBy = "",
Project = "",
Filter = "",
ReturnPartialSuccess = false,
};
// Make the request
PagedAsyncEnumerable<RegionInstanceGroupList, InstanceGroup> response = regionInstanceGroupsClient.ListAsync(request);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((InstanceGroup item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((RegionInstanceGroupList page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (InstanceGroup item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<InstanceGroup> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (InstanceGroup item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for List</summary>
public void List()
{
// Snippet: List(string, string, string, int?, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = RegionInstanceGroupsClient.Create();
// Initialize request argument(s)
string project = "";
string region = "";
// Make the request
PagedEnumerable<RegionInstanceGroupList, InstanceGroup> response = regionInstanceGroupsClient.List(project, region);
// Iterate over all response items, lazily performing RPCs as required
foreach (InstanceGroup item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (RegionInstanceGroupList page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (InstanceGroup item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<InstanceGroup> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (InstanceGroup item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListAsync</summary>
public async Task ListAsync()
{
// Snippet: ListAsync(string, string, string, int?, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = await RegionInstanceGroupsClient.CreateAsync();
// Initialize request argument(s)
string project = "";
string region = "";
// Make the request
PagedAsyncEnumerable<RegionInstanceGroupList, InstanceGroup> response = regionInstanceGroupsClient.ListAsync(project, region);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((InstanceGroup item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((RegionInstanceGroupList page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (InstanceGroup item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<InstanceGroup> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (InstanceGroup item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListInstances</summary>
public void ListInstancesRequestObject()
{
// Snippet: ListInstances(ListInstancesRegionInstanceGroupsRequest, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = RegionInstanceGroupsClient.Create();
// Initialize request argument(s)
ListInstancesRegionInstanceGroupsRequest request = new ListInstancesRegionInstanceGroupsRequest
{
RegionInstanceGroupsListInstancesRequestResource = new RegionInstanceGroupsListInstancesRequest(),
InstanceGroup = "",
Region = "",
OrderBy = "",
Project = "",
Filter = "",
ReturnPartialSuccess = false,
};
// Make the request
PagedEnumerable<RegionInstanceGroupsListInstances, InstanceWithNamedPorts> response = regionInstanceGroupsClient.ListInstances(request);
// Iterate over all response items, lazily performing RPCs as required
foreach (InstanceWithNamedPorts item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (RegionInstanceGroupsListInstances page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (InstanceWithNamedPorts item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<InstanceWithNamedPorts> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (InstanceWithNamedPorts item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListInstancesAsync</summary>
public async Task ListInstancesRequestObjectAsync()
{
// Snippet: ListInstancesAsync(ListInstancesRegionInstanceGroupsRequest, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = await RegionInstanceGroupsClient.CreateAsync();
// Initialize request argument(s)
ListInstancesRegionInstanceGroupsRequest request = new ListInstancesRegionInstanceGroupsRequest
{
RegionInstanceGroupsListInstancesRequestResource = new RegionInstanceGroupsListInstancesRequest(),
InstanceGroup = "",
Region = "",
OrderBy = "",
Project = "",
Filter = "",
ReturnPartialSuccess = false,
};
// Make the request
PagedAsyncEnumerable<RegionInstanceGroupsListInstances, InstanceWithNamedPorts> response = regionInstanceGroupsClient.ListInstancesAsync(request);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((InstanceWithNamedPorts item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((RegionInstanceGroupsListInstances page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (InstanceWithNamedPorts item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<InstanceWithNamedPorts> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (InstanceWithNamedPorts item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListInstances</summary>
public void ListInstances()
{
// Snippet: ListInstances(string, string, string, RegionInstanceGroupsListInstancesRequest, string, int?, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = RegionInstanceGroupsClient.Create();
// Initialize request argument(s)
string project = "";
string region = "";
string instanceGroup = "";
RegionInstanceGroupsListInstancesRequest regionInstanceGroupsListInstancesRequestResource = new RegionInstanceGroupsListInstancesRequest();
// Make the request
PagedEnumerable<RegionInstanceGroupsListInstances, InstanceWithNamedPorts> response = regionInstanceGroupsClient.ListInstances(project, region, instanceGroup, regionInstanceGroupsListInstancesRequestResource);
// Iterate over all response items, lazily performing RPCs as required
foreach (InstanceWithNamedPorts item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (RegionInstanceGroupsListInstances page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (InstanceWithNamedPorts item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<InstanceWithNamedPorts> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (InstanceWithNamedPorts item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListInstancesAsync</summary>
public async Task ListInstancesAsync()
{
// Snippet: ListInstancesAsync(string, string, string, RegionInstanceGroupsListInstancesRequest, string, int?, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = await RegionInstanceGroupsClient.CreateAsync();
// Initialize request argument(s)
string project = "";
string region = "";
string instanceGroup = "";
RegionInstanceGroupsListInstancesRequest regionInstanceGroupsListInstancesRequestResource = new RegionInstanceGroupsListInstancesRequest();
// Make the request
PagedAsyncEnumerable<RegionInstanceGroupsListInstances, InstanceWithNamedPorts> response = regionInstanceGroupsClient.ListInstancesAsync(project, region, instanceGroup, regionInstanceGroupsListInstancesRequestResource);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((InstanceWithNamedPorts item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((RegionInstanceGroupsListInstances page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (InstanceWithNamedPorts item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<InstanceWithNamedPorts> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (InstanceWithNamedPorts item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for SetNamedPorts</summary>
public void SetNamedPortsRequestObject()
{
// Snippet: SetNamedPorts(SetNamedPortsRegionInstanceGroupRequest, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = RegionInstanceGroupsClient.Create();
// Initialize request argument(s)
SetNamedPortsRegionInstanceGroupRequest request = new SetNamedPortsRegionInstanceGroupRequest
{
RegionInstanceGroupsSetNamedPortsRequestResource = new RegionInstanceGroupsSetNamedPortsRequest(),
RequestId = "",
InstanceGroup = "",
Region = "",
Project = "",
};
// Make the request
lro::Operation<Operation, Operation> response = regionInstanceGroupsClient.SetNamedPorts(request);
// Poll until the returned long-running operation is complete
lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
Operation result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
lro::Operation<Operation, Operation> retrievedResponse = regionInstanceGroupsClient.PollOnceSetNamedPorts(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Operation retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for SetNamedPortsAsync</summary>
public async Task SetNamedPortsRequestObjectAsync()
{
// Snippet: SetNamedPortsAsync(SetNamedPortsRegionInstanceGroupRequest, CallSettings)
// Additional: SetNamedPortsAsync(SetNamedPortsRegionInstanceGroupRequest, CancellationToken)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = await RegionInstanceGroupsClient.CreateAsync();
// Initialize request argument(s)
SetNamedPortsRegionInstanceGroupRequest request = new SetNamedPortsRegionInstanceGroupRequest
{
RegionInstanceGroupsSetNamedPortsRequestResource = new RegionInstanceGroupsSetNamedPortsRequest(),
RequestId = "",
InstanceGroup = "",
Region = "",
Project = "",
};
// Make the request
lro::Operation<Operation, Operation> response = await regionInstanceGroupsClient.SetNamedPortsAsync(request);
// Poll until the returned long-running operation is complete
lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Operation result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
lro::Operation<Operation, Operation> retrievedResponse = await regionInstanceGroupsClient.PollOnceSetNamedPortsAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Operation retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for SetNamedPorts</summary>
public void SetNamedPorts()
{
// Snippet: SetNamedPorts(string, string, string, RegionInstanceGroupsSetNamedPortsRequest, CallSettings)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = RegionInstanceGroupsClient.Create();
// Initialize request argument(s)
string project = "";
string region = "";
string instanceGroup = "";
RegionInstanceGroupsSetNamedPortsRequest regionInstanceGroupsSetNamedPortsRequestResource = new RegionInstanceGroupsSetNamedPortsRequest();
// Make the request
lro::Operation<Operation, Operation> response = regionInstanceGroupsClient.SetNamedPorts(project, region, instanceGroup, regionInstanceGroupsSetNamedPortsRequestResource);
// Poll until the returned long-running operation is complete
lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
Operation result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
lro::Operation<Operation, Operation> retrievedResponse = regionInstanceGroupsClient.PollOnceSetNamedPorts(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Operation retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for SetNamedPortsAsync</summary>
public async Task SetNamedPortsAsync()
{
// Snippet: SetNamedPortsAsync(string, string, string, RegionInstanceGroupsSetNamedPortsRequest, CallSettings)
// Additional: SetNamedPortsAsync(string, string, string, RegionInstanceGroupsSetNamedPortsRequest, CancellationToken)
// Create client
RegionInstanceGroupsClient regionInstanceGroupsClient = await RegionInstanceGroupsClient.CreateAsync();
// Initialize request argument(s)
string project = "";
string region = "";
string instanceGroup = "";
RegionInstanceGroupsSetNamedPortsRequest regionInstanceGroupsSetNamedPortsRequestResource = new RegionInstanceGroupsSetNamedPortsRequest();
// Make the request
lro::Operation<Operation, Operation> response = await regionInstanceGroupsClient.SetNamedPortsAsync(project, region, instanceGroup, regionInstanceGroupsSetNamedPortsRequestResource);
// Poll until the returned long-running operation is complete
lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Operation result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
lro::Operation<Operation, Operation> retrievedResponse = await regionInstanceGroupsClient.PollOnceSetNamedPortsAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Operation retrievedResult = retrievedResponse.Result;
}
// End snippet
}
}
}
| |
using System;
using NUnit.Framework;
using Zu.AsyncChromeDriver.Tests.Environment;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Zu.AsyncWebDriver;
using Zu.WebBrowser.AsyncInteractions;
namespace Zu.AsyncChromeDriver.Tests
{
[TestFixture]
public class TextHandlingTest : DriverTestFixture
{
private readonly string NewLine = "\r\n";
[Test]
public async Task ShouldReturnTheTextContentOfASingleElementWithNoChildren()
{
await driver.GoToUrl(simpleTestPage);
string selectText = await driver.FindElement(By.Id("oneline")).Text();
Assert.AreEqual(selectText, "A single line of text");
string getText = await driver.FindElement(By.Id("oneline")).Text();
Assert.AreEqual(getText, "A single line of text");
}
[Test]
public async Task ShouldReturnTheEntireTextContentOfChildElements()
{
await driver.GoToUrl((simpleTestPage));
string text = await driver.FindElement(By.Id("multiline")).Text();
Assert.That(text, Does.Contain("A div containing"));
Assert.That(text, Does.Contain("More than one line of text"));
Assert.That(text, Does.Contain("and block level elements"));
}
[Test]
public async Task ShouldIgnoreScriptElements()
{
await driver.GoToUrl(javascriptEnhancedForm);
IWebElement labelForUsername = await driver.FindElement(By.Id("labelforusername"));
string text = await labelForUsername.Text();
Assert.AreEqual((await labelForUsername.FindElements(By.TagName("script"))).Count, 1);
Assert.That(text, Does.Not.Contain("document.getElementById"));
Assert.AreEqual(text, "Username:");
}
[Test]
public async Task ShouldRepresentABlockLevelElementAsANewline()
{
await driver.GoToUrl(simpleTestPage);
string text = await driver.FindElement(By.Id("multiline")).Text();
Assert.That(text, Does.StartWith("A div containing" + NewLine));
Assert.That(text, Does.Contain("More than one line of text" + NewLine));
Assert.That(text, Does.EndWith("and block level elements"));
}
[Test]
public async Task ShouldCollapseMultipleWhitespaceCharactersIntoASingleSpace()
{
await driver.GoToUrl((simpleTestPage));
string text = await driver.FindElement(By.Id("lotsofspaces")).Text();
Assert.AreEqual(text, "This line has lots of spaces.");
}
[Test]
public async Task ShouldTrimText()
{
await driver.GoToUrl((simpleTestPage));
string text = await driver.FindElement(By.Id("multiline")).Text();
Assert.That(text, Does.StartWith("A div containing"));
Assert.That(text, Does.EndWith("block level elements"));
}
[Test]
public async Task ShouldConvertANonBreakingSpaceIntoANormalSpaceCharacter()
{
await driver.GoToUrl((simpleTestPage));
string text = await driver.FindElement(By.Id("nbsp")).Text();
Assert.AreEqual(text, "This line has a non-breaking space");
}
[Test]
public async Task ShouldNotCollapseANonBreakingSpaces()
{
await driver.GoToUrl(simpleTestPage);
IWebElement element = await driver.FindElement(By.Id("nbspandspaces"));
string text = await element.Text();
Assert.AreEqual(text, "This line has a non-breaking space and spaces");
}
[Test]
public async Task ShouldNotTrimNonBreakingSpacesAtTheEndOfALineInTheMiddleOfText()
{
await driver.GoToUrl(simpleTestPage);
IWebElement element = await driver.FindElement(By.Id("multilinenbsp"));
string text = await element.Text();
string expectedStart = "These lines " + System.Environment.NewLine;
Assert.That(text, Does.StartWith(expectedStart));
}
[Test]
public async Task ShouldNotTrimNonBreakingSpacesAtTheStartOfALineInTheMiddleOfText()
{
await driver.GoToUrl(simpleTestPage);
IWebElement element = await driver.FindElement(By.Id("multilinenbsp"));
string text = await element.Text();
string expectedContent = System.Environment.NewLine + " have";
Assert.That(text, Does.Contain(expectedContent));
}
[Test]
public async Task ShouldNotTrimTrailingNonBreakingSpacesInMultilineText()
{
await driver.GoToUrl(simpleTestPage);
IWebElement element = await driver.FindElement(By.Id("multilinenbsp"));
string text = await element.Text();
string expectedEnd = "trailing NBSPs ";
Assert.That(text, Does.EndWith(expectedEnd));
}
[Test]
public async Task HavingInlineElementsShouldNotAffectHowTextIsReturned()
{
await driver.GoToUrl((simpleTestPage));
string text = await driver.FindElement(By.Id("inline")).Text();
Assert.AreEqual(text, "This line has text within elements that are meant to be displayed inline");
}
[Test]
public async Task ShouldReturnTheEntireTextOfInlineElements()
{
await driver.GoToUrl((simpleTestPage));
string text = await driver.FindElement(By.Id("span")).Text();
Assert.AreEqual(text, "An inline element");
}
[Test]
public async Task ShouldRetainTheFormatingOfTextWithinAPreElement()
{
await driver.GoToUrl(simpleTestPage);
string text = await driver.FindElement(By.Id("preformatted")).Text();
Assert.That(text, Is.EqualTo(" This section has a preformatted" + System.Environment.NewLine +
" text block " + System.Environment.NewLine +
" split in four lines" + System.Environment.NewLine +
" "));
}
[Test]
public async Task ShouldRetainTheFormatingOfTextWithinAPreElementThatIsWithinARegularBlock()
{
await driver.GoToUrl(simpleTestPage);
string text = await driver.FindElement(By.Id("div-with-pre")).Text();
Assert.That(text, Is.EqualTo("before pre" + System.Environment.NewLine +
" This section has a preformatted" + System.Environment.NewLine +
" text block " + System.Environment.NewLine +
" split in four lines" + System.Environment.NewLine +
" " + System.Environment.NewLine +
"after pre"));
}
[Test]
public async Task ShouldBeAbleToSetMoreThanOneLineOfTextInATextArea()
{
await driver.GoToUrl(formsPage);
IWebElement textarea = await driver.FindElement(By.Id("withText"));
await textarea.Clear();
string expectedText = "I like cheese" + NewLine + NewLine + "It's really nice";
await textarea.SendKeys(expectedText);
string seenText = await textarea.GetAttribute("value");
Assert.AreEqual(seenText, expectedText);
}
[Test]
public async Task ShouldBeAbleToEnterDatesAfterFillingInOtherValuesFirst()
{
await driver.GoToUrl(formsPage);
IWebElement input = await driver.FindElement(By.Id("working"));
string expectedValue = "10/03/2007 to 30/07/1993";
await input.SendKeys(expectedValue);
string seenValue = await input.GetAttribute("value");
Assert.AreEqual(seenValue, expectedValue);
}
[Test]
public async Task ShouldReturnEmptyStringWhenTextIsOnlySpaces()
{
await driver.GoToUrl((xhtmlTestPage));
string text = await driver.FindElement(By.Id("spaces")).Text();
Assert.AreEqual(text, string.Empty);
}
[Test]
public async Task ShouldReturnEmptyStringWhenTextIsEmpty()
{
await driver.GoToUrl((xhtmlTestPage));
string text = await driver.FindElement(By.Id("empty")).Text();
Assert.AreEqual(text, string.Empty);
}
[Test]
public async Task ShouldReturnEmptyStringWhenTagIsSelfClosing()
{
await driver.GoToUrl((xhtmlFormPage));
string text = await driver.FindElement(By.Id("self-closed")).Text();
Assert.AreEqual(text, string.Empty);
}
[Test]
public async Task ShouldNotTrimSpacesWhenLineWraps()
{
await driver.GoToUrl(simpleTestPage);
string text = await driver.FindElement(By.XPath("//table/tbody/tr[1]/td[1]")).Text();
Assert.AreEqual("beforeSpace afterSpace", text);
}
[Test]
public async Task ShouldHandleSiblingBlockLevelElements()
{
await driver.GoToUrl(simpleTestPage);
string text = await driver.FindElement(By.Id("twoblocks")).Text();
Assert.AreEqual(text, "Some text" + NewLine + "Some more text");
}
[Test]
public async Task ShouldHandleNestedBlockLevelElements()
{
await driver.GoToUrl((simpleTestPage));
string text = await driver.FindElement(By.Id("nestedblocks")).Text();
Assert.AreEqual("Cheese" + NewLine + "Some text" + NewLine + "Some more text" + NewLine
+ "and also" + NewLine + "Brie", text);
}
[Test]
public async Task ShouldHandleWhitespaceInInlineElements()
{
await driver.GoToUrl((simpleTestPage));
string text = await driver.FindElement(By.Id("inlinespan")).Text();
Assert.AreEqual(text, "line has text");
}
[Test]
public async Task ReadALargeAmountOfData()
{
await driver.GoToUrl(EnvironmentManager.Instance.UrlBuilder.WhereIs("macbeth.html"));
string source = await driver.PageSource().Trim().ToLower();
Assert.That(source, Does.EndWith("</html>"));
}
[Test]
public async Task GetTextWithLineBreakForInlineElement()
{
await driver.GoToUrl(simpleTestPage);
IWebElement label = await driver.FindElement(By.Id("label1"));
string labelText = await label.Text();
Assert.That(new Regex("foo[\\n\\r]+bar").IsMatch(labelText), "Label text '" + labelText + "' did not match regular expression 'foo[\\n\\r]+bar'");
}
[Test]
public async Task ShouldOnlyIncludeVisibleText()
{
await driver.GoToUrl(javascriptPage);
string empty = await driver.FindElement(By.Id("suppressedParagraph")).Text();
string explicitText = await driver.FindElement(By.Id("outer")).Text();
Assert.AreEqual(string.Empty, empty);
Assert.AreEqual("sub-element that is explicitly visible", explicitText);
}
[Test]
public async Task ShouldGetTextFromTableCells()
{
await driver.GoToUrl(tables);
IWebElement tr = await driver.FindElement(By.Id("hidden_text"));
String text = await tr.Text();
Assert.That(text, Does.Contain("some text"));
Assert.That(text, Does.Not.Contain("some more text"));
}
[Test]
public async Task TextOfAnInputFieldShouldBeEmpty()
{
await driver.GoToUrl(formsPage);
IWebElement input = await driver.FindElement(By.Id("inputWithText"));
Assert.AreEqual(string.Empty, await input.Text());
}
[Test]
public async Task TextOfATextAreaShouldBeEqualToItsDefaultText()
{
await driver.GoToUrl(formsPage);
IWebElement area = await driver.FindElement(By.Id("withText"));
Assert.AreEqual("Example text", await area.Text());
}
[Test]
public async Task TextOfATextAreaShouldBeEqualToItsDefaultTextEvenAfterTyping()
{
await driver.GoToUrl(formsPage);
IWebElement area = await driver.FindElement(By.Id("withText"));
string oldText = await area.Text();
await area.SendKeys("New Text");
Assert.AreEqual(oldText, await area.Text());
}
[Test]
public async Task TextOfATextAreaShouldBeEqualToItsDefaultTextEvenAfterChangingTheValue()
{
await driver.GoToUrl(formsPage);
IWebElement area = await driver.FindElement(By.Id("withText"));
string oldText = await area.GetAttribute("value");
await ((IJavaScriptExecutor)driver).ExecuteScript("arguments[0].value = arguments[1]", new CancellationToken(), area, "New Text");
Assert.AreEqual(oldText, await area.Text());
}
[Test]
public async Task ShouldGetTextWhichIsAValidJSONObject()
{
await driver.GoToUrl(simpleTestPage);
IWebElement element = await driver.FindElement(By.Id("simpleJsonText"));
Assert.AreEqual("{a=\"b\", c=1, d=true}", await element.Text());
//assertEquals("{a=\"b\", \"c\"=d, e=true, f=\\123\\\\g\\\\\"\"\"\\\'}", element.getText());
}
[Test]
public async Task ShouldGetTextWhichIsAValidComplexJSONObject()
{
await driver.GoToUrl(simpleTestPage);
IWebElement element = await driver.FindElement(By.Id("complexJsonText"));
Assert.AreEqual("{a=\"\\\\b\\\\\\\"\'\\\'\"}", await element.Text());
}
[Test]
public async Task CanHandleTextThatLooksLikeANumber()
{
await driver.GoToUrl(EnvironmentManager.Instance.UrlBuilder.CreateInlinePage(
new InlinePage().WithBody("<div id='point'>12.345</div>",
"<div id='comma'>12,345</div>",
"<div id='space'>12 345</div>")));
Assert.That(await driver.FindElement(By.Id("point")).Text(), Is.EqualTo("12.345"));
Assert.That(await driver.FindElement(By.Id("comma")).Text(), Is.EqualTo("12,345"));
Assert.That(await driver.FindElement(By.Id("space")).Text(), Is.EqualTo("12 345"));
}
[Test]
public async Task CanHandleTextTransformProperty()
{
await driver.GoToUrl(simpleTestPage);
Assert.That(await driver.FindElement(By.Id("capitalized")).Text(), Is.EqualTo("Hello, World! Bla-Bla-BLA").Or.EqualTo("Hello, World! Bla-bla-BLA"));
Assert.That(await driver.FindElement(By.Id("lowercased")).Text(), Is.EqualTo("hello, world! bla-bla-bla"));
Assert.That(await driver.FindElement(By.Id("uppercased")).Text(), Is.EqualTo("HELLO, WORLD! BLA-BLA-BLA"));
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Linq;
using Xunit;
namespace System.Collections.Immutable.Test
{
public class ImmutableHashSetTest : ImmutableSetTest
{
protected override bool IncludesGetHashCodeDerivative
{
get { return true; }
}
[Fact]
public void EmptyTest()
{
this.EmptyTestHelper(Empty<int>(), 5, null);
this.EmptyTestHelper(EmptyTyped<string>().WithComparer(StringComparer.OrdinalIgnoreCase), "a", StringComparer.OrdinalIgnoreCase);
}
[Fact]
public void CustomSort()
{
this.CustomSortTestHelper(
ImmutableHashSet<string>.Empty.WithComparer(StringComparer.Ordinal),
false,
new[] { "apple", "APPLE" },
new[] { "apple", "APPLE" });
this.CustomSortTestHelper(
ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase),
false,
new[] { "apple", "APPLE" },
new[] { "apple" });
}
[Fact]
public void ChangeUnorderedEqualityComparer()
{
var ordinalSet = ImmutableHashSet<string>.Empty
.WithComparer(StringComparer.Ordinal)
.Add("apple")
.Add("APPLE");
Assert.Equal(2, ordinalSet.Count); // claimed count
Assert.False(ordinalSet.Contains("aPpLe"));
var ignoreCaseSet = ordinalSet.WithComparer(StringComparer.OrdinalIgnoreCase);
Assert.Equal(1, ignoreCaseSet.Count);
Assert.True(ignoreCaseSet.Contains("aPpLe"));
}
[Fact]
public void ToSortTest()
{
var set = ImmutableHashSet<string>.Empty
.Add("apple")
.Add("APPLE");
var sorted = set.ToImmutableSortedSet();
CollectionAssertAreEquivalent(set.ToList(), sorted.ToList());
}
[Fact]
public void EnumeratorWithHashCollisionsTest()
{
var emptySet = this.EmptyTyped<int>().WithComparer(new BadHasher<int>());
this.EnumeratorTestHelper(emptySet, null, 3, 1, 5);
}
[Fact]
public void EnumeratorRecyclingMisuse()
{
var collection = ImmutableHashSet.Create<int>().Add(5);
var enumerator = collection.GetEnumerator();
var enumeratorCopy = enumerator;
Assert.True(enumerator.MoveNext());
Assert.False(enumerator.MoveNext());
enumerator.Dispose();
Assert.Throws<ObjectDisposedException>(() => enumerator.MoveNext());
Assert.Throws<ObjectDisposedException>(() => enumerator.Reset());
Assert.Throws<ObjectDisposedException>(() => enumerator.Current);
Assert.Throws<ObjectDisposedException>(() => enumeratorCopy.MoveNext());
Assert.Throws<ObjectDisposedException>(() => enumeratorCopy.Reset());
Assert.Throws<ObjectDisposedException>(() => enumeratorCopy.Current);
enumerator.Dispose(); // double-disposal should not throw
enumeratorCopy.Dispose();
// We expect that acquiring a new enumerator will use the same underlying Stack<T> object,
// but that it will not throw exceptions for the new enumerator.
enumerator = collection.GetEnumerator();
Assert.True(enumerator.MoveNext());
Assert.False(enumerator.MoveNext());
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
enumerator.Dispose();
}
[Fact]
public void Create()
{
var comparer = StringComparer.OrdinalIgnoreCase;
var set = ImmutableHashSet.Create<string>();
Assert.Equal(0, set.Count);
Assert.Same(EqualityComparer<string>.Default, set.KeyComparer);
set = ImmutableHashSet.Create<string>(comparer);
Assert.Equal(0, set.Count);
Assert.Same(comparer, set.KeyComparer);
set = ImmutableHashSet.Create("a");
Assert.Equal(1, set.Count);
Assert.Same(EqualityComparer<string>.Default, set.KeyComparer);
set = ImmutableHashSet.Create(comparer, "a");
Assert.Equal(1, set.Count);
Assert.Same(comparer, set.KeyComparer);
set = ImmutableHashSet.Create("a", "b");
Assert.Equal(2, set.Count);
Assert.Same(EqualityComparer<string>.Default, set.KeyComparer);
set = ImmutableHashSet.Create(comparer, "a", "b");
Assert.Equal(2, set.Count);
Assert.Same(comparer, set.KeyComparer);
set = ImmutableHashSet.CreateRange((IEnumerable<string>)new[] { "a", "b" });
Assert.Equal(2, set.Count);
Assert.Same(EqualityComparer<string>.Default, set.KeyComparer);
set = ImmutableHashSet.CreateRange(comparer, (IEnumerable<string>)new[] { "a", "b" });
Assert.Equal(2, set.Count);
Assert.Same(comparer, set.KeyComparer);
}
/// <summary>
/// Verifies the non-removal of an item that does not belong to the set,
/// but which happens to have a colliding hash code with another value
/// that *is* in the set.
/// </summary>
[Fact]
public void RemoveValuesFromCollidedHashCode()
{
var set = ImmutableHashSet.Create<int>(new BadHasher<int>(), 5, 6);
Assert.Same(set, set.Remove(2));
var setAfterRemovingFive = set.Remove(5);
Assert.Equal(1, setAfterRemovingFive.Count);
Assert.Equal(new[] { 6 }, setAfterRemovingFive);
}
[Fact]
public void TryGetValueTest()
{
this.TryGetValueTestHelper(ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase));
}
[Fact]
public void DebuggerAttributesValid()
{
DebuggerAttributes.ValidateDebuggerDisplayReferences(ImmutableHashSet.Create<string>());
DebuggerAttributes.ValidateDebuggerTypeProxyProperties(ImmutableHashSet.Create<int>(1, 2, 3));
}
protected override IImmutableSet<T> Empty<T>()
{
return ImmutableHashSet<T>.Empty;
}
protected ImmutableHashSet<T> EmptyTyped<T>()
{
return ImmutableHashSet<T>.Empty;
}
protected override ISet<T> EmptyMutable<T>()
{
return new HashSet<T>();
}
internal override IBinaryTree GetRootNode<T>(IImmutableSet<T> set)
{
return ((ImmutableHashSet<T>)set).Root;
}
/// <summary>
/// Tests various aspects of an unordered set.
/// </summary>
/// <typeparam name="T">The type of element stored in the set.</typeparam>
/// <param name="emptySet">The empty set.</param>
/// <param name="value">A value that could be placed in the set.</param>
/// <param name="comparer">The comparer used to obtain the empty set, if any.</param>
private void EmptyTestHelper<T>(IImmutableSet<T> emptySet, T value, IEqualityComparer<T> comparer)
{
Contract.Requires(emptySet != null);
this.EmptyTestHelper(emptySet);
Assert.Same(emptySet, emptySet.ToImmutableHashSet(comparer));
Assert.Same(comparer ?? EqualityComparer<T>.Default, ((IHashKeyCollection<T>)emptySet).KeyComparer);
if (comparer == null)
{
Assert.Same(emptySet, ImmutableHashSet<T>.Empty);
}
var reemptied = emptySet.Add(value).Clear();
Assert.Same(reemptied, reemptied.ToImmutableHashSet(comparer)); //, "Getting the empty set from a non-empty instance did not preserve the comparer.");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Security.Cryptography.Tests;
using Xunit;
namespace System.Security.Cryptography.EcDsa.Tests
{
public sealed class ECDsaTests_Span : ECDsaTests
{
protected override bool VerifyData(ECDsa ecdsa, byte[] data, int offset, int count, byte[] signature, HashAlgorithmName hashAlgorithm) =>
ecdsa.VerifyData(new ReadOnlySpan<byte>(data, offset, count), signature, hashAlgorithm);
protected override byte[] SignData(ECDsa ecdsa, byte[] data, int offset, int count, HashAlgorithmName hashAlgorithm) =>
TryWithOutputArray(dest => ecdsa.TrySignData(new ReadOnlySpan<byte>(data, offset, count), dest, hashAlgorithm, out int bytesWritten) ? (true, bytesWritten) : (false, 0));
protected override void UseAfterDispose(ECDsa ecdsa, byte[] data, byte[] sig)
{
base.UseAfterDispose(ecdsa, data, sig);
byte[] hash = new byte[32];
Assert.Throws<ObjectDisposedException>(() => ecdsa.VerifyHash(hash.AsSpan(), sig.AsSpan()));
Assert.Throws<ObjectDisposedException>(() => ecdsa.TrySignHash(hash, sig, out _));
}
[Theory, MemberData(nameof(RealImplementations))]
public void SignData_InvalidArguments_Throws(ECDsa ecdsa)
{
AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.TrySignData(ReadOnlySpan<byte>.Empty, Span<byte>.Empty, new HashAlgorithmName(null), out int bytesWritten));
AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.TrySignData(ReadOnlySpan<byte>.Empty, Span<byte>.Empty, new HashAlgorithmName(""), out int bytesWritten));
Assert.ThrowsAny<CryptographicException>(() => ecdsa.TrySignData(ReadOnlySpan<byte>.Empty, Span<byte>.Empty, new HashAlgorithmName(Guid.NewGuid().ToString("N")), out int bytesWritten));
}
[Theory, MemberData(nameof(RealImplementations))]
public void VerifyData_InvalidArguments_Throws(ECDsa ecdsa)
{
AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(ReadOnlySpan<byte>.Empty, ReadOnlySpan<byte>.Empty, new HashAlgorithmName(null)));
AssertExtensions.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(ReadOnlySpan<byte>.Empty, ReadOnlySpan<byte>.Empty, new HashAlgorithmName("")));
Assert.ThrowsAny<CryptographicException>(() => ecdsa.VerifyData(ReadOnlySpan<byte>.Empty, Span<byte>.Empty, new HashAlgorithmName(Guid.NewGuid().ToString("N"))));
}
private static byte[] TryWithOutputArray(Func<byte[], (bool, int)> func)
{
for (int length = 1; ; length = checked(length * 2))
{
var result = new byte[length];
var (success, bytesWritten) = func(result);
if (success)
{
Array.Resize(ref result, bytesWritten);
return result;
}
}
}
}
public abstract partial class ECDsaTests : ECDsaTestsBase
{
[Fact]
public void KeySizeProp()
{
using (ECDsa e = ECDsaFactory.Create())
{
e.KeySize = 384;
Assert.Equal(384, e.KeySize);
ECParameters p384 = e.ExportParameters(false);
Assert.True(p384.Curve.IsNamed);
p384.Validate();
e.KeySize = 521;
Assert.Equal(521, e.KeySize);
ECParameters p521 = e.ExportParameters(false);
Assert.True(p521.Curve.IsNamed);
p521.Validate();
// Ensure the key was regenerated
Assert.NotEqual(p384.Curve.Oid.FriendlyName, p521.Curve.Oid.FriendlyName);
}
}
[Theory, MemberData(nameof(TestNewCurves))]
public void TestRegenKeyExplicit(CurveDef curveDef)
{
ECParameters param, param2;
ECDsa ec, newEc;
using (ec = ECDsaFactory.Create(curveDef.Curve))
{
param = ec.ExportExplicitParameters(true);
Assert.NotNull(param.D);
using (newEc = ECDsaFactory.Create())
{
newEc.ImportParameters(param);
// The curve name is not flowed on explicit export\import (by design) so this excercises logic
// that regenerates based on current curve values
newEc.GenerateKey(param.Curve);
param2 = newEc.ExportExplicitParameters(true);
// Only curve should match
ComparePrivateKey(param, param2, false);
ComparePublicKey(param.Q, param2.Q, false);
CompareCurve(param.Curve, param2.Curve);
// Specify same curve name
newEc.GenerateKey(curveDef.Curve);
Assert.Equal(curveDef.KeySize, newEc.KeySize);
param2 = newEc.ExportExplicitParameters(true);
// Only curve should match
ComparePrivateKey(param, param2, false);
ComparePublicKey(param.Q, param2.Q, false);
CompareCurve(param.Curve, param2.Curve);
// Specify different curve than current
if (param.Curve.IsPrime)
{
if (curveDef.Curve.IsNamed &&
curveDef.Curve.Oid.FriendlyName != ECCurve.NamedCurves.nistP256.Oid.FriendlyName)
{
// Specify different curve (nistP256) by explicit value
newEc.GenerateKey(ECCurve.NamedCurves.nistP256);
Assert.Equal(256, newEc.KeySize);
param2 = newEc.ExportExplicitParameters(true);
// Keys should not match
ComparePrivateKey(param, param2, false);
ComparePublicKey(param.Q, param2.Q, false);
// P,X,Y (and others) should not match
Assert.True(param2.Curve.IsPrime);
Assert.NotEqual(param.Curve.Prime, param2.Curve.Prime);
Assert.NotEqual(param.Curve.G.X, param2.Curve.G.X);
Assert.NotEqual(param.Curve.G.Y, param2.Curve.G.Y);
// Reset back to original
newEc.GenerateKey(param.Curve);
Assert.Equal(curveDef.KeySize, newEc.KeySize);
ECParameters copyOfParam1 = newEc.ExportExplicitParameters(true);
// Only curve should match
ComparePrivateKey(param, copyOfParam1, false);
ComparePublicKey(param.Q, copyOfParam1.Q, false);
CompareCurve(param.Curve, copyOfParam1.Curve);
// Set back to nistP256
newEc.GenerateKey(param2.Curve);
Assert.Equal(256, newEc.KeySize);
param2 = newEc.ExportExplicitParameters(true);
// Keys should not match
ComparePrivateKey(param, param2, false);
ComparePublicKey(param.Q, param2.Q, false);
// P,X,Y (and others) should not match
Assert.True(param2.Curve.IsPrime);
Assert.NotEqual(param.Curve.Prime, param2.Curve.Prime);
Assert.NotEqual(param.Curve.G.X, param2.Curve.G.X);
Assert.NotEqual(param.Curve.G.Y, param2.Curve.G.Y);
}
}
else if (param.Curve.IsCharacteristic2)
{
if (curveDef.Curve.Oid.Value != ECDSA_Sect193r1_OID_VALUE)
{
if (ECDsaFactory.IsCurveValid(new Oid(ECDSA_Sect193r1_OID_VALUE)))
{
// Specify different curve by name
newEc.GenerateKey(ECCurve.CreateFromValue(ECDSA_Sect193r1_OID_VALUE));
Assert.Equal(193, newEc.KeySize);
param2 = newEc.ExportExplicitParameters(true);
// Keys should not match
ComparePrivateKey(param, param2, false);
ComparePublicKey(param.Q, param2.Q, false);
// Polynomial,X,Y (and others) should not match
Assert.True(param2.Curve.IsCharacteristic2);
Assert.NotEqual(param.Curve.Polynomial, param2.Curve.Polynomial);
Assert.NotEqual(param.Curve.G.X, param2.Curve.G.X);
Assert.NotEqual(param.Curve.G.Y, param2.Curve.G.Y);
}
}
}
}
}
}
[Theory]
[MemberData(nameof(TestCurves))]
public void TestRegenKeyNamed(CurveDef curveDef)
{
ECParameters param, param2;
ECDsa ec;
using (ec = ECDsaFactory.Create(curveDef.Curve))
{
param = ec.ExportParameters(true);
Assert.NotNull(param.D);
param.Validate();
ec.GenerateKey(param.Curve);
param2 = ec.ExportParameters(true);
param2.Validate();
// Only curve should match
ComparePrivateKey(param, param2, false);
ComparePublicKey(param.Q, param2.Q, false);
CompareCurve(param.Curve, param2.Curve);
}
}
[ConditionalFact(nameof(ECExplicitCurvesSupported))]
public void TestRegenKeyNistP256()
{
ECParameters param, param2;
ECDsa ec;
using (ec = ECDsaFactory.Create(256))
{
param = ec.ExportExplicitParameters(true);
Assert.NotNull(param.D);
ec.GenerateKey(param.Curve);
param2 = ec.ExportExplicitParameters(true);
// Only curve should match
ComparePrivateKey(param, param2, false);
ComparePublicKey(param.Q, param2.Q, false);
CompareCurve(param.Curve, param2.Curve);
}
}
[Theory]
[MemberData(nameof(TestCurves))]
public void TestChangeFromNamedCurveToKeySize(CurveDef curveDef)
{
if (!curveDef.Curve.IsNamed)
return;
using (ECDsa ec = ECDsaFactory.Create(curveDef.Curve))
{
ECParameters param = ec.ExportParameters(false);
// Avoid comparing against same key as in curveDef
if (ec.KeySize != 384 && ec.KeySize != 521)
{
ec.KeySize = 384;
ECParameters param384 = ec.ExportParameters(false);
Assert.NotEqual(param.Curve.Oid.FriendlyName, param384.Curve.Oid.FriendlyName);
Assert.Equal(384, ec.KeySize);
ec.KeySize = 521;
ECParameters param521 = ec.ExportParameters(false);
Assert.NotEqual(param384.Curve.Oid.FriendlyName, param521.Curve.Oid.FriendlyName);
Assert.Equal(521, ec.KeySize);
}
}
}
[ConditionalFact(nameof(ECExplicitCurvesSupported))]
public void TestPositive256WithExplicitParameters()
{
using (ECDsa ecdsa = ECDsaFactory.Create())
{
ecdsa.ImportParameters(EccTestData.GetNistP256ExplicitTestData());
Verify256(ecdsa, true);
}
}
[Fact]
public void TestNegative256WithRandomKey()
{
using (ECDsa ecdsa = ECDsaFactory.Create(ECCurve.NamedCurves.nistP256))
{
Verify256(ecdsa, false); // will not match because of randomness
}
}
[Fact]
public void PublicKey_CannotSign()
{
using (ECDsa ecdsaPriv = ECDsaFactory.Create())
using (ECDsa ecdsa = ECDsaFactory.Create())
{
ECParameters keyParameters = ecdsaPriv.ExportParameters(false);
ecdsa.ImportParameters(keyParameters);
Assert.ThrowsAny<CryptographicException>(
() => SignData(ecdsa, new byte[] { 1, 2, 3, 4, 5 }, HashAlgorithmName.SHA256));
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Drawing.Internal
{
using System.Diagnostics;
using System.Globalization;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Security.Permissions;
using System.Text;
/// <include file='doc\DbgUtil.uex' path='docs/doc[@for="DbgUtil"]/*' />
/// <devdoc>
/// Debug help utility.
/// </devdoc>
[
ReflectionPermission(SecurityAction.Assert, MemberAccess = true),
EnvironmentPermission(SecurityAction.Assert, Unrestricted = true),
FileIOPermission(SecurityAction.Assert, Unrestricted = true),
SecurityPermission(SecurityAction.Assert, Flags = SecurityPermissionFlag.UnmanagedCode),
UIPermission(SecurityAction.Assert, Unrestricted = true)
]
internal sealed class DbgUtil
{
public const int
FORMAT_MESSAGE_ALLOCATE_BUFFER = 0x00000100,
FORMAT_MESSAGE_IGNORE_INSERTS = 0x00000200,
FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000,
FORMAT_MESSAGE_DEFAULT = FORMAT_MESSAGE_IGNORE_INSERTS | FORMAT_MESSAGE_FROM_SYSTEM;
[DllImport(ExternDll.Kernel32, SetLastError = true, CharSet = System.Runtime.InteropServices.CharSet.Auto)]
public static extern int GetUserDefaultLCID();
[DllImport(ExternDll.Kernel32, SetLastError = true, CharSet = System.Runtime.InteropServices.CharSet.Auto)]
public static extern int FormatMessage(int dwFlags, HandleRef lpSource, int dwMessageId, int dwLanguageId, StringBuilder lpBuffer, int nSize, HandleRef arguments);
public static int gdipInitMaxFrameCount = 8;
// disable csharp compiler warning #0414: field assigned unused value
#pragma warning disable 0414
public static int gdiUseMaxFrameCount = 8;
public static int finalizeMaxFrameCount = 5;
#pragma warning restore 0414
// Methods
/// <devdoc>
/// Call this method from your Dispose(bool) to assert that unmanaged resources has been explicitly disposed.
/// </devdoc>
[Conditional("DEBUG")] // This code will be compiled into the assembly anyways, it is up to the compiler to ignore the call.
public static void AssertFinalization(object obj, bool disposing)
{
#if GDI_FINALIZATION_WATCH
if( disposing || AppDomain.CurrentDomain.IsFinalizingForUnload() )
{
return;
}
try
{
BindingFlags bindingFlags = BindingFlags.NonPublic | BindingFlags.GetField | BindingFlags.Static | BindingFlags.Instance;
FieldInfo allocSiteFld = obj.GetType().GetField("AllocationSite", bindingFlags);
string allocationSite = allocSiteFld != null ? allocSiteFld.GetValue( obj ).ToString() : "<Allocation site unavailable>";
// ignore ojects created by WindowsGraphicsCacheManager.
if( allocationSite.Contains("WindowsGraphicsCacheManager") )
{
return;
}
Debug.Fail("Object Disposed through finalization - it should be explicitly disposed.");
Debug.WriteLine("Allocation stack:\r\n" + allocationSite);
}
catch(Exception ex)
{
try
{
Debug.WriteLine("Exception thrown while trying to get allocation stack: " + ex);
}
catch
{
}
}
#endif
}
/// <devdoc>
/// </devdoc>
[Conditional("DEBUG")]
public static void AssertWin32(bool expression, string message)
{
#if DEBUG
if (!expression)
{
Debug.Fail(message + "\r\nError: " + DbgUtil.GetLastErrorStr());
}
#endif
}
/// <devdoc>
/// </devdoc>
[Conditional("DEBUG")]
public static void AssertWin32(bool expression, string format, object arg1)
{
#if DEBUG
if (!expression)
{
object[] args = new object[] { arg1 };
AssertWin32Impl(expression, format, args);
}
#endif
}
/// <devdoc>
/// </devdoc>
[Conditional("DEBUG")]
public static void AssertWin32(bool expression, string format, object arg1, object arg2)
{
#if DEBUG
if (!expression)
{
object[] args = new object[] { arg1, arg2 };
AssertWin32Impl(expression, format, args);
}
#endif
}
/// <devdoc>
/// </devdoc>
[Conditional("DEBUG")]
public static void AssertWin32(bool expression, string format, object arg1, object arg2, object arg3)
{
#if DEBUG
if (!expression)
{
object[] args = new object[] { arg1, arg2, arg3 };
AssertWin32Impl(expression, format, args);
}
#endif
}
/// <devdoc>
/// </devdoc>
[Conditional("DEBUG")]
public static void AssertWin32(bool expression, string format, object arg1, object arg2, object arg3, object arg4)
{
#if DEBUG
if (!expression)
{
object[] args = new object[] { arg1, arg2, arg3, arg4 };
AssertWin32Impl(expression, format, args);
}
#endif
}
/// <devdoc>
/// </devdoc>
[Conditional("DEBUG")]
public static void AssertWin32(bool expression, string format, object arg1, object arg2, object arg3, object arg4, object arg5)
{
#if DEBUG
if (!expression)
{
object[] args = new object[] { arg1, arg2, arg3, arg4, arg5 };
AssertWin32Impl(expression, format, args);
}
#endif
}
/// <devdoc>
/// </devdoc>
[Conditional("DEBUG")] // This code will be compiled into the assembly anyways, it is up to the compiler to ignore the call.
private static void AssertWin32Impl(bool expression, string format, object[] args)
{
#if DEBUG
if (!expression)
{
string message = string.Format(CultureInfo.CurrentCulture, format, args);
Debug.Fail(message + "\r\nError: " + DbgUtil.GetLastErrorStr());
}
#endif
}
//
// WARNING: Your PInvoke function needs to have the DllImport.SetLastError=true for this method
// to work properly. From the MSDN:
// GetLastWin32Error exposes the Win32 GetLastError API method from Kernel32.DLL. This method exists
// because it is not safe to make a direct platform invoke call to GetLastError to obtain this information.
// If you want to access this error code, you must call GetLastWin32Error rather than writing your own
// platform invoke definition for GetLastError and calling it. The common language runtime can make
// internal calls to APIs that overwrite the operating system maintained GetLastError.
//
// You can only use this method to obtain error codes if you apply the System.Runtime.InteropServices.DllImportAttribute
// to the method signature and set the SetLastError field to true.
//
public static string GetLastErrorStr()
{
int MAX_SIZE = 255;
StringBuilder buffer = new StringBuilder(MAX_SIZE);
string message = String.Empty;
int err = 0;
try
{
err = Marshal.GetLastWin32Error();
int retVal = FormatMessage(
FORMAT_MESSAGE_DEFAULT,
new HandleRef(null, IntPtr.Zero),
err,
GetUserDefaultLCID(),
buffer,
MAX_SIZE,
new HandleRef(null, IntPtr.Zero));
message = retVal != 0 ? buffer.ToString() : "<error returned>";
}
catch (Exception ex)
{
if (DbgUtil.IsCriticalException(ex))
{
throw; //rethrow critical exception.
}
message = ex.ToString();
}
return String.Format(CultureInfo.CurrentCulture, "0x{0:x8} - {1}", err, message);
}
/// <devdoc>
/// Duplicated here from ClientUtils not to depend on that code because this class is to be
/// compiled into System.Drawing and System.Windows.Forms.
/// </devdoc>
private static bool IsCriticalException(Exception ex)
{
return
//ex is NullReferenceException ||
ex is StackOverflowException ||
ex is OutOfMemoryException ||
ex is System.Threading.ThreadAbortException;
}
/// <devdoc>
/// </devdoc>
public static string StackTrace
{
get
{
return Environment.StackTrace;
}
}
/// <devdoc>
/// Returns information about the top stack frames in a string format. The input param determines the number of
/// frames to include.
/// </devdoc>
public static string StackFramesToStr(int maxFrameCount)
{
string trace = String.Empty;
try
{
StackTrace st = new StackTrace(true);
int dbgUtilFrameCount = 0;
//
// Ignore frames for methods on this library.
// Note: The stack frame holds the latest frame at index 0.
//
while (dbgUtilFrameCount < st.FrameCount)
{
StackFrame sf = st.GetFrame(dbgUtilFrameCount);
if (sf == null || sf.GetMethod().DeclaringType != typeof(DbgUtil))
{
break;
}
dbgUtilFrameCount++;
}
maxFrameCount += dbgUtilFrameCount; // add ignored frames.
if (maxFrameCount > st.FrameCount)
{
maxFrameCount = st.FrameCount;
}
for (int i = dbgUtilFrameCount; i < maxFrameCount; i++)
{
StackFrame sf = st.GetFrame(i);
if (sf == null)
{
continue;
}
MethodBase mi = sf.GetMethod();
if (mi == null)
{
continue;
}
string args = String.Empty;
string fileName = sf.GetFileName();
int backSlashIndex = fileName == null ? -1 : fileName.LastIndexOf('\\');
if (backSlashIndex != -1)
{
fileName = fileName.Substring(backSlashIndex + 1, fileName.Length - backSlashIndex - 1);
}
foreach (ParameterInfo pi in mi.GetParameters())
{
args += pi.ParameterType.Name + ", ";
}
if (args.Length > 0) // remove last comma.
{
args = args.Substring(0, args.Length - 2);
}
trace += String.Format(CultureInfo.CurrentCulture, "at {0} {1}.{2}({3})\r\n", fileName, mi.DeclaringType, mi.Name, args);
}
}
catch (Exception ex)
{
if (DbgUtil.IsCriticalException(ex))
{
throw; //rethrow critical exception.
}
trace += ex.ToString();
}
return trace.ToString();
}
/// <devdoc>
/// Returns information about the top stack frames in a string format.
/// </devdoc>
public static string StackFramesToStr()
{
return StackFramesToStr(DbgUtil.gdipInitMaxFrameCount);
}
/// <devdoc>
/// Returns information about the top stack frames in a string format. The input param determines the number of
/// frames to include. The 'message' parameter is used as the header of the returned string.
/// </devdoc>
public static string StackTraceToStr(string message, int frameCount)
{
return String.Format(CultureInfo.CurrentCulture, "{0}\r\nTop Stack Trace:\r\n{1}", message, DbgUtil.StackFramesToStr(frameCount));
}
/// <devdoc>
/// Returns information about the top stack frames in a string format. The 'message' parameter is used as the header of the returned string.
/// </devdoc>
public static string StackTraceToStr(string message)
{
return StackTraceToStr(message, DbgUtil.gdipInitMaxFrameCount);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Text;
using System.Diagnostics;
using System.Runtime.InteropServices;
using Internal.Cryptography;
namespace Internal.NativeCrypto
{
internal static partial class BCryptNative
{
/// <summary>
/// Well known algorithm names
/// </summary>
internal static class AlgorithmName
{
public const string ECDHP256 = "ECDH_P256"; // BCRYPT_ECDH_P256_ALGORITHM
public const string ECDHP384 = "ECDH_P384"; // BCRYPT_ECDH_P384_ALGORITHM
public const string ECDHP521 = "ECDH_P521"; // BCRYPT_ECDH_P521_ALGORITHM
public const string ECDsaP256 = "ECDSA_P256"; // BCRYPT_ECDSA_P256_ALGORITHM
public const string ECDsaP384 = "ECDSA_P384"; // BCRYPT_ECDSA_P384_ALGORITHM
public const string ECDsaP521 = "ECDSA_P521"; // BCRYPT_ECDSA_P521_ALGORITHM
public const string MD5 = "MD5"; // BCRYPT_MD5_ALGORITHM
public const string Sha1 = "SHA1"; // BCRYPT_SHA1_ALGORITHM
public const string Sha256 = "SHA256"; // BCRYPT_SHA256_ALGORITHM
public const string Sha384 = "SHA384"; // BCRYPT_SHA384_ALGORITHM
public const string Sha512 = "SHA512"; // BCRYPT_SHA512_ALGORITHM
}
/// <summary>
/// Magic numbers identifying blob types
/// </summary>
internal enum KeyBlobMagicNumber {
ECDHPublicP256 = 0x314B4345, // BCRYPT_ECDH_PUBLIC_P256_MAGIC
ECDHPublicP384 = 0x334B4345, // BCRYPT_ECDH_PUBLIC_P384_MAGIC
ECDHPublicP521 = 0x354B4345, // BCRYPT_ECDH_PUBLIC_P521_MAGIC
ECDsaPublicP256 = 0x31534345, // BCRYPT_ECDSA_PUBLIC_P256_MAGIC
ECDsaPublicP384 = 0x33534345, // BCRYPT_ECDSA_PUBLIC_P384_MAGIC
ECDsaPublicP521 = 0x35534345 // BCRYPT_ECDSA_PUBLIC_P521_MAGIC
}
internal static class KeyDerivationFunction
{
public const string Hash = "HASH"; // BCRYPT_KDF_HASH
public const string Hmac = "HMAC"; // BCRYPT_KDF_HMAC
public const string Tls = "TLS_PRF"; // BCRYPT_KDF_TLS_PRF
}
}
//
// Interop layer around Windows CNG api.
//
internal static partial class Cng
{
public const String CngDll = "BCrypt.dll";
public const String Capi2Dll = "Crypt32.dll";
[Flags]
public enum OpenAlgorithmProviderFlags : int
{
NONE = 0x00000000,
BCRYPT_ALG_HANDLE_HMAC_FLAG = 0x00000008,
}
public const string BCRYPT_3DES_ALGORITHM = "3DES";
public const string BCRYPT_AES_ALGORITHM = "AES";
public const string BCRYPT_CHAIN_MODE_CBC = "ChainingModeCBC";
public const string BCRYPT_CHAIN_MODE_ECB = "ChainingModeECB";
public static SafeAlgorithmHandle BCryptOpenAlgorithmProvider(String pszAlgId, String pszImplementation, OpenAlgorithmProviderFlags dwFlags)
{
SafeAlgorithmHandle hAlgorithm = null;
NTSTATUS ntStatus = Interop.BCryptOpenAlgorithmProvider(out hAlgorithm, pszAlgId, pszImplementation, (int)dwFlags);
if (ntStatus != NTSTATUS.STATUS_SUCCESS)
throw CreateCryptographicException(ntStatus);
return hAlgorithm;
}
public static SafeKeyHandle BCryptImportKey(this SafeAlgorithmHandle hAlg, byte[] key)
{
unsafe
{
const String BCRYPT_KEY_DATA_BLOB = "KeyDataBlob";
int keySize = key.Length;
int blobSize = sizeof(BCRYPT_KEY_DATA_BLOB_HEADER) + keySize;
byte[] blob = new byte[blobSize];
fixed (byte* pbBlob = blob)
{
BCRYPT_KEY_DATA_BLOB_HEADER* pBlob = (BCRYPT_KEY_DATA_BLOB_HEADER*)pbBlob;
pBlob->dwMagic = BCRYPT_KEY_DATA_BLOB_HEADER.BCRYPT_KEY_DATA_BLOB_MAGIC;
pBlob->dwVersion = BCRYPT_KEY_DATA_BLOB_HEADER.BCRYPT_KEY_DATA_BLOB_VERSION1;
pBlob->cbKeyData = (uint)keySize;
}
Buffer.BlockCopy(key, 0, blob, sizeof(BCRYPT_KEY_DATA_BLOB_HEADER), keySize);
SafeKeyHandle hKey;
NTSTATUS ntStatus = Interop.BCryptImportKey(hAlg, IntPtr.Zero, BCRYPT_KEY_DATA_BLOB, out hKey, IntPtr.Zero, 0, blob, blobSize, 0);
if (ntStatus != NTSTATUS.STATUS_SUCCESS)
throw CreateCryptographicException(ntStatus);
return hKey;
}
}
[StructLayout(LayoutKind.Sequential)]
private struct BCRYPT_KEY_DATA_BLOB_HEADER
{
public UInt32 dwMagic;
public UInt32 dwVersion;
public UInt32 cbKeyData;
public const UInt32 BCRYPT_KEY_DATA_BLOB_MAGIC = 0x4d42444b;
public const UInt32 BCRYPT_KEY_DATA_BLOB_VERSION1 = 0x1;
}
public static void SetCipherMode(this SafeAlgorithmHandle hAlg, string cipherMode)
{
NTSTATUS ntStatus = Interop.BCryptSetProperty(hAlg, "ChainingMode", cipherMode, (cipherMode.Length + 1) * 2, 0);
if (ntStatus != NTSTATUS.STATUS_SUCCESS)
{
throw CreateCryptographicException(ntStatus);
}
}
// Note: input and output are allowed to be the same buffer. BCryptEncrypt will correctly do the encryption in place according to CNG documentation.
public static int BCryptEncrypt(this SafeKeyHandle hKey, byte[] input, int inputOffset, int inputCount, byte[] iv, byte[] output, int outputOffset, int outputCount)
{
Debug.Assert(input != null);
Debug.Assert(inputOffset >= 0);
Debug.Assert(inputCount >= 0);
Debug.Assert(inputCount <= input.Length - inputOffset);
Debug.Assert(output != null);
Debug.Assert(outputOffset >= 0);
Debug.Assert(outputCount >= 0);
Debug.Assert(outputCount <= output.Length - outputOffset);
unsafe
{
fixed (byte* pbInput = input)
{
fixed (byte* pbOutput = output)
{
int cbResult;
NTSTATUS ntStatus = Interop.BCryptEncrypt(hKey, pbInput + inputOffset, inputCount, IntPtr.Zero, iv, iv == null ? 0 : iv.Length, pbOutput + outputOffset, outputCount, out cbResult, 0);
if (ntStatus != NTSTATUS.STATUS_SUCCESS)
throw CreateCryptographicException(ntStatus);
return cbResult;
}
}
}
}
// Note: input and output are allowed to be the same buffer. BCryptDecrypt will correctly do the decryption in place according to CNG documentation.
public static int BCryptDecrypt(this SafeKeyHandle hKey, byte[] input, int inputOffset, int inputCount, byte[] iv, byte[] output, int outputOffset, int outputCount)
{
Debug.Assert(input != null);
Debug.Assert(inputOffset >= 0);
Debug.Assert(inputCount >= 0);
Debug.Assert(inputCount <= input.Length - inputOffset);
Debug.Assert(output != null);
Debug.Assert(outputOffset >= 0);
Debug.Assert(outputCount >= 0);
Debug.Assert(outputCount <= output.Length - outputOffset);
unsafe
{
fixed (byte* pbInput = input)
{
fixed (byte* pbOutput = output)
{
int cbResult;
NTSTATUS ntStatus = Interop.BCryptDecrypt(hKey, pbInput + inputOffset, inputCount, IntPtr.Zero, iv, iv == null ? 0 : iv.Length, pbOutput + outputOffset, outputCount, out cbResult, 0);
if (ntStatus != NTSTATUS.STATUS_SUCCESS)
throw CreateCryptographicException(ntStatus);
return cbResult;
}
}
}
}
private static class BCryptGetPropertyStrings
{
public const String BCRYPT_HASH_LENGTH = "HashDigestLength";
}
public static String CryptFormatObject(String oidValue, byte[] rawData, bool multiLine)
{
const int X509_ASN_ENCODING = 0x00000001;
const int CRYPT_FORMAT_STR_MULTI_LINE = 0x00000001;
int dwFormatStrType = multiLine ? CRYPT_FORMAT_STR_MULTI_LINE : 0;
int cbFormat = 0;
if (!Interop.CryptFormatObject(X509_ASN_ENCODING, 0, dwFormatStrType, IntPtr.Zero, oidValue, rawData, rawData.Length, null, ref cbFormat))
return null;
StringBuilder sb = new StringBuilder((cbFormat + 1) / 2);
if (!Interop.CryptFormatObject(X509_ASN_ENCODING, 0, dwFormatStrType, IntPtr.Zero, oidValue, rawData, rawData.Length, sb, ref cbFormat))
return null;
return sb.ToString();
}
private enum NTSTATUS : uint
{
STATUS_SUCCESS = 0x0,
STATUS_NOT_FOUND = 0xc0000225,
STATUS_INVALID_PARAMETER = 0xc000000d,
STATUS_NO_MEMORY = 0xc0000017,
}
private static Exception CreateCryptographicException(NTSTATUS ntStatus)
{
int hr = ((int)ntStatus) | 0x01000000;
return hr.ToCryptographicException();
}
}
internal static partial class Cng
{
private static class Interop
{
[DllImport(CngDll, CharSet = CharSet.Unicode)]
public static extern NTSTATUS BCryptOpenAlgorithmProvider(out SafeAlgorithmHandle phAlgorithm, String pszAlgId, String pszImplementation, int dwFlags);
[DllImport(CngDll, CharSet = CharSet.Unicode)]
public static extern unsafe NTSTATUS BCryptSetProperty(SafeAlgorithmHandle hObject, String pszProperty, String pbInput, int cbInput, int dwFlags);
[DllImport(CngDll, CharSet = CharSet.Unicode)]
public static extern NTSTATUS BCryptImportKey(SafeAlgorithmHandle hAlgorithm, IntPtr hImportKey, String pszBlobType, out SafeKeyHandle hKey, IntPtr pbKeyObject, int cbKeyObject, byte[] pbInput, int cbInput, int dwFlags);
[DllImport(CngDll, CharSet = CharSet.Unicode)]
public static extern unsafe NTSTATUS BCryptEncrypt(SafeKeyHandle hKey, byte* pbInput, int cbInput, IntPtr paddingInfo, [In,Out] byte [] pbIV, int cbIV, byte* pbOutput, int cbOutput, out int cbResult, int dwFlags);
[DllImport(CngDll, CharSet = CharSet.Unicode)]
public static extern unsafe NTSTATUS BCryptDecrypt(SafeKeyHandle hKey, byte* pbInput, int cbInput, IntPtr paddingInfo, [In, Out] byte[] pbIV, int cbIV, byte* pbOutput, int cbOutput, out int cbResult, int dwFlags);
[DllImport(Capi2Dll, CharSet = CharSet.Ansi, SetLastError = true, BestFitMapping = false)]
public static extern bool CryptFormatObject(
[In] int dwCertEncodingType, // only valid value is X509_ASN_ENCODING
[In] int dwFormatType, // unused - pass 0.
[In] int dwFormatStrType, // select multiline
[In] IntPtr pFormatStruct, // unused - pass IntPtr.Zero
[MarshalAs(UnmanagedType.LPStr)]
[In] String lpszStructType, // OID value
[In] byte[] pbEncoded, // Data to be formatted
[In] int cbEncoded, // Length of data to be formatted
[MarshalAs(UnmanagedType.LPWStr)]
[Out] StringBuilder pbFormat, // Receives formatted string.
[In, Out] ref int pcbFormat); // Sends/receives length of formatted String.
}
}
internal abstract class SafeBCryptHandle : SafeHandle
{
public SafeBCryptHandle()
: base(IntPtr.Zero, true)
{
}
public sealed override bool IsInvalid
{
get
{
return handle == IntPtr.Zero;
}
}
}
internal sealed class SafeAlgorithmHandle : SafeBCryptHandle
{
protected sealed override bool ReleaseHandle()
{
uint ntStatus = BCryptCloseAlgorithmProvider(handle, 0);
return ntStatus == 0;
}
[DllImport(Cng.CngDll)]
private static extern uint BCryptCloseAlgorithmProvider(IntPtr hAlgorithm, int dwFlags);
}
internal sealed class SafeHashHandle : SafeBCryptHandle
{
protected sealed override bool ReleaseHandle()
{
uint ntStatus = BCryptDestroyHash(handle);
return ntStatus == 0;
}
[DllImport(Cng.CngDll)]
private static extern uint BCryptDestroyHash(IntPtr hHash);
}
internal sealed class SafeKeyHandle : SafeBCryptHandle
{
protected sealed override bool ReleaseHandle()
{
uint ntStatus = BCryptDestroyKey(handle);
return ntStatus == 0;
}
[DllImport(Cng.CngDll)]
private static extern uint BCryptDestroyKey(IntPtr hKey);
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.Azure.AcceptanceTestsHeadExceptions
{
using Azure;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
/// <summary>
/// Test Infrastructure for AutoRest
/// </summary>
public partial class AutoRestHeadExceptionTestService : ServiceClient<AutoRestHeadExceptionTestService>, IAutoRestHeadExceptionTestService, IAzureClient
{
/// <summary>
/// The base URI of the service.
/// </summary>
public System.Uri BaseUri { get; set; }
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
public Newtonsoft.Json.JsonSerializerSettings SerializationSettings { get; private set; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
public Newtonsoft.Json.JsonSerializerSettings DeserializationSettings { get; private set; }
/// <summary>
/// Credentials needed for the client to connect to Azure.
/// </summary>
public ServiceClientCredentials Credentials { get; private set; }
/// <summary>
/// Gets or sets the preferred language for the response.
/// </summary>
public string AcceptLanguage { get; set; }
/// <summary>
/// Gets or sets the retry timeout in seconds for Long Running Operations.
/// Default value is 30.
/// </summary>
public int? LongRunningOperationRetryTimeout { get; set; }
/// <summary>
/// When set to true a unique x-ms-client-request-id value is generated and
/// included in each request. Default is true.
/// </summary>
public bool? GenerateClientRequestId { get; set; }
/// <summary>
/// Gets the IHeadExceptionOperations.
/// </summary>
public virtual IHeadExceptionOperations HeadException { get; private set; }
/// <summary>
/// Initializes a new instance of the AutoRestHeadExceptionTestService class.
/// </summary>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected AutoRestHeadExceptionTestService(params System.Net.Http.DelegatingHandler[] handlers) : base(handlers)
{
Initialize();
}
/// <summary>
/// Initializes a new instance of the AutoRestHeadExceptionTestService class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected AutoRestHeadExceptionTestService(System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : base(rootHandler, handlers)
{
Initialize();
}
/// <summary>
/// Initializes a new instance of the AutoRestHeadExceptionTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
protected AutoRestHeadExceptionTestService(System.Uri baseUri, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the AutoRestHeadExceptionTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
protected AutoRestHeadExceptionTestService(System.Uri baseUri, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the AutoRestHeadExceptionTestService class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public AutoRestHeadExceptionTestService(ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers)
{
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
Credentials = credentials;
if (Credentials != null)
{
Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the AutoRestHeadExceptionTestService class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public AutoRestHeadExceptionTestService(ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
Credentials = credentials;
if (Credentials != null)
{
Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the AutoRestHeadExceptionTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public AutoRestHeadExceptionTestService(System.Uri baseUri, ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
BaseUri = baseUri;
Credentials = credentials;
if (Credentials != null)
{
Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the AutoRestHeadExceptionTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public AutoRestHeadExceptionTestService(System.Uri baseUri, ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
BaseUri = baseUri;
Credentials = credentials;
if (Credentials != null)
{
Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// An optional partial-method to perform custom initialization.
/// </summary>
partial void CustomInitialize();
/// <summary>
/// Initializes client properties.
/// </summary>
private void Initialize()
{
HeadException = new HeadExceptionOperations(this);
BaseUri = new System.Uri("http://localhost");
AcceptLanguage = "en-US";
LongRunningOperationRetryTimeout = 30;
GenerateClientRequestId = true;
SerializationSettings = new Newtonsoft.Json.JsonSerializerSettings
{
Formatting = Newtonsoft.Json.Formatting.Indented,
DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
DeserializationSettings = new Newtonsoft.Json.JsonSerializerSettings
{
DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
CustomInitialize();
DeserializationSettings.Converters.Add(new CloudErrorJsonConverter());
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Diagnostics;
using System.Dynamic.Utils;
using System.Reflection;
namespace System.Linq.Expressions.Interpreter
{
internal abstract class AddInstruction : Instruction
{
private static Instruction s_int16, s_int32, s_int64, s_UInt16, s_UInt32, s_UInt64, s_single, s_double;
public override int ConsumedStack { get { return 2; } }
public override int ProducedStack { get { return 1; } }
public override string InstructionName
{
get { return "Add"; }
}
private AddInstruction()
{
}
internal sealed class AddInt32 : AddInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = ScriptingRuntimeHelpers.Int32ToObject(unchecked((Int32)l + (Int32)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddInt16 : AddInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = unchecked((Int16)((Int16)l + (Int16)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddInt64 : AddInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = unchecked((Int64)((Int64)l + (Int64)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddUInt16 : AddInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = unchecked((UInt16)((UInt16)l + (UInt16)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddUInt32 : AddInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = unchecked((UInt32)((UInt32)l + (UInt32)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddUInt64 : AddInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = unchecked((UInt64)((UInt64)l + (UInt64)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddSingle : AddInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = (Single)((Single)l + (Single)r);
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddDouble : AddInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = (Double)l + (Double)r;
}
frame.StackIndex--;
return +1;
}
}
public static Instruction Create(Type type)
{
Debug.Assert(!type.GetTypeInfo().IsEnum);
switch (System.Dynamic.Utils.TypeExtensions.GetTypeCode(TypeUtils.GetNonNullableType(type)))
{
case TypeCode.Int16: return s_int16 ?? (s_int16 = new AddInt16());
case TypeCode.Int32: return s_int32 ?? (s_int32 = new AddInt32());
case TypeCode.Int64: return s_int64 ?? (s_int64 = new AddInt64());
case TypeCode.UInt16: return s_UInt16 ?? (s_UInt16 = new AddUInt16());
case TypeCode.UInt32: return s_UInt32 ?? (s_UInt32 = new AddUInt32());
case TypeCode.UInt64: return s_UInt64 ?? (s_UInt64 = new AddUInt64());
case TypeCode.Single: return s_single ?? (s_single = new AddSingle());
case TypeCode.Double: return s_double ?? (s_double = new AddDouble());
default:
throw Error.ExpressionNotSupportedForType("Add", type);
}
}
public override string ToString()
{
return "Add()";
}
}
internal abstract class AddOvfInstruction : Instruction
{
private static Instruction s_int16, s_int32, s_int64, s_UInt16, s_UInt32, s_UInt64, s_single, s_double;
public override int ConsumedStack { get { return 2; } }
public override int ProducedStack { get { return 1; } }
public override string InstructionName
{
get { return "AddOvf"; }
}
private AddOvfInstruction()
{
}
internal sealed class AddOvfInt32 : AddOvfInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = ScriptingRuntimeHelpers.Int32ToObject(checked((Int32)l + (Int32)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddOvfInt16 : AddOvfInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = checked((Int16)((Int16)l + (Int16)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddOvfInt64 : AddOvfInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = checked((Int64)((Int64)l + (Int64)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddOvfUInt16 : AddOvfInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = checked((UInt16)((UInt16)l + (UInt16)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddOvfUInt32 : AddOvfInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = checked((UInt32)((UInt32)l + (UInt32)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddOvfUInt64 : AddOvfInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = checked((UInt64)((UInt64)l + (UInt64)r));
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddOvfSingle : AddOvfInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = (Single)((Single)l + (Single)r);
}
frame.StackIndex--;
return +1;
}
}
internal sealed class AddOvfDouble : AddOvfInstruction
{
public override int Run(InterpretedFrame frame)
{
object l = frame.Data[frame.StackIndex - 2];
object r = frame.Data[frame.StackIndex - 1];
if (l == null || r == null)
{
frame.Data[frame.StackIndex - 2] = null;
}
else
{
frame.Data[frame.StackIndex - 2] = (Double)l + (Double)r;
}
frame.StackIndex--;
return +1;
}
}
public static Instruction Create(Type type)
{
Debug.Assert(!type.GetTypeInfo().IsEnum);
switch (System.Dynamic.Utils.TypeExtensions.GetTypeCode(TypeUtils.GetNonNullableType(type)))
{
case TypeCode.Int16: return s_int16 ?? (s_int16 = new AddOvfInt16());
case TypeCode.Int32: return s_int32 ?? (s_int32 = new AddOvfInt32());
case TypeCode.Int64: return s_int64 ?? (s_int64 = new AddOvfInt64());
case TypeCode.UInt16: return s_UInt16 ?? (s_UInt16 = new AddOvfUInt16());
case TypeCode.UInt32: return s_UInt32 ?? (s_UInt32 = new AddOvfUInt32());
case TypeCode.UInt64: return s_UInt64 ?? (s_UInt64 = new AddOvfUInt64());
case TypeCode.Single: return s_single ?? (s_single = new AddOvfSingle());
case TypeCode.Double: return s_double ?? (s_double = new AddOvfDouble());
default:
throw Error.ExpressionNotSupportedForType("AddOvf", type);
}
}
public override string ToString()
{
return "AddOvf()";
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using System.Security.Cryptography;
using Common.Logging;
using Google.Protobuf;
using Ipfs.Registry;
using Newtonsoft.Json;
namespace Ipfs
{
/// <summary>
/// A protocol for differentiating outputs from various well-established cryptographic hash functions,
/// addressing size + encoding considerations.
/// </summary>
/// <remarks>
/// See the <see cref="HashingAlgorithm">registry</see> for supported algorithms.
/// </remarks>
/// <seealso href="https://github.com/jbenet/multihash"/>
[JsonConverter(typeof(MultiHash.Json))]
public class MultiHash : IEquatable<MultiHash>
{
static readonly ILog log = LogManager.GetLogger<MultiHash>();
/// <summary>
/// The cached base-58 encoding of the multihash.
/// </summary>
string b58String;
/// <summary>
/// The default hashing algorithm is "sha2-256".
/// </summary>
public const string DefaultAlgorithmName = "sha2-256";
/// <summary>
/// Gets the <see cref="HashAlgorithm"/> with the specified IPFS multi-hash name.
/// </summary>
/// <param name="name">
/// The name of a hashing algorithm, see <see href="https://github.com/multiformats/multicodec/blob/master/table.csv"/>
/// for IPFS defined names.
/// </param>
/// <returns>
/// The hashing implementation associated with the <paramref name="name"/>.
/// After using the hashing algorithm it should be disposed.
/// </returns>
/// <exception cref="KeyNotFoundException">
/// When <paramref name="name"/> is not registered.
/// </exception>
public static HashAlgorithm GetHashAlgorithm(string name = DefaultAlgorithmName)
{
try
{
return HashingAlgorithm.Names[name].Hasher();
}
catch (KeyNotFoundException)
{
throw new KeyNotFoundException($"Hash algorithm '{name}' is not registered.");
}
}
/// <summary>
/// Gets the name of hashing algorithm name with the specified code.
/// </summary>
/// <param name="code">
/// The code of a hashing algorithm, see <see href="https://github.com/multiformats/multicodec/blob/master/table.csv"/>
/// for IPFS defined codes.
/// </param>
/// <returns>
/// The name assigned to <paramref name="code"/>.
/// </returns>
/// <exception cref="KeyNotFoundException">
/// When <paramref name="code"/> is not registered.
/// </exception>
public static string GetHashAlgorithmName(int code)
{
try
{
return HashingAlgorithm.Codes[code].Name;
}
catch (KeyNotFoundException)
{
throw new KeyNotFoundException($"Hash algorithm with code '{code}' is not registered.");
}
}
/// <summary>
/// Occurs when an unknown hashing algorithm number is parsed.
/// </summary>
public static EventHandler<UnknownHashingAlgorithmEventArgs> UnknownHashingAlgorithm;
/// <summary>
/// Creates a new instance of the <see cref="MultiHash"/> class with the
/// specified <see cref="HashingAlgorithm">Algorithm name</see> and <see cref="Digest"/> value.
/// </summary>
/// <param name="algorithmName">
/// A valid IPFS hashing algorithm name, e.g. "sha2-256" or "sha2-512".
/// </param>
/// <param name="digest">
/// The digest value as a byte array.
/// </param>
public MultiHash(string algorithmName, byte[] digest)
{
if (algorithmName == null)
throw new ArgumentNullException("algorithmName");
if (digest == null)
throw new ArgumentNullException("digest");
if (!HashingAlgorithm.Names.TryGetValue(algorithmName, out HashingAlgorithm a))
{
throw new ArgumentException(string.Format("The IPFS hashing algorithm '{0}' is unknown.", algorithmName));
}
Algorithm = a;
if (Algorithm.DigestSize != 0 && Algorithm.DigestSize != digest.Length)
throw new ArgumentException(string.Format("The digest size for '{0}' is {1} bytes, not {2}.", algorithmName, Algorithm.DigestSize, digest.Length));
Digest = digest;
}
/// <summary>
/// Creates a new instance of the <see cref="MultiHash"/> class from the
/// specified byte array.
/// </summary>
/// <param name="buffer">
/// A sequence of bytes containing the binary representation of the
/// <b>MultiHash</b>.
/// </param>
/// <remarks>
/// Reads the binary representation of <see cref="MultiHash"/> from the <paramref name="buffer"/>.
/// <para>
/// The binary representation is a <see cref="Varint"/> of the <see cref="HashingAlgorithm.Code"/>,
/// <see cref="Varint"/> of the <see cref="HashingAlgorithm.DigestSize"/> followed by the <see cref="Digest"/>.
/// </para>
/// <para>
/// When an unknown <see cref="HashingAlgorithm.Code">hashing algorithm number</see> is encountered
/// a new hashing algorithm is <see cref="HashingAlgorithm.Register">registered</see>. This new algorithm does not support
/// matching nor computing a hash.
/// This behaviour allows parsing of any well formed <see cref="MultiHash"/> even when
/// the hashing algorithm is unknown.
/// </para>
/// </remarks>
/// <seealso cref="ToArray"/>
public MultiHash(byte[] buffer)
{
using (var ms = new MemoryStream(buffer, false))
{
Read(ms);
}
}
/// <summary>
/// Creates a new instance of the <see cref="MultiHash"/> class from the
/// specified <see cref="Stream"/>.
/// </summary>
/// <param name="stream">
/// A <see cref="Stream"/> containing the binary representation of the
/// <b>MultiHash</b>.
/// </param>
/// <remarks>
/// Reads the binary representation of <see cref="MultiHash"/> from the <paramref name="stream"/>.
/// <para>
/// The binary representation is a <see cref="Varint"/> of the <see cref="HashingAlgorithm.Code"/>,
/// <see cref="Varint"/> of the <see cref="HashingAlgorithm.DigestSize"/> followed by the <see cref="Digest"/>.
/// </para>
/// <para>
/// When an unknown <see cref="HashingAlgorithm.Code">hashing algorithm number</see> is encountered
/// a new hashing algorithm is <see cref="HashingAlgorithm.Register">registered</see>. This new algorithm does not support
/// matching nor computing a hash.
/// This behaviour allows parsing of any well formed <see cref="MultiHash"/> even when
/// the hashing algorithm is unknown.
/// </para>
/// </remarks>
public MultiHash(Stream stream)
{
Read(stream);
}
/// <summary>
/// Creates a new instance of the <see cref="MultiHash"/> class from the
/// specified <see cref="CodedInputStream"/>.
/// </summary>
/// <param name="stream">
/// A <see cref="CodedInputStream"/> containing the binary representation of the
/// <b>MultiHash</b>.
/// </param>
/// <remarks>
/// Reads the binary representation of <see cref="MultiHash"/> from the <paramref name="stream"/>.
/// <para>
/// The binary representation is a <see cref="Varint"/> of the <see cref="HashingAlgorithm.Code"/>,
/// <see cref="Varint"/> of the <see cref="HashingAlgorithm.DigestSize"/> followed by the <see cref="Digest"/>.
/// </para>
/// <para>
/// When an unknown <see cref="HashingAlgorithm.Code">hashing algorithm number</see> is encountered
/// a new hashing algorithm is <see cref="HashingAlgorithm.Register">registered</see>. This new algorithm does not support
/// matching nor computing a hash.
/// This behaviour allows parsing of any well formed <see cref="MultiHash"/> even when
/// the hashing algorithm is unknown.
/// </para>
/// </remarks>
public MultiHash(CodedInputStream stream)
{
Read(stream);
}
/// <summary>
/// Creates a new instance of the <see cref="MultiHash"/> class from the specified
/// <see cref="Base58"/> encoded <see cref="string"/>.
/// </summary>
/// <param name="s">
/// A <see cref="Base58"/> encoded <b>MultiHash</b>.
/// </param>
/// <remarks>
/// <para>
/// When an unknown <see cref="HashingAlgorithm.Code">hashing algorithm number</see> is encountered
/// a new hashing algorithm is <see cref="HashingAlgorithm.Register">registered</see>. This new algorithm does not support
/// matching nor computing a hash.
/// This behaviour allows parsing of any well formed <see cref="MultiHash"/> even when
/// the hashing algorithm is unknown.
/// </para>
/// </remarks>
/// <seealso cref="ToBase58"/>
public MultiHash(string s)
{
using (var ms = new MemoryStream(s.FromBase58(), false))
{
Read(ms);
}
}
/// <summary>
/// Implicit casting of a <see cref="string"/> to a <see cref="MultiHash"/>.
/// </summary>
/// <param name="s">
/// A <see cref="Base58"/> encoded <b>MultiHash</b>.
/// </param>
/// <returns>
/// A new <see cref="MultiHash"/>.
/// </returns>
/// <remarks>
/// Equivalent to <code>new MultiHash(s)</code>
/// </remarks>
static public implicit operator MultiHash(string s)
{
return new MultiHash(s);
}
/// <summary>
/// The hashing algorithm.
/// </summary>
/// <value>
/// Details on the hashing algorithm.
/// </value>
public HashingAlgorithm Algorithm { get; private set; }
/// <summary>
/// The hashing algorithm's digest value.
/// </summary>
/// <value>
/// The output of the hashing algorithm.
/// </value>
public byte[] Digest { get; private set; }
/// <summary>
/// Determines if the identity hash algorithm is in use.
/// </summary>
/// <value>
/// <b>true</b> if the identity hash algorithm is used; otherwise, <b>false</b>.
/// </value>
/// <remarks>
/// The identity hash is used to inline a small amount of data into a <see cref="Cid"/>.
/// When <b>true</b>, the <see cref="Digest"/> is also the content.
/// </remarks>
public bool IsIdentityHash
{
get { return Algorithm.Code == 0; }
}
/// <summary>
/// Writes the binary representation of the multihash to the specified <see cref="Stream"/>.
/// </summary>
/// <param name="stream">
/// The <see cref="Stream"/> to write to.
/// </param>
/// <remarks>
/// The binary representation is a 1-byte <see cref="HashingAlgorithm.Code"/>,
/// 1-byte <see cref="HashingAlgorithm.DigestSize"/> followed by the <see cref="Digest"/>.
/// </remarks>
public void Write(Stream stream)
{
using (var cos = new CodedOutputStream(stream, true))
{
Write(cos);
}
}
/// <summary>
/// Writes the binary representation of the multihash to the specified <see cref="CodedOutputStream"/>.
/// </summary>
/// <param name="stream">
/// The <see cref="CodedOutputStream"/> to write to.
/// </param>
/// <remarks>
/// The binary representation is a <see cref="Varint"/> of the <see cref="HashingAlgorithm.Code"/>,
/// <see cref="Varint"/> of the <see cref="HashingAlgorithm.DigestSize"/> followed by the <see cref="Digest"/>.
/// </remarks>
public void Write(CodedOutputStream stream)
{
if (stream == null)
throw new ArgumentNullException("stream");
stream.WriteInt32(Algorithm.Code);
stream.WriteLength(Digest.Length);
stream.WriteSomeBytes(Digest);
}
void Read(Stream stream)
{
using (var cis = new CodedInputStream(stream, true))
{
Read(cis);
}
}
void Read(CodedInputStream stream)
{
var code = stream.ReadInt32();
var digestSize = stream.ReadLength();
HashingAlgorithm.Codes.TryGetValue(code, out HashingAlgorithm a);
Algorithm = a;
if (Algorithm == null)
{
Algorithm = HashingAlgorithm.Register("ipfs-" + code, code, digestSize);
RaiseUnknownHashingAlgorithm(Algorithm);
}
else if (Algorithm.DigestSize != 0 && digestSize != Algorithm.DigestSize)
{
throw new InvalidDataException(string.Format("The digest size {0} is wrong for {1}; it should be {2}.", digestSize, Algorithm.Name, Algorithm.DigestSize));
}
Digest = stream.ReadSomeBytes(digestSize);
}
/// <inheritdoc />
public override int GetHashCode()
{
return ToString().GetHashCode();
}
/// <inheritdoc />
public override bool Equals(object obj)
{
var that = obj as MultiHash;
return (that == null)
? false
: this.Equals(that);
}
/// <inheritdoc />
public bool Equals(MultiHash that)
{
return this.Algorithm.Code == that.Algorithm.Code
&& this.Digest.SequenceEqual(that.Digest);
}
/// <summary>
/// Value equality.
/// </summary>
public static bool operator ==(MultiHash a, MultiHash b)
{
if (object.ReferenceEquals(a, b)) return true;
if (a is null) return false;
if (b is null) return false;
return a.Equals(b);
}
/// <summary>
/// Value inequality.
/// </summary>
public static bool operator !=(MultiHash a, MultiHash b)
{
return !(a == b);
}
/// <summary>
/// Returns the <see cref="Base58"/> encoding of the <see cref="MultiHash"/>.
/// </summary>
/// <returns>
/// A base-58 representaton of the MultiHash.
/// </returns>
/// <seealso cref="ToBase58"/>
public override string ToString()
{
return this.ToBase58();
}
/// <summary>
/// Returns the <see cref="Base58"/> encoding of the <see cref="MultiHash"/>.
/// </summary>
/// <returns>
/// The <see cref="Base58"/> representation of the <see cref="MultiHash"/>.
/// </returns>
public string ToBase58()
{
if (b58String != null)
{
return b58String;
}
using (var ms = new MemoryStream())
{
Write(ms);
b58String = ms.ToArray().ToBase58();
return b58String;
}
}
/// <summary>
/// Returns the <see cref="Base32"/> encoding of the <see cref="MultiHash"/>.
/// </summary>
/// <returns>
/// The <see cref="Base32"/> representation of the <see cref="MultiHash"/>.
/// </returns>
public string ToBase32()
{
return ToArray().ToBase32();
}
/// <summary>
/// Returns the IPFS binary representation as a byte array.
/// </summary>
/// <returns>
/// A byte array.
/// </returns>
/// <remarks>
/// The binary representation is a sequence of <see cref="MultiHash"/>.
/// </remarks>
public byte[] ToArray()
{
using (var ms = new MemoryStream())
{
Write(ms);
return ms.ToArray();
}
}
/// <summary>
/// Determines if the data matches the hash.
/// </summary>
/// <param name="data">
/// The data to check.
/// </param>
/// <returns>
/// <b>true</b> if the data matches the <see cref="MultiHash"/>; otherwise, <b>false</b>.
/// </returns>
/// <remarks>
/// <b>Matches</b> is used to ensure data integrity.
/// </remarks>
public bool Matches(byte[] data)
{
var digest = Algorithm.Hasher().ComputeHash(data);
for (int i = digest.Length - 1; 0 <= i; --i)
{
if (digest[i] != Digest[i])
return false;
}
return true;
}
/// <summary>
/// Determines if the stream data matches the hash.
/// </summary>
/// <param name="data">
/// The <see cref="Stream"/> containing the data to check.
/// </param>
/// <returns>
/// <b>true</b> if the data matches the <see cref="MultiHash"/>; otherwise, <b>false</b>.
/// </returns>
/// <remarks>
/// <b>Matches</b> is used to ensure data integrity.
/// </remarks>
public bool Matches(Stream data)
{
var digest = Algorithm.Hasher().ComputeHash(data);
for (int i = digest.Length - 1; 0 <= i; --i)
{
if (digest[i] != Digest[i])
return false;
}
return true;
}
void RaiseUnknownHashingAlgorithm(HashingAlgorithm algorithm)
{
if (log.IsWarnEnabled)
log.WarnFormat("Unknown hashing algorithm number 0x{0:x2}.", algorithm.Code);
var handler = UnknownHashingAlgorithm;
if (handler != null)
{
var args = new UnknownHashingAlgorithmEventArgs { Algorithm = algorithm };
handler(this, args);
}
}
/// <summary>
/// Generate the multihash for the specified byte array.
/// </summary>
/// <param name="data">
/// The byte array containing the data to hash.
/// </param>
/// <param name="algorithmName">
/// The name of the hashing algorithm to use; defaults to <see cref="DefaultAlgorithmName"/>.
/// </param>
/// <returns>
/// A <see cref="MultiHash"/> for the <paramref name="data"/>.
/// </returns>
public static MultiHash ComputeHash(byte[] data, string algorithmName = DefaultAlgorithmName)
{
using (var alg = GetHashAlgorithm(algorithmName))
{
return new MultiHash(algorithmName, alg.ComputeHash(data));
}
}
/// <summary>
/// Generate the multihash for the specified <see cref="Stream"/>.
/// </summary>
/// <param name="data">
/// The <see cref="Stream"/> containing the data to hash.
/// </param>
/// <param name="algorithmName">
/// The name of the hashing algorithm to use; defaults to <see cref="DefaultAlgorithmName"/>.
/// </param>
/// <returns>
/// A <see cref="MultiHash"/> for the <paramref name="data"/>.
/// </returns>
public static MultiHash ComputeHash(Stream data, string algorithmName = DefaultAlgorithmName)
{
using (var alg = GetHashAlgorithm(algorithmName))
{
return new MultiHash(algorithmName, alg.ComputeHash(data));
}
}
/// <summary>
/// Conversion of a <see cref="MultiHash"/> to and from JSON.
/// </summary>
/// <remarks>
/// The JSON is just a single string value.
/// </remarks>
class Json : JsonConverter
{
public override bool CanConvert(Type objectType)
{
return true;
}
public override bool CanRead => true;
public override bool CanWrite => true;
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
var mh = value as MultiHash;
writer.WriteValue(mh?.ToString());
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
var s = reader.Value as string;
return s == null ? null : new MultiHash(s);
}
}
}
/// <summary>
/// Provides data for the unknown hashing algorithm event.
/// </summary>
public class UnknownHashingAlgorithmEventArgs : EventArgs
{
/// <summary>
/// The <see cref="HashingAlgorithm"/> that is defined for the
/// unknown hashing number.
/// </summary>
public HashingAlgorithm Algorithm { get; set; }
}
}
| |
// MIT License
//
// Copyright (c) 2009-2017 Luca Piccioni
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
// This file is automatically generated
#pragma warning disable 649, 1572, 1573
// ReSharper disable RedundantUsingDirective
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Security;
using System.Text;
using Khronos;
// ReSharper disable CheckNamespace
// ReSharper disable InconsistentNaming
// ReSharper disable JoinDeclarationAndInitializer
namespace OpenGL
{
public partial class Wgl
{
/// <summary>
/// [WGL] Value of WGL_GPU_VENDOR_AMD symbol.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public const int GPU_VENDOR_AMD = 0x1F00;
/// <summary>
/// [WGL] Value of WGL_GPU_RENDERER_STRING_AMD symbol.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public const int GPU_RENDERER_STRING_AMD = 0x1F01;
/// <summary>
/// [WGL] Value of WGL_GPU_OPENGL_VERSION_STRING_AMD symbol.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public const int GPU_OPENGL_VERSION_STRING_AMD = 0x1F02;
/// <summary>
/// [WGL] Value of WGL_GPU_FASTEST_TARGET_GPUS_AMD symbol.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public const int GPU_FASTEST_TARGET_GPUS_AMD = 0x21A2;
/// <summary>
/// [WGL] Value of WGL_GPU_RAM_AMD symbol.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public const int GPU_RAM_AMD = 0x21A3;
/// <summary>
/// [WGL] Value of WGL_GPU_CLOCK_AMD symbol.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public const int GPU_CLOCK_AMD = 0x21A4;
/// <summary>
/// [WGL] Value of WGL_GPU_NUM_PIPES_AMD symbol.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public const int GPU_NUM_PIPES_AMD = 0x21A5;
/// <summary>
/// [WGL] Value of WGL_GPU_NUM_SIMD_AMD symbol.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public const int GPU_NUM_SIMD_AMD = 0x21A6;
/// <summary>
/// [WGL] Value of WGL_GPU_NUM_RB_AMD symbol.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public const int GPU_NUM_RB_AMD = 0x21A7;
/// <summary>
/// [WGL] Value of WGL_GPU_NUM_SPI_AMD symbol.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public const int GPU_NUM_SPI_AMD = 0x21A8;
/// <summary>
/// [WGL] wglGetGPUIDsAMD: Binding for wglGetGPUIDsAMD.
/// </summary>
/// <param name="maxCount">
/// A <see cref="T:uint"/>.
/// </param>
/// <param name="ids">
/// A <see cref="T:uint[]"/>.
/// </param>
[RequiredByFeature("WGL_AMD_gpu_association")]
public static uint GetGPUIDsAMD(uint maxCount, [Out] uint[] ids)
{
uint retValue;
unsafe {
fixed (uint* p_ids = ids)
{
Debug.Assert(Delegates.pwglGetGPUIDsAMD != null, "pwglGetGPUIDsAMD not implemented");
retValue = Delegates.pwglGetGPUIDsAMD(maxCount, p_ids);
LogCommand("wglGetGPUIDsAMD", retValue, maxCount, ids );
}
}
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [WGL] wglGetGPUInfoAMD: Binding for wglGetGPUInfoAMD.
/// </summary>
/// <param name="id">
/// A <see cref="T:uint"/>.
/// </param>
/// <param name="property">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="dataType">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="size">
/// A <see cref="T:uint"/>.
/// </param>
/// <param name="data">
/// A <see cref="T:IntPtr"/>.
/// </param>
[RequiredByFeature("WGL_AMD_gpu_association")]
public static int GetGPUInfoAMD(uint id, int property, int dataType, uint size, IntPtr data)
{
int retValue;
Debug.Assert(Delegates.pwglGetGPUInfoAMD != null, "pwglGetGPUInfoAMD not implemented");
retValue = Delegates.pwglGetGPUInfoAMD(id, property, dataType, size, data);
LogCommand("wglGetGPUInfoAMD", retValue, id, property, dataType, size, data );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [WGL] wglGetContextGPUIDAMD: Binding for wglGetContextGPUIDAMD.
/// </summary>
/// <param name="hglrc">
/// A <see cref="T:IntPtr"/>.
/// </param>
[RequiredByFeature("WGL_AMD_gpu_association")]
public static uint GetContextGPUIDAMD(IntPtr hglrc)
{
uint retValue;
Debug.Assert(Delegates.pwglGetContextGPUIDAMD != null, "pwglGetContextGPUIDAMD not implemented");
retValue = Delegates.pwglGetContextGPUIDAMD(hglrc);
LogCommand("wglGetContextGPUIDAMD", retValue, hglrc );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [WGL] wglCreateAssociatedContextAMD: Binding for wglCreateAssociatedContextAMD.
/// </summary>
/// <param name="id">
/// A <see cref="T:uint"/>.
/// </param>
[RequiredByFeature("WGL_AMD_gpu_association")]
public static IntPtr CreateAssociatedContextAMD(uint id)
{
IntPtr retValue;
Debug.Assert(Delegates.pwglCreateAssociatedContextAMD != null, "pwglCreateAssociatedContextAMD not implemented");
retValue = Delegates.pwglCreateAssociatedContextAMD(id);
LogCommand("wglCreateAssociatedContextAMD", retValue, id );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [WGL] wglCreateAssociatedContextAttribsAMD: Binding for wglCreateAssociatedContextAttribsAMD.
/// </summary>
/// <param name="id">
/// A <see cref="T:uint"/>.
/// </param>
/// <param name="hShareContext">
/// A <see cref="T:IntPtr"/>.
/// </param>
/// <param name="attribList">
/// A <see cref="T:int[]"/>.
/// </param>
[RequiredByFeature("WGL_AMD_gpu_association")]
public static IntPtr CreateAssociatedContextAttribsAMD(uint id, IntPtr hShareContext, int[] attribList)
{
IntPtr retValue;
unsafe {
fixed (int* p_attribList = attribList)
{
Debug.Assert(Delegates.pwglCreateAssociatedContextAttribsAMD != null, "pwglCreateAssociatedContextAttribsAMD not implemented");
retValue = Delegates.pwglCreateAssociatedContextAttribsAMD(id, hShareContext, p_attribList);
LogCommand("wglCreateAssociatedContextAttribsAMD", retValue, id, hShareContext, attribList );
}
}
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [WGL] wglDeleteAssociatedContextAMD: Binding for wglDeleteAssociatedContextAMD.
/// </summary>
/// <param name="hglrc">
/// A <see cref="T:IntPtr"/>.
/// </param>
[RequiredByFeature("WGL_AMD_gpu_association")]
public static bool DeleteAssociatedContextAMD(IntPtr hglrc)
{
bool retValue;
Debug.Assert(Delegates.pwglDeleteAssociatedContextAMD != null, "pwglDeleteAssociatedContextAMD not implemented");
retValue = Delegates.pwglDeleteAssociatedContextAMD(hglrc);
LogCommand("wglDeleteAssociatedContextAMD", retValue, hglrc );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [WGL] wglMakeAssociatedContextCurrentAMD: Binding for wglMakeAssociatedContextCurrentAMD.
/// </summary>
/// <param name="hglrc">
/// A <see cref="T:IntPtr"/>.
/// </param>
[RequiredByFeature("WGL_AMD_gpu_association")]
public static bool MakeAssociatedContextCurrentAMD(IntPtr hglrc)
{
bool retValue;
Debug.Assert(Delegates.pwglMakeAssociatedContextCurrentAMD != null, "pwglMakeAssociatedContextCurrentAMD not implemented");
retValue = Delegates.pwglMakeAssociatedContextCurrentAMD(hglrc);
LogCommand("wglMakeAssociatedContextCurrentAMD", retValue, hglrc );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [WGL] wglGetCurrentAssociatedContextAMD: Binding for wglGetCurrentAssociatedContextAMD.
/// </summary>
[RequiredByFeature("WGL_AMD_gpu_association")]
public static IntPtr GetCurrentAssociatedContextAMD()
{
IntPtr retValue;
Debug.Assert(Delegates.pwglGetCurrentAssociatedContextAMD != null, "pwglGetCurrentAssociatedContextAMD not implemented");
retValue = Delegates.pwglGetCurrentAssociatedContextAMD();
LogCommand("wglGetCurrentAssociatedContextAMD", retValue );
DebugCheckErrors(retValue);
return (retValue);
}
/// <summary>
/// [WGL] wglBlitContextFramebufferAMD: Binding for wglBlitContextFramebufferAMD.
/// </summary>
/// <param name="dstCtx">
/// A <see cref="T:IntPtr"/>.
/// </param>
/// <param name="srcX0">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="srcY0">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="srcX1">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="srcY1">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="dstX0">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="dstY0">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="dstX1">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="dstY1">
/// A <see cref="T:int"/>.
/// </param>
/// <param name="mask">
/// A <see cref="T:uint"/>.
/// </param>
/// <param name="filter">
/// A <see cref="T:int"/>.
/// </param>
[RequiredByFeature("WGL_AMD_gpu_association")]
public static void BlitContextFramebufferAMD(IntPtr dstCtx, int srcX0, int srcY0, int srcX1, int srcY1, int dstX0, int dstY0, int dstX1, int dstY1, uint mask, int filter)
{
Debug.Assert(Delegates.pwglBlitContextFramebufferAMD != null, "pwglBlitContextFramebufferAMD not implemented");
Delegates.pwglBlitContextFramebufferAMD(dstCtx, srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter);
LogCommand("wglBlitContextFramebufferAMD", null, dstCtx, srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter );
DebugCheckErrors(null);
}
internal static unsafe partial class Delegates
{
[RequiredByFeature("WGL_AMD_gpu_association")]
[SuppressUnmanagedCodeSecurity]
internal delegate uint wglGetGPUIDsAMD(uint maxCount, uint* ids);
[RequiredByFeature("WGL_AMD_gpu_association")]
internal static wglGetGPUIDsAMD pwglGetGPUIDsAMD;
[RequiredByFeature("WGL_AMD_gpu_association")]
[SuppressUnmanagedCodeSecurity]
internal delegate int wglGetGPUInfoAMD(uint id, int property, int dataType, uint size, IntPtr data);
[RequiredByFeature("WGL_AMD_gpu_association")]
internal static wglGetGPUInfoAMD pwglGetGPUInfoAMD;
[RequiredByFeature("WGL_AMD_gpu_association")]
[SuppressUnmanagedCodeSecurity]
internal delegate uint wglGetContextGPUIDAMD(IntPtr hglrc);
[RequiredByFeature("WGL_AMD_gpu_association")]
internal static wglGetContextGPUIDAMD pwglGetContextGPUIDAMD;
[RequiredByFeature("WGL_AMD_gpu_association")]
[SuppressUnmanagedCodeSecurity]
internal delegate IntPtr wglCreateAssociatedContextAMD(uint id);
[RequiredByFeature("WGL_AMD_gpu_association")]
internal static wglCreateAssociatedContextAMD pwglCreateAssociatedContextAMD;
[RequiredByFeature("WGL_AMD_gpu_association")]
[SuppressUnmanagedCodeSecurity]
internal delegate IntPtr wglCreateAssociatedContextAttribsAMD(uint id, IntPtr hShareContext, int* attribList);
[RequiredByFeature("WGL_AMD_gpu_association")]
internal static wglCreateAssociatedContextAttribsAMD pwglCreateAssociatedContextAttribsAMD;
[RequiredByFeature("WGL_AMD_gpu_association")]
[SuppressUnmanagedCodeSecurity]
internal delegate bool wglDeleteAssociatedContextAMD(IntPtr hglrc);
[RequiredByFeature("WGL_AMD_gpu_association")]
internal static wglDeleteAssociatedContextAMD pwglDeleteAssociatedContextAMD;
[RequiredByFeature("WGL_AMD_gpu_association")]
[SuppressUnmanagedCodeSecurity]
internal delegate bool wglMakeAssociatedContextCurrentAMD(IntPtr hglrc);
[RequiredByFeature("WGL_AMD_gpu_association")]
internal static wglMakeAssociatedContextCurrentAMD pwglMakeAssociatedContextCurrentAMD;
[RequiredByFeature("WGL_AMD_gpu_association")]
[SuppressUnmanagedCodeSecurity]
internal delegate IntPtr wglGetCurrentAssociatedContextAMD();
[RequiredByFeature("WGL_AMD_gpu_association")]
internal static wglGetCurrentAssociatedContextAMD pwglGetCurrentAssociatedContextAMD;
[RequiredByFeature("WGL_AMD_gpu_association")]
[SuppressUnmanagedCodeSecurity]
internal delegate void wglBlitContextFramebufferAMD(IntPtr dstCtx, int srcX0, int srcY0, int srcX1, int srcY1, int dstX0, int dstY0, int dstX1, int dstY1, uint mask, int filter);
[RequiredByFeature("WGL_AMD_gpu_association")]
internal static wglBlitContextFramebufferAMD pwglBlitContextFramebufferAMD;
}
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="SocketTcp.cs" company="Exit Games GmbH">
// Copyright (c) Exit Games GmbH. All rights reserved.
// </copyright>
// <summary>
// Internal class to encapsulate the network i/o functionality for the realtime libary.
// </summary>
// <author>developer@exitgames.com</author>
// --------------------------------------------------------------------------------------------------------------------
using System;
using System.Collections;
using UnityEngine;
#if !NoSocket
namespace ExitGames.Client.Photon
{
/// <summary>
/// Internal class to encapsulate the network i/o functionality for the realtime libary.
/// </summary>
internal class SocketWebTcp : IPhotonSocket, IDisposable
{
private WebSocket sock;
private readonly object syncer = new object();
public SocketWebTcp(PeerBase npeer) : base(npeer)
{
ServerAddress = npeer.ServerAddress;
if (this.ReportDebugOfLevel(DebugLevel.INFO))
{
Listener.DebugReturn(DebugLevel.INFO, "new SocketWebTcp() " + ServerAddress);
}
Protocol = ConnectionProtocol.Tcp;
PollReceive = false;
}
public void Dispose()
{
this.State = PhotonSocketState.Disconnecting;
if (this.sock != null)
{
try
{
if (this.sock.Connected) this.sock.Close();
}
catch (Exception ex)
{
this.EnqueueDebugReturn(DebugLevel.INFO, "Exception in Dispose(): " + ex);
}
}
this.sock = null;
this.State = PhotonSocketState.Disconnected;
}
GameObject websocketConnectionObject;
public override bool Connect()
{
//bool baseOk = base.Connect();
//if (!baseOk)
//{
// return false;
//}
State = PhotonSocketState.Connecting;
if (websocketConnectionObject != null)
{
UnityEngine.Object.Destroy(websocketConnectionObject);
}
websocketConnectionObject = new GameObject("websocketConnectionObject");
MonoBehaviour mb = websocketConnectionObject.AddComponent<MonoBehaviour>();
// TODO: not hidden for debug
//websocketConnectionObject.hideFlags = HideFlags.HideInHierarchy;
UnityEngine.Object.DontDestroyOnLoad(websocketConnectionObject);
this.sock = new WebSocket(new Uri(ServerAddress));
mb.StartCoroutine(this.sock.Connect());
mb.StartCoroutine(ReceiveLoop());
return true;
}
public override bool Disconnect()
{
if (ReportDebugOfLevel(DebugLevel.INFO))
{
this.Listener.DebugReturn(DebugLevel.INFO, "SocketTcp.Disconnect()");
}
State = PhotonSocketState.Disconnecting;
lock (this.syncer)
{
if (this.sock != null)
{
try
{
this.sock.Close();
}
catch (Exception ex)
{
this.Listener.DebugReturn(DebugLevel.ERROR, "Exception in Disconnect(): " + ex);
}
this.sock = null;
}
}
if (websocketConnectionObject != null)
{
UnityEngine.Object.Destroy(websocketConnectionObject);
}
State = PhotonSocketState.Disconnected;
return true;
}
/// <summary>
/// used by TPeer*
/// </summary>
public override PhotonSocketError Send(byte[] data, int length)
{
if (this.State != PhotonSocketState.Connected)
{
return PhotonSocketError.Skipped;
}
try
{
if (this.ReportDebugOfLevel(DebugLevel.ALL))
{
this.Listener.DebugReturn(DebugLevel.ALL, "Sending: " + SupportClass.ByteArrayToString(data));
}
this.sock.Send(data);
}
catch (Exception e)
{
this.Listener.DebugReturn(DebugLevel.ERROR, "Cannot send. " + e.Message);
HandleException(StatusCode.Exception);
return PhotonSocketError.Exception;
}
return PhotonSocketError.Success;
}
public override PhotonSocketError Receive(out byte[] data)
{
data = null;
return PhotonSocketError.NoData;
}
internal const int ALL_HEADER_BYTES = 9;
internal const int TCP_HEADER_BYTES = 7;
internal const int MSG_HEADER_BYTES = 2;
public IEnumerator ReceiveLoop()
{
this.Listener.DebugReturn(DebugLevel.INFO, "ReceiveLoop()");
while (!this.sock.Connected && this.sock.Error == null)
{
yield return new WaitForSeconds(0.1f);
}
if (this.sock.Error != null)
{
this.Listener.DebugReturn(DebugLevel.ERROR, "Exiting receive thread due to error: " + this.sock.Error);
this.HandleException(StatusCode.ExceptionOnConnect);
}
else
{
if (this.ReportDebugOfLevel(DebugLevel.ALL))
{
this.Listener.DebugReturn(DebugLevel.ALL, "Receiving by websocket. this.State: " + State);
}
State = PhotonSocketState.Connected;
while (State == PhotonSocketState.Connected)
{
if (this.sock.Error != null)
{
this.Listener.DebugReturn(DebugLevel.ERROR, "Exiting receive thread (inside loop) due to error: " + this.sock.Error);
this.HandleException(StatusCode.ExceptionOnReceive);
break;
}
else
{
byte[] inBuff = this.sock.Recv();
if (inBuff == null || inBuff.Length == 0)
{
yield return new WaitForSeconds(0.1f);
continue;
}
if (this.ReportDebugOfLevel(DebugLevel.ALL))
{
this.Listener.DebugReturn(DebugLevel.ALL, "TCP << " + inBuff.Length + " = " + SupportClass.ByteArrayToString(inBuff));
}
// check if it's a ping-result (first byte = 0xF0). this is 9 bytes in total. no other headers!
// note: its a coincidence that ping-result-size == header-size. if this changes we have to refactor this
if (inBuff[0] == 0xF0)
{
try
{
HandleReceivedDatagram(inBuff, inBuff.Length, false);
}
catch (Exception e)
{
if (this.ReportDebugOfLevel(DebugLevel.ERROR))
{
this.EnqueueDebugReturn(DebugLevel.ERROR, "Receive issue. State: " + this.State + " Exception: " + e);
}
this.HandleException(StatusCode.ExceptionOnReceive);
}
continue;
}
// get data and split the datagram into two buffers: head and body
if (inBuff.Length > 0)
{
try
{
HandleReceivedDatagram(inBuff, inBuff.Length, false);
}
catch (Exception e)
{
if (this.ReportDebugOfLevel(DebugLevel.ERROR))
{
this.EnqueueDebugReturn(DebugLevel.ERROR, "Receive issue. State: " + this.State + " Exception: " + e);
}
this.HandleException(StatusCode.ExceptionOnReceive);
}
}
}
}
}
Disconnect();
}
}
}
#endif
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Automation
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// RunbookOperations operations.
/// </summary>
public partial interface IRunbookOperations
{
/// <summary>
/// Retrieve the content of runbook identified by runbook name.
/// <see href="http://aka.ms/azureautomationsdk/runbookoperations" />
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='runbookName'>
/// The runbook name.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorResponseException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<Stream>> GetContentWithHttpMessagesAsync(string resourceGroupName, string automationAccountName, string runbookName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Retrieve the runbook identified by runbook name.
/// <see href="http://aka.ms/azureautomationsdk/runbookoperations" />
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='runbookName'>
/// The runbook name.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorResponseException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<Runbook>> GetWithHttpMessagesAsync(string resourceGroupName, string automationAccountName, string runbookName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Create the runbook identified by runbook name.
/// <see href="http://aka.ms/azureautomationsdk/runbookoperations" />
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='runbookName'>
/// The runbook name.
/// </param>
/// <param name='parameters'>
/// The create or update parameters for runbook. Provide either content
/// link for a published runbook or draft, not both.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorResponseException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string automationAccountName, string runbookName, RunbookCreateOrUpdateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Update the runbook identified by runbook name.
/// <see href="http://aka.ms/azureautomationsdk/runbookoperations" />
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='runbookName'>
/// The runbook name.
/// </param>
/// <param name='parameters'>
/// The update parameters for runbook.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorResponseException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<Runbook>> UpdateWithHttpMessagesAsync(string resourceGroupName, string automationAccountName, string runbookName, RunbookUpdateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Delete the runbook by name.
/// <see href="http://aka.ms/azureautomationsdk/runbookoperations" />
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='runbookName'>
/// The runbook name.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorResponseException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string automationAccountName, string runbookName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Retrieve a list of runbooks.
/// <see href="http://aka.ms/azureautomationsdk/runbookoperations" />
/// </summary>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorResponseException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<Runbook>>> ListByAutomationAccountWithHttpMessagesAsync(string resourceGroupName, string automationAccountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Retrieve a list of runbooks.
/// <see href="http://aka.ms/azureautomationsdk/runbookoperations" />
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorResponseException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<Runbook>>> ListByAutomationAccountNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
// ***********************************************************************
// Copyright (c) 2007-2016 Charlie Poole, Rob Prouse
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
#if PLATFORM_DETECTION
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using Microsoft.Win32;
namespace NUnit.Framework.Internal
{
/// <summary>
/// Enumeration identifying a common language
/// runtime implementation.
/// </summary>
public enum RuntimeType
{
/// <summary>Any supported runtime framework</summary>
Any,
/// <summary>Microsoft .NET Framework</summary>
Net,
/// <summary>Microsoft Shared Source CLI</summary>
SSCLI,
/// <summary>Mono</summary>
Mono,
/// <summary>MonoTouch</summary>
MonoTouch
}
/// <summary>
/// RuntimeFramework represents a particular version
/// of a common language runtime implementation.
/// </summary>
[Serializable]
public sealed class RuntimeFramework
{
// NOTE: This version of RuntimeFramework is for use
// within the NUnit framework assembly. It is simpler
// than the version in the test engine because it does
// not need to know what frameworks are available,
// only what framework is currently running.
#region Static and Instance Fields
/// <summary>
/// DefaultVersion is an empty Version, used to indicate that
/// NUnit should select the CLR version to use for the test.
/// </summary>
public static readonly Version DefaultVersion = new Version(0,0);
private static readonly Lazy<RuntimeFramework> currentFramework = new Lazy<RuntimeFramework>(() =>
{
Type monoRuntimeType = Type.GetType("Mono.Runtime", false);
Type monoTouchType = Type.GetType("MonoTouch.UIKit.UIApplicationDelegate,monotouch");
bool isMonoTouch = monoTouchType != null;
bool isMono = monoRuntimeType != null;
RuntimeType runtime = isMonoTouch
? RuntimeType.MonoTouch
: isMono
? RuntimeType.Mono
: RuntimeType.Net;
int major = Environment.Version.Major;
int minor = Environment.Version.Minor;
if (isMono)
{
switch (major)
{
case 1:
minor = 0;
break;
case 2:
major = 3;
minor = 5;
break;
}
}
else /* It's windows */
if (major == 2)
{
using (RegistryKey key = Registry.LocalMachine.OpenSubKey(@"SOFTWARE\Microsoft\.NETFramework"))
{
if (key != null)
{
string installRoot = key.GetValue("InstallRoot") as string;
if (installRoot != null)
{
if (Directory.Exists(Path.Combine(installRoot, "v3.5")))
{
major = 3;
minor = 5;
}
else if (Directory.Exists(Path.Combine(installRoot, "v3.0")))
{
major = 3;
minor = 0;
}
}
}
}
}
else if (major == 4 && Type.GetType("System.Reflection.AssemblyMetadataAttribute") != null)
{
minor = 5;
}
var currentFramework = new RuntimeFramework( runtime, new Version (major, minor) )
{
ClrVersion = Environment.Version
};
if (isMono)
{
MethodInfo getDisplayNameMethod = monoRuntimeType.GetMethod(
"GetDisplayName", BindingFlags.Static | BindingFlags.NonPublic | BindingFlags.DeclaredOnly | BindingFlags.ExactBinding);
if (getDisplayNameMethod != null)
currentFramework.DisplayName = (string)getDisplayNameMethod.Invoke(null, new object[0]);
}
return currentFramework;
});
#endregion
#region Constructor
/// <summary>
/// Construct from a runtime type and version. If the version has
/// two parts, it is taken as a framework version. If it has three
/// or more, it is taken as a CLR version. In either case, the other
/// version is deduced based on the runtime type and provided version.
/// </summary>
/// <param name="runtime">The runtime type of the framework</param>
/// <param name="version">The version of the framework</param>
public RuntimeFramework( RuntimeType runtime, Version version)
{
Runtime = runtime;
if (version.Build < 0)
InitFromFrameworkVersion(version);
else
InitFromClrVersion(version);
DisplayName = GetDefaultDisplayName(runtime, version);
}
private void InitFromFrameworkVersion(Version version)
{
FrameworkVersion = ClrVersion = version;
if (version.Major > 0) // 0 means any version
switch (Runtime)
{
case RuntimeType.Net:
case RuntimeType.Mono:
case RuntimeType.Any:
switch (version.Major)
{
case 1:
switch (version.Minor)
{
case 0:
ClrVersion = Runtime == RuntimeType.Mono
? new Version(1, 1, 4322)
: new Version(1, 0, 3705);
break;
case 1:
if (Runtime == RuntimeType.Mono)
FrameworkVersion = new Version(1, 0);
ClrVersion = new Version(1, 1, 4322);
break;
default:
ThrowInvalidFrameworkVersion(version);
break;
}
break;
case 2:
case 3:
ClrVersion = new Version(2, 0, 50727);
break;
case 4:
ClrVersion = new Version(4, 0, 30319);
break;
default:
ThrowInvalidFrameworkVersion(version);
break;
}
break;
}
}
private static void ThrowInvalidFrameworkVersion(Version version)
{
throw new ArgumentException("Unknown framework version " + version, nameof(version));
}
private void InitFromClrVersion(Version version)
{
FrameworkVersion = new Version(version.Major, version.Minor);
ClrVersion = version;
if (Runtime == RuntimeType.Mono && version.Major == 1)
FrameworkVersion = new Version(1, 0);
}
#endregion
#region Properties
/// <summary>
/// Static method to return a RuntimeFramework object
/// for the framework that is currently in use.
/// </summary>
public static RuntimeFramework CurrentFramework
{
get
{
return currentFramework.Value;
}
}
/// <summary>
/// The type of this runtime framework
/// </summary>
public RuntimeType Runtime { get; }
/// <summary>
/// The framework version for this runtime framework
/// </summary>
public Version FrameworkVersion { get; private set; }
/// <summary>
/// The CLR version for this runtime framework
/// </summary>
public Version ClrVersion { get; private set; }
/// <summary>
/// Return true if any CLR version may be used in
/// matching this RuntimeFramework object.
/// </summary>
public bool AllowAnyVersion
{
get { return ClrVersion == DefaultVersion; }
}
/// <summary>
/// Returns the Display name for this framework
/// </summary>
public string DisplayName { get; private set; }
#endregion
#region Public Methods
/// <summary>
/// Parses a string representing a RuntimeFramework.
/// The string may be just a RuntimeType name or just
/// a Version or a hyphenated RuntimeType-Version or
/// a Version prefixed by 'versionString'.
/// </summary>
/// <param name="s"></param>
/// <returns></returns>
public static RuntimeFramework Parse(string s)
{
RuntimeType runtime = RuntimeType.Any;
Version version = DefaultVersion;
string[] parts = s.Split('-');
if (parts.Length == 2)
{
runtime = (RuntimeType)Enum.Parse(typeof(RuntimeType), parts[0], true);
string vstring = parts[1];
if (vstring != "")
version = new Version(vstring);
}
else if (char.ToLower(s[0]) == 'v')
{
version = new Version(s.Substring(1));
}
else if (IsRuntimeTypeName(s))
{
runtime = (RuntimeType)Enum.Parse(typeof(RuntimeType), s, true);
}
else
{
version = new Version(s);
}
return new RuntimeFramework(runtime, version);
}
/// <summary>
/// Overridden to return the short name of the framework
/// </summary>
/// <returns></returns>
public override string ToString()
{
if (AllowAnyVersion)
{
return Runtime.ToString().ToLower();
}
else
{
string vstring = FrameworkVersion.ToString();
if (Runtime == RuntimeType.Any)
return "v" + vstring;
else
return Runtime.ToString().ToLower() + "-" + vstring;
}
}
/// <summary>
/// Returns true if the current framework matches the
/// one supplied as an argument. Two frameworks match
/// if their runtime types are the same or either one
/// is RuntimeType.Any and all specified version components
/// are equal. Negative (i.e. unspecified) version
/// components are ignored.
/// </summary>
/// <param name="target">The RuntimeFramework to be matched.</param>
/// <returns>True on match, otherwise false</returns>
public bool Supports(RuntimeFramework target)
{
if (Runtime != RuntimeType.Any
&& target.Runtime != RuntimeType.Any
&& Runtime != target.Runtime)
return false;
if (AllowAnyVersion || target.AllowAnyVersion)
return true;
if (!VersionsMatch(ClrVersion, target.ClrVersion))
return false;
return FrameworkVersion.Major >= target.FrameworkVersion.Major && FrameworkVersion.Minor >= target.FrameworkVersion.Minor;
}
#endregion
#region Helper Methods
private static bool IsRuntimeTypeName(string name)
{
return Enum.GetNames( typeof(RuntimeType)).Any( item => item.ToLower() == name.ToLower() );
}
private static string GetDefaultDisplayName(RuntimeType runtime, Version version)
{
if (version == DefaultVersion)
return runtime.ToString();
else if (runtime == RuntimeType.Any)
return "v" + version;
else
return runtime + " " + version;
}
private static bool VersionsMatch(Version v1, Version v2)
{
return v1.Major == v2.Major &&
v1.Minor == v2.Minor &&
(v1.Build < 0 || v2.Build < 0 || v1.Build == v2.Build) &&
(v1.Revision < 0 || v2.Revision < 0 || v1.Revision == v2.Revision);
}
#endregion
}
}
#endif
| |
// Copyright 2004-2012 Castle Project, Henrik Feldt &contributors - https://github.com/castleproject
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace Castle.IO
{
using System;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Net;
using System.Text.RegularExpressions;
/// <summary>
/// Immutable path data holder and value object that overrides Equals,
/// implements IEquatable and overrides the == and != operators.
///
/// Invariant: no fields nor properties are null after c'tor.
/// </summary>
[DebuggerDisplay(@"PathInfo: \{ Root: {Root}, Rest: {NonRootPath} \}")]
public sealed class PathInfo : IEquatable<PathInfo>
{
internal const string UNCPrefixRegex = @"(?<UNC_prefix> \\\\\? (?<UNC_literal>\\UNC)? )";
internal const string DeviceRegex =
UNCPrefixRegex +
@"?
(?(UNC_prefix)|\\)
(?<device>
(?<dev_prefix>\\\.\\)
(
(?<dev_name>[\w\-]+)
|(?<dev_guid>\{[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}\})
)
)\\?";
private const string DriveRegex =
UNCPrefixRegex +
@"?
(?(UNC_prefix)\\)? # if we have an UNC prefix, there must be an extra backslash
(?<drive>
(?<drive_letter>[A-Z]{1,3})
: # the :-character after the drive letter
(\\|/) # the trailing slash
)";
internal const string ServerRegex =
UNCPrefixRegex +
@"?
(?(UNC_prefix)|\\) #this is optional IIF we have the UNC_prefix, so only match \\ if we did not have it
(?<server>
(?<server_prefix>\\)
(?:
(?<ipv4>(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3})
|(?:\[(?<ipv6>[A-F0-9:]{3,39})\])
|(?<server_name>(?!UNC)[\w\-]+) # allow dashes in server names, but ignore servers named UNC
)
)\\?";
private const string StrRegex =
@"
(?<root>
(" + DriveRegex + @")
|(" + ServerRegex + @")
|(" + DeviceRegex +
@")
|/
|\\
)?
(?<nonrootpath>
(?!\\)
(?<rel_drive>\w{1,3}:)?
(?<folders_files>.+))?";
private static readonly Regex regex = new Regex(StrRegex,
RegexOptions.Compiled |
RegexOptions.IgnorePatternWhitespace |
RegexOptions.IgnoreCase |
RegexOptions.Multiline);
private readonly string root,
uncPrefix,
uncLiteral,
drive,
driveLetter,
server,
iPv4,
iPv6,
serverName,
device,
devicePrefix,
deviceName,
deviceGuid,
nonRootPath,
relDrive,
folderAndFiles;
// too stupid checker
[ContractVerification(false)]
public static PathInfo Parse(string path)
{
Contract.Requires(path != null);
Contract.Ensures(Contract.Result<PathInfo>() != null);
var matches = regex.Matches(path);
Func<string, string> m = s => GetMatch(matches, s);
// this might be possible to improve using raw indicies (ints) instead.
return new PathInfo(
m("root"),
m("UNC_prefix"),
m("UNC_literal"),
m("drive"),
m("drive_letter"),
m("server"),
m("ipv4"),
m("ipv6"),
m("server_name"),
m("device"),
m("dev_prefix"),
m("dev_name"),
m("dev_guid"),
m("nonrootpath"),
m("rel_drive"),
m("folders_files")
);
}
internal static string GetMatch(MatchCollection matches,
string groupIndex)
{
Contract.Requires(matches != null);
Contract.Ensures(Contract.Result<string>() != null);
var matchC = matches.Count;
for (var i = 0; i < matchC; i++)
if (matches[i].Groups[groupIndex].Success)
return matches[i].Groups[groupIndex].Value;
return string.Empty;
}
#region c'tor and non null invariants
private PathInfo(string root, string uncPrefix, string uncLiteral, string drive, string driveLetter,
string server, string iPv4, string iPv6, string serverName, string device, string devicePrefix,
string deviceName, string deviceGuid, string nonRootPath, string relDrive, string folderAndFiles)
{
Contract.Requires(root != null);
Contract.Requires(uncPrefix != null);
Contract.Requires(uncLiteral != null);
Contract.Requires(drive != null);
Contract.Requires(driveLetter != null);
Contract.Requires(server != null);
Contract.Requires(iPv4 != null);
Contract.Requires(iPv6 != null);
Contract.Requires(serverName != null);
Contract.Requires(device != null);
Contract.Requires(devicePrefix != null);
Contract.Requires(deviceName != null);
Contract.Requires(deviceGuid != null);
Contract.Requires(nonRootPath != null);
Contract.Requires(relDrive != null);
Contract.Requires(folderAndFiles != null);
this.root = root;
this.uncPrefix = uncPrefix;
this.uncLiteral = uncLiteral;
this.drive = drive;
this.driveLetter = driveLetter;
this.server = server;
this.iPv4 = iPv4;
this.iPv6 = iPv6;
this.serverName = serverName;
this.device = device;
this.devicePrefix = devicePrefix;
this.deviceName = deviceName;
this.deviceGuid = deviceGuid;
this.nonRootPath = nonRootPath;
this.relDrive = relDrive;
this.folderAndFiles = folderAndFiles;
}
[ContractInvariantMethod]
private void Invariant()
{
Contract.Invariant(root != null);
Contract.Invariant(uncPrefix != null);
Contract.Invariant(uncLiteral != null);
Contract.Invariant(drive != null);
Contract.Invariant(driveLetter != null);
Contract.Invariant(server != null);
Contract.Invariant(iPv4 != null);
Contract.Invariant(iPv6 != null);
Contract.Invariant(serverName != null);
Contract.Invariant(device != null);
Contract.Invariant(devicePrefix != null);
Contract.Invariant(deviceName != null);
Contract.Invariant(deviceGuid != null);
Contract.Invariant(nonRootPath != null);
Contract.Invariant(relDrive != null);
Contract.Invariant(folderAndFiles != null);
}
#endregion
/// <summary>
/// Examples of return values:
/// <list>
/// <item>\\?\UNC\C:\</item>
/// <item>\\?\UNC\servername\</item>
/// <item>\\192.168.0.2\</item>
/// <item>C:\</item>
/// </list>
///
/// Definition: Returns part of the string that is in itself uniquely from the currently
/// executing CLR.
/// </summary>
[Pure]
public string Root
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return root;
}
}
/// <summary>
/// Examples of return values:
/// <list>
/// <item></item>
/// </list>
/// </summary>
[Pure]
public string UNCPrefix
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return uncPrefix;
}
}
/// <summary>
/// </summary>
[Pure]
public string UNCLiteral
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return uncLiteral;
}
}
/// <summary>
/// </summary>
[Pure]
public string Drive
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return drive;
}
}
/// <summary>
/// </summary>
[Pure]
public string DriveLetter
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return driveLetter;
}
}
/// <summary>
/// </summary>
[Pure]
public string Server
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return server;
}
}
/// <summary>
/// Gets the IPv4 IP-address if any. <see cref = "IPAddress.None" />
/// if none was found.
/// </summary>
[Pure]
public IPAddress IPv4
{
get
{
Contract.Ensures(Contract.Result<IPAddress>() != null);
IPAddress addr;
return !string.IsNullOrEmpty(iPv4) && IPAddress.TryParse(iPv4, out addr)
? addr
: IPAddress.None;
}
}
/// <summary>
/// Gets the IPv6 IP-address if any. <see cref = "IPAddress.None" />
/// if non was found.
/// </summary>
[Pure]
public IPAddress IPv6
{
get
{
Contract.Ensures(Contract.Result<IPAddress>() != null);
IPAddress addr;
return !string.IsNullOrEmpty(iPv6) && IPAddress.TryParse(iPv6, out addr)
? addr
: IPAddress.IPv6None;
}
}
/// <summary>
/// </summary>
[Pure]
public string ServerName
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return serverName;
}
}
/// <summary>
/// </summary>
[Pure]
public string Device
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return device;
}
}
/// <summary>
/// </summary>
[Pure]
public string DevicePrefix
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return devicePrefix;
}
}
/// <summary>
/// </summary>
[Pure]
public string DeviceName
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return deviceName;
}
}
/// <summary>
/// Gets the device GUID in the form
/// <code>{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}</code>
/// i.e. 8-4-4-4-12 hex digits with curly brackets.
/// </summary>
[Pure]
public Guid DeviceGuid
{
get { return deviceGuid == string.Empty ? Guid.Empty : Guid.Parse(deviceGuid); }
}
/// <summary>
/// Gets a the part of the path that starts when the root ends.
/// The root in turn is any UNC-prefix plus device, drive, server or ip-prefix.
/// This string may not start with neither of '\' or '/'.
/// </summary>
[Pure]
public string NonRootPath
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return nonRootPath;
}
}
/// <summary>
/// </summary>
[Pure]
public string RelDrive
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return relDrive;
}
}
/// <summary>
/// The only time when this differs from <see cref = "NonRootPath" />
/// is when a path like this is used:
/// <code>C:../parent/a.txt</code>, otherwise, for all paths,
/// this property equals <see cref = "NonRootPath" />.
/// </summary>
[Pure]
public string FolderAndFiles
{
get
{
Contract.Ensures(Contract.Result<string>() != null);
return folderAndFiles;
}
}
[Pure]
public PathType Type
{
get
{
if (!string.IsNullOrEmpty(Device))
return PathType.Device;
if (!string.IsNullOrEmpty(ServerName))
return PathType.Server;
if (IPv4 != IPAddress.None)
return PathType.IPv4;
if (IPv6 != IPAddress.IPv6None)
return PathType.IPv6;
if (!string.IsNullOrEmpty(Drive))
return PathType.Drive;
return PathType.Relative;
}
}
/// <summary>
/// Returns whether <see cref = "Root" /> is not an empty string.
/// </summary>
[Pure]
public bool IsRooted
{
get { return !string.IsNullOrEmpty(root); }
}
/// <summary>
/// Returns whether the current PathInfo is a valid parent of the child path info
/// passed as argument.
/// </summary>
/// <param name = "child">The path info to verify</param>
/// <returns>Whether it is true that the current path info is a parent of child.</returns>
/// <exception cref = "NotSupportedException">If this instance of path info and child aren't rooted.</exception>
public bool IsParentOf(PathInfo child)
{
Contract.Requires(child != null);
if (Root == string.Empty || child.Root == string.Empty)
throw new NotSupportedException("Non-rooted paths are not supported.");
// TODO: Normalize Path
var OK = child.FolderAndFiles.StartsWith(FolderAndFiles);
switch (Type)
{
case PathType.Device:
OK &= child.DeviceName.Equals(DeviceName, StringComparison.InvariantCultureIgnoreCase);
break;
case PathType.Server:
OK &= child.ServerName.Equals(ServerName, StringComparison.InvariantCultureIgnoreCase);
break;
case PathType.IPv4:
OK &= child.IPv4.Equals(IPv4);
break;
case PathType.IPv6:
OK &= child.IPv6.Equals(IPv6);
break;
case PathType.Relative:
throw new NotSupportedException("Since root isn't empty we should never get relative paths.");
case PathType.Drive:
OK &= DriveLetter.ToLowerInvariant() == child.DriveLetter.ToLowerInvariant();
break;
}
return OK;
}
/// <summary>
/// Removes the path info passes as a parameter from the current root. Only works for two rooted paths with same root.
/// Does NOT cover all edge cases, please verify its intended results yourself.
/// <example>
/// </example>
/// </summary>
/// <param name = "other"></param>
/// <returns></returns>
public string RemoveParameterFromRoot(PathInfo other)
{
Contract.Requires(Root == other.Root, "roots must match to be able to subtract");
Contract.Requires(FolderAndFiles.Length >= other.FolderAndFiles.Length,
"The folders and files part of the parameter must be shorter or equal to in length, than that path you wish to subtract from.");
if (other.FolderAndFiles == FolderAndFiles)
return string.Empty;
var startIndex = other.FolderAndFiles.Length;
Contract.Assume(startIndex <= FolderAndFiles.Length);
var substring = FolderAndFiles.Substring(startIndex);
return substring.TrimStart(Path.GetDirectorySeparatorChars());
}
public bool Equals(PathInfo other)
{
if (ReferenceEquals(null, other)) return false;
if (ReferenceEquals(this, other)) return true;
return Equals(other.drive, drive)
&& Equals(other.driveLetter, driveLetter)
&& Equals(other.server, server)
&& Equals(other.iPv4, iPv4)
&& Equals(other.iPv6, iPv6)
&& Equals(other.serverName, serverName)
&& Equals(other.device, device)
&& Equals(other.devicePrefix, devicePrefix)
&& Equals(other.deviceName, deviceName)
&& Equals(other.deviceGuid, deviceGuid)
&& Equals(other.nonRootPath, nonRootPath)
&& Equals(other.relDrive, relDrive)
&& Equals(other.folderAndFiles, folderAndFiles);
}
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) return false;
if (ReferenceEquals(this, obj)) return true;
if (obj.GetType() != typeof(PathInfo)) return false;
return Equals((PathInfo)obj);
}
public override int GetHashCode()
{
unchecked
{
var result = drive.GetHashCode();
result = (result*397) ^ driveLetter.GetHashCode();
result = (result*397) ^ server.GetHashCode();
result = (result*397) ^ iPv4.GetHashCode();
result = (result*397) ^ iPv6.GetHashCode();
result = (result*397) ^ serverName.GetHashCode();
result = (result*397) ^ device.GetHashCode();
result = (result*397) ^ devicePrefix.GetHashCode();
result = (result*397) ^ deviceName.GetHashCode();
result = (result*397) ^ deviceGuid.GetHashCode();
result = (result*397) ^ nonRootPath.GetHashCode();
result = (result*397) ^ relDrive.GetHashCode();
result = (result*397) ^ folderAndFiles.GetHashCode();
return result;
}
}
public static bool operator ==(PathInfo left, PathInfo right)
{
return Equals(left, right);
}
public static bool operator !=(PathInfo left, PathInfo right)
{
return !Equals(left, right);
}
}
}
| |
//------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------------------------
namespace System.ServiceModel.Configuration
{
using System;
using System.Configuration;
using System.IdentityModel.Selectors;
using System.IdentityModel.Tokens;
using System.Security.Cryptography.X509Certificates;
using System.ServiceModel;
using System.ServiceModel.Security;
using System.Xml;
public sealed partial class IssuedTokenServiceElement : ConfigurationElement
{
public IssuedTokenServiceElement()
{
}
[ConfigurationProperty(ConfigurationStrings.AllowedAudienceUris)]
public AllowedAudienceUriElementCollection AllowedAudienceUris
{
get { return (AllowedAudienceUriElementCollection)base[ConfigurationStrings.AllowedAudienceUris]; }
}
[ConfigurationProperty(ConfigurationStrings.AudienceUriMode, DefaultValue = IssuedTokenServiceCredential.DefaultAudienceUriMode)]
[ServiceModelEnumValidator(typeof(AudienceUriModeValidationHelper))]
public AudienceUriMode AudienceUriMode
{
get { return (AudienceUriMode)base[ConfigurationStrings.AudienceUriMode]; }
set { base[ConfigurationStrings.AudienceUriMode] = value; }
}
[ConfigurationProperty(ConfigurationStrings.CustomCertificateValidatorType, DefaultValue = "")]
[StringValidator(MinLength = 0)]
public string CustomCertificateValidatorType
{
get { return (string)base[ConfigurationStrings.CustomCertificateValidatorType]; }
set
{
if (String.IsNullOrEmpty(value))
{
value = String.Empty;
}
base[ConfigurationStrings.CustomCertificateValidatorType] = value;
}
}
[ConfigurationProperty(ConfigurationStrings.CertificateValidationMode, DefaultValue = IssuedTokenServiceCredential.DefaultCertificateValidationMode)]
[ServiceModelEnumValidator(typeof(X509CertificateValidationModeHelper))]
public X509CertificateValidationMode CertificateValidationMode
{
get { return (X509CertificateValidationMode)base[ConfigurationStrings.CertificateValidationMode]; }
set { base[ConfigurationStrings.CertificateValidationMode] = value; }
}
[ConfigurationProperty(ConfigurationStrings.RevocationMode, DefaultValue = IssuedTokenServiceCredential.DefaultRevocationMode)]
[StandardRuntimeEnumValidator(typeof(X509RevocationMode))]
public X509RevocationMode RevocationMode
{
get { return (X509RevocationMode)base[ConfigurationStrings.RevocationMode]; }
set { base[ConfigurationStrings.RevocationMode] = value; }
}
[ConfigurationProperty(ConfigurationStrings.TrustedStoreLocation, DefaultValue = IssuedTokenServiceCredential.DefaultTrustedStoreLocation)]
[StandardRuntimeEnumValidator(typeof(StoreLocation))]
public StoreLocation TrustedStoreLocation
{
get { return (StoreLocation)base[ConfigurationStrings.TrustedStoreLocation]; }
set { base[ConfigurationStrings.TrustedStoreLocation] = value; }
}
[ConfigurationProperty(ConfigurationStrings.SamlSerializerType, DefaultValue = "")]
[StringValidator(MinLength = 0)]
public string SamlSerializerType
{
get { return (string)base[ConfigurationStrings.SamlSerializerType]; }
set
{
if (String.IsNullOrEmpty(value))
{
value = String.Empty;
}
base[ConfigurationStrings.SamlSerializerType] = value;
}
}
[ConfigurationProperty(ConfigurationStrings.KnownCertificates)]
public X509CertificateTrustedIssuerElementCollection KnownCertificates
{
get { return (X509CertificateTrustedIssuerElementCollection)base[ConfigurationStrings.KnownCertificates]; }
}
[ConfigurationProperty(ConfigurationStrings.AllowUntrustedRsaIssuers, DefaultValue = IssuedTokenServiceCredential.DefaultAllowUntrustedRsaIssuers)]
public bool AllowUntrustedRsaIssuers
{
get { return (bool)base[ConfigurationStrings.AllowUntrustedRsaIssuers]; }
set { base[ConfigurationStrings.AllowUntrustedRsaIssuers] = value; }
}
public void Copy(IssuedTokenServiceElement from)
{
if (this.IsReadOnly())
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ConfigurationErrorsException(SR.GetString(SR.ConfigReadOnly)));
}
if (null == from)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("from");
}
this.SamlSerializerType = from.SamlSerializerType;
#pragma warning suppress 56506 // [....]; ElementInformation is never null.
PropertyInformationCollection propertyInfo = from.ElementInformation.Properties;
if (propertyInfo[ConfigurationStrings.KnownCertificates].ValueOrigin != PropertyValueOrigin.Default)
{
this.KnownCertificates.Clear();
foreach (X509CertificateTrustedIssuerElement src in from.KnownCertificates)
{
X509CertificateTrustedIssuerElement copy = new X509CertificateTrustedIssuerElement();
copy.Copy(src);
this.KnownCertificates.Add(copy);
}
}
if (propertyInfo[ConfigurationStrings.AllowedAudienceUris].ValueOrigin != PropertyValueOrigin.Default)
{
this.AllowedAudienceUris.Clear();
foreach (AllowedAudienceUriElement src in from.AllowedAudienceUris)
{
AllowedAudienceUriElement copy = new AllowedAudienceUriElement();
copy.AllowedAudienceUri = src.AllowedAudienceUri;
this.AllowedAudienceUris.Add(copy);
}
}
this.AllowUntrustedRsaIssuers = from.AllowUntrustedRsaIssuers;
this.CertificateValidationMode = from.CertificateValidationMode;
this.AudienceUriMode = from.AudienceUriMode;
this.CustomCertificateValidatorType = from.CustomCertificateValidatorType;
this.RevocationMode = from.RevocationMode;
this.TrustedStoreLocation = from.TrustedStoreLocation;
}
internal void ApplyConfiguration(IssuedTokenServiceCredential issuedToken)
{
if (issuedToken == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("issuedToken");
}
issuedToken.CertificateValidationMode = this.CertificateValidationMode;
issuedToken.RevocationMode = this.RevocationMode;
issuedToken.TrustedStoreLocation = this.TrustedStoreLocation;
issuedToken.AudienceUriMode = this.AudienceUriMode;
if (!string.IsNullOrEmpty(this.CustomCertificateValidatorType))
{
Type type = System.Type.GetType(this.CustomCertificateValidatorType, true);
if (!typeof(X509CertificateValidator).IsAssignableFrom(type))
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ConfigurationErrorsException(
SR.GetString(SR.ConfigInvalidCertificateValidatorType, this.CustomCertificateValidatorType, typeof(X509CertificateValidator).ToString())));
}
issuedToken.CustomCertificateValidator = (X509CertificateValidator)Activator.CreateInstance(type);
}
if (!string.IsNullOrEmpty(this.SamlSerializerType))
{
Type type = System.Type.GetType(this.SamlSerializerType, true);
if (!typeof(SamlSerializer).IsAssignableFrom(type))
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ConfigurationErrorsException(
SR.GetString(SR.ConfigInvalidSamlSerializerType, this.SamlSerializerType, typeof(SamlSerializer).ToString())));
}
issuedToken.SamlSerializer = (SamlSerializer)Activator.CreateInstance(type);
}
PropertyInformationCollection propertyInfo = this.ElementInformation.Properties;
if (propertyInfo[ConfigurationStrings.KnownCertificates].ValueOrigin != PropertyValueOrigin.Default)
{
foreach (X509CertificateTrustedIssuerElement src in this.KnownCertificates)
{
issuedToken.KnownCertificates.Add(SecurityUtils.GetCertificateFromStore(src.StoreName, src.StoreLocation, src.X509FindType, src.FindValue, null));
}
}
if (propertyInfo[ConfigurationStrings.AllowedAudienceUris].ValueOrigin != PropertyValueOrigin.Default)
{
foreach (AllowedAudienceUriElement src in this.AllowedAudienceUris)
{
issuedToken.AllowedAudienceUris.Add(src.AllowedAudienceUri);
}
}
issuedToken.AllowUntrustedRsaIssuers = this.AllowUntrustedRsaIssuers;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Linq.Expressions;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Azure.Core;
using Azure.Core.Pipeline;
using Azure.Data.Tables.Models;
using Azure.Data.Tables.Sas;
namespace Azure.Data.Tables
{
/// <summary>
/// The <see cref="TableServiceClient"/> provides synchronous and asynchronous methods to perform table level operations with Azure Tables hosted in either Azure storage accounts or Azure Cosmos DB table API.
/// </summary>
public class TableServiceClient
{
private readonly ClientDiagnostics _diagnostics;
private readonly TableRestClient _tableOperations;
private readonly ServiceRestClient _serviceOperations;
private readonly ServiceRestClient _secondaryServiceOperations;
private readonly OdataMetadataFormat _format = OdataMetadataFormat.ApplicationJsonOdataMinimalmetadata;
private readonly string _version;
internal readonly bool _isCosmosEndpoint;
private readonly QueryOptions _defaultQueryOptions = new QueryOptions() { Format = OdataMetadataFormat.ApplicationJsonOdataMinimalmetadata };
private string _accountName;
private readonly Uri _endpoint;
private readonly HttpPipeline _pipeline;
/// <summary>
/// The name of the table account with which this client instance will interact.
/// </summary>
public virtual string AccountName
{
get
{
if (_accountName == null)
{
var builder = new TableUriBuilder(_endpoint);
_accountName = builder.AccountName;
}
return _accountName;
}
}
/// <summary>
/// Initializes a new instance of the <see cref="TableServiceClient"/> using the specified <see cref="Uri" /> containing a shared access signature (SAS)
/// token credential. See <see cref="TableClient.GetSasBuilder(TableSasPermissions, DateTimeOffset)" /> for creating a SAS token.
/// </summary>
/// <param name="endpoint">
/// A <see cref="Uri"/> referencing the table service account.
/// This is likely to be similar to "https://{account_name}.table.core.windows.net/" or "https://{account_name}.table.cosmos.azure.com/".
/// </param>
/// <param name="credential">The shared access signature credential used to sign requests.</param>
public TableServiceClient(Uri endpoint, AzureSasCredential credential)
: this(endpoint, credential, null)
{
Argument.AssertNotNull(credential, nameof(credential));
}
/// <summary>
/// Initializes a new instance of the <see cref="TableServiceClient"/> using the specified connection string.
/// </summary>
/// <param name="connectionString">
/// A connection string includes the authentication information
/// required for your application to access data in an Azure Storage
/// account at runtime.
///
/// For more information,
/// <see href="https://docs.microsoft.com/azure/storage/common/storage-configure-connection-string">
/// Configure Azure Storage connection strings</see>.
/// </param>
public TableServiceClient(string connectionString)
: this(connectionString, null)
{ }
/// <summary>
/// Initializes a new instance of the <see cref="TableServiceClient"/> using the specified <see cref="Uri" /> containing a shared access signature (SAS)
/// token credential. See <see cref="TableClient.GetSasBuilder(TableSasPermissions, DateTimeOffset)" /> for creating a SAS token.
/// </summary>
/// <param name="endpoint">
/// A <see cref="Uri"/> referencing the table service account.
/// This is likely to be similar to "https://{account_name}.table.core.windows.net/" or "https://{account_name}.table.cosmos.azure.com/".
/// </param>
/// <param name="credential">The shared access signature credential used to sign requests.</param>
/// <param name="options">
/// Optional client options that define the transport pipeline policies for authentication, retries, etc., that are applied to every request.
/// </param>
public TableServiceClient(Uri endpoint, AzureSasCredential credential, TablesClientOptions options = null)
: this(endpoint, default, credential, options)
{
if (endpoint.Scheme != Uri.UriSchemeHttps)
{
throw new ArgumentException("Cannot use TokenCredential without HTTPS.", nameof(endpoint));
}
Argument.AssertNotNull(credential, nameof(credential));
}
/// <summary>
/// Initializes a new instance of the <see cref="TableServiceClient"/> using the specified table service <see cref="Uri" /> and <see cref="TableSharedKeyCredential" />.
/// </summary>
/// <param name="endpoint">
/// A <see cref="Uri"/> referencing the table service account.
/// This is likely to be similar to "https://{account_name}.table.core.windows.net/" or "https://{account_name}.table.cosmos.azure.com/".
/// </param>
/// <param name="credential">The shared key credential used to sign requests.</param>
public TableServiceClient(Uri endpoint, TableSharedKeyCredential credential)
: this(endpoint, new TableSharedKeyPipelinePolicy(credential), default, null)
{
Argument.AssertNotNull(credential, nameof(credential));
}
/// <summary>
/// Initializes a new instance of the <see cref="TableServiceClient"/> using the specified table service <see cref="Uri" /> and <see cref="TableSharedKeyCredential" />.
/// </summary>
/// <param name="endpoint">
/// A <see cref="Uri"/> referencing the table service account.
/// This is likely to be similar to "https://{account_name}.table.core.windows.net/" or "https://{account_name}.table.cosmos.azure.com/".
/// </param>
/// <param name="credential">The shared key credential used to sign requests.</param>
/// <param name="options">
/// Optional client options that define the transport pipeline policies for authentication, retries, etc., that are applied to every request.
/// </param>
public TableServiceClient(Uri endpoint, TableSharedKeyCredential credential, TablesClientOptions options)
: this(endpoint, new TableSharedKeyPipelinePolicy(credential), default, options)
{
Argument.AssertNotNull(credential, nameof(credential));
}
/// <summary>
/// Initializes a new instance of the <see cref="TableServiceClient"/> using the specified connection string.
/// </summary>
/// <param name="connectionString">
/// A connection string includes the authentication information
/// required for your application to access data in an Azure Storage
/// account at runtime.
///
/// For more information,
/// <see href="https://docs.microsoft.com/azure/storage/common/storage-configure-connection-string">
/// Configure Azure Storage connection strings</see>.
/// </param>
/// <param name="options">
/// Optional client options that define the transport pipeline policies for authentication, retries, etc., that are applied to every request.
/// </param>
public TableServiceClient(string connectionString, TablesClientOptions options = null)
{
Argument.AssertNotNull(connectionString, nameof(connectionString));
TableConnectionString connString = TableConnectionString.Parse(connectionString);
_accountName = connString._accountName;
options ??= new TablesClientOptions();
var endpointString = connString.TableStorageUri.PrimaryUri.AbsoluteUri;
var secondaryEndpoint = connString.TableStorageUri.SecondaryUri?.AbsoluteUri;
_isCosmosEndpoint = TableServiceClient.IsPremiumEndpoint(connString.TableStorageUri.PrimaryUri);
var perCallPolicies = _isCosmosEndpoint ? new[] { new CosmosPatchTransformPolicy() } : Array.Empty<HttpPipelinePolicy>();
TableSharedKeyPipelinePolicy policy = connString.Credentials switch
{
TableSharedKeyCredential credential => new TableSharedKeyPipelinePolicy(credential),
_ => default
};
_pipeline = HttpPipelineBuilder.Build(
options,
perCallPolicies: perCallPolicies,
perRetryPolicies: new[] { policy },
new ResponseClassifier());
_version = options.VersionString;
_diagnostics = new TablesClientDiagnostics(options);
_tableOperations = new TableRestClient(_diagnostics, _pipeline, endpointString, _version);
_serviceOperations = new ServiceRestClient(_diagnostics, _pipeline, endpointString, _version);
_secondaryServiceOperations = new ServiceRestClient(_diagnostics, _pipeline, secondaryEndpoint, _version);
}
internal TableServiceClient(Uri endpoint, TableSharedKeyPipelinePolicy policy, AzureSasCredential sasCredential, TablesClientOptions options)
{
Argument.AssertNotNull(endpoint, nameof(endpoint));
_endpoint = endpoint;
options ??= new TablesClientOptions();
_isCosmosEndpoint = IsPremiumEndpoint(endpoint);
var perCallPolicies = _isCosmosEndpoint ? new[] { new CosmosPatchTransformPolicy() } : Array.Empty<HttpPipelinePolicy>();
var endpointString = endpoint.AbsoluteUri;
string secondaryEndpoint = TableConnectionString.GetSecondaryUriFromPrimary(endpoint)?.AbsoluteUri;
HttpPipelinePolicy authPolicy = sasCredential switch
{
null => policy,
_ => new AzureSasCredentialSynchronousPolicy(sasCredential)
};
_pipeline = HttpPipelineBuilder.Build(
options,
perCallPolicies: perCallPolicies,
perRetryPolicies: new[] { authPolicy },
new ResponseClassifier());
_version = options.VersionString;
_diagnostics = new TablesClientDiagnostics(options);
_tableOperations = new TableRestClient(_diagnostics, _pipeline, endpointString, _version);
_serviceOperations = new ServiceRestClient(_diagnostics, _pipeline, endpointString, _version);
_secondaryServiceOperations = new ServiceRestClient(_diagnostics, _pipeline, secondaryEndpoint, _version);
}
/// <summary>
/// Initializes a new instance of the <see cref="TableServiceClient"/>
/// class for mocking.
/// </summary>
internal TableServiceClient(TableRestClient internalClient)
{
_tableOperations = internalClient;
}
/// <summary>
/// Initializes a new instance of the <see cref="TableServiceClient"/>
/// class for mocking.
/// </summary>
protected TableServiceClient()
{ }
/// <summary>
/// Gets a <see cref="TableSasBuilder"/> instance scoped to the current account.
/// </summary>
/// <param name="permissions"><see cref="TableAccountSasPermissions"/> containing the allowed permissions.</param>
/// <param name="resourceTypes"><see cref="TableAccountSasResourceTypes"/> containing the accessible resource types.</param>
/// <param name="expiresOn">The time at which the shared access signature becomes invalid.</param>
/// <returns>An instance of <see cref="TableAccountSasBuilder"/>.</returns>
public virtual TableAccountSasBuilder GetSasBuilder(
TableAccountSasPermissions permissions,
TableAccountSasResourceTypes resourceTypes,
DateTimeOffset expiresOn)
{
return new TableAccountSasBuilder(permissions, resourceTypes, expiresOn) { Version = _version };
}
/// <summary>
/// Gets a <see cref="TableAccountSasBuilder"/> instance scoped to the current table.
/// </summary>
/// <param name="rawPermissions">The permissions associated with the shared access signature. This string should contain one or more of the following permission characters in this order: "racwdl".</param>
/// <param name="resourceTypes"><see cref="TableAccountSasResourceTypes"/> containing the accessible resource types.</param>
/// <param name="expiresOn">The time at which the shared access signature becomes invalid.</param>
/// <returns>An instance of <see cref="TableAccountSasBuilder"/>.</returns>
public virtual TableAccountSasBuilder GetSasBuilder(string rawPermissions, TableAccountSasResourceTypes resourceTypes, DateTimeOffset expiresOn)
{
return new TableAccountSasBuilder(rawPermissions, resourceTypes, expiresOn) { Version = _version };
}
/// <summary>
/// Gets an instance of a <see cref="TableClient"/> configured with the current <see cref="TableServiceClient"/> options, affinitized to the specified <paramref name="tableName"/>.
/// </summary>
/// <param name="tableName"></param>
/// <returns></returns>
public virtual TableClient GetTableClient(string tableName)
{
Argument.AssertNotNull(tableName, nameof(tableName));
return new TableClient(tableName, _tableOperations, _version, _diagnostics, _isCosmosEndpoint, _endpoint, _pipeline);
}
/// <summary>
/// Gets a list of tables from the storage account.
/// </summary>
/// <param name="filter">
/// Returns only tables that satisfy the specified filter.
/// For example, the following would filter tables with a Name of 'foo': <c>"TableName eq 'foo'"</c>.
/// </param>
/// <param name="maxPerPage">
/// The maximum number of tables that will be returned per page.
/// Note: This value does not limit the total number of results if the result is fully enumerated.
/// </param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>An <see cref="AsyncPageable{T}"/> containing a collection of <see cref="TableItem"/>s.</returns>
public virtual AsyncPageable<TableItem> QueryAsync(string filter = null, int? maxPerPage = null, CancellationToken cancellationToken = default)
{
return PageableHelpers.CreateAsyncEnumerable(
async pageSizeHint =>
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(Query)}");
scope.Start();
try
{
var response = await _tableOperations.QueryAsync(
null,
new QueryOptions() { Filter = filter, Select = null, Top = pageSizeHint, Format = _format },
cancellationToken)
.ConfigureAwait(false);
return Page.FromValues(response.Value.Value, response.Headers.XMsContinuationNextTableName, response.GetRawResponse());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
},
async (nextLink, pageSizeHint) =>
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(Query)}");
scope.Start();
try
{
var response = await _tableOperations.QueryAsync(
nextTableName: nextLink,
new QueryOptions() { Filter = filter, Select = null, Top = pageSizeHint, Format = _format },
cancellationToken)
.ConfigureAwait(false);
return Page.FromValues(response.Value.Value, response.Headers.XMsContinuationNextTableName, response.GetRawResponse());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
},
maxPerPage);
}
/// <summary>
/// Gets a list of tables from the storage account.
/// </summary>
/// <param name="filter">
/// Returns only tables that satisfy the specified filter.
/// For example, the following would filter tables with a Name of 'foo': <c>"TableName eq 'foo'"</c>.
/// </param>
/// <param name="maxPerPage">
/// The maximum number tables that will be returned per page.
/// Note: This value does not limit the total number of results if the result is fully enumerated.
/// </param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>An <see cref="Pageable{T}"/> containing a collection of <see cref="TableItem"/>.</returns>
public virtual Pageable<TableItem> Query(string filter = null, int? maxPerPage = null, CancellationToken cancellationToken = default)
{
return PageableHelpers.CreateEnumerable(
pageSizeHint =>
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(Query)}");
scope.Start();
try
{
var response = _tableOperations.Query(
null,
new QueryOptions() { Filter = filter, Select = null, Top = pageSizeHint, Format = _format },
cancellationToken);
return Page.FromValues(response.Value.Value, response.Headers.XMsContinuationNextTableName, response.GetRawResponse());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
},
(nextLink, pageSizeHint) =>
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(Query)}");
scope.Start();
try
{
var response = _tableOperations.Query(
nextLink,
new QueryOptions() { Filter = filter, Select = null, Top = pageSizeHint, Format = _format },
cancellationToken);
return Page.FromValues(response.Value.Value, response.Headers.XMsContinuationNextTableName, response.GetRawResponse());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
},
maxPerPage);
}
/// <summary>
/// Gets a list of tables from the storage account.
/// </summary>
/// <param name="filter">
/// Returns only tables that satisfy the specified filter expression.
/// For example, the following would filter tables with a Name of 'foo': <c>"TableName eq {someStringVariable}"</c>.
/// The filter string will be properly quoted and escaped.
/// </param>
/// <param name="maxPerPage">
/// The maximum number of entities that will be returned per page.
/// Note: This value does not limit the total number of results if the result is fully enumerated.
/// </param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>An <see cref="AsyncPageable{T}"/> containing a collection of <see cref="TableItem"/>s.</returns>
/// <exception cref="RequestFailedException">The server returned an error. See <see cref="Exception.Message"/> for details returned from the server.</exception>
public virtual AsyncPageable<TableItem> QueryAsync(FormattableString filter, int? maxPerPage = null, CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableClient)}.{nameof(Query)}");
scope.Start();
try
{
return QueryAsync(TableOdataFilter.Create(filter), maxPerPage, cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Gets a list of tables from the storage account.
/// </summary>
/// <param name="filter">
/// Returns only tables that satisfy the specified filter expression.
/// For example, the following would filter tables with a Name of 'foo': <c>"TableName eq {someStringVariable}"</c>.
/// The filter string will be properly quoted and escaped.
/// </param>
/// <param name="maxPerPage">
/// The maximum number of entities that will be returned per page.
/// Note: This value does not limit the total number of results if the result is fully enumerated.
/// </param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>An <see cref="Pageable{T}"/> containing a collection of <see cref="TableItem"/>.</returns>
/// <exception cref="RequestFailedException">The server returned an error. See <see cref="Exception.Message"/> for details returned from the server.</exception>
public virtual Pageable<TableItem> Query(FormattableString filter, int? maxPerPage = null, CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableClient)}.{nameof(Query)}");
scope.Start();
try
{
return Query(TableOdataFilter.Create(filter), maxPerPage, cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Gets a list of tables from the storage account.
/// </summary>
/// <param name="filter">
/// Returns only tables that satisfy the specified filter expression.
/// For example, the following expression would filter tables with a Name of 'foo': <c>e => e.Name == "foo"</c>.
/// </param>
/// <param name="maxPerPage">
/// The maximum number of entities that will be returned per page.
/// Note: This value does not limit the total number of results if the result is fully enumerated.
/// </param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>An <see cref="AsyncPageable{T}"/> containing a collection of <see cref="TableItem"/>s.</returns>
/// <exception cref="RequestFailedException">The server returned an error. See <see cref="Exception.Message"/> for details returned from the server.</exception>
public virtual AsyncPageable<TableItem> QueryAsync(
Expression<Func<TableItem, bool>> filter,
int? maxPerPage = null,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableClient)}.{nameof(Query)}");
scope.Start();
try
{
return QueryAsync(TableClient.Bind(filter), maxPerPage, cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Gets a list of tables from the storage account.
/// </summary>
/// <param name="filter">
/// Returns only tables that satisfy the specified filter expression.
/// For example, the following expression would filter tables with a Name of 'foo': <c>e => e.Name == "foo"</c>.
/// </param>
/// <param name="maxPerPage">
/// The maximum number of entities that will be returned per page.
/// Note: This value does not limit the total number of results if the result is fully enumerated.
/// </param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>An <see cref="Pageable{T}"/> containing a collection of <see cref="TableItem"/>.</returns>
/// <exception cref="RequestFailedException">The server returned an error. See <see cref="Exception.Message"/> for details returned from the server.</exception>
public virtual Pageable<TableItem> Query(Expression<Func<TableItem, bool>> filter, int? maxPerPage = null, CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableClient)}.{nameof(Query)}");
scope.Start();
try
{
return Query(TableClient.Bind(filter), maxPerPage, cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Creates a table on the service.
/// </summary>
/// <param name="tableName">The name of table to create.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>A <see cref="Response{TableItem}"/> containing properties of the table.</returns>
public virtual Response<TableItem> CreateTable(string tableName, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(tableName, nameof(tableName));
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(CreateTable)}");
scope.Start();
try
{
var response = _tableOperations.Create(
new TableProperties() { TableName = tableName },
null,
queryOptions: _defaultQueryOptions,
cancellationToken: cancellationToken);
return Response.FromValue(response.Value as TableItem, response.GetRawResponse());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Creates a table on the service.
/// </summary>
/// <param name="tableName">The name of table to create.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>A <see cref="Response{TableItem}"/> containing properties of the table.</returns>
public virtual async Task<Response<TableItem>> CreateTableAsync(string tableName, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(tableName, nameof(tableName));
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(CreateTable)}");
scope.Start();
try
{
var response = await _tableOperations.CreateAsync(
new TableProperties() { TableName = tableName },
null,
queryOptions: _defaultQueryOptions,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
return Response.FromValue(response.Value as TableItem, response.GetRawResponse());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Creates a table on the service.
/// </summary>
/// <param name="tableName">The name of the table to create.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>If the table does not already exist, a <see cref="Response{TableItem}"/>. If the table already exists, <c>null</c>.</returns>
public virtual Response<TableItem> CreateTableIfNotExists(string tableName, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(tableName, nameof(tableName));
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(CreateTableIfNotExists)}");
scope.Start();
try
{
var response = _tableOperations.Create(
new TableProperties() { TableName = tableName },
null,
queryOptions: _defaultQueryOptions,
cancellationToken: cancellationToken);
return Response.FromValue(response.Value as TableItem, response.GetRawResponse());
}
catch (RequestFailedException ex) when (ex.Status == (int)HttpStatusCode.Conflict)
{
return default;
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Creates a table on the service.
/// </summary>
/// <param name="tableName">The name of the table to create.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>If the table does not already exist, a <see cref="Response{TableItem}"/>. If the table already exists, <c>null</c>.</returns>
public virtual async Task<Response<TableItem>> CreateTableIfNotExistsAsync(string tableName, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(tableName, nameof(tableName));
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(CreateTableIfNotExists)}");
scope.Start();
try
{
var response = await _tableOperations.CreateAsync(
new TableProperties() { TableName = tableName },
null,
queryOptions: _defaultQueryOptions,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
return Response.FromValue(response.Value as TableItem, response.GetRawResponse());
}
catch (RequestFailedException ex) when (ex.Status == (int)HttpStatusCode.Conflict)
{
return default;
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Deletes a table on the service.
/// </summary>
/// <param name="tableName">The name of the table to create.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>The <see cref="Response"/> indicating the result of the operation.</returns>
public virtual Response DeleteTable(string tableName, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(tableName, nameof(tableName));
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(DeleteTable)}");
scope.Start();
try
{
using var message = _tableOperations.CreateDeleteRequest(tableName);
_pipeline.Send(message, cancellationToken);
switch (message.Response.Status)
{
case 404:
case 204:
return message.Response;
default:
throw _diagnostics.CreateRequestFailedException(message.Response);
}
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary>
/// Deletes a table on the service.
/// </summary>
/// <param name="tableName">The name of the table to create.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>The <see cref="Response"/> indicating the result of the operation.</returns>
public virtual async Task<Response> DeleteTableAsync(string tableName, CancellationToken cancellationToken = default)
{
Argument.AssertNotNull(tableName, nameof(tableName));
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(DeleteTable)}");
scope.Start();
try
{
using var message = _tableOperations.CreateDeleteRequest(tableName);
await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false);
switch (message.Response.Status)
{
case 404:
case 204:
return message.Response;
default:
throw _diagnostics.CreateRequestFailedException(message.Response);
}
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary> Sets properties for an account's Table service endpoint, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. </summary>
/// <param name="properties"> The Table Service properties. </param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>The <see cref="Response"/> indicating the result of the operation.</returns>
public virtual Response SetProperties(TableServiceProperties properties, CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(SetProperties)}");
scope.Start();
try
{
return _serviceOperations.SetProperties(properties, cancellationToken: cancellationToken);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary> Sets properties for an account's Table service endpoint, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. </summary>
/// <param name="properties"> The Table Service properties. </param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>The <see cref="Response"/> indicating the result of the operation.</returns>
public virtual async Task<Response> SetPropertiesAsync(TableServiceProperties properties, CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(SetProperties)}");
scope.Start();
try
{
return await _serviceOperations.SetPropertiesAsync(properties, cancellationToken: cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary> Gets the properties of an account's Table service, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. </summary>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>The <see cref="Response{TableServiceProperties}"/> indicating the result of the operation.</returns>
public virtual Response<TableServiceProperties> GetProperties(CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetProperties)}");
scope.Start();
try
{
var response = _serviceOperations.GetProperties(cancellationToken: cancellationToken);
return Response.FromValue(response.Value, response.GetRawResponse());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary> Gets the properties of an account's Table service, including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. </summary>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>The <see cref="Response{TableServiceProperties}"/> indicating the result of the operation.</returns>
public virtual async Task<Response<TableServiceProperties>> GetPropertiesAsync(CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetProperties)}");
scope.Start();
try
{
var response = await _serviceOperations.GetPropertiesAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
return Response.FromValue(response.Value, response.GetRawResponse());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary> Retrieves statistics related to replication for the Table service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the account. </summary>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
public virtual async Task<Response<TableServiceStatistics>> GetStatisticsAsync(CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetStatistics)}");
scope.Start();
try
{
var response = await _secondaryServiceOperations.GetStatisticsAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
return Response.FromValue(response.Value, response.GetRawResponse());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
/// <summary> Retrieves statistics related to replication for the Table service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the account. </summary>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
public virtual Response<TableServiceStatistics> GetStatistics(CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetStatistics)}");
scope.Start();
try
{
var response = _secondaryServiceOperations.GetStatistics(cancellationToken: cancellationToken);
return Response.FromValue(response.Value, response.GetRawResponse());
}
catch (Exception ex)
{
scope.Failed(ex);
throw;
}
}
internal static bool IsPremiumEndpoint(Uri endpoint)
{
return (endpoint.Host.Equals("localhost", StringComparison.OrdinalIgnoreCase) && endpoint.Port != 10002) ||
endpoint.Host.IndexOf(TableConstants.CosmosTableDomain, StringComparison.OrdinalIgnoreCase) >= 0 ||
endpoint.Host.IndexOf(TableConstants.LegacyCosmosTableDomain, StringComparison.OrdinalIgnoreCase) >= 0;
}
/// <summary>
/// Creates an OData filter query string from the provided expression.
/// </summary>
/// <param name="filter">A filter expression.</param>
/// <returns>The string representation of the filter expression.</returns>
public static string CreateQueryFilter(Expression<Func<TableItem, bool>> filter) => TableClient.Bind(filter);
/// <summary>
/// Create an OData filter expression from an interpolated string. The interpolated values will be quoted and escaped as necessary.
/// </summary>
/// <param name="filter">An interpolated filter string.</param>
/// <returns>A valid OData filter expression.</returns>
public static string CreateQueryFilter(FormattableString filter) => TableOdataFilter.Create(filter);
}
}
| |
using System;
using System.Data;
using System.Configuration;
using System.Collections;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Web.UI.WebControls.WebParts;
using System.Web.UI.HtmlControls;
using System.Text;
using System.Text.RegularExpressions;
//using websuatreports.reports;
using DataAccess;
using Ext.Net;
public partial class MasterPage : BaseMaster
{
public bool IsRecordOpen;
public bool patConsents;
public bool HasPatientTransfers = false;
public string strInitialVisitID;
public string strMenuItems;
public string strToolbarItems;
public string strMilisecondsSessionExpire;
public string strSessionTimeout;
//common objects
protected CUser usr = new CUser();
protected CDataUtils utils = new CDataUtils();
protected CTreatment treatment = new CTreatment();
protected void Page_Load(object sender, EventArgs e)
{
if (!this.IsLoggedIn())
{
Response.Redirect("Default.aspx", true);
}
//set session time remaining
strMilisecondsSessionExpire = SessionTimeRemaining();
strSessionTimeout = SessionTimeRemaining();
// register Ext.NET library icons
ResourceManager1.RegisterIcon(Icon.Information);
//pass the master to the popup
ucPatLookup.BaseMstr = this;
ucLogin.BaseMstr = this;
ucVerticalMenu.BaseMstr = this;
ucEncounterType.BaseMstr = this;
if (!this.IsLoggedIn())
{
//put us back in login mode
ucLogin.SetMode(1);
this.ClosePatient();
}
//------------------------------------------------------------------------------
// Get "Last updated" info
string strLastUpdated = this.GetVSStringValue("LAST_UPDATED");
if (!String.IsNullOrEmpty(strLastUpdated))
{
divLastModified.Visible = true;
divLastModified.InnerText = strLastUpdated;
}
//remove PATIENTNAME session variable if
//not logged in and patient_id is empty
if (!this.IsLoggedIn() || String.IsNullOrEmpty(this.SelectedPatientID))
{
Session["PATIENTNAME"] = null;
}
//------------------------------------------------------------------------------
// NOT POSTBACK
//------------------------------------------------------------------------------
#region not_postback
if (!IsPostBack)
{
//get system settings
DataSet dsSys = new DataSet();
if(Session["SYSSETTINGS"] == null)
{
CSystemSettings sys = new CSystemSettings();
Session["SYSSETTINGS"] = sys.GetSystemSettingsDS(this);
}
dsSys = (DataSet)Session["SYSSETTINGS"];
//get site
if(Session["SiteID"] == null)
{
CSystemSettings SysSettings = new CSystemSettings();
DataSet dsSite = SysSettings.GetSiteDS(this);
string strSiteID = utils.GetStringValueFromDS(dsSite, "SITE_ID");
Session["SiteID"] = strSiteID;
}
}
#endregion
//------------------------------------------------------------------------------
// IS POSTBACK
//------------------------------------------------------------------------------
#region isPostBack
if (IsPostBack)
{
//get the postback control
string strPostBackControl = Request.Params["__EVENTTARGET"];
if (strPostBackControl != null)
{
#region PatientLookup
//did we do a patient lookup?
if (strPostBackControl.Equals("PATIENT_LOOKUP"))
{
//Clears previously looked up patient id, treatment id
this.SelectedPatientID = "";
this.SelectedTreatmentID = -1;
this.SelectedEncounterID = "";
this.SelectedProblemID = -1;
this.ClosePatient();
//get the patient id
string[] strArg = Request.Form["__EVENTARGUMENT"].Split('|');
//pass the patient id to the base, this will cache
//it in the db fx_session_value table
this.SelectedPatientID = strArg[0];
//check if it is an event lookup
if (strArg.Length > 1)
{
if (strArg[1].ToLower().Equals("event"))
{
Session["EVENT_LOOKUP"] = true;
}
}
//set the current treatment id, gets list of records with newest first
DataSet t_recs = treatment.GetRecordList(this,
this.SelectedPatientID,
2); //OPEN CASES - Revamp only uses this
if (t_recs != null && t_recs.Tables[0].Rows.Count > 0)
{
this.SelectedTreatmentID = Convert.ToInt32(t_recs.Tables[0].Rows[0]["treatment_id"].ToString());
}
//------------------------------------------------------------------------------
//GET INITIAL VISIT ID
//GET PATIENT NAME FOR THE DEMOGRAPHICS BLURB
if (this.IsLoggedIn() && !String.IsNullOrEmpty(this.SelectedPatientID))//must be logged in too...
{
if (Session["InitialVisit"] == null)
{
CEncounter patInitVisit = new CEncounter();
CDataUtils dUtils = new CDataUtils();
DataSet dsInitVisit = patInitVisit.GetInitialVisitDS(this, this.SelectedPatientID, this.SelectedTreatmentID);
Session["InitialVisit"] = dUtils.GetStringValueFromDS(dsInitVisit, "encounter_id");
}
if (Session["PATIENTNAME"] == null)
{
CPatient cpat = new CPatient();
Session["PATIENTNAME"] = cpat.GetPatientName(this);
}
//GET SELECTED PATIENT'S DEMOGRAPHICS
CPatient pat = new CPatient();
CDataUtils utils = new CDataUtils();
DataSet clientDemographics = new DataSet();
Session["PAT_DEMOGRAPHICS_DS"] = pat.GetPatientDemographicsDS(this);
clientDemographics = (DataSet)Session["PAT_DEMOGRAPHICS_DS"];
foreach (DataTable patTable in clientDemographics.Tables)
{
foreach (DataRow patRow in patTable.Rows)
{
this.APPMaster.PatientHasOpenCase = false;
if (!patRow.IsNull("OPENCASE_COUNT"))
{
if (Convert.ToInt32(patRow["OPENCASE_COUNT"]) > 0)
{
this.APPMaster.PatientHasOpenCase = true;
}
}
}
}
}
//--------------------------------------------------------------------------------------
//VERIFY STATUS OF THE PATIENT RECORD
CPatientLock plock = new CPatientLock(this);
string strLockProviderName = String.Empty;
string strLockProviderEmail = String.Empty;
this.IsPatientLocked = plock.IsPatientLocked(this.SelectedPatientID, out strLockProviderName, out strLockProviderEmail);
Session["PAT_LOCK_PROVIDER"] = strLockProviderName;
Session["PAT_LOCK_EMAIL"] = strLockProviderEmail;
//REDIRECT USER ------------------------------------------------------------------------
long lSoapNoteUR = (long)SUATUserRight.NoteSubjectiveUR
+ (long)SUATUserRight.NoteObjectiveUR
+ (long)SUATUserRight.NoteAssessmentUR
+ (long)SUATUserRight.NotePlanUR;
if (this.APPMaster.HasUserRight(lSoapNoteUR))
{
Response.Redirect("pat_summary.aspx", true);
}
else if (this.APPMaster.HasUserRight((long)SUATUserRight.ProcessNewPatientsUR))
{
Response.Redirect("pat_demographics.aspx", false);
}
else
{
Response.Redirect("revamp.aspx", true);
}
}
#endregion
#region OtherLookups
//did we do a user lookup?
if (strPostBackControl.Equals("USER_LOOKUP"))
{
//get the uidpwd
string strArg = Request.Form["__EVENTARGUMENT"];
//pass the patient id to the base, this will cache
//it in the db fx_session_value table
this.SelectedProviderID = strArg;
Response.Redirect("user_admin.aspx", true);
}
//did we do a Portal Patient Lookup?
if (strPostBackControl.Equals("PORTAL_PATIENT_LOOKUP"))
{
//get the uidpwd
string strArg = Request.Form["__EVENTARGUMENT"];
//pass the patient id to the base, this will cache
//it in the db fx_session_value table
this.SelectedPatientID = strArg;
Response.Redirect("pat_portal_account.aspx", true);
}
//-- 2/22/2011 close currently looked up patient
if (strPostBackControl.Equals("CLOSE_PATIENT"))
{
this.SelectedPatientID = "";
this.SelectedTreatmentID = -1;
this.LookupSearchCase = -1;
Response.Redirect("revamp.aspx", true);
}
#endregion
}
}
#endregion
#region ShowUsernameAndPatientDemographics
if (this.IsLoggedIn())
{
//Name Of User Currently logged on.
string strUserLoggedOn = String.Empty;
if (Session["USERLOGGEDON"] == null)
{
strUserLoggedOn += "<img alt=\"Account Activity\" src=\"Images/information.png\" style=\"cursor: pointer; vertical-align: middle; margin-right: 3px;\" onclick=\"showAccDetails();\" />";
strUserLoggedOn += UserLoggedOn();
strUserLoggedOn += " - ";
strUserLoggedOn += DateTime.Now.ToShortDateString();
strUserLoggedOn += " ";
strUserLoggedOn += DateTime.Now.ToShortTimeString();
lblUserLoggedOn.Text = strUserLoggedOn;
Session["USERLOGGEDON"] = strUserLoggedOn;
}
else
{
lblUserLoggedOn.Text = Session["USERLOGGEDON"].ToString();
}
//draw the patient info bar at the top
patDemoInfoBar.InnerHtml = "";
//GET PATIENT NAME for the demographics blurb
//Render Left Vertical Menu for selected patient
if (this.IsLoggedIn() && !String.IsNullOrEmpty(this.SelectedPatientID))//must be logged in too...
{
if (Session["PATIENTNAME"] == null)
{
CPatient cpat = new CPatient();
Session["PATIENTNAME"] = cpat.GetPatientName(this);
}
string[] strPatInfo = (string[])Session["PATIENTNAME"];
patDemoInfoBar.InnerHtml = strPatInfo[0];
//render vertical menu
ucVerticalMenu.RenderVerticalMenu();
}
else
{
this.ClosePatient();
}
}
#endregion
//load the patient treatment/encounter info
//if we are in group note, hide it...
bool bSkipSelectedPatientCheck = false;
string strPage = "";
strPage = this.GetPageName().ToLower();
if (strPage.IndexOf("pat_portal_account.aspx") > -1)
{
pnlTxTree.Visible = false;
bSkipSelectedPatientCheck = true;
}
else if (strPage.IndexOf("cms_menu_edit.aspx") > -1)
{
pnlTxTree.Visible = false;
bSkipSelectedPatientCheck = true;
if (IsPostBack) {
if (this.OnMasterSAVE()) {
BuildMenu();
}
}
}
else if (strPage.IndexOf("cms_page_edit.aspx") > -1)
{
pnlTxTree.Visible = false;
bSkipSelectedPatientCheck = true;
}
else
{
pnlTxTree.Visible = true;
patDemoInfoBar.Visible = true;
bSkipSelectedPatientCheck = false;
}
// ----------------
if (!bSkipSelectedPatientCheck)
{
if (!String.IsNullOrEmpty(this.SelectedPatientID))
{
pnlTxTree.Visible = true;
pnlDemoInfoBar.Visible = true;
}
else
{
pnlTxTree.Visible = false;
pnlDemoInfoBar.Visible = false;
}
}
if (!this.IsLoggedIn())
{
pnlLogoff.Visible = false;
}
else
{
pnlLogoff.Visible = true;
}
//get account activity details
GetAccountDetails();
// Build the Menu & Toolbar HTML string
BuildMenu();
//check if user has new messages
btnEmailNew.Attributes.CssStyle.Add("vertical-align", "middle");
btnEmailNew.Attributes.CssStyle.Add("margin-right", "8px");
btnEmailNew.Attributes.CssStyle.Add("cursor", "pointer");
btnEmailNew.Visible = this.HasNewMessage();
btnEmailNew.Attributes.Add("onclick","winrpt.showReport('messagescenter',['null'],{maximizable:false, width:($(window).width() - 50), height:($(window).height() - 50)});");
}
//build menus and toolbar
protected void BuildMenu() {
CAppMenu menu = new CAppMenu(this);
strMenuItems = menu.RenderMenuHTML();
strToolbarItems = menu.RenderToolbarHTML();
}
//does the patient consent to treatment?
public bool PatientConsents()
{
if (Session["PATIENT_CONSENTS"] != null)
{
if (Session["PATIENT_CONSENTS"].ToString() == "1")
{
return true;
}
}
return false;
}
//checks if encounter has counselor and patient assessments
protected bool HasAssessments(DataSet ds, string strEncounterID)
{
int iCount = 0;
DataRow[] arrDR = ds.Tables[0].Select("encounter_id = '" + strEncounterID + "'");
if (arrDR.GetLength(0) > 0)
{
foreach (DataRow dr in arrDR)
{
if (Convert.ToInt32(dr["INTAKE_TYPE"]) == (long)IntakeType.PATIENT)
{
++iCount;
}
else if (Convert.ToInt32(dr["INTAKE_TYPE"]) == (long)IntakeType.COUNSELOR)
{
++iCount;
}
}
}
if (iCount > 0)
{
return true;
}
return false;
}
protected string UserLoggedOn()
{
string strUserName = "";
CDataUtils utils = new CDataUtils();
if (this.APPMaster.UserType == (long)(SUATUserType.PATIENT))
{
CPatient pat = new CPatient();
//attempt to grab the user's profile
strUserName = pat.GetPatientUserName(this);
}
else
{
CUserAdmin RevampUser = new CUserAdmin();
DataSet dsRevampUser = new DataSet();
//attempt to grab the user's profile
dsRevampUser = RevampUser.GetSuatUserNameDS(this);
//load SUAT User Name Name Field
if (dsRevampUser != null)
{
foreach (DataTable table0 in dsRevampUser.Tables)
{
foreach (DataRow row in table0.Rows)
{
if (!row.IsNull("NAME"))
{
strUserName = row["NAME"].ToString();
}
if (!row.IsNull("GRAPH_PREF"))
{
this.GraphicOption = Convert.ToInt32(row["GRAPH_PREF"]);
}
if (!row.IsNull("DIMS_ID"))
{
this.APPMaster.UserDMISID = row["DIMS_ID"].ToString();
}
}
}
}
}
return strUserName;
}
protected void btnKeepAlive_OnClick(object sender, EventArgs e)
{
string strValue = "";
if (this.GetSessionValue("FX_USER_ID", out strValue))
{
Session["SESSION_INITIATED"] = DateTime.Now;
//refresh patient's record lock
if (!String.IsNullOrEmpty(this.SelectedPatientID))
{
CPatientLock plock = new CPatientLock(this);
plock.RefreshPatientLock(this.SelectedPatientID);
}
}
else
{
this.LogOff();
}
}
protected void btnFeedbackOK_OnClick(object sender, EventArgs e)
{
winSysFeedback.Hide();
}
protected string SessionTimeRemaining()
{
long lSessionTimeout = (Session.Timeout - 1) * 60 * 1000;
return lSessionTimeout.ToString();
}
protected void GetAccountDetails()
{
#region account_details
if (this.IsLoggedIn() && Session["ACC_DETAILS"] == null)
{
CUser user = new CUser();
DataSet dsUser = user.GetLoginUserDS(this, this.FXUserID);
if (dsUser != null)
{
CDataUtils utils = new CDataUtils();
string strAccDetails = String.Empty;
DateTime dtLastLogin = utils.GetDSDateTimeValue(dsUser, "date_last_login");
string strLastLogin = utils.GetDateTimeAsString(dtLastLogin);
string strLastLoginIP = utils.GetDSStringValue(dsUser, "last_login_ip");
DateTime dtFLastLogin = utils.GetDSDateTimeValue(dsUser, "last_flogin_date");
string strFLastLogin = utils.GetDateTimeAsString(dtFLastLogin);
string strFLastLoginIP = utils.GetDSStringValue(dsUser, "last_flogin_ip");
long lFAttempts = utils.GetDSLongValue(dsUser, "flogin_attempts");
string strFAttempts = Convert.ToString(lFAttempts);
strAccDetails += "Unsuccessful Logon Attempts Since Last Successful Logon: " + strFAttempts;
strAccDetails += "<br>";
if (!String.IsNullOrEmpty(strFLastLoginIP))
{
strAccDetails += "Last Unsuccessful Logon:";
strAccDetails += " " + strFLastLogin;
strAccDetails += " - IP Address: " + strFLastLoginIP;
}
else
{
strAccDetails += "No Unsuccessful Logons";
}
strAccDetails += "<br>";
strAccDetails += "Last Successful Logon:";
strAccDetails += " " + strLastLogin;
strAccDetails += " - IP Address: " + strLastLoginIP;
Session["ACC_DETAILS"] = strAccDetails;
}
if (this.APPMaster.PasswordExpires > 0 && this.APPMaster.PasswordExpires <= 10)
{
this.StatusCode = 1;
this.StatusComment = "Your account password will expire in " + Convert.ToString(this.APPMaster.PasswordExpires) + " days!";
}
}
#endregion
}
protected bool HasNewMessage() {
bool result = false;
CMessages msg = new CMessages(this);
DataSet dsNewMsg = msg.GetUnreadMessagesDS();
if (dsNewMsg != null) {
result = (dsNewMsg.Tables[0].Rows.Count > 0);
}
return result;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
** Purpose: Platform independent integer
**
**
===========================================================*/
namespace System {
using System;
using System.Globalization;
using System.Runtime;
using System.Runtime.Serialization;
using System.Runtime.CompilerServices;
using System.Runtime.ConstrainedExecution;
using System.Security;
using System.Diagnostics.Contracts;
[Serializable]
[System.Runtime.InteropServices.ComVisible(true)]
public struct IntPtr : ISerializable
{
[SecurityCritical]
unsafe private void* m_value; // The compiler treats void* closest to uint hence explicit casts are required to preserve int behavior
public static readonly IntPtr Zero;
// fast way to compare IntPtr to (IntPtr)0 while IntPtr.Zero doesn't work due to slow statics access
[System.Security.SecuritySafeCritical] // auto-generated
[Pure]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
internal unsafe bool IsNull()
{
return (this.m_value == null);
}
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.MayCorruptInstance, Cer.MayFail)]
[System.Runtime.Versioning.NonVersionable]
public unsafe IntPtr(int value)
{
#if WIN32
m_value = (void *)value;
#else
m_value = (void *)(long)value;
#endif
}
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.MayCorruptInstance, Cer.MayFail)]
[System.Runtime.Versioning.NonVersionable]
public unsafe IntPtr(long value)
{
#if WIN32
m_value = (void *)checked((int)value);
#else
m_value = (void *)value;
#endif
}
[System.Security.SecurityCritical]
[CLSCompliant(false)]
[ReliabilityContract(Consistency.MayCorruptInstance, Cer.MayFail)]
[System.Runtime.Versioning.NonVersionable]
public unsafe IntPtr(void* value)
{
m_value = value;
}
[System.Security.SecurityCritical] // auto-generated
private unsafe IntPtr(SerializationInfo info, StreamingContext context) {
long l = info.GetInt64("value");
if (Size==4 && (l>Int32.MaxValue || l<Int32.MinValue)) {
throw new ArgumentException(Environment.GetResourceString("Serialization_InvalidPtrValue"));
}
m_value = (void *)l;
}
#if FEATURE_SERIALIZATION
[System.Security.SecurityCritical]
unsafe void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) {
if (info==null) {
throw new ArgumentNullException("info");
}
Contract.EndContractBlock();
#if WIN32
info.AddValue("value", (long)((int)m_value));
#else
info.AddValue("value", (long)(m_value));
#endif
}
#endif
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe override bool Equals(Object obj) {
if (obj is IntPtr) {
return (m_value == ((IntPtr)obj).m_value);
}
return false;
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe override int GetHashCode() {
return unchecked((int)((long)m_value));
}
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[System.Runtime.Versioning.NonVersionable]
public unsafe int ToInt32() {
#if WIN32
return (int)m_value;
#else
long l = (long)m_value;
return checked((int)l);
#endif
}
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[System.Runtime.Versioning.NonVersionable]
public unsafe long ToInt64() {
#if WIN32
return (long)(int)m_value;
#else
return (long)m_value;
#endif
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe override String ToString() {
#if WIN32
return ((int)m_value).ToString(CultureInfo.InvariantCulture);
#else
return ((long)m_value).ToString(CultureInfo.InvariantCulture);
#endif
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe String ToString(String format)
{
Contract.Ensures(Contract.Result<String>() != null);
#if WIN32
return ((int)m_value).ToString(format, CultureInfo.InvariantCulture);
#else
return ((long)m_value).ToString(format, CultureInfo.InvariantCulture);
#endif
}
[ReliabilityContract(Consistency.MayCorruptInstance, Cer.MayFail)]
[System.Runtime.Versioning.NonVersionable]
public static explicit operator IntPtr (int value)
{
return new IntPtr(value);
}
[ReliabilityContract(Consistency.MayCorruptInstance, Cer.MayFail)]
[System.Runtime.Versioning.NonVersionable]
public static explicit operator IntPtr (long value)
{
return new IntPtr(value);
}
[System.Security.SecurityCritical]
[CLSCompliant(false), ReliabilityContract(Consistency.MayCorruptInstance, Cer.MayFail)]
[System.Runtime.Versioning.NonVersionable]
public static unsafe explicit operator IntPtr (void* value)
{
return new IntPtr(value);
}
[System.Security.SecuritySafeCritical] // auto-generated
[CLSCompliant(false)]
[System.Runtime.Versioning.NonVersionable]
public static unsafe explicit operator void* (IntPtr value)
{
return value.m_value;
}
[System.Security.SecuritySafeCritical] // auto-generated
[System.Runtime.Versioning.NonVersionable]
public unsafe static explicit operator int (IntPtr value)
{
#if WIN32
return (int)value.m_value;
#else
long l = (long)value.m_value;
return checked((int)l);
#endif
}
[System.Security.SecuritySafeCritical] // auto-generated
[System.Runtime.Versioning.NonVersionable]
public unsafe static explicit operator long (IntPtr value)
{
#if WIN32
return (long)(int)value.m_value;
#else
return (long)value.m_value;
#endif
}
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[System.Runtime.Versioning.NonVersionable]
public unsafe static bool operator == (IntPtr value1, IntPtr value2)
{
return value1.m_value == value2.m_value;
}
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[System.Runtime.Versioning.NonVersionable]
public unsafe static bool operator != (IntPtr value1, IntPtr value2)
{
return value1.m_value != value2.m_value;
}
[ReliabilityContract(Consistency.MayCorruptInstance, Cer.MayFail)]
[System.Runtime.Versioning.NonVersionable]
public static IntPtr Add(IntPtr pointer, int offset)
{
return pointer + offset;
}
[ReliabilityContract(Consistency.MayCorruptInstance, Cer.MayFail)]
[System.Runtime.Versioning.NonVersionable]
public static IntPtr operator +(IntPtr pointer, int offset)
{
#if WIN32
return new IntPtr(pointer.ToInt32() + offset);
#else
return new IntPtr(pointer.ToInt64() + offset);
#endif
}
[ReliabilityContract(Consistency.MayCorruptInstance, Cer.MayFail)]
[System.Runtime.Versioning.NonVersionable]
public static IntPtr Subtract(IntPtr pointer, int offset) {
return pointer - offset;
}
[ReliabilityContract(Consistency.MayCorruptInstance, Cer.MayFail)]
[System.Runtime.Versioning.NonVersionable]
public static IntPtr operator -(IntPtr pointer, int offset) {
#if WIN32
return new IntPtr(pointer.ToInt32() - offset);
#else
return new IntPtr(pointer.ToInt64() - offset);
#endif
}
public static int Size
{
[Pure]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[System.Runtime.Versioning.NonVersionable]
get
{
#if WIN32
return 4;
#else
return 8;
#endif
}
}
[System.Security.SecuritySafeCritical] // auto-generated
[CLSCompliant(false)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
[System.Runtime.Versioning.NonVersionable]
public unsafe void* ToPointer()
{
return m_value;
}
}
}
| |
// Copyright (c) Umbraco.
// See LICENSE for more details.
using System;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
using Umbraco.Cms.Core.Models;
using Umbraco.Cms.Core.PropertyEditors;
using Umbraco.Cms.Core.Scoping;
using Umbraco.Cms.Core.Serialization;
using Umbraco.Cms.Core.Services;
using Umbraco.Cms.Infrastructure.Persistence.Repositories.Implement;
using Umbraco.Cms.Tests.Common.Builders;
using Umbraco.Cms.Tests.Common.Builders.Extensions;
using Umbraco.Cms.Tests.Common.Testing;
using Umbraco.Cms.Tests.Integration.Testing;
using Umbraco.Extensions;
namespace Umbraco.Cms.Tests.Integration.Umbraco.Infrastructure.Services
{
[TestFixture]
[UmbracoTest(
Database = UmbracoTestOptions.Database.NewSchemaPerTest,
PublishedRepositoryEvents = true,
WithApplication = true,
Logger = UmbracoTestOptions.Logger.Console)]
public class ContentServiceTagsTests : UmbracoIntegrationTest
{
private IContentTypeService ContentTypeService => GetRequiredService<IContentTypeService>();
private IContentService ContentService => GetRequiredService<IContentService>();
private ITagService TagService => GetRequiredService<ITagService>();
private IDataTypeService DataTypeService => GetRequiredService<IDataTypeService>();
private ILocalizationService LocalizationService => GetRequiredService<ILocalizationService>();
private IFileService FileService => GetRequiredService<IFileService>();
private IJsonSerializer Serializer => GetRequiredService<IJsonSerializer>();
public PropertyEditorCollection PropertyEditorCollection => GetRequiredService<PropertyEditorCollection>();
[SetUp]
public void Setup() => ContentRepositoryBase.ThrowOnWarning = true;
[TearDown]
public void Teardown() => ContentRepositoryBase.ThrowOnWarning = false;
[Test]
public void TagsCanBeInvariant()
{
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
IContent content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "another", "one" });
ContentService.SaveAndPublish(content1);
content1 = ContentService.GetById(content1.Id);
string[] enTags = content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer).ToArray();
Assert.AreEqual(4, enTags.Length);
Assert.Contains("one", enTags);
Assert.AreEqual(-1, enTags.IndexOf("plus"));
IEnumerable<IGrouping<int?, ITag>> tagGroups = TagService.GetAllTags().GroupBy(x => x.LanguageId);
foreach (ITag tag in TagService.GetAllTags())
{
Console.WriteLine($"{tag.Group}:{tag.Text} {tag.LanguageId}");
}
Assert.AreEqual(1, tagGroups.Count());
IGrouping<int?, ITag> enTagGroup = tagGroups.FirstOrDefault(x => x.Key == null);
Assert.IsNotNull(enTagGroup);
Assert.AreEqual(4, enTagGroup.Count());
Assert.IsTrue(enTagGroup.Any(x => x.Text == "one"));
Assert.IsFalse(enTagGroup.Any(x => x.Text == "plus"));
}
[Test]
public void TagsCanBeVariant()
{
ILocalizationService languageService = LocalizationService;
ILanguage language = new LanguageBuilder()
.WithCultureInfo("fr-FR")
.Build();
LocalizationService.Save(language); // en-US is already there
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType, ContentVariation.Culture);
ContentTypeService.Save(contentType);
IContent content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.SetCultureName("name-fr", "fr-FR");
content1.SetCultureName("name-en", "en-US");
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "plus" }, culture: "fr-FR");
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "another", "one" }, culture: "en-US");
ContentService.SaveAndPublish(content1);
content1 = ContentService.GetById(content1.Id);
string[] frTags = content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer, "fr-FR").ToArray();
Assert.AreEqual(5, frTags.Length);
Assert.Contains("plus", frTags);
Assert.AreEqual(-1, frTags.IndexOf("one"));
string[] enTags = content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer, "en-US").ToArray();
Assert.AreEqual(4, enTags.Length);
Assert.Contains("one", enTags);
Assert.AreEqual(-1, enTags.IndexOf("plus"));
IEnumerable<IGrouping<int?, ITag>> tagGroups = TagService.GetAllTags(culture: "*").GroupBy(x => x.LanguageId);
foreach (ITag tag in TagService.GetAllTags())
{
Console.WriteLine($"{tag.Group}:{tag.Text} {tag.LanguageId}");
}
Assert.AreEqual(2, tagGroups.Count());
IGrouping<int?, ITag> frTagGroup = tagGroups.FirstOrDefault(x => x.Key == 2);
Assert.IsNotNull(frTagGroup);
Assert.AreEqual(5, frTagGroup.Count());
Assert.IsTrue(frTagGroup.Any(x => x.Text == "plus"));
Assert.IsFalse(frTagGroup.Any(x => x.Text == "one"));
IGrouping<int?, ITag> enTagGroup = tagGroups.FirstOrDefault(x => x.Key == 1);
Assert.IsNotNull(enTagGroup);
Assert.AreEqual(4, enTagGroup.Count());
Assert.IsTrue(enTagGroup.Any(x => x.Text == "one"));
Assert.IsFalse(enTagGroup.Any(x => x.Text == "plus"));
}
[Test]
public void TagsCanBecomeVariant()
{
int enId = LocalizationService.GetLanguageIdByIsoCode("en-US").Value;
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
PropertyType propertyType = CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
IContent content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "another", "one" });
ContentService.SaveAndPublish(content1);
contentType.Variations = ContentVariation.Culture;
ContentTypeService.Save(contentType);
// no changes
content1 = ContentService.GetById(content1.Id);
string[] tags = content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer).ToArray();
Assert.AreEqual(4, tags.Length);
Assert.Contains("one", tags);
Assert.AreEqual(-1, tags.IndexOf("plus"));
IEnumerable<IGrouping<int?, ITag>> tagGroups = TagService.GetAllTags().GroupBy(x => x.LanguageId);
foreach (ITag tag in TagService.GetAllTags())
{
Console.WriteLine($"{tag.Group}:{tag.Text} {tag.LanguageId}");
}
Assert.AreEqual(1, tagGroups.Count());
IGrouping<int?, ITag> enTagGroup = tagGroups.FirstOrDefault(x => x.Key == null);
Assert.IsNotNull(enTagGroup);
Assert.AreEqual(4, enTagGroup.Count());
Assert.IsTrue(enTagGroup.Any(x => x.Text == "one"));
Assert.IsFalse(enTagGroup.Any(x => x.Text == "plus"));
propertyType.Variations = ContentVariation.Culture;
ContentTypeService.Save(contentType);
// changes
content1 = ContentService.GetById(content1.Id);
// property value has been moved from invariant to en-US
tags = content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer).ToArray();
Assert.IsEmpty(tags);
tags = content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer, "en-US").ToArray();
Assert.AreEqual(4, tags.Length);
Assert.Contains("one", tags);
Assert.AreEqual(-1, tags.IndexOf("plus"));
// tags have been copied from invariant to en-US
tagGroups = TagService.GetAllTags(culture: "*").GroupBy(x => x.LanguageId);
foreach (ITag tag in TagService.GetAllTags("*"))
{
Console.WriteLine($"{tag.Group}:{tag.Text} {tag.LanguageId}");
}
Assert.AreEqual(1, tagGroups.Count());
enTagGroup = tagGroups.FirstOrDefault(x => x.Key == enId);
Assert.IsNotNull(enTagGroup);
Assert.AreEqual(4, enTagGroup.Count());
Assert.IsTrue(enTagGroup.Any(x => x.Text == "one"));
Assert.IsFalse(enTagGroup.Any(x => x.Text == "plus"));
}
[Test]
public void TagsCanBecomeInvariant()
{
ILanguage language = new LanguageBuilder()
.WithCultureInfo("fr-FR")
.Build();
LocalizationService.Save(language); // en-US is already there
int enId = LocalizationService.GetLanguageIdByIsoCode("en-US").Value;
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType, ContentVariation.Culture);
ContentTypeService.Save(contentType);
IContent content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.SetCultureName("name-fr", "fr-FR");
content1.SetCultureName("name-en", "en-US");
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "plus" }, culture: "fr-FR");
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "another", "one" }, culture: "en-US");
ContentService.SaveAndPublish(content1);
contentType.Variations = ContentVariation.Nothing;
ContentTypeService.Save(contentType);
// changes
content1 = ContentService.GetById(content1.Id);
// property value has been moved from en-US to invariant, fr-FR tags are gone
Assert.IsEmpty(content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer, "fr-FR"));
Assert.IsEmpty(content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer, "en-US"));
string[] tags = content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer).ToArray();
Assert.AreEqual(4, tags.Length);
Assert.Contains("one", tags);
Assert.AreEqual(-1, tags.IndexOf("plus"));
// tags have been copied from en-US to invariant, fr-FR tags are gone
IEnumerable<IGrouping<int?, ITag>> tagGroups = TagService.GetAllTags(culture: "*").GroupBy(x => x.LanguageId);
foreach (ITag tag in TagService.GetAllTags("*"))
{
Console.WriteLine($"{tag.Group}:{tag.Text} {tag.LanguageId}");
}
Assert.AreEqual(1, tagGroups.Count());
IGrouping<int?, ITag> enTagGroup = tagGroups.FirstOrDefault(x => x.Key == null);
Assert.IsNotNull(enTagGroup);
Assert.AreEqual(4, enTagGroup.Count());
Assert.IsTrue(enTagGroup.Any(x => x.Text == "one"));
Assert.IsFalse(enTagGroup.Any(x => x.Text == "plus"));
}
[Test]
public void TagsCanBecomeInvariant2()
{
ILanguage language = new LanguageBuilder()
.WithCultureInfo("fr-FR")
.Build();
LocalizationService.Save(language); // en-US is already there
int enId = LocalizationService.GetLanguageIdByIsoCode("en-US").Value;
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
PropertyType propertyType = CreateAndAddTagsPropertyType(contentType, ContentVariation.Culture);
ContentTypeService.Save(contentType);
IContent content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.SetCultureName("name-fr", "fr-FR");
content1.SetCultureName("name-en", "en-US");
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "plus" }, culture: "fr-FR");
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "another", "one" }, culture: "en-US");
ContentService.SaveAndPublish(content1);
IContent content2 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 2", -1);
content2.SetCultureName("name-fr", "fr-FR");
content2.SetCultureName("name-en", "en-US");
content2.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "plus" }, culture: "fr-FR");
content2.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "another", "one" }, culture: "en-US");
ContentService.SaveAndPublish(content2);
//// pretend we already have invariant values
// using (var scope = ScopeProvider.CreateScope())
// {
// scope.Database.Execute("INSERT INTO [cmsTags] ([tag], [group], [languageId]) SELECT DISTINCT [tag], [group], NULL FROM [cmsTags] WHERE [languageId] IS NOT NULL");
// }
// this should work
propertyType.Variations = ContentVariation.Nothing;
Assert.DoesNotThrow(() => ContentTypeService.Save(contentType));
}
[Test]
public void TagsCanBecomeInvariantByPropertyType()
{
ILanguage language = new LanguageBuilder()
.WithCultureInfo("fr-FR")
.Build();
LocalizationService.Save(language); // en-US is already there
int enId = LocalizationService.GetLanguageIdByIsoCode("en-US").Value;
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
PropertyType propertyType = CreateAndAddTagsPropertyType(contentType, ContentVariation.Culture);
ContentTypeService.Save(contentType);
IContent content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.SetCultureName("name-fr", "fr-FR");
content1.SetCultureName("name-en", "en-US");
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "plus" }, culture: "fr-FR");
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "another", "one" }, culture: "en-US");
ContentService.SaveAndPublish(content1);
propertyType.Variations = ContentVariation.Nothing;
ContentTypeService.Save(contentType);
// changes
content1 = ContentService.GetById(content1.Id);
// property value has been moved from en-US to invariant, fr-FR tags are gone
Assert.IsEmpty(content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer, "fr-FR"));
Assert.IsEmpty(content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer, "en-US"));
string[] tags = content1.Properties["tags"].GetTagsValue(PropertyEditorCollection, DataTypeService, Serializer).ToArray();
Assert.AreEqual(4, tags.Length);
Assert.Contains("one", tags);
Assert.AreEqual(-1, tags.IndexOf("plus"));
// tags have been copied from en-US to invariant, fr-FR tags are gone
IEnumerable<IGrouping<int?, ITag>> tagGroups = TagService.GetAllTags(culture: "*").GroupBy(x => x.LanguageId);
foreach (ITag tag in TagService.GetAllTags("*"))
{
Console.WriteLine($"{tag.Group}:{tag.Text} {tag.LanguageId}");
}
Assert.AreEqual(1, tagGroups.Count());
IGrouping<int?, ITag> enTagGroup = tagGroups.FirstOrDefault(x => x.Key == null);
Assert.IsNotNull(enTagGroup);
Assert.AreEqual(4, enTagGroup.Count());
Assert.IsTrue(enTagGroup.Any(x => x.Text == "one"));
Assert.IsFalse(enTagGroup.Any(x => x.Text == "plus"));
}
[Test]
public void TagsCanBecomeInvariantByPropertyTypeAndBackToVariant()
{
ILanguage language = new LanguageBuilder()
.WithCultureInfo("fr-FR")
.Build();
LocalizationService.Save(language); // en-US is already there
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
PropertyType propertyType = CreateAndAddTagsPropertyType(contentType, ContentVariation.Culture);
ContentTypeService.Save(contentType);
IContent content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.SetCultureName("name-fr", "fr-FR");
content1.SetCultureName("name-en", "en-US");
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "plus" }, culture: "fr-FR");
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "another", "one" }, culture: "en-US");
ContentService.SaveAndPublish(content1);
propertyType.Variations = ContentVariation.Nothing;
ContentTypeService.Save(contentType);
// FIXME: This throws due to index violations
propertyType.Variations = ContentVariation.Culture;
ContentTypeService.Save(contentType);
// TODO: Assert results
}
[Test]
public void TagsAreUpdatedWhenContentIsTrashedAndUnTrashed_One()
{
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
Content content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "plus" });
ContentService.SaveAndPublish(content1);
Content content2 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 2", -1);
content2.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags" });
ContentService.SaveAndPublish(content2);
// verify
IEnumerable<ITag> tags = TagService.GetTagsForEntity(content1.Id);
Assert.AreEqual(5, tags.Count());
IEnumerable<ITag> allTags = TagService.GetAllContentTags();
Assert.AreEqual(5, allTags.Count());
ContentService.MoveToRecycleBin(content1);
}
[Test]
public void TagsAreUpdatedWhenContentIsTrashedAndUnTrashed_All()
{
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
Content content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "bam" });
ContentService.SaveAndPublish(content1);
Content content2 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 2", -1);
content2.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags" });
ContentService.SaveAndPublish(content2);
// verify
IEnumerable<ITag> tags = TagService.GetTagsForEntity(content1.Id);
Assert.AreEqual(5, tags.Count());
IEnumerable<ITag> allTags = TagService.GetAllContentTags();
Assert.AreEqual(5, allTags.Count());
ContentService.Unpublish(content1);
ContentService.Unpublish(content2);
}
[Test]
[Ignore("https://github.com/umbraco/Umbraco-CMS/issues/3821 (U4-8442), will need to be fixed.")]
public void TagsAreUpdatedWhenContentIsTrashedAndUnTrashed_Tree()
{
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
Content content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "plus" });
ContentService.SaveAndPublish(content1);
Content content2 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 2", content1.Id);
content2.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags" });
ContentService.SaveAndPublish(content2);
// verify
IEnumerable<ITag> tags = TagService.GetTagsForEntity(content1.Id);
Assert.AreEqual(5, tags.Count());
IEnumerable<ITag> allTags = TagService.GetAllContentTags();
Assert.AreEqual(5, allTags.Count());
ContentService.MoveToRecycleBin(content1);
// no more tags
tags = TagService.GetTagsForEntity(content1.Id);
Assert.AreEqual(0, tags.Count());
tags = TagService.GetTagsForEntity(content2.Id);
Assert.AreEqual(0, tags.Count());
// no more tags
allTags = TagService.GetAllContentTags();
Assert.AreEqual(0, allTags.Count());
ContentService.Move(content1, -1);
Assert.IsFalse(content1.Published);
// no more tags
tags = TagService.GetTagsForEntity(content1.Id);
Assert.AreEqual(0, tags.Count());
tags = TagService.GetTagsForEntity(content2.Id);
Assert.AreEqual(0, tags.Count());
// no more tags
allTags = TagService.GetAllContentTags();
Assert.AreEqual(0, allTags.Count());
content1.PublishCulture(CultureImpact.Invariant);
ContentService.SaveAndPublish(content1);
Assert.IsTrue(content1.Published);
// tags are back
tags = TagService.GetTagsForEntity(content1.Id);
Assert.AreEqual(5, tags.Count());
// FIXME: tag & tree issue
// when we publish, we 'just' publish the top one and not the ones below = fails
// what we should do is... NOT clear tags when unpublishing or trashing or...
// and just update the tag service to NOT return anything related to trashed or
// unpublished entities (since trashed is set on ALL entities in the trashed branch)
tags = TagService.GetTagsForEntity(content2.Id); // including that one!
Assert.AreEqual(4, tags.Count());
// tags are back
allTags = TagService.GetAllContentTags();
Assert.AreEqual(5, allTags.Count());
}
[Test]
public void TagsAreUpdatedWhenContentIsUnpublishedAndRePublished()
{
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
Content content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "bam" });
ContentService.SaveAndPublish(content1);
Content content2 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 2", -1);
content2.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags" });
ContentService.SaveAndPublish(content2);
ContentService.Unpublish(content1);
ContentService.Unpublish(content2);
}
[Test]
[Ignore("https://github.com/umbraco/Umbraco-CMS/issues/3821 (U4-8442), will need to be fixed.")]
public void TagsAreUpdatedWhenContentIsUnpublishedAndRePublished_Tree()
{
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
Content content1 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags", "bam" });
ContentService.SaveAndPublish(content1);
Content content2 = ContentBuilder.CreateSimpleContent(contentType, "Tagged content 2", content1);
content2.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags" });
ContentService.SaveAndPublish(content2);
ContentService.Unpublish(content1);
IEnumerable<ITag> tags = TagService.GetTagsForEntity(content1.Id);
Assert.AreEqual(0, tags.Count());
// FIXME: tag & tree issue
// when we (un)publish, we 'just' publish the top one and not the ones below = fails
// see similar note above
tags = TagService.GetTagsForEntity(content2.Id);
Assert.AreEqual(0, tags.Count());
IEnumerable<ITag> allTags = TagService.GetAllContentTags();
Assert.AreEqual(0, allTags.Count());
content1.PublishCulture(CultureImpact.Invariant);
ContentService.SaveAndPublish(content1);
tags = TagService.GetTagsForEntity(content2.Id);
Assert.AreEqual(4, tags.Count());
allTags = TagService.GetAllContentTags();
Assert.AreEqual(5, allTags.Count());
}
[Test]
public void Create_Tag_Data_Bulk_Publish_Operation()
{
// Arrange
// set configuration
IDataType dataType = DataTypeService.GetDataType(1041);
dataType.Configuration = new TagConfiguration
{
Group = "test",
StorageType = TagsStorageType.Csv
};
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
contentType.AllowedContentTypes = new[] { new ContentTypeSort(new Lazy<int>(() => contentType.Id), 0, contentType.Alias) };
Content content = ContentBuilder.CreateSimpleContent(contentType, "Tagged content", -1);
content.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags" });
ContentService.Save(content);
Content child1 = ContentBuilder.CreateSimpleContent(contentType, "child 1 content", content.Id);
child1.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello1", "world1", "some1" });
ContentService.Save(child1);
Content child2 = ContentBuilder.CreateSimpleContent(contentType, "child 2 content", content.Id);
child2.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello2", "world2" });
ContentService.Save(child2);
// Act
ContentService.SaveAndPublishBranch(content, true);
// Assert
int propertyTypeId = contentType.PropertyTypes.Single(x => x.Alias == "tags").Id;
using (IScope scope = ScopeProvider.CreateScope())
{
Assert.AreEqual(4, scope.Database.ExecuteScalar<int>(
"SELECT COUNT(*) FROM cmsTagRelationship WHERE nodeId=@nodeId AND propertyTypeId=@propTypeId",
new { nodeId = content.Id, propTypeId = propertyTypeId }));
Assert.AreEqual(3, scope.Database.ExecuteScalar<int>(
"SELECT COUNT(*) FROM cmsTagRelationship WHERE nodeId=@nodeId AND propertyTypeId=@propTypeId",
new { nodeId = child1.Id, propTypeId = propertyTypeId }));
Assert.AreEqual(2, scope.Database.ExecuteScalar<int>(
"SELECT COUNT(*) FROM cmsTagRelationship WHERE nodeId=@nodeId AND propertyTypeId=@propTypeId",
new { nodeId = child2.Id, propTypeId = propertyTypeId }));
scope.Complete();
}
}
[Test]
public void Does_Not_Create_Tag_Data_For_Non_Published_Version()
{
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
// create content type with a tag property
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
// create a content with tags and publish
Content content = ContentBuilder.CreateSimpleContent(contentType, "Tagged content", -1);
content.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags" });
ContentService.SaveAndPublish(content);
// edit tags and save
content.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "another", "world" }, merge: true);
ContentService.Save(content);
// the (edit) property does contain all tags
Assert.AreEqual(5, content.Properties["tags"].GetValue().ToString().Split(',').Distinct().Count());
// but the database still contains the initial two tags
int propertyTypeId = contentType.PropertyTypes.Single(x => x.Alias == "tags").Id;
using (IScope scope = ScopeProvider.CreateScope())
{
Assert.AreEqual(4, scope.Database.ExecuteScalar<int>(
"SELECT COUNT(*) FROM cmsTagRelationship WHERE nodeId=@nodeId AND propertyTypeId=@propTypeId",
new { nodeId = content.Id, propTypeId = propertyTypeId }));
scope.Complete();
}
}
[Test]
public void Can_Replace_Tag_Data_To_Published_Content()
{
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
// Arrange
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
Content content = ContentBuilder.CreateSimpleContent(contentType, "Tagged content", -1);
// Act
content.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags" });
ContentService.SaveAndPublish(content);
// Assert
Assert.AreEqual(4, content.Properties["tags"].GetValue().ToString().Split(',').Distinct().Count());
int propertyTypeId = contentType.PropertyTypes.Single(x => x.Alias == "tags").Id;
using (IScope scope = ScopeProvider.CreateScope())
{
Assert.AreEqual(4, scope.Database.ExecuteScalar<int>(
"SELECT COUNT(*) FROM cmsTagRelationship WHERE nodeId=@nodeId AND propertyTypeId=@propTypeId",
new { nodeId = content.Id, propTypeId = propertyTypeId }));
scope.Complete();
}
}
[Test]
public void Can_Append_Tag_Data_To_Published_Content()
{
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
// Arrange
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
Content content = ContentBuilder.CreateSimpleContent(contentType, "Tagged content", -1);
content.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags" });
ContentService.SaveAndPublish(content);
// Act
content.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "another", "world" }, merge: true);
ContentService.SaveAndPublish(content);
// Assert
Assert.AreEqual(5, content.Properties["tags"].GetValue().ToString().Split(',').Distinct().Count());
int propertyTypeId = contentType.PropertyTypes.Single(x => x.Alias == "tags").Id;
using (IScope scope = ScopeProvider.CreateScope())
{
Assert.AreEqual(5, scope.Database.ExecuteScalar<int>(
"SELECT COUNT(*) FROM cmsTagRelationship WHERE nodeId=@nodeId AND propertyTypeId=@propTypeId",
new { nodeId = content.Id, propTypeId = propertyTypeId }));
scope.Complete();
}
}
[Test]
public void Can_Remove_Tag_Data_To_Published_Content()
{
Template template = TemplateBuilder.CreateTextPageTemplate();
FileService.SaveTemplate(template);
// Arrange
ContentType contentType = ContentTypeBuilder.CreateSimpleContentType("umbMandatory", "Mandatory Doc Type", mandatoryProperties: true, defaultTemplateId: template.Id);
CreateAndAddTagsPropertyType(contentType);
ContentTypeService.Save(contentType);
Content content = ContentBuilder.CreateSimpleContent(contentType, "Tagged content", -1);
content.AssignTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "hello", "world", "some", "tags" });
ContentService.SaveAndPublish(content);
// Act
content.RemoveTags(PropertyEditorCollection, DataTypeService, Serializer, "tags", new[] { "some", "world" });
ContentService.SaveAndPublish(content);
// Assert
Assert.AreEqual(2, content.Properties["tags"].GetValue().ToString().Split(',').Distinct().Count());
int propertyTypeId = contentType.PropertyTypes.Single(x => x.Alias == "tags").Id;
using (IScope scope = ScopeProvider.CreateScope())
{
Assert.AreEqual(2, scope.Database.ExecuteScalar<int>(
"SELECT COUNT(*) FROM cmsTagRelationship WHERE nodeId=@nodeId AND propertyTypeId=@propTypeId",
new { nodeId = content.Id, propTypeId = propertyTypeId }));
scope.Complete();
}
}
private PropertyType CreateAndAddTagsPropertyType(ContentType contentType, ContentVariation variations = ContentVariation.Nothing)
{
PropertyType propertyType = new PropertyTypeBuilder()
.WithPropertyEditorAlias("test")
.WithAlias("tags")
.WithDataTypeId(1041)
.WithVariations(variations)
.Build();
contentType.PropertyGroups.First().PropertyTypes.Add(propertyType);
contentType.Variations = variations;
return propertyType;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
/// <summary>
/// This interface should be implemented by any class whose instances are intended
/// to be executed by a thread.
/// </summary>
public interface IThreadRunnable
{
/// <summary>
/// This method has to be implemented in order that starting of the thread causes the object's
/// run method to be called in that separately executing thread.
/// </summary>
void Run();
}
/// <summary>
/// Contains conversion support elements such as classes, interfaces and static methods.
/// </summary>
public class SupportClass
{
public interface Checksum
{
void reset();
void update(int b);
void update(byte[] b);
void update(byte[] b, int offset, int length);
Int64 getValue();
}
public class CRC32 : Checksum
{
private static readonly UInt32[] crcTable = InitializeCRCTable();
private static UInt32[] InitializeCRCTable()
{
UInt32[] crcTable = new UInt32[256];
for (UInt32 n = 0; n < 256; n++)
{
UInt32 c = n;
for (int k = 8; --k >= 0; )
{
if ((c & 1) != 0)
c = 0xedb88320 ^ (c >> 1);
else
c = c >> 1;
}
crcTable[n] = c;
}
return crcTable;
}
private UInt32 crc = 0;
public Int64 getValue()
{
return (Int64)crc & 0xffffffffL;
}
public void reset()
{
crc = 0;
}
public void update(int bval)
{
UInt32 c = ~crc;
c = crcTable[(c ^ bval) & 0xff] ^ (c >> 8);
crc = ~c;
}
public void update(byte[] buf, int off, int len)
{
UInt32 c = ~crc;
while (--len >= 0)
c = crcTable[(c ^ buf[off++]) & 0xff] ^ (c >> 8);
crc = ~c;
}
public void update(byte[] buf)
{
update(buf, 0, buf.Length);
}
}
public class TextSupport
{
/// <summary>
/// Copies an array of chars obtained from a String into a specified array of chars
/// </summary>
/// <param name="sourceString">The String to get the chars from</param>
/// <param name="sourceStart">Position of the String to start getting the chars</param>
/// <param name="sourceEnd">Position of the String to end getting the chars</param>
/// <param name="destinationArray">Array to return the chars</param>
/// <param name="destinationStart">Position of the destination array of chars to start storing the chars</param>
/// <returns>An array of chars</returns>
public static void GetCharsFromString(string sourceString, int sourceStart, int sourceEnd, char[] destinationArray, int destinationStart)
{
int sourceCounter;
int destinationCounter;
sourceCounter = sourceStart;
destinationCounter = destinationStart;
while (sourceCounter < sourceEnd)
{
destinationArray[destinationCounter] = (char)sourceString[sourceCounter];
sourceCounter++;
destinationCounter++;
}
}
}
public class CollectionsSupport
{
public class BitSet
{
private System.Collections.BitArray bitArray = null;
public BitSet()
: this(0)
{
}
public BitSet(int size)
{
bitArray = new System.Collections.BitArray(size, false);
}
public void Set(int index)
{
if (index >= bitArray.Count)
GrowBitArray(index + 1);
bitArray.Set(index, true);
}
public int Cardinality()
{
int cardinality = 0;
for (int i = 0; i < bitArray.Length; i++)
if (bitArray.Get(i))
cardinality++;
return cardinality;
}
/// <summary>
/// Returns the next set bit at or index, or -1 if no such bit exists.
/// </summary>
/// <param name="index">the index of the bit at which to start checking</param>
/// <returns>the next set bit or -1</returns>
public int NextSetBit(int index)
{
while (index < bitArray.Count)
{
// if index bit is set, return it; otherwise check next index bit
if (bitArray.Get(index))
return index;
else
index++;
}
// if no bits are set at or after index, return -1
return -1;
}
private void GrowBitArray(int size)
{
//TODO:
// might be able to change this to:
bitArray.Length = size;
//System.Collections.BitArray tmp = new System.Collections.BitArray(size, false);
//for (int i = 0; i < bitArray.Count; i++)
// tmp.Set(i, bitArray.Get(i));
//bitArray = tmp;
}
}
public static void ArrayFill(object[] array, object fillValue)
{
ArrayFill(array, 0, array.Length, fillValue);
}
public static void ArrayFill(object[] array, int from, int to, object fillValue)
{
for (int i = from; i < to; i++)
array[i] = fillValue;
}
public static void ArrayFill(byte[] array, byte fillValue)
{
ArrayFill(array, 0, array.Length, fillValue);
}
public static void ArrayFill(byte[] array, int from, int to, byte fillValue)
{
for (int i = from; i < to; i++)
array[i] = fillValue;
}
public static void ArrayFill(char[] array, char fillValue)
{
ArrayFill(array, 0, array.Length, fillValue);
}
public static void ArrayFill(char[] array, int from, int to, char fillValue)
{
for (int i = from; i < to; i++)
array[i] = fillValue;
}
public static void ArrayFill(int[] array, int fillValue)
{
ArrayFill(array, 0, array.Length, fillValue);
}
public static void ArrayFill(int[] array, int from, int to, int fillValue)
{
for (int i = from; i < to; i++)
array[i] = fillValue;
}
public static void ArrayFill(long[] array, long fillValue)
{
ArrayFill(array, 0, array.Length, fillValue);
}
public static void ArrayFill(long[] array, int from, int to, long fillValue)
{
for (int i = from; i < to; i++)
array[i] = fillValue;
}
public static void AddAll(System.Collections.Generic.ICollection<byte[]> source, System.Collections.Generic.ICollection<byte[]> destination)
{
System.Collections.Generic.IEnumerator<byte[]> enumerator = source.GetEnumerator();
while (enumerator.MoveNext())
destination.Add(enumerator.Current);
}
public static void AddAll(System.Collections.Generic.ICollection<string> source, System.Collections.Generic.ICollection<string> destination)
{
System.Collections.Generic.IEnumerator<string> enumerator = source.GetEnumerator();
while (enumerator.MoveNext())
destination.Add(enumerator.Current);
}
public static void AddAll(System.Collections.Generic.IList<object> source, System.Collections.Generic.IList<object> destination)
{
System.Collections.Generic.IEnumerator<object> enumerator = source.GetEnumerator();
while (enumerator.MoveNext())
destination.Add(enumerator.Current);
}
public static System.Collections.Generic.SortedDictionary<string, Lucene.Net.Index.IndexReader> TailMap(System.Collections.Generic.SortedDictionary<string, Lucene.Net.Index.IndexReader> map, string fromKey)
{
System.Collections.Generic.SortedDictionary<string, Lucene.Net.Index.IndexReader> tailMap;
if (map.Comparer != null)
tailMap = new System.Collections.Generic.SortedDictionary<string, Lucene.Net.Index.IndexReader>(map.Comparer);
else
tailMap = new System.Collections.Generic.SortedDictionary<string, Lucene.Net.Index.IndexReader>();
if (map != null && map.Count > 0)
{
System.Collections.Generic.IEnumerator<System.Collections.Generic.KeyValuePair<string, Lucene.Net.Index.IndexReader>> e = map.GetEnumerator();
if (map.Comparer != null)
while (e.MoveNext())
{
if (map.Comparer.Compare(fromKey, e.Current.Key) <= 0)
tailMap[e.Current.Key] = e.Current.Value;
}
else
while (e.MoveNext())
{
if (string.CompareOrdinal(fromKey, e.Current.Key) <= 0)
tailMap[e.Current.Key] = e.Current.Value;
}
}
return tailMap;
}
public static void PutAll(System.Collections.IDictionary source, System.Collections.IDictionary destination)
{
// using destination[key] = source[key] avoids exceptions on duplicate, and
// preserves the most recent duplicate key, which is semantically equivalent
// to the java.util.Map functionality
System.Collections.IEnumerator enumerator = source.Keys.GetEnumerator();
while (enumerator.MoveNext())
destination[enumerator.Current] = source[enumerator.Current];
}
public static void PutAll(System.Collections.Generic.IDictionary<object, object> source, System.Collections.Generic.IDictionary<object, object> destination)
{
// using destination[key] = source[key] avoids exceptions on duplicate, and
// preserves the most recent duplicate key, which is semantically equivalent
// to the java.util.Map functionality
System.Collections.Generic.IEnumerator<object> enumerator = source.Keys.GetEnumerator();
while (enumerator.MoveNext())
destination[enumerator.Current] = source[enumerator.Current];
}
}
/// <summary>
/// Support class used to handle threads
/// </summary>
public class ThreadClass : IThreadRunnable
{
/// <summary>
/// The instance of System.Threading.Thread
/// </summary>
private System.Threading.Thread threadField;
/// <summary>
/// Initializes a new instance of the ThreadClass class
/// </summary>
public ThreadClass()
{
threadField = new System.Threading.Thread(new System.Threading.ThreadStart(Run));
This = this;
}
/// <summary>
/// Initializes a new instance of the Thread class.
/// </summary>
/// <param name="Name">The name of the thread</param>
public ThreadClass(System.String Name)
{
threadField = new System.Threading.Thread(new System.Threading.ThreadStart(Run));
this.Name = Name;
This = this;
}
/// <summary>
/// Initializes a new instance of the Thread class.
/// </summary>
/// <param name="Start">A ThreadStart delegate that references the methods to be invoked when this thread begins executing</param>
public ThreadClass(System.Threading.ThreadStart Start)
{
threadField = new System.Threading.Thread(Start);
This = this;
}
/// <summary>
/// Initializes a new instance of the Thread class.
/// </summary>
/// <param name="Start">A ThreadStart delegate that references the methods to be invoked when this thread begins executing</param>
/// <param name="Name">The name of the thread</param>
public ThreadClass(System.Threading.ThreadStart Start, System.String Name)
{
threadField = new System.Threading.Thread(Start);
this.Name = Name;
This = this;
}
/// <summary>
/// This method has no functionality unless the method is overridden
/// </summary>
public virtual void Run()
{
}
/// <summary>
/// Causes the operating system to change the state of the current thread instance to ThreadState.Running
/// </summary>
public virtual void Start()
{
threadField.Start();
if (This == null)
{
This = this;
This.Instance = threadField;
}
}
/// <summary>
/// Interrupts a thread that is in the WaitSleepJoin thread state
/// </summary>
public virtual void Interrupt()
{
threadField.Interrupt();
}
/// <summary>
/// Gets the current thread instance
/// </summary>
public System.Threading.Thread Instance
{
get
{
return threadField;
}
set
{
threadField = value;
}
}
/// <summary>
/// Gets or sets the name of the thread
/// </summary>
public System.String Name
{
get
{
return threadField.Name;
}
set
{
if (threadField.Name == null)
threadField.Name = value;
}
}
public void SetDaemon(bool isDaemon)
{
threadField.IsBackground = isDaemon;
}
/// <summary>
/// Gets or sets a value indicating the scheduling priority of a thread
/// </summary>
public System.Threading.ThreadPriority Priority
{
get
{
try
{
return threadField.Priority;
}
catch
{
return System.Threading.ThreadPriority.Normal;
}
}
set
{
try
{
threadField.Priority = value;
}
catch{}
}
}
/// <summary>
/// Gets a value indicating the execution status of the current thread
/// </summary>
public bool IsAlive
{
get
{
return threadField.IsAlive;
}
}
/// <summary>
/// Gets or sets a value indicating whether or not a thread is a background thread.
/// </summary>
public bool IsBackground
{
get
{
return threadField.IsBackground;
}
set
{
threadField.IsBackground = value;
}
}
/// <summary>
/// Blocks the calling thread until a thread terminates
/// </summary>
public void Join()
{
lock (this)
{
threadField.Join();
}
}
/// <summary>
/// Blocks the calling thread until a thread terminates or the specified time elapses
/// </summary>
/// <param name="MiliSeconds">Time of wait in milliseconds</param>
public void Join(long MiliSeconds)
{
lock (this)
{
threadField.Join(new System.TimeSpan(MiliSeconds * 10000));
}
}
/// <summary>
/// Blocks the calling thread until a thread terminates or the specified time elapses
/// </summary>
/// <param name="MiliSeconds">Time of wait in milliseconds</param>
/// <param name="NanoSeconds">Time of wait in nanoseconds</param>
public void Join(long MiliSeconds, int NanoSeconds)
{
lock (this)
{
threadField.Join(new System.TimeSpan(MiliSeconds * 10000 + NanoSeconds * 100));
}
}
/// <summary>
/// Resumes a thread that has been suspended
/// </summary>
public void Resume()
{
System.Threading.Monitor.PulseAll(threadField);
}
/// <summary>
/// Raises a ThreadAbortException in the thread on which it is invoked,
/// to begin the process of terminating the thread. Calling this method
/// usually terminates the thread
/// </summary>
public void Abort()
{
threadField.Abort();
}
/// <summary>
/// Raises a ThreadAbortException in the thread on which it is invoked,
/// to begin the process of terminating the thread while also providing
/// exception information about the thread termination.
/// Calling this method usually terminates the thread.
/// </summary>
/// <param name="stateInfo">An object that contains application-specific information, such as state, which can be used by the thread being aborted</param>
public void Abort(object stateInfo)
{
lock (this)
{
threadField.Abort(stateInfo);
}
}
/// <summary>
/// Suspends the thread, if the thread is already suspended it has no effect
/// </summary>
public void Suspend()
{
System.Threading.Monitor.Wait(threadField);
}
/// <summary>
/// Obtain a String that represents the current object
/// </summary>
/// <returns>A String that represents the current object</returns>
public override System.String ToString()
{
return "Thread[" + Name + "," + Priority.ToString() + "," + "" + "]";
}
[ThreadStatic]
static ThreadClass This = null;
// named as the Java version
public static ThreadClass CurrentThread()
{
return Current();
}
public static void Sleep(long ms)
{
// casting long ms to int ms could lose resolution, however unlikely
// that someone would want to sleep for that long...
System.Threading.Thread.Sleep((int)ms);
}
/// <summary>
/// Gets the currently running thread
/// </summary>
/// <returns>The currently running thread</returns>
public static ThreadClass Current()
{
if (This == null)
{
This = new ThreadClass();
This.Instance = System.Threading.Thread.CurrentThread;
}
return This;
}
}
/// <summary>
/// Represents the methods to support some operations over files.
/// </summary>
public class FileSupport
{
/// <summary>
/// Returns an array of abstract pathnames representing the files and directories of the specified path.
/// </summary>
/// <param name="path">The abstract pathname to list it childs.</param>
/// <returns>An array of abstract pathnames childs of the path specified or null if the path is not a directory</returns>
public static System.IO.FileInfo[] GetFiles(System.IO.FileInfo path)
{
if ((path.Attributes & System.IO.FileAttributes.Directory) > 0)
{
String[] fullpathnames = System.IO.Directory.GetFileSystemEntries(path.FullName);
System.IO.FileInfo[] result = new System.IO.FileInfo[fullpathnames.Length];
for (int i = 0; i < result.Length ; i++)
result[i] = new System.IO.FileInfo(fullpathnames[i]);
return result;
}
else
return null;
}
/// <summary>
/// Returns a list of files in a give directory.
/// </summary>
/// <param name="fullName">The full path name to the directory.</param>
/// <param name="indexFileNameFilter"></param>
/// <returns>An array containing the files.</returns>
public static System.String[] GetLuceneIndexFiles(System.String fullName,
Lucene.Net.Index.IndexFileNameFilter indexFileNameFilter)
{
System.IO.DirectoryInfo dInfo = new System.IO.DirectoryInfo(fullName);
System.Collections.ArrayList list = new System.Collections.ArrayList();
foreach (System.IO.FileInfo fInfo in dInfo.GetFiles())
{
if (indexFileNameFilter.Accept(fInfo, fInfo.Name) == true)
{
list.Add(fInfo.Name);
}
}
System.String[] retFiles = new System.String[list.Count];
list.CopyTo(retFiles);
return retFiles;
}
}
/// <summary>
/// A simple class for number conversions.
/// </summary>
public class Number
{
/// <summary>
/// Min radix value.
/// </summary>
public const int MIN_RADIX = 2;
/// <summary>
/// Max radix value.
/// </summary>
public const int MAX_RADIX = 36;
private const System.String digits = "0123456789abcdefghijklmnopqrstuvwxyz";
/// <summary>
/// Converts a number to System.String.
/// </summary>
/// <param name="number"></param>
/// <returns></returns>
public static System.String ToString(long number)
{
System.Text.StringBuilder s = new System.Text.StringBuilder();
if (number == 0)
{
s.Append("0");
}
else
{
if (number < 0)
{
s.Append("-");
number = -number;
}
while (number > 0)
{
char c = digits[(int)number % 36];
s.Insert(0, c);
number = number / 36;
}
}
return s.ToString();
}
/// <summary>
/// Converts a number to System.String.
/// </summary>
/// <param name="f"></param>
/// <returns></returns>
public static System.String ToString(float f)
{
if (((float)(int)f) == f)
{
return ((int)f).ToString() + ".0";
}
else
{
return f.ToString(System.Globalization.NumberFormatInfo.InvariantInfo);
}
}
/// <summary>
/// Converts a number to System.String in the specified radix.
/// </summary>
/// <param name="i">A number to be converted.</param>
/// <param name="radix">A radix.</param>
/// <returns>A System.String representation of the number in the specified redix.</returns>
public static System.String ToString(long i, int radix)
{
if (radix < MIN_RADIX || radix > MAX_RADIX)
radix = 10;
char[] buf = new char[65];
int charPos = 64;
bool negative = (i < 0);
if (!negative)
{
i = -i;
}
while (i <= -radix)
{
buf[charPos--] = digits[(int)(-(i % radix))];
i = i / radix;
}
buf[charPos] = digits[(int)(-i)];
if (negative)
{
buf[--charPos] = '-';
}
return new System.String(buf, charPos, (65 - charPos));
}
/// <summary>
/// Parses a number in the specified radix.
/// </summary>
/// <param name="s">An input System.String.</param>
/// <param name="radix">A radix.</param>
/// <returns>The parsed number in the specified radix.</returns>
public static long Parse(System.String s, int radix)
{
if (s == null)
{
throw new ArgumentException("null");
}
if (radix < MIN_RADIX)
{
throw new NotSupportedException("radix " + radix +
" less than Number.MIN_RADIX");
}
if (radix > MAX_RADIX)
{
throw new NotSupportedException("radix " + radix +
" greater than Number.MAX_RADIX");
}
long result = 0;
long mult = 1;
s = s.ToLower();
for (int i = s.Length - 1; i >= 0; i--)
{
int weight = digits.IndexOf(s[i]);
if (weight == -1)
throw new FormatException("Invalid number for the specified radix");
result += (weight * mult);
mult *= radix;
}
return result;
}
/// <summary>
/// Performs an unsigned bitwise right shift with the specified number
/// </summary>
/// <param name="number">Number to operate on</param>
/// <param name="bits">Ammount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static int URShift(int number, int bits)
{
if (number >= 0)
return number >> bits;
else
return (number >> bits) + (2 << ~bits);
}
/// <summary>
/// Performs an unsigned bitwise right shift with the specified number
/// </summary>
/// <param name="number">Number to operate on</param>
/// <param name="bits">Ammount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static long URShift(long number, int bits)
{
if (number >= 0)
return number >> bits;
else
return (number >> bits) + (2 << ~bits);
}
/// <summary>
/// Returns the index of the first bit that is set to true that occurs
/// on or after the specified starting index. If no such bit exists
/// then -1 is returned.
/// </summary>
/// <param name="bits">The BitArray object.</param>
/// <param name="fromIndex">The index to start checking from (inclusive).</param>
/// <returns>The index of the next set bit.</returns>
public static int NextSetBit(System.Collections.BitArray bits, int fromIndex)
{
for (int i = fromIndex; i < bits.Length; i++)
{
if (bits[i] == true)
{
return i;
}
}
return -1;
}
/// <summary>
/// Returns the number of bits set to true in this BitSet.
/// </summary>
/// <param name="bits">The BitArray object.</param>
/// <returns>The number of bits set to true in this BitSet.</returns>
public static int Cardinality(System.Collections.BitArray bits)
{
int count = 0;
for (int i = 0; i < bits.Count; i++)
{
if (bits[i])
count++;
}
return count;
}
/// <summary>
/// Converts a System.String number to long.
/// </summary>
/// <param name="s"></param>
/// <returns></returns>
public static long ToInt64(System.String s)
{
long number = 0;
int factor;
// handle negative number
if (s.StartsWith("-"))
{
s = s.Substring(1);
factor = -1;
}
else
{
factor = 1;
}
// generate number
for (int i = s.Length - 1; i > -1; i--)
{
int n = digits.IndexOf(s[i]);
// not supporting fractional or scientific notations
if (n < 0)
throw new System.ArgumentException("Invalid or unsupported character in number: " + s[i]);
number += (n * factor);
factor *= 36;
}
return number;
}
}
/// <summary>
/// Mimics Java's Character class.
/// </summary>
public class Character
{
private const char charNull= '\0';
private const char charZero = '0';
private const char charA = 'a';
/// <summary>
/// </summary>
public static int MAX_RADIX
{
get
{
return 36;
}
}
/// <summary>
/// </summary>
public static int MIN_RADIX
{
get
{
return 2;
}
}
/// <summary>
///
/// </summary>
/// <param name="digit"></param>
/// <param name="radix"></param>
/// <returns></returns>
public static char ForDigit(int digit, int radix)
{
// if radix or digit is out of range,
// return the null character.
if (radix < Character.MIN_RADIX)
return charNull;
if (radix > Character.MAX_RADIX)
return charNull;
if (digit < 0)
return charNull;
if (digit >= radix)
return charNull;
// if digit is less than 10,
// return '0' plus digit
if (digit < 10)
return (char) ( (int) charZero + digit);
// otherwise, return 'a' plus digit.
return (char) ((int) charA + digit - 10);
}
}
/// <summary>
///
/// </summary>
public class Date
{
/// <summary>
///
/// </summary>
/// <param name="dateTime"></param>
/// <returns></returns>
static public long GetTime(DateTime dateTime)
{
TimeSpan ts = dateTime.Subtract(new DateTime(1970, 1, 1));
ts = ts.Subtract(TimeZone.CurrentTimeZone.GetUtcOffset(dateTime));
return ts.Ticks / TimeSpan.TicksPerMillisecond;
}
}
/// <summary>
///
/// </summary>
public class Single
{
/// <summary>
///
/// </summary>
/// <param name="s"></param>
/// <param name="style"></param>
/// <param name="provider"></param>
/// <returns></returns>
public static System.Single Parse(System.String s, System.Globalization.NumberStyles style, System.IFormatProvider provider)
{
try
{
if (s.EndsWith("f") || s.EndsWith("F"))
return System.Single.Parse(s.Substring(0, s.Length - 1), style, provider);
else
return System.Single.Parse(s, style, provider);
}
catch (System.FormatException fex)
{
throw fex;
}
}
/// <summary>
///
/// </summary>
/// <param name="s"></param>
/// <param name="provider"></param>
/// <returns></returns>
public static System.Single Parse(System.String s, System.IFormatProvider provider)
{
try
{
if (s.EndsWith("f") || s.EndsWith("F"))
return System.Single.Parse(s.Substring(0, s.Length - 1), provider);
else
return System.Single.Parse(s, provider);
}
catch (System.FormatException fex)
{
throw fex;
}
}
/// <summary>
///
/// </summary>
/// <param name="s"></param>
/// <param name="style"></param>
/// <returns></returns>
public static System.Single Parse(System.String s, System.Globalization.NumberStyles style)
{
try
{
if (s.EndsWith("f") || s.EndsWith("F"))
return System.Single.Parse(s.Substring(0, s.Length - 1), style);
else
return System.Single.Parse(s, style);
}
catch(System.FormatException fex)
{
throw fex;
}
}
/// <summary>
///
/// </summary>
/// <param name="s"></param>
/// <returns></returns>
public static System.Single Parse(System.String s)
{
try
{
if (s.EndsWith("f") || s.EndsWith("F"))
return System.Single.Parse(s.Substring(0, s.Length - 1).Replace(".", System.Globalization.CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator));
else
return System.Single.Parse(s.Replace(".", System.Globalization.CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator));
}
catch(System.FormatException fex)
{
throw fex;
}
}
public static bool TryParse(System.String s, out float f)
{
bool ok = false;
if (s.EndsWith("f") || s.EndsWith("F"))
ok = System.Single.TryParse(s.Substring(0, s.Length - 1).Replace(".", System.Globalization.CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator), out f);
else
ok = System.Single.TryParse(s.Replace(".", System.Globalization.CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator), out f);
return ok;
}
/// <summary>
///
/// </summary>
/// <param name="f"></param>
/// <returns></returns>
public static string ToString(float f)
{
return f.ToString().Replace(System.Globalization.CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator, ".");
}
/// <summary>
///
/// </summary>
/// <param name="f"></param>
/// <param name="format"></param>
/// <returns></returns>
public static string ToString(float f, string format)
{
return f.ToString(format).Replace(System.Globalization.CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator, ".");
}
}
/// <summary>
///
/// </summary>
public class AppSettings
{
static System.Collections.Specialized.ListDictionary settings = new System.Collections.Specialized.ListDictionary();
/// <summary>
///
/// </summary>
/// <param name="key"></param>
/// <param name="defValue"></param>
public static void Set(System.String key, int defValue)
{
settings[key] = defValue;
//System.Configuration.ConfigurationManager.AppSettings.Set(key, defValue.ToString()); // {{Aroush-2.3.1}} try this instead
}
/// <summary>
///
/// </summary>
/// <param name="key"></param>
/// <param name="defValue"></param>
public static void Set(System.String key, long defValue)
{
settings[key] = defValue;
//System.Configuration.ConfigurationManager.AppSettings.Set(key, defValue.ToString()); // {{Aroush-2.3.1}} try this instead
}
/// <summary>
///
/// </summary>
/// <param name="Key"></param>
/// <param name="Value"></param>
public static void Set(System.String key, System.String defValue)
{
settings[key] = defValue;
//System.Configuration.ConfigurationManager.AppSettings.Set(key, defValue); // {{Aroush-2.3.1}} try this instead
}
/// <summary>
///
/// </summary>
/// <param name="Key"></param>
/// <param name="Value"></param>
public static void Set(System.String key, bool defValue)
{
settings[key] = defValue;
//System.Configuration.ConfigurationManager.AppSettings.Set(key, defValue); // {{Aroush-2.3.1}} try this instead
}
/// <summary>
///
/// </summary>
/// <param name="key"></param>
/// <param name="defValue"></param>
/// <returns></returns>
public static int Get(System.String key, int defValue)
{
if (settings[key] != null)
{
return (int) settings[key];
}
System.String theValue = System.Configuration.ConfigurationManager.AppSettings.Get(key);
if (theValue == null)
{
return defValue;
}
int retValue = System.Convert.ToInt32(theValue.Trim());
settings[key] = retValue;
return retValue;
}
/// <summary>
///
/// </summary>
/// <param name="key"></param>
/// <param name="defValue"></param>
/// <returns></returns>
public static long Get(System.String key, long defValue)
{
if (settings[key] != null)
{
return (long) settings[key];
}
System.String theValue = System.Configuration.ConfigurationManager.AppSettings.Get(key);
if (theValue == null)
{
return defValue;
}
long retValue = System.Convert.ToInt64(theValue.Trim());
settings[key] = retValue;
return retValue;
}
/// <summary>
///
/// </summary>
/// <param name="key"></param>
/// <param name="defValue"></param>
/// <returns></returns>
public static System.String Get(System.String key, System.String defValue)
{
if (settings[key] != null)
{
return (System.String) settings[key];
}
System.String theValue = System.Configuration.ConfigurationManager.AppSettings.Get(key);
if (theValue == null)
{
return defValue;
}
settings[key] = theValue;
return theValue;
}
public static bool Get(System.String key, bool defValue)
{
if (settings[key] != null)
{
return (bool)settings[key];
}
System.String theValue = System.Configuration.ConfigurationManager.AppSettings.Get(key);
if (theValue == null)
{
return defValue;
}
bool retValue = System.Convert.ToBoolean(theValue.Trim());
settings[key] = retValue;
return retValue;
}
}
/// <summary>
/// This class provides supporting methods of java.util.BitSet
/// that are not present in System.Collections.BitArray.
/// </summary>
public class BitSetSupport
{
/// <summary>
/// Returns the next set bit at or after docId, or -1 if no such bit exists.
/// </summary>
/// <param name="bitArray"></param>
/// <param name="docId">the index of bit array at which to start checking</param>
/// <returns>the next set bit or -1</returns>
public static int NextSetBit(System.Collections.BitArray bitArray, int docId)
{
while (docId < bitArray.Length)
{
// if docId bit is set, return it
// otherwise check next docId bit
if (bitArray.Get(docId))
return docId;
else
docId++;
}
// if no bits are set at or after docId, return -1
return -1;
}
private BitSetSupport() { }
}
/// <summary>
/// Summary description for TestSupportClass.
/// </summary>
public class Compare
{
/// <summary>
/// Compares two Term arrays for equality.
/// </summary>
/// <param name="t1">First Term array to compare</param>
/// <param name="t2">Second Term array to compare</param>
/// <returns>true if the Terms are equal in both arrays, false otherwise</returns>
public static bool CompareTermArrays(Lucene.Net.Index.Term[] t1, Lucene.Net.Index.Term[] t2)
{
if (t1.Length != t2.Length)
return false;
for (int i = 0; i < t1.Length; i++)
{
if (t1[i].CompareTo(t2[i]) == 0)
{
return true;
}
}
return false;
}
/// <summary>
/// Compares two string arrays for equality.
/// </summary>
/// <param name="l1">First string array list to compare</param>
/// <param name="l2">Second string array list to compare</param>
/// <returns>true if the strings are equal in both arrays, false otherwise</returns>
public static bool CompareStringArrays(System.String[] l1, System.String[] l2)
{
if (l1.Length != l2.Length)
return false;
for (int i = 0; i < l1.Length; i++)
{
if (l1[i] != l2[i])
return false;
}
return true;
}
}
/// <summary>
/// Use for .NET 1.1 Framework only.
/// </summary>
public class CompressionSupport
{
public interface ICompressionAdapter
{
byte[] Compress(byte[] input, int offset, int length);
byte[] Uncompress(byte[] input);
}
#if SHARP_ZIP_LIB
private static ICompressionAdapter compressionAdapter = new Lucene.Net.Index.Compression.SharpZipLibAdapter();
#else
private static ICompressionAdapter compressionAdapter;
#endif
public static byte[] Uncompress(byte[] input)
{
CheckCompressionSupport();
return compressionAdapter.Uncompress(input);
}
public static byte[] Compress(byte[] input, int offset, int length)
{
CheckCompressionSupport();
return compressionAdapter.Compress(input, offset, length);
}
private static void CheckCompressionSupport()
{
if (compressionAdapter == null)
{
System.String compressionLibClassName = SupportClass.AppSettings.Get("Lucene.Net.CompressionLib.class", null);
if (compressionLibClassName == null)
throw new System.SystemException("Compression support not configured");
Type compressionLibClass = Type.GetType(compressionLibClassName, true);
object compressionAdapterObj = Activator.CreateInstance(compressionLibClass);
compressionAdapter = compressionAdapterObj as ICompressionAdapter;
if (compressionAdapter == null)
throw new System.SystemException("Compression adapter does not support the ICompressionAdapter interface");
}
}
}
#region WEAKHASHTABLE
/// <summary>
/// A Hashtable which holds weak references to its keys so they
/// can be collected during GC.
/// </summary>
[System.Diagnostics.DebuggerDisplay("Count = {Values.Count}")]
public class WeakHashTable : Hashtable, IEnumerable
{
/// <summary>
/// A weak referene wrapper for the hashtable keys. Whenever a key\value pair
/// is added to the hashtable, the key is wrapped using a WeakKey. WeakKey saves the
/// value of the original object hashcode for fast comparison.
/// </summary>
class WeakKey : WeakReference
{
int hashCode;
public WeakKey(object key)
: base(key)
{
if (key == null)
throw new ArgumentNullException("key");
hashCode = key.GetHashCode();
}
public override int GetHashCode()
{
return hashCode;
}
}
/// <summary>
/// A Dictionary enumerator which wraps the original hashtable enumerator
/// and performs 2 tasks: Extract the real key from a WeakKey and skip keys
/// that were already collected.
/// </summary>
class WeakDictionaryEnumerator : IDictionaryEnumerator
{
IDictionaryEnumerator baseEnumerator;
object currentKey;
object currentValue;
public WeakDictionaryEnumerator(IDictionaryEnumerator baseEnumerator)
{
this.baseEnumerator = baseEnumerator;
}
public DictionaryEntry Entry
{
get
{
return new DictionaryEntry(this.currentKey, this.currentValue);
}
}
public object Key
{
get
{
return this.currentKey;
}
}
public object Value
{
get
{
return this.currentValue;
}
}
public object Current
{
get
{
return Entry;
}
}
public bool MoveNext()
{
while (baseEnumerator.MoveNext())
{
object key = ((WeakKey)baseEnumerator.Key).Target;
if (key != null)
{
this.currentKey = key;
this.currentValue = baseEnumerator.Value;
return true;
}
}
return false;
}
public void Reset()
{
baseEnumerator.Reset();
this.currentKey = null;
this.currentValue = null;
}
}
/// <summary>
/// Serves as a simple "GC Monitor" that indicates whether cleanup is needed.
/// If collectableObject.IsAlive is false, GC has occurred and we should perform cleanup
/// </summary>
WeakReference collectableObject = new WeakReference(new Object());
/// <summary>
/// Customize the hashtable lookup process by overriding KeyEquals. KeyEquals
/// will compare both WeakKey to WeakKey and WeakKey to real keys
/// </summary>
protected override bool KeyEquals(object x, object y)
{
if (x == y)
return true;
if (x is WeakKey)
{
x = ((WeakKey)x).Target;
if (x == null)
return false;
}
if (y is WeakKey)
{
y = ((WeakKey)y).Target;
if (y == null)
return false;
}
return x.Equals(y);
}
protected override int GetHash(object key)
{
return key.GetHashCode();
}
/// <summary>
/// Perform cleanup if GC occurred
/// </summary>
private void CleanIfNeeded()
{
if (collectableObject.Target == null)
{
Clean();
collectableObject = new WeakReference(new Object());
}
}
/// <summary>
/// Iterate over all keys and remove keys that were collected
/// </summary>
private void Clean()
{
ArrayList keysToDelete = new ArrayList();
foreach (WeakKey wtk in base.Keys)
{
if (!wtk.IsAlive)
{
keysToDelete.Add(wtk);
}
}
foreach (WeakKey wtk in keysToDelete)
Remove(wtk);
}
/// <summary>
/// Wrap each key with a WeakKey and add it to the hashtable
/// </summary>
public override void Add(object key, object value)
{
CleanIfNeeded();
base.Add(new WeakKey(key), value);
}
public override IDictionaryEnumerator GetEnumerator()
{
return new WeakDictionaryEnumerator(base.GetEnumerator());
}
/// <summary>
/// Create a temporary copy of the real keys and return that
/// </summary>
public override ICollection Keys
{
get
{
ArrayList keys = new ArrayList(Count);
foreach (WeakKey key in base.Keys)
{
object realKey = key.Target;
if (realKey != null)
keys.Add(realKey);
}
return keys;
}
}
public override object this[object key]
{
get
{
return base[key];
}
set
{
CleanIfNeeded();
base[new WeakKey(key)] = value;
}
}
public override void CopyTo(Array array, int index)
{
int arrayIndex = index;
foreach (DictionaryEntry de in this)
{
array.SetValue(de, arrayIndex++);
}
}
public override int Count
{
get
{
CleanIfNeeded();
return base.Count;
}
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
#endregion
public class Cryptography
{
static public bool FIPSCompliant = false;
static public System.Security.Cryptography.HashAlgorithm GetHashAlgorithm()
{
if (FIPSCompliant)
{
//LUCENENET-175
//No Assumptions should be made on the HashAlgorithm. It may change in time.
//SHA256 SHA384 SHA512 etc.
return System.Security.Cryptography.SHA1.Create();
}
return System.Security.Cryptography.MD5.Create();
}
}
public class FileStream
{
//[System.Runtime.InteropServices.DllImport("kernel32")]
//public static extern int FlushFileBuffers(Microsoft.Win32.SafeHandles.SafeFileHandle SafeFileHandle);
//public static void Sync(System.IO.FileStream fs)
//{
// if (FlushFileBuffers(fs.SafeFileHandle) == 0) { throw new SyncFailedException(); }
//}
//public class SyncFailedException : Exception
//{
//}
public static void Sync(System.IO.FileStream fs)
{
fs.Flush();
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using OpenSim.Framework.Servers.HttpServer;
using System;
using System.Collections.Generic;
using System.Net;
using System.Text;
using System.Threading;
namespace OpenSim.Framework.Servers
{
public class MainServer
{
// private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private static BaseHttpServer instance = null;
private static Dictionary<uint, BaseHttpServer> m_Servers = new Dictionary<uint, BaseHttpServer>();
private static ReaderWriterLock m_ServersRwLock = new ReaderWriterLock();
private static int s_debugLevel;
/// <summary>
/// Control the printing of certain debug messages.
/// </summary>
/// <remarks>
/// If DebugLevel >= 1 then short warnings are logged when receiving bad input data.
/// If DebugLevel >= 2 then long warnings are logged when receiving bad input data.
/// If DebugLevel >= 3 then short notices about all incoming non-poll HTTP requests are logged.
/// If DebugLevel >= 4 then the time taken to fulfill the request is logged.
/// If DebugLevel >= 5 then the start of the body of incoming non-poll HTTP requests will be logged.
/// If DebugLevel >= 6 then the entire body of incoming non-poll HTTP requests will be logged.
/// </remarks>
public static int DebugLevel
{
get { return s_debugLevel; }
set
{
s_debugLevel = value;
m_ServersRwLock.AcquireReaderLock(-1);
try
{
foreach (BaseHttpServer server in m_Servers.Values)
server.DebugLevel = s_debugLevel;
}
finally
{
m_ServersRwLock.ReleaseReaderLock();
}
}
}
/// <summary>
/// Set the main HTTP server instance.
/// </summary>
/// <remarks>
/// This will be used to register all handlers that listen to the default port.
/// </remarks>
/// <exception cref='Exception'>
/// Thrown if the HTTP server has not already been registered via AddHttpServer()
/// </exception>
public static BaseHttpServer Instance
{
get { return instance; }
set
{
m_ServersRwLock.AcquireWriterLock(-1);
try
{
if (!m_Servers.ContainsValue(value))
throw new Exception("HTTP server must already have been registered to be set as the main instance");
instance = value;
}
finally
{
m_ServersRwLock.ReleaseWriterLock();
}
}
}
/// <summary>
/// Get all the registered servers.
/// </summary>
/// <remarks>
/// Returns a copy of the dictionary so this can be iterated through without locking.
/// </remarks>
/// <value></value>
public static Dictionary<uint, BaseHttpServer> Servers
{
get { return new Dictionary<uint, BaseHttpServer>(m_Servers); }
}
/// <summary>
/// Register an already started HTTP server to the collection of known servers.
/// </summary>
/// <param name='server'></param>
public static void AddHttpServer(BaseHttpServer server)
{
m_ServersRwLock.AcquireWriterLock(-1);
try
{
if (m_Servers.ContainsKey(server.Port))
throw new Exception(string.Format("HTTP server for port {0} already exists.", server.Port));
m_Servers.Add(server.Port, server);
}
finally
{
m_ServersRwLock.ReleaseWriterLock();
}
}
/// <summary>
/// Does this collection of servers contain one with the given port?
/// </summary>
/// <remarks>
/// Unlike GetHttpServer, this will not instantiate a server if one does not exist on that port.
/// </remarks>
/// <param name='port'></param>
/// <returns>true if a server with the given port is registered, false otherwise.</returns>
public static bool ContainsHttpServer(uint port)
{
m_ServersRwLock.AcquireReaderLock(-1);
try
{
return m_Servers.ContainsKey(port);
}
finally
{
m_ServersRwLock.ReleaseReaderLock();
}
}
/// <summary>
/// Get the default http server or an http server for a specific port.
/// </summary>
/// <remarks>
/// If the requested HTTP server doesn't already exist then a new one is instantiated and started.
/// </remarks>
/// <returns></returns>
/// <param name='port'>If 0 then the default HTTP server is returned.</param>
public static IHttpServer GetHttpServer(uint port)
{
return GetHttpServer(port, null);
}
/// <summary>
/// Get the default http server, an http server for a specific port
/// and/or an http server bound to a specific address
/// </summary>
/// <remarks>
/// If the requested HTTP server doesn't already exist then a new one is instantiated and started.
/// </remarks>
/// <returns></returns>
/// <param name='port'>If 0 then the default HTTP server is returned.</param>
/// <param name='ipaddr'>A specific IP address to bind to. If null then the default IP address is used.</param>
public static IHttpServer GetHttpServer(uint port, IPAddress ipaddr)
{
if (port == 0)
return Instance;
if (instance != null && port == Instance.Port)
return Instance;
m_ServersRwLock.AcquireReaderLock(-1);
try
{
if (m_Servers.ContainsKey(port))
return m_Servers[port];
LockCookie lc = m_ServersRwLock.UpgradeToWriterLock(-1);
try
{
m_Servers[port] = new BaseHttpServer(port);
if (ipaddr != null)
m_Servers[port].ListenIPAddress = ipaddr;
m_Servers[port].Start();
return m_Servers[port];
}
finally
{
m_ServersRwLock.DowngradeFromWriterLock(ref lc);
}
}
finally
{
m_ServersRwLock.ReleaseReaderLock();
}
}
public static void RegisterHttpConsoleCommands(ICommandConsole console)
{
console.Commands.AddCommand(
"Comms", false, "show http-handlers",
"show http-handlers",
"Show all registered http handlers", HandleShowHttpHandlersCommand);
console.Commands.AddCommand(
"Debug", false, "debug http", "debug http <in|out|all> [<level>]",
"Turn on http request logging.",
"If in or all and\n"
+ " level <= 0 then no extra logging is done.\n"
+ " level >= 1 then short warnings are logged when receiving bad input data.\n"
+ " level >= 2 then long warnings are logged when receiving bad input data.\n"
+ " level >= 3 then short notices about all incoming non-poll HTTP requests are logged.\n"
+ " level >= 4 then the time taken to fulfill the request is logged.\n"
+ " level >= 5 then a sample from the beginning of the data is logged.\n"
+ " level >= 6 then the entire data is logged.\n"
+ " no level is specified then the current level is returned.\n\n"
+ "If out or all and\n"
+ " level >= 3 then short notices about all outgoing requests going through WebUtil are logged.\n"
+ " level >= 4 then the time taken to fulfill the request is logged.\n"
+ " level >= 5 then a sample from the beginning of the data is logged.\n"
+ " level >= 6 then the entire data is logged.\n",
HandleDebugHttpCommand);
}
/// <summary>
/// Removes the http server listening on the given port.
/// </summary>
/// <remarks>
/// It is the responsibility of the caller to do clean up.
/// </remarks>
/// <param name='port'></param>
/// <returns></returns>
public static bool RemoveHttpServer(uint port)
{
m_ServersRwLock.AcquireWriterLock(-1);
try
{
if (instance != null && instance.Port == port)
instance = null;
return m_Servers.Remove(port);
}
finally
{
m_ServersRwLock.ReleaseWriterLock();
}
}
/// <summary>
/// Turn on some debugging values for OpenSim.
/// </summary>
/// <param name="args"></param>
private static void HandleDebugHttpCommand(string module, string[] cmdparams)
{
if (cmdparams.Length < 3)
{
MainConsole.Instance.Output("Usage: debug http <in|out|all> 0..6");
return;
}
bool inReqs = false;
bool outReqs = false;
bool allReqs = false;
string subCommand = cmdparams[2];
if (subCommand.ToLower() == "in")
{
inReqs = true;
}
else if (subCommand.ToLower() == "out")
{
outReqs = true;
}
else if (subCommand.ToLower() == "all")
{
allReqs = true;
}
else
{
MainConsole.Instance.Output("You must specify in, out or all");
return;
}
if (cmdparams.Length >= 4)
{
string rawNewDebug = cmdparams[3];
int newDebug;
if (!int.TryParse(rawNewDebug, out newDebug))
{
MainConsole.Instance.OutputFormat("{0} is not a valid debug level", rawNewDebug);
return;
}
if (newDebug < 0 || newDebug > 6)
{
MainConsole.Instance.OutputFormat("{0} is outside the valid debug level range of 0..6", newDebug);
return;
}
if (allReqs || inReqs)
{
MainServer.DebugLevel = newDebug;
MainConsole.Instance.OutputFormat("IN debug level set to {0}", newDebug);
}
if (allReqs || outReqs)
{
WebUtil.DebugLevel = newDebug;
MainConsole.Instance.OutputFormat("OUT debug level set to {0}", newDebug);
}
}
else
{
if (allReqs || inReqs)
MainConsole.Instance.OutputFormat("Current IN debug level is {0}", MainServer.DebugLevel);
if (allReqs || outReqs)
MainConsole.Instance.OutputFormat("Current OUT debug level is {0}", WebUtil.DebugLevel);
}
}
private static void HandleShowHttpHandlersCommand(string module, string[] args)
{
if (args.Length != 2)
{
MainConsole.Instance.Output("Usage: show http-handlers");
return;
}
StringBuilder handlers = new StringBuilder();
m_ServersRwLock.AcquireReaderLock(-1);
try
{
foreach (BaseHttpServer httpServer in m_Servers.Values)
{
handlers.AppendFormat(
"Registered HTTP Handlers for server at {0}:{1}\n", httpServer.ListenIPAddress, httpServer.Port);
handlers.AppendFormat("* XMLRPC:\n");
foreach (String s in httpServer.GetXmlRpcHandlerKeys())
handlers.AppendFormat("\t{0}\n", s);
handlers.AppendFormat("* HTTP:\n");
foreach (String s in httpServer.GetHTTPHandlerKeys())
handlers.AppendFormat("\t{0}\n", s);
handlers.AppendFormat("* HTTP (poll):\n");
foreach (String s in httpServer.GetPollServiceHandlerKeys())
handlers.AppendFormat("\t{0}\n", s);
handlers.AppendFormat("* JSONRPC:\n");
foreach (String s in httpServer.GetJsonRpcHandlerKeys())
handlers.AppendFormat("\t{0}\n", s);
// handlers.AppendFormat("* Agent:\n");
// foreach (String s in httpServer.GetAgentHandlerKeys())
// handlers.AppendFormat("\t{0}\n", s);
handlers.AppendFormat("* LLSD:\n");
foreach (String s in httpServer.GetLLSDHandlerKeys())
handlers.AppendFormat("\t{0}\n", s);
handlers.AppendFormat("* StreamHandlers ({0}):\n", httpServer.GetStreamHandlerKeys().Count);
foreach (String s in httpServer.GetStreamHandlerKeys())
handlers.AppendFormat("\t{0}\n", s);
handlers.Append("\n");
}
}
finally
{
m_ServersRwLock.ReleaseReaderLock();
}
MainConsole.Instance.Output(handlers.ToString());
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Reflection;
using UnityEngine;
namespace InControl
{
public class InputManager
{
public static readonly VersionInfo Version = VersionInfo.InControlVersion();
public static readonly VersionInfo UnityVersion = VersionInfo.UnityVersion();
public static event Action OnSetup;
public static event Action<ulong,float> OnUpdate;
public static event Action<InputDevice> OnDeviceAttached;
public static event Action<InputDevice> OnDeviceDetached;
public static event Action<InputDevice> OnActiveDeviceChanged;
static List<InputDeviceManager> inputDeviceManagers = new List<InputDeviceManager>();
static InputDevice activeDevice = InputDevice.Null;
static List<InputDevice> devices = new List<InputDevice>();
public static ReadOnlyCollection<InputDevice> Devices;
public static string Platform { get; private set; }
public static bool MenuWasPressed { get; private set; }
public static bool InvertYAxis;
static bool enableXInput;
static bool isSetup;
static float initialTime;
static float currentTime;
static float lastUpdateTime;
static ulong currentTick;
/// <summary>
/// DEPRECATED: Use the InControlManager component instead.
/// </summary>
[Obsolete( "Calling InputManager.Setup() manually is deprecated. Use the InControlManager component instead." )]
public static void Setup()
{
SetupInternal();
}
internal static void SetupInternal()
{
if (isSetup)
{
return;
}
Platform = (SystemInfo.operatingSystem + " " + SystemInfo.deviceModel).ToUpper();
initialTime = 0.0f;
currentTime = 0.0f;
lastUpdateTime = 0.0f;
currentTick = 0;
inputDeviceManagers.Clear();
devices.Clear();
Devices = new ReadOnlyCollection<InputDevice>( devices );
activeDevice = InputDevice.Null;
isSetup = true;
#if UNITY_STANDALONE_WIN || UNITY_EDITOR
if (enableXInput)
{
XInputDeviceManager.Enable();
}
#endif
if (OnSetup != null)
{
OnSetup.Invoke();
OnSetup = null;
}
var addUnityInputDeviceManager = true;
#if UNITY_ANDROID && INCONTROL_OUYA && !UNITY_EDITOR
addUnityInputDeviceManager = false;
#endif
if (addUnityInputDeviceManager)
{
AddDeviceManager<UnityInputDeviceManager>();
}
}
/// <summary>
/// DEPRECATED: Use the InControlManager component instead.
/// </summary>
[Obsolete( "Calling InputManager.Reset() manually is deprecated. Use the InControlManager component instead." )]
public static void Reset()
{
ResetInternal();
}
internal static void ResetInternal()
{
OnSetup = null;
OnUpdate = null;
OnActiveDeviceChanged = null;
OnDeviceAttached = null;
OnDeviceDetached = null;
inputDeviceManagers.Clear();
devices.Clear();
activeDevice = InputDevice.Null;
isSetup = false;
}
static void AssertIsSetup()
{
if (!isSetup)
{
throw new Exception( "InputManager is not initialized. Call InputManager.Setup() first." );
}
}
/// <summary>
/// DEPRECATED: Use the InControlManager component instead.
/// </summary>
[Obsolete( "Calling InputManager.Update() manually is deprecated. Use the InControlManager component instead." )]
public static void Update()
{
UpdateInternal();
}
internal static void UpdateInternal()
{
AssertIsSetup();
if (OnSetup != null)
{
OnSetup.Invoke();
OnSetup = null;
}
currentTick++;
UpdateCurrentTime();
var deltaTime = currentTime - lastUpdateTime;
UpdateDeviceManagers( deltaTime );
PreUpdateDevices( deltaTime );
UpdateDevices( deltaTime );
PostUpdateDevices( deltaTime );
UpdateActiveDevice();
lastUpdateTime = currentTime;
}
internal static void OnApplicationFocus( bool focusState )
{
if (!focusState)
{
int deviceCount = devices.Count;
for (int i = 0; i < deviceCount; i++)
{
var inputControls = devices[i].Controls;
var inputControlCount = inputControls.Length;
for (int j = 0; j < inputControlCount; j++)
{
var inputControl = inputControls[j];
if (inputControl != null)
{
inputControl.SetZeroTick();
}
}
}
}
}
internal static void OnApplicationPause( bool pauseState )
{
}
internal static void OnApplicationQuit()
{
}
static void UpdateActiveDevice()
{
var lastActiveDevice = ActiveDevice;
int deviceCount = devices.Count;
for (int i = 0; i < deviceCount; i++)
{
var inputDevice = devices[i];
if (ActiveDevice == InputDevice.Null ||
inputDevice.LastChangedAfter( ActiveDevice ))
{
ActiveDevice = inputDevice;
}
}
if (lastActiveDevice != ActiveDevice)
{
if (OnActiveDeviceChanged != null)
{
OnActiveDeviceChanged( ActiveDevice );
}
}
}
public static void AddDeviceManager( InputDeviceManager inputDeviceManager )
{
AssertIsSetup();
inputDeviceManagers.Add( inputDeviceManager );
inputDeviceManager.Update( currentTick, currentTime - lastUpdateTime );
}
public static void AddDeviceManager<T>() where T : InputDeviceManager, new()
{
if (!HasDeviceManager<T>())
{
AddDeviceManager( new T() );
}
}
public static bool HasDeviceManager<T>() where T : InputDeviceManager
{
int inputDeviceManagerCount = inputDeviceManagers.Count;
for (int i = 0; i < inputDeviceManagerCount; i++)
{
if (inputDeviceManagers[i] is T)
{
return true;
}
}
return false;
}
static void UpdateCurrentTime()
{
// Have to do this hack since Time.realtimeSinceStartup is not set until AFTER Awake().
if (initialTime < float.Epsilon)
{
initialTime = Time.realtimeSinceStartup;
}
currentTime = Mathf.Max( 0.0f, Time.realtimeSinceStartup - initialTime );
}
static void UpdateDeviceManagers( float deltaTime )
{
int inputDeviceManagerCount = inputDeviceManagers.Count;
for (int i = 0; i < inputDeviceManagerCount; i++)
{
var inputDeviceManager = inputDeviceManagers[i];
inputDeviceManager.Update( currentTick, deltaTime );
}
}
static void PreUpdateDevices( float deltaTime )
{
MenuWasPressed = false;
int deviceCount = devices.Count;
for (int i = 0; i < deviceCount; i++)
{
var device = devices[i];
device.PreUpdate( currentTick, deltaTime );
}
}
static void UpdateDevices( float deltaTime )
{
int deviceCount = devices.Count;
for (int i = 0; i < deviceCount; i++)
{
var device = devices[i];
device.Update( currentTick, deltaTime );
}
if (OnUpdate != null)
{
OnUpdate.Invoke( currentTick, deltaTime );
}
}
static void PostUpdateDevices( float deltaTime )
{
int deviceCount = devices.Count;
for (int i = 0; i < deviceCount; i++)
{
var device = devices[i];
device.PostUpdate( currentTick, deltaTime );
if (device.MenuWasPressed)
{
MenuWasPressed = true;
}
}
}
public static void AttachDevice( InputDevice inputDevice )
{
AssertIsSetup();
if (!inputDevice.IsSupportedOnThisPlatform)
{
return;
}
devices.Add( inputDevice );
devices.Sort( ( d1, d2 ) => d1.SortOrder.CompareTo( d2.SortOrder ) );
if (OnDeviceAttached != null)
{
OnDeviceAttached( inputDevice );
}
if (ActiveDevice == InputDevice.Null)
{
ActiveDevice = inputDevice;
}
}
public static void DetachDevice( InputDevice inputDevice )
{
AssertIsSetup();
devices.Remove( inputDevice );
devices.Sort( ( d1, d2 ) => d1.SortOrder.CompareTo( d2.SortOrder ) );
if (ActiveDevice == inputDevice)
{
ActiveDevice = InputDevice.Null;
}
if (OnDeviceDetached != null)
{
OnDeviceDetached( inputDevice );
}
}
public static void HideDevicesWithProfile( Type type )
{
#if !UNITY_EDITOR && UNITY_WINRT
if (type.GetTypeInfo().IsAssignableFrom( typeof( UnityInputDeviceProfile ).GetTypeInfo() ))
#else
if (type.IsSubclassOf( typeof(UnityInputDeviceProfile) ))
#endif
{
UnityInputDeviceProfile.Hide( type );
}
}
static InputDevice DefaultActiveDevice
{
get
{
return (devices.Count > 0) ? devices[0] : InputDevice.Null;
}
}
public static InputDevice ActiveDevice
{
get
{
return (activeDevice == null) ? InputDevice.Null : activeDevice;
}
private set
{
activeDevice = (value == null) ? InputDevice.Null : value;
}
}
public static bool EnableXInput
{
get
{
return enableXInput;
}
set
{
enableXInput = value;
}
}
}
}
| |
// Copyright 2009 Auxilium B.V. - http://www.auxilium.nl/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace JelloScrum.Web.Controllers
{
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
using Castle.Components.DictionaryAdapter;
using Castle.MonoRail.ActiveRecordSupport;
using Castle.MonoRail.Framework;
using Container;
using Filter;
using JelloScrum.Repositories.Exceptions;
using Model.IRepositories;
/// <summary>
/// De basecontroller voor jelloscrum.
/// Alle controllers moeten van deze base controller erven.
/// </summary>
[Layout("default")]
[Rescue("generalerror")]
[Filter(ExecuteWhen.AfterAction, typeof(TitelFilter))]
public abstract class JelloScrumControllerBase : ARSmartDispatcherController
{
public IList<string> errors;
private DictionaryAdapterFactory adapterFactory;
private string titel = string.Empty;
/// <summary>
/// Gets an adapterfactory for wrapping dictionaries
/// Used for the session and componentparams dictionary in combination with the IDictionary interface.
/// </summary>
public DictionaryAdapterFactory AdapterFactory
{
get
{
if (adapterFactory == null)
adapterFactory = new DictionaryAdapterFactory();
return adapterFactory;
}
}
/// <summary>
/// Titel van de pagina voor in de Default layouts
/// </summary>
public string Titel
{
get { return titel; }
set { titel = value; }
}
#region Services Properties
/// <summary>
/// Gets the gebruiker service.
/// </summary>
/// <value>The gebruiker service.</value>
public static IUserRepository GebruikerRepository
{
get { return IoC.Resolve<IUserRepository>(); }
}
/// <summary>
/// Gets the project service.
/// </summary>
/// <value>The project service.</value>
public static IProjectRepository ProjectRepository
{
get { return IoC.Resolve<IProjectRepository>(); }
}
/// <summary>
/// Gets the projectShortList service.
/// </summary>
/// <value>The projectShortList service.</value>
public static IProjectShortListRepository ProjectShortListRepository
{
get { return IoC.Resolve<IProjectShortListRepository>(); }
}
/// <summary>
/// Gets the sprint service.
/// </summary>
/// <value>The sprint service.</value>
public static ISprintRepository SprintRepository
{
get { return IoC.Resolve<ISprintRepository>(); }
}
/// <summary>
/// Gets the sprint service.
/// </summary>
/// <value>The sprint service.</value>
public static ISprintUserRepository SprintGebruikerRepository
{
get { return IoC.Resolve<ISprintUserRepository>(); }
}
/// <summary>
/// Gets the sprint story service.
/// </summary>
/// <value>The sprint story service.</value>
public static ISprintStoryRepository SprintStoryRepository
{
get { return IoC.Resolve<ISprintStoryRepository>(); }
}
/// <summary>
/// Gets the story service.
/// </summary>
/// <value>The story service.</value>
public static IStoryRepository StoryRepository
{
get { return IoC.Resolve<IStoryRepository>(); }
}
/// <summary>
/// Gets the task service.
/// </summary>
/// <value>The task service.</value>
public static ITaskRepository TaskRepository
{
get { return IoC.Resolve<ITaskRepository>(); }
}
#endregion
#region Generieke meldingen
/// <summary>
/// Maakt een generieke fout melding afhandeling mogelijk
/// </summary>
/// <param name="errorMessage">de fout melding</param>
protected virtual void AddErrorMessageToPropertyBag(string errorMessage)
{
AddMessageToPropertyBag(errorMessage, "errorMessages");
}
/// <summary>
/// Maakt een generieke positieve melding afhandeling mogelijk
/// </summary>
/// <param name="positiveMessage">de fout melding</param>
protected virtual void AddPositiveMessageToPropertyBag(string positiveMessage)
{
AddMessageToPropertyBag(positiveMessage, "positiveMessages");
}
/// <summary>
/// Maakt een generieke info melding afhandeling mogelijk
/// </summary>
/// <param name="infoMessage">de fout melding</param>
protected virtual void AddInfoMessageToPropertyBag(string infoMessage)
{
AddMessageToPropertyBag(infoMessage, "infoMessages");
}
/// <summary>
/// doet het zelfde als AddErrorMessageToPropertyBag maar dan met een flash bag
/// Deze functie moet worden gebruikt waneer een RedirectToReferrer word aangeroepen.
/// </summary>
/// <param name="errorMessage">de fout melding</param>
protected virtual void AddErrorMessageToFlashBag(string errorMessage)
{
AddMessageToFlashBag(errorMessage, "errorMessages");
}
/// <summary>
/// doet het zelfde als AddPositiveMessageToPropertyBag maar dan met een flash bag
/// Deze functie moet worden gebruikt waneer een RedirectToReferrer word aangeroepen.
/// </summary>
/// <param name="positiveMessage">de fout melding</param>
protected virtual void AddPositiveMessageToFlashBag(string positiveMessage)
{
AddMessageToFlashBag(positiveMessage, "positiveMessages");
}
/// <summary>
/// doet het zelfde als AddInfoMessageToPropertyBag maar dan met een flash bag
/// Deze functie moet worden gebruikt waneer een RedirectToReferrer word aangeroepen.
/// </summary>
/// <param name="infoMessage">de fout melding</param>
protected virtual void AddInfoMessageToFlashBag(string infoMessage)
{
AddMessageToFlashBag(infoMessage, "infoMessages");
}
private void AddMessageToPropertyBag(string message, string Type)
{
List<string> messages = new List<string>();
if (PropertyBag.Contains(Type) && PropertyBag[Type] != null)
{
messages.AddRange((IEnumerable<string>) PropertyBag[Type]);
}
if (string.IsNullOrEmpty(message))
return;
messages.Add(message);
PropertyBag[Type] = messages;
}
private void AddMessageToFlashBag(string message, string Type)
{
List<string> messages = new List<string>();
if (Flash.Contains(Type) && Flash[Type] != null)
{
messages.AddRange((IEnumerable<string>) Flash[Type]);
}
if (string.IsNullOrEmpty(message))
return;
messages.Add(message);
Flash[Type] = messages;
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
//
// This file was autogenerated by a tool.
// Do not modify it.
//
namespace Microsoft.Azure.Batch
{
using Models = Microsoft.Azure.Batch.Protocol.Models;
using System;
using System.Collections.Generic;
using System.Linq;
/// <summary>
/// Information used to connect to an Azure Storage Container using Blobfuse.
/// </summary>
public partial class AzureBlobFileSystemConfiguration : ITransportObjectProvider<Models.AzureBlobFileSystemConfiguration>, IPropertyMetadata
{
private class PropertyContainer : PropertyCollection
{
public readonly PropertyAccessor<string> AccountKeyProperty;
public readonly PropertyAccessor<string> AccountNameProperty;
public readonly PropertyAccessor<string> BlobfuseOptionsProperty;
public readonly PropertyAccessor<string> ContainerNameProperty;
public readonly PropertyAccessor<ComputeNodeIdentityReference> IdentityReferenceProperty;
public readonly PropertyAccessor<string> RelativeMountPathProperty;
public readonly PropertyAccessor<string> SasKeyProperty;
public PropertyContainer() : base(BindingState.Unbound)
{
this.AccountKeyProperty = this.CreatePropertyAccessor<string>(nameof(AccountKey), BindingAccess.Read | BindingAccess.Write);
this.AccountNameProperty = this.CreatePropertyAccessor<string>(nameof(AccountName), BindingAccess.Read | BindingAccess.Write);
this.BlobfuseOptionsProperty = this.CreatePropertyAccessor<string>(nameof(BlobfuseOptions), BindingAccess.Read | BindingAccess.Write);
this.ContainerNameProperty = this.CreatePropertyAccessor<string>(nameof(ContainerName), BindingAccess.Read | BindingAccess.Write);
this.IdentityReferenceProperty = this.CreatePropertyAccessor<ComputeNodeIdentityReference>(nameof(IdentityReference), BindingAccess.Read | BindingAccess.Write);
this.RelativeMountPathProperty = this.CreatePropertyAccessor<string>(nameof(RelativeMountPath), BindingAccess.Read | BindingAccess.Write);
this.SasKeyProperty = this.CreatePropertyAccessor<string>(nameof(SasKey), BindingAccess.Read | BindingAccess.Write);
}
public PropertyContainer(Models.AzureBlobFileSystemConfiguration protocolObject) : base(BindingState.Bound)
{
this.AccountKeyProperty = this.CreatePropertyAccessor(
protocolObject.AccountKey,
nameof(AccountKey),
BindingAccess.Read);
this.AccountNameProperty = this.CreatePropertyAccessor(
protocolObject.AccountName,
nameof(AccountName),
BindingAccess.Read);
this.BlobfuseOptionsProperty = this.CreatePropertyAccessor(
protocolObject.BlobfuseOptions,
nameof(BlobfuseOptions),
BindingAccess.Read);
this.ContainerNameProperty = this.CreatePropertyAccessor(
protocolObject.ContainerName,
nameof(ContainerName),
BindingAccess.Read);
this.IdentityReferenceProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.IdentityReference, o => new ComputeNodeIdentityReference(o).Freeze()),
nameof(IdentityReference),
BindingAccess.Read);
this.RelativeMountPathProperty = this.CreatePropertyAccessor(
protocolObject.RelativeMountPath,
nameof(RelativeMountPath),
BindingAccess.Read);
this.SasKeyProperty = this.CreatePropertyAccessor(
protocolObject.SasKey,
nameof(SasKey),
BindingAccess.Read);
}
}
private readonly PropertyContainer propertyContainer;
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="AzureBlobFileSystemConfiguration"/> class.
/// </summary>
/// <param name='accountName'>The Azure Storage account name.</param>
/// <param name='containerName'>The Azure Blob Storage Container name.</param>
/// <param name='relativeMountPath'>The relative path on the compute node where the file system will be mounted.</param>
/// <param name='accountKey'>The Azure Storage Account key. This property is mutually exclusive with <see cref="SasKey"/>.</param>
/// <param name='sasKey'>The Azure Storage SAS token. This property is mutually exclusive with <see cref="AccountKey"/>.</param>
/// <param name='blobfuseOptions'>Additional command line options to pass to the mount command.</param>
/// <param name='identityReference'>The reference to the user assigned identity to use to access containerName</param>
internal AzureBlobFileSystemConfiguration(
string accountName,
string containerName,
string relativeMountPath,
string accountKey = default(string),
string sasKey = default(string),
string blobfuseOptions = default(string),
ComputeNodeIdentityReference identityReference = default(ComputeNodeIdentityReference))
{
this.propertyContainer = new PropertyContainer();
this.AccountName = accountName;
this.ContainerName = containerName;
this.RelativeMountPath = relativeMountPath;
this.AccountKey = accountKey;
this.SasKey = sasKey;
this.BlobfuseOptions = blobfuseOptions;
this.IdentityReference = identityReference;
}
/// <summary>
/// Default constructor to support mocking the <see cref="AzureBlobFileSystemConfiguration"/> class.
/// </summary>
protected AzureBlobFileSystemConfiguration()
{
this.propertyContainer = new PropertyContainer();
}
internal AzureBlobFileSystemConfiguration(Models.AzureBlobFileSystemConfiguration protocolObject)
{
this.propertyContainer = new PropertyContainer(protocolObject);
}
#endregion Constructors
#region AzureBlobFileSystemConfiguration
/// <summary>
/// Gets the Azure Storage Account key. This property is mutually exclusive with <see cref="SasKey"/>.
/// </summary>
public string AccountKey
{
get { return this.propertyContainer.AccountKeyProperty.Value; }
private set { this.propertyContainer.AccountKeyProperty.Value = value; }
}
/// <summary>
/// Gets the Azure Storage account name.
/// </summary>
public string AccountName
{
get { return this.propertyContainer.AccountNameProperty.Value; }
private set { this.propertyContainer.AccountNameProperty.Value = value; }
}
/// <summary>
/// Gets additional command line options to pass to the mount command.
/// </summary>
/// <remarks>
/// These are 'net use' options in Windows and 'mount' options in Linux.
/// </remarks>
public string BlobfuseOptions
{
get { return this.propertyContainer.BlobfuseOptionsProperty.Value; }
private set { this.propertyContainer.BlobfuseOptionsProperty.Value = value; }
}
/// <summary>
/// Gets the Azure Blob Storage Container name.
/// </summary>
public string ContainerName
{
get { return this.propertyContainer.ContainerNameProperty.Value; }
private set { this.propertyContainer.ContainerNameProperty.Value = value; }
}
/// <summary>
/// Gets or sets the reference to the user assigned identity to use to access containerName
/// </summary>
/// <remarks>
/// This property is mutually exclusive with both accountKey and sasKey; exactly one must be specified.
/// </remarks>
public ComputeNodeIdentityReference IdentityReference
{
get { return this.propertyContainer.IdentityReferenceProperty.Value; }
set { this.propertyContainer.IdentityReferenceProperty.Value = value; }
}
/// <summary>
/// Gets the relative path on the compute node where the file system will be mounted.
/// </summary>
/// <remarks>
/// All file systems are mounted relative to the Batch mounts directory, accessible via the AZ_BATCH_NODE_MOUNTS_DIR
/// environment variable.
/// </remarks>
public string RelativeMountPath
{
get { return this.propertyContainer.RelativeMountPathProperty.Value; }
private set { this.propertyContainer.RelativeMountPathProperty.Value = value; }
}
/// <summary>
/// Gets the Azure Storage SAS token. This property is mutually exclusive with <see cref="AccountKey"/>.
/// </summary>
public string SasKey
{
get { return this.propertyContainer.SasKeyProperty.Value; }
private set { this.propertyContainer.SasKeyProperty.Value = value; }
}
#endregion // AzureBlobFileSystemConfiguration
#region IPropertyMetadata
bool IModifiable.HasBeenModified
{
get { return this.propertyContainer.HasBeenModified; }
}
bool IReadOnly.IsReadOnly
{
get { return this.propertyContainer.IsReadOnly; }
set { this.propertyContainer.IsReadOnly = value; }
}
#endregion //IPropertyMetadata
#region Internal/private methods
/// <summary>
/// Return a protocol object of the requested type.
/// </summary>
/// <returns>The protocol object of the requested type.</returns>
Models.AzureBlobFileSystemConfiguration ITransportObjectProvider<Models.AzureBlobFileSystemConfiguration>.GetTransportObject()
{
Models.AzureBlobFileSystemConfiguration result = new Models.AzureBlobFileSystemConfiguration()
{
AccountKey = this.AccountKey,
AccountName = this.AccountName,
BlobfuseOptions = this.BlobfuseOptions,
ContainerName = this.ContainerName,
IdentityReference = UtilitiesInternal.CreateObjectWithNullCheck(this.IdentityReference, (o) => o.GetTransportObject()),
RelativeMountPath = this.RelativeMountPath,
SasKey = this.SasKey,
};
return result;
}
#endregion // Internal/private methods
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text.Encodings.Web;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Routing.Patterns;
using Microsoft.Extensions.ObjectPool;
namespace Microsoft.AspNetCore.Routing.Template
{
/// <summary>
/// Supports processing and binding parameter values in a route template.
/// </summary>
public class TemplateBinder
{
private readonly UrlEncoder _urlEncoder;
private readonly ObjectPool<UriBuildingContext> _pool;
private readonly (string parameterName, IRouteConstraint constraint)[] _constraints;
private readonly RouteValueDictionary? _defaults;
private readonly KeyValuePair<string, object?>[] _filters;
private readonly (string parameterName, IOutboundParameterTransformer transformer)[] _parameterTransformers;
private readonly RoutePattern _pattern;
private readonly string[] _requiredKeys;
// A pre-allocated template for the 'known' route values that this template binder uses.
//
// We always make a copy of this and operate on the copy, so that we don't mutate shared state.
private readonly KeyValuePair<string, object?>[] _slots;
/// <summary>
/// Creates a new instance of <see cref="TemplateBinder"/>.
/// </summary>
/// <param name="urlEncoder">The <see cref="UrlEncoder"/>.</param>
/// <param name="pool">The <see cref="ObjectPool{T}"/>.</param>
/// <param name="template">The <see cref="RouteTemplate"/> to bind values to.</param>
/// <param name="defaults">The default values for <paramref name="template"/>.</param>
internal TemplateBinder(
UrlEncoder urlEncoder,
ObjectPool<UriBuildingContext> pool,
RouteTemplate template,
RouteValueDictionary defaults)
: this(urlEncoder, pool, template?.ToRoutePattern()!, defaults, requiredKeys: null, parameterPolicies: null)
{
}
/// <summary>
/// Creates a new instance of <see cref="TemplateBinder"/>.
/// </summary>
/// <param name="urlEncoder">The <see cref="UrlEncoder"/>.</param>
/// <param name="pool">The <see cref="ObjectPool{T}"/>.</param>
/// <param name="pattern">The <see cref="RoutePattern"/> to bind values to.</param>
/// <param name="defaults">The default values for <paramref name="pattern"/>. Optional.</param>
/// <param name="requiredKeys">Keys used to determine if the ambient values apply. Optional.</param>
/// <param name="parameterPolicies">
/// A list of (<see cref="string"/>, <see cref="IParameterPolicy"/>) pairs to evaluate when producing a URI.
/// </param>
internal TemplateBinder(
UrlEncoder urlEncoder,
ObjectPool<UriBuildingContext> pool,
RoutePattern pattern,
RouteValueDictionary? defaults,
IEnumerable<string>? requiredKeys,
IEnumerable<(string parameterName, IParameterPolicy policy)>? parameterPolicies)
{
if (urlEncoder == null)
{
throw new ArgumentNullException(nameof(urlEncoder));
}
if (pool == null)
{
throw new ArgumentNullException(nameof(pool));
}
if (pattern == null)
{
throw new ArgumentNullException(nameof(pattern));
}
_urlEncoder = urlEncoder;
_pool = pool;
_pattern = pattern;
_defaults = defaults;
_requiredKeys = requiredKeys?.ToArray() ?? Array.Empty<string>();
// Any default that doesn't have a corresponding parameter is a 'filter' and if a value
// is provided for that 'filter' it must match the value in defaults.
var filters = new RouteValueDictionary(_defaults);
for (var i = 0; i < pattern.Parameters.Count; i++)
{
filters.Remove(pattern.Parameters[i].Name);
}
_filters = filters.ToArray();
_constraints = parameterPolicies
?.Where(p => p.policy is IRouteConstraint)
.Select(p => (p.parameterName, (IRouteConstraint)p.policy))
.ToArray() ?? Array.Empty<(string, IRouteConstraint)>();
_parameterTransformers = parameterPolicies
?.Where(p => p.policy is IOutboundParameterTransformer)
.Select(p => (p.parameterName, (IOutboundParameterTransformer)p.policy))
.ToArray() ?? Array.Empty<(string, IOutboundParameterTransformer)>();
_slots = AssignSlots(_pattern, _filters);
}
internal TemplateBinder(
UrlEncoder urlEncoder,
ObjectPool<UriBuildingContext> pool,
RoutePattern pattern,
IEnumerable<(string parameterName, IParameterPolicy policy)> parameterPolicies)
{
if (urlEncoder == null)
{
throw new ArgumentNullException(nameof(urlEncoder));
}
if (pool == null)
{
throw new ArgumentNullException(nameof(pool));
}
if (pattern == null)
{
throw new ArgumentNullException(nameof(pattern));
}
// Parameter policies can be null.
_urlEncoder = urlEncoder;
_pool = pool;
_pattern = pattern;
_defaults = new RouteValueDictionary(pattern.Defaults);
_requiredKeys = pattern.RequiredValues.Keys.ToArray();
// Any default that doesn't have a corresponding parameter is a 'filter' and if a value
// is provided for that 'filter' it must match the value in defaults.
var filters = new RouteValueDictionary(_defaults);
for (var i = 0; i < pattern.Parameters.Count; i++)
{
filters.Remove(pattern.Parameters[i].Name);
}
_filters = filters.ToArray();
_constraints = parameterPolicies
?.Where(p => p.policy is IRouteConstraint)
.Select(p => (p.parameterName, (IRouteConstraint)p.policy))
.ToArray() ?? Array.Empty<(string, IRouteConstraint)>();
_parameterTransformers = parameterPolicies
?.Where(p => p.policy is IOutboundParameterTransformer)
.Select(p => (p.parameterName, (IOutboundParameterTransformer)p.policy))
.ToArray() ?? Array.Empty<(string, IOutboundParameterTransformer)>();
_slots = AssignSlots(_pattern, _filters);
}
/// <summary>
/// Generates the parameter values in the route.
/// </summary>
/// <param name="ambientValues">The values associated with the current request.</param>
/// <param name="values">The route values to process.</param>
/// <returns>A <see cref="TemplateValuesResult"/> instance. Can be null.</returns>
public TemplateValuesResult? GetValues(RouteValueDictionary? ambientValues, RouteValueDictionary values)
{
// Make a new copy of the slots array, we'll use this as 'scratch' space
// and then the RVD will take ownership of it.
var slots = new KeyValuePair<string, object?>[_slots.Length];
Array.Copy(_slots, 0, slots, 0, slots.Length);
// Keeping track of the number of 'values' we've processed can be used to avoid doing
// some expensive 'merge' operations later.
var valueProcessedCount = 0;
// Start by copying all of the values out of the 'values' and into the slots. There's no success
// case where we *don't* use all of the 'values' so there's no reason not to do this up front
// to avoid visiting the values dictionary again and again.
for (var i = 0; i < slots.Length; i++)
{
var key = slots[i].Key;
if (values.TryGetValue(key, out var value))
{
// We will need to know later if the value in the 'values' was an null value.
// This affects how we process ambient values. Since the 'slots' are initialized
// with null values, we use the null-object-pattern to track 'explicit null', which means that
// null means omitted.
value = IsRoutePartNonEmpty(value) ? value : SentinullValue.Instance;
slots[i] = new KeyValuePair<string, object?>(key, value);
// Track the count of processed values - this allows a fast path later.
valueProcessedCount++;
}
}
// In Endpoint Routing, patterns can have logical parameters that appear 'to the left' of
// the route template. This governs whether or not the template can be selected (they act like
// filters), and whether the remaining ambient values should be used.
// should be used.
// For example, in case of MVC it flattens out a route template like below
// {controller}/{action}/{id?}
// to
// Products/Index/{id?},
// defaults: new { controller = "Products", action = "Index" },
// requiredValues: new { controller = "Products", action = "Index" }
// In the above example, "controller" and "action" are no longer parameters.
var copyAmbientValues = ambientValues != null;
if (copyAmbientValues)
{
var requiredKeys = _requiredKeys;
for (var i = 0; i < requiredKeys.Length; i++)
{
// For each required key, the values and ambient values need to have the same value.
var key = requiredKeys[i];
var hasExplicitValue = values.TryGetValue(key, out var value);
if (ambientValues == null || !ambientValues.TryGetValue(key, out var ambientValue))
{
ambientValue = null;
}
// For now, only check ambient values with required values that don't have a parameter
// Ambient values for parameters are processed below
var hasParameter = _pattern.GetParameter(key) != null;
if (!hasParameter)
{
if (!_pattern.RequiredValues.TryGetValue(key, out var requiredValue))
{
throw new InvalidOperationException($"Unable to find required value '{key}' on route pattern.");
}
if (!RoutePartsEqual(ambientValue, _pattern.RequiredValues[key]) &&
!RoutePattern.IsRequiredValueAny(_pattern.RequiredValues[key]))
{
copyAmbientValues = false;
break;
}
if (hasExplicitValue && !RoutePartsEqual(value, ambientValue))
{
copyAmbientValues = false;
break;
}
}
}
}
// We can now process the rest of the parameters (from left to right) and copy the ambient
// values as long as the conditions are met.
//
// Find out which entries in the URI are valid for the URI we want to generate.
// If the URI had ordered parameters a="1", b="2", c="3" and the new values
// specified that b="9", then we need to invalidate everything after it. The new
// values should then be a="1", b="9", c=<no value>.
//
// We also handle the case where a parameter is optional but has no value - we shouldn't
// accept additional parameters that appear *after* that parameter.
var parameters = _pattern.Parameters;
var parameterCount = _pattern.Parameters.Count;
for (var i = 0; i < parameterCount; i++)
{
var key = slots[i].Key;
var value = slots[i].Value;
// Whether or not the value was explicitly provided is signficant when comparing
// ambient values. Remember that we're using a special sentinel value so that we
// can tell the difference between an omitted value and an explicitly specified null.
var hasExplicitValue = value != null;
var hasAmbientValue = false;
var ambientValue = (object?)null;
var parameter = parameters[i];
// We are copying **all** ambient values
if (copyAmbientValues)
{
hasAmbientValue = ambientValues != null && ambientValues.TryGetValue(key, out ambientValue);
if (hasExplicitValue && hasAmbientValue && !RoutePartsEqual(ambientValue, value))
{
// Stop copying current values when we find one that doesn't match
copyAmbientValues = false;
}
if (!hasExplicitValue &&
!hasAmbientValue &&
_defaults?.ContainsKey(parameter.Name) != true)
{
// This is an unsatisfied parameter value and there are no defaults. We might still
// be able to generate a URL but we should stop 'accepting' ambient values.
//
// This might be a case like:
// template: a/{b?}/{c?}
// ambient: { c = 17 }
// values: { }
//
// We can still generate a URL from this ("/a") but we shouldn't accept 'c' because
// we can't use it.
//
// In the example above we should fall into this block for 'b'.
copyAmbientValues = false;
}
}
// This might be an ambient value that matches a required value. We want to use these even if we're
// not bulk-copying ambient values.
//
// This comes up in a case like the following:
// ambient-values: { page = "/DeleteUser", area = "Admin", }
// values: { controller = "Home", action = "Index", }
// pattern: {area}/{controller}/{action}/{id?}
// required-values: { area = "Admin", controller = "Home", action = "Index", page = (string)null, }
//
// OR in plain English... when linking from a page in an area to an action in the same area, it should
// be possible to use the area as an ambient value.
if (!copyAmbientValues && !hasExplicitValue && _pattern.RequiredValues.TryGetValue(key, out var requiredValue))
{
hasAmbientValue = ambientValues != null && ambientValues.TryGetValue(key, out ambientValue);
if (hasAmbientValue &&
(RoutePartsEqual(requiredValue, ambientValue) || RoutePattern.IsRequiredValueAny(requiredValue)))
{
// Treat this an an explicit value to *force it*.
slots[i] = new KeyValuePair<string, object?>(key, ambientValue);
hasExplicitValue = true;
value = ambientValue;
}
}
// If the parameter is a match, add it to the list of values we will use for URI generation
if (hasExplicitValue && !ReferenceEquals(value, SentinullValue.Instance))
{
// Already has a value in the list, do nothing
}
else if (copyAmbientValues && hasAmbientValue)
{
slots[i] = new KeyValuePair<string, object?>(key, ambientValue);
}
else if (parameter.IsOptional || parameter.IsCatchAll)
{
// Value isn't needed for optional or catchall parameters - wipe out the key, so it
// will be omitted from the RVD.
slots[i] = default;
}
else if (_defaults != null && _defaults.TryGetValue(parameter.Name, out var defaultValue))
{
// Add the default value only if there isn't already a new value for it and
// only if it actually has a default value.
slots[i] = new KeyValuePair<string, object?>(key, defaultValue);
}
else
{
// If we get here, this parameter needs a value, but doesn't have one. This is a
// failure case.
return null;
}
}
// Any default values that don't appear as parameters are treated like filters. Any new values
// provided must match these defaults.
var filters = _filters;
for (var i = 0; i < filters.Length; i++)
{
var key = filters[i].Key;
var value = slots[i + parameterCount].Value;
// We use a sentinel value here so we can track the different between omission and explicit null.
// 'real null' means that the value was omitted.
var hasExplictValue = value != null;
if (hasExplictValue)
{
// If there is a non-parameterized value in the route and there is a
// new value for it and it doesn't match, this route won't match.
if (!RoutePartsEqual(value, filters[i].Value))
{
return null;
}
}
else
{
// If no value was provided, then blank out this slot so that it doesn't show up in accepted values.
slots[i + parameterCount] = default;
}
}
// At this point we've captured all of the 'known' route values, but we have't
// handled an extra route values that were provided in 'values'. These all
// need to be included in the accepted values.
var acceptedValues = RouteValueDictionary.FromArray(slots);
if (valueProcessedCount < values.Count)
{
// There are some values in 'value' that are unaccounted for, merge them into
// the dictionary.
foreach (var kvp in values)
{
if (!_defaults!.ContainsKey(kvp.Key))
{
#if RVD_TryAdd
acceptedValues.TryAdd(kvp.Key, kvp.Value);
#else
if (!acceptedValues.ContainsKey(kvp.Key))
{
acceptedValues.Add(kvp.Key, kvp.Value);
}
#endif
}
}
}
// Currently this copy is required because BindValues will mutate the accepted values :(
var combinedValues = new RouteValueDictionary(acceptedValues);
// Add any ambient values that don't match parameters - they need to be visible to constraints
// but they will ignored by link generation.
CopyNonParameterAmbientValues(
ambientValues: ambientValues,
acceptedValues: acceptedValues,
combinedValues: combinedValues);
return new TemplateValuesResult()
{
AcceptedValues = acceptedValues,
CombinedValues = combinedValues,
};
}
// Step 1.5: Process constraints
/// <summary>
/// Processes the constraints **if** they were passed in to the TemplateBinder constructor.
/// </summary>
/// <param name="httpContext">The <see cref="HttpContext"/> associated with the current request.</param>
/// <param name="combinedValues">A dictionary that contains the parameters for the route.</param>
/// <param name="parameterName">The name of the parameter.</param>
/// <param name="constraint">The constraint object.</param>
/// <returns><see langword="true"/> if constraints were processed succesfully and false otherwise.</returns>
public bool TryProcessConstraints(HttpContext? httpContext, RouteValueDictionary combinedValues, out string? parameterName, out IRouteConstraint? constraint)
{
var constraints = _constraints;
for (var i = 0; i < constraints.Length; i++)
{
(parameterName, constraint) = constraints[i];
if (!constraint.Match(httpContext, NullRouter.Instance, parameterName, combinedValues, RouteDirection.UrlGeneration))
{
return false;
}
}
parameterName = null;
constraint = null;
return true;
}
// Step 2: If the route is a match generate the appropriate URI
/// <summary>
/// Returns a string representation of the URI associated with the route.
/// </summary>
/// <param name="acceptedValues">A dictionary that contains the parameters for the route.</param>
/// <returns>The string representation of the route.</returns>
public string? BindValues(RouteValueDictionary acceptedValues)
{
var context = _pool.Get();
try
{
return TryBindValuesCore(context, acceptedValues) ? context.ToString() : null;
}
finally
{
_pool.Return(context);
}
}
// Step 2: If the route is a match generate the appropriate URI
internal bool TryBindValues(
RouteValueDictionary acceptedValues,
LinkOptions? options,
LinkOptions globalOptions,
out (PathString path, QueryString query) result)
{
var context = _pool.Get();
context.AppendTrailingSlash = options?.AppendTrailingSlash ?? globalOptions.AppendTrailingSlash ?? false;
context.LowercaseQueryStrings = options?.LowercaseQueryStrings ?? globalOptions.LowercaseQueryStrings ?? false;
context.LowercaseUrls = options?.LowercaseUrls ?? globalOptions.LowercaseUrls ?? false;
try
{
if (TryBindValuesCore(context, acceptedValues))
{
result = (context.ToPathString(), context.ToQueryString());
return true;
}
result = default;
return false;
}
finally
{
_pool.Return(context);
}
}
private bool TryBindValuesCore(UriBuildingContext context, RouteValueDictionary acceptedValues)
{
// If we have any output parameter transformers, allow them a chance to influence the parameter values
// before we build the URI.
var parameterTransformers = _parameterTransformers;
for (var i = 0; i < parameterTransformers.Length; i++)
{
(var parameterName, var transformer) = parameterTransformers[i];
if (acceptedValues.TryGetValue(parameterName, out var value))
{
acceptedValues[parameterName] = transformer.TransformOutbound(value);
}
}
var segments = _pattern.PathSegments;
// Read interface .Count once rather than per iteration
var segmentsCount = segments.Count;
for (var i = 0; i < segmentsCount; i++)
{
Debug.Assert(context.BufferState == SegmentState.Beginning);
Debug.Assert(context.UriState == SegmentState.Beginning);
var parts = segments[i].Parts;
// Read interface .Count once rather than per iteration
var partsCount = parts.Count;
for (var j = 0; j < partsCount; j++)
{
var part = parts[j];
if (part is RoutePatternLiteralPart literalPart)
{
if (!context.Accept(literalPart.Content))
{
return false;
}
}
else if (part is RoutePatternSeparatorPart separatorPart)
{
if (!context.Accept(separatorPart.Content))
{
return false;
}
}
else if (part is RoutePatternParameterPart parameterPart)
{
// If it's a parameter, get its value
acceptedValues.Remove(parameterPart.Name, out var value);
var isSameAsDefault = false;
if (_defaults != null &&
_defaults.TryGetValue(parameterPart.Name, out var defaultValue) &&
RoutePartsEqual(value, defaultValue))
{
isSameAsDefault = true;
}
var converted = Convert.ToString(value, CultureInfo.InvariantCulture);
if (isSameAsDefault)
{
// If the accepted value is the same as the default value buffer it since
// we won't necessarily add it to the URI we generate.
if (!context.Buffer(converted))
{
return false;
}
}
else
{
// If the value is not accepted, it is null or empty value in the
// middle of the segment. We accept this if the parameter is an
// optional parameter and it is preceded by an optional seperator.
// In this case, we need to remove the optional seperator that we
// have added to the URI
// Example: template = {id}.{format?}. parameters: id=5
// In this case after we have generated "5.", we wont find any value
// for format, so we remove '.' and generate 5.
if (!context.Accept(converted, parameterPart.EncodeSlashes))
{
RoutePatternSeparatorPart? nullablePart;
if (j != 0 && parameterPart.IsOptional && (nullablePart = parts[j - 1] as RoutePatternSeparatorPart) != null)
{
separatorPart = nullablePart;
context.Remove(separatorPart.Content);
}
else
{
return false;
}
}
}
}
}
context.EndSegment();
}
// Generate the query string from the remaining values
var wroteFirst = false;
foreach (var kvp in acceptedValues)
{
if (_defaults != null && _defaults.ContainsKey(kvp.Key))
{
// This value is a 'filter' we don't need to put it in the query string.
continue;
}
var values = kvp.Value as IEnumerable;
if (values != null && !(values is string))
{
foreach (var value in values)
{
wroteFirst |= AddQueryKeyValueToContext(context, kvp.Key, value, wroteFirst);
}
}
else
{
wroteFirst |= AddQueryKeyValueToContext(context, kvp.Key, kvp.Value, wroteFirst);
}
}
return true;
}
private bool AddQueryKeyValueToContext(UriBuildingContext context, string key, object? value, bool wroteFirst)
{
var converted = Convert.ToString(value, CultureInfo.InvariantCulture);
if (!string.IsNullOrEmpty(converted))
{
if (context.LowercaseQueryStrings)
{
key = key.ToLowerInvariant();
converted = converted.ToLowerInvariant();
}
context.QueryWriter.Write(wroteFirst ? '&' : '?');
_urlEncoder.Encode(context.QueryWriter, key);
context.QueryWriter.Write('=');
_urlEncoder.Encode(context.QueryWriter, converted);
return true;
}
return false;
}
/// <summary>
/// Compares two objects for equality as parts of a case-insensitive path.
/// </summary>
/// <param name="a">An object to compare.</param>
/// <param name="b">An object to compare.</param>
/// <returns>True if the object are equal, otherwise false.</returns>
public static bool RoutePartsEqual(object? a, object? b)
{
var sa = a as string ?? (ReferenceEquals(SentinullValue.Instance, a) ? string.Empty : null);
var sb = b as string ?? (ReferenceEquals(SentinullValue.Instance, b) ? string.Empty : null);
// In case of strings, consider empty and null the same.
// Since null cannot tell us the type, consider it to be a string if the other value is a string.
if ((sa == string.Empty && sb == null) || (sb == string.Empty && sa == null))
{
return true;
}
else if (sa != null && sb != null)
{
// For strings do a case-insensitive comparison
return string.Equals(sa, sb, StringComparison.OrdinalIgnoreCase);
}
else
{
if (a != null && b != null)
{
// Explicitly call .Equals() in case it is overridden in the type
return a.Equals(b);
}
else
{
// At least one of them is null. Return true if they both are
return a == b;
}
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static bool IsRoutePartNonEmpty(object? part)
{
if (part == null)
{
return false;
}
if (ReferenceEquals(SentinullValue.Instance, part))
{
return false;
}
if (part is string stringPart && stringPart.Length == 0)
{
return false;
}
return true;
}
private void CopyNonParameterAmbientValues(
RouteValueDictionary? ambientValues,
RouteValueDictionary acceptedValues,
RouteValueDictionary combinedValues)
{
if (ambientValues == null)
{
return;
}
foreach (var kvp in ambientValues)
{
if (IsRoutePartNonEmpty(kvp.Value))
{
var parameter = _pattern.GetParameter(kvp.Key);
if (parameter == null && !acceptedValues.ContainsKey(kvp.Key))
{
combinedValues.Add(kvp.Key, kvp.Value);
}
}
}
}
private static KeyValuePair<string, object?>[] AssignSlots(RoutePattern pattern, KeyValuePair<string, object?>[] filters)
{
var slots = new KeyValuePair<string, object?>[pattern.Parameters.Count + filters.Length];
for (var i = 0; i < pattern.Parameters.Count; i++)
{
slots[i] = new KeyValuePair<string, object?>(pattern.Parameters[i].Name, null);
}
for (var i = 0; i < filters.Length; i++)
{
slots[i + pattern.Parameters.Count] = new KeyValuePair<string, object?>(filters[i].Key, null);
}
return slots;
}
// This represents an 'explicit null' in the slots array.
[DebuggerDisplay("explicit null")]
private class SentinullValue
{
public static object Instance = new SentinullValue();
private SentinullValue()
{
}
public override string ToString() => string.Empty;
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Speech.V1.Snippets
{
using Google.Api.Gax.Grpc;
using Google.LongRunning;
using System.Threading.Tasks;
/// <summary>Generated snippets.</summary>
public sealed class GeneratedSpeechClientSnippets
{
/// <summary>Snippet for Recognize</summary>
public void RecognizeRequestObject()
{
// Snippet: Recognize(RecognizeRequest, CallSettings)
// Create client
SpeechClient speechClient = SpeechClient.Create();
// Initialize request argument(s)
RecognizeRequest request = new RecognizeRequest
{
Config = new RecognitionConfig(),
Audio = new RecognitionAudio(),
};
// Make the request
RecognizeResponse response = speechClient.Recognize(request);
// End snippet
}
/// <summary>Snippet for RecognizeAsync</summary>
public async Task RecognizeRequestObjectAsync()
{
// Snippet: RecognizeAsync(RecognizeRequest, CallSettings)
// Additional: RecognizeAsync(RecognizeRequest, CancellationToken)
// Create client
SpeechClient speechClient = await SpeechClient.CreateAsync();
// Initialize request argument(s)
RecognizeRequest request = new RecognizeRequest
{
Config = new RecognitionConfig(),
Audio = new RecognitionAudio(),
};
// Make the request
RecognizeResponse response = await speechClient.RecognizeAsync(request);
// End snippet
}
/// <summary>Snippet for Recognize</summary>
public void Recognize()
{
// Snippet: Recognize(RecognitionConfig, RecognitionAudio, CallSettings)
// Create client
SpeechClient speechClient = SpeechClient.Create();
// Initialize request argument(s)
RecognitionConfig config = new RecognitionConfig();
RecognitionAudio audio = new RecognitionAudio();
// Make the request
RecognizeResponse response = speechClient.Recognize(config, audio);
// End snippet
}
/// <summary>Snippet for RecognizeAsync</summary>
public async Task RecognizeAsync()
{
// Snippet: RecognizeAsync(RecognitionConfig, RecognitionAudio, CallSettings)
// Additional: RecognizeAsync(RecognitionConfig, RecognitionAudio, CancellationToken)
// Create client
SpeechClient speechClient = await SpeechClient.CreateAsync();
// Initialize request argument(s)
RecognitionConfig config = new RecognitionConfig();
RecognitionAudio audio = new RecognitionAudio();
// Make the request
RecognizeResponse response = await speechClient.RecognizeAsync(config, audio);
// End snippet
}
/// <summary>Snippet for LongRunningRecognize</summary>
public void LongRunningRecognizeRequestObject()
{
// Snippet: LongRunningRecognize(LongRunningRecognizeRequest, CallSettings)
// Create client
SpeechClient speechClient = SpeechClient.Create();
// Initialize request argument(s)
LongRunningRecognizeRequest request = new LongRunningRecognizeRequest
{
Config = new RecognitionConfig(),
Audio = new RecognitionAudio(),
OutputConfig = new TranscriptOutputConfig(),
};
// Make the request
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> response = speechClient.LongRunningRecognize(request);
// Poll until the returned long-running operation is complete
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
LongRunningRecognizeResponse result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> retrievedResponse = speechClient.PollOnceLongRunningRecognize(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
LongRunningRecognizeResponse retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for LongRunningRecognizeAsync</summary>
public async Task LongRunningRecognizeRequestObjectAsync()
{
// Snippet: LongRunningRecognizeAsync(LongRunningRecognizeRequest, CallSettings)
// Additional: LongRunningRecognizeAsync(LongRunningRecognizeRequest, CancellationToken)
// Create client
SpeechClient speechClient = await SpeechClient.CreateAsync();
// Initialize request argument(s)
LongRunningRecognizeRequest request = new LongRunningRecognizeRequest
{
Config = new RecognitionConfig(),
Audio = new RecognitionAudio(),
OutputConfig = new TranscriptOutputConfig(),
};
// Make the request
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> response = await speechClient.LongRunningRecognizeAsync(request);
// Poll until the returned long-running operation is complete
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
LongRunningRecognizeResponse result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> retrievedResponse = await speechClient.PollOnceLongRunningRecognizeAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
LongRunningRecognizeResponse retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for LongRunningRecognize</summary>
public void LongRunningRecognize()
{
// Snippet: LongRunningRecognize(RecognitionConfig, RecognitionAudio, CallSettings)
// Create client
SpeechClient speechClient = SpeechClient.Create();
// Initialize request argument(s)
RecognitionConfig config = new RecognitionConfig();
RecognitionAudio audio = new RecognitionAudio();
// Make the request
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> response = speechClient.LongRunningRecognize(config, audio);
// Poll until the returned long-running operation is complete
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> completedResponse = response.PollUntilCompleted();
// Retrieve the operation result
LongRunningRecognizeResponse result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> retrievedResponse = speechClient.PollOnceLongRunningRecognize(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
LongRunningRecognizeResponse retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for LongRunningRecognizeAsync</summary>
public async Task LongRunningRecognizeAsync()
{
// Snippet: LongRunningRecognizeAsync(RecognitionConfig, RecognitionAudio, CallSettings)
// Additional: LongRunningRecognizeAsync(RecognitionConfig, RecognitionAudio, CancellationToken)
// Create client
SpeechClient speechClient = await SpeechClient.CreateAsync();
// Initialize request argument(s)
RecognitionConfig config = new RecognitionConfig();
RecognitionAudio audio = new RecognitionAudio();
// Make the request
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> response = await speechClient.LongRunningRecognizeAsync(config, audio);
// Poll until the returned long-running operation is complete
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
LongRunningRecognizeResponse result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
Operation<LongRunningRecognizeResponse, LongRunningRecognizeMetadata> retrievedResponse = await speechClient.PollOnceLongRunningRecognizeAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
LongRunningRecognizeResponse retrievedResult = retrievedResponse.Result;
}
// End snippet
}
/// <summary>Snippet for StreamingRecognize</summary>
public async Task StreamingRecognize()
{
// Snippet: StreamingRecognize(CallSettings, BidirectionalStreamingSettings)
// Create client
SpeechClient speechClient = SpeechClient.Create();
// Initialize streaming call, retrieving the stream object
SpeechClient.StreamingRecognizeStream response = speechClient.StreamingRecognize();
// Sending requests and retrieving responses can be arbitrarily interleaved
// Exact sequence will depend on client/server behavior
// Create task to do something with responses from server
Task responseHandlerTask = Task.Run(async () =>
{
// Note that C# 8 code can use await foreach
AsyncResponseStream<StreamingRecognizeResponse> responseStream = response.GetResponseStream();
while (await responseStream.MoveNextAsync())
{
StreamingRecognizeResponse responseItem = responseStream.Current;
// Do something with streamed response
}
// The response stream has completed
});
// Send requests to the server
bool done = false;
while (!done)
{
// Initialize a request
StreamingRecognizeRequest request = new StreamingRecognizeRequest
{
StreamingConfig = new StreamingRecognitionConfig(),
};
// Stream a request to the server
await response.WriteAsync(request);
// Set "done" to true when sending requests is complete
}
// Complete writing requests to the stream
await response.WriteCompleteAsync();
// Await the response handler
// This will complete once all server responses have been processed
await responseHandlerTask;
// End snippet
}
}
}
| |
/*
* Copyright (c) 2006, Brendan Grant (grantb@dahat.com)
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * All original and modified versions of this source code must include the
* above copyright notice, this list of conditions and the following
* disclaimer.
* * This code may not be used with or within any modules or code that is
* licensed in any way that that compels or requires users or modifiers
* to release their source code or changes as a requirement for
* the use, modification or distribution of binary, object or source code
* based on the licensed source code. (ex: Cannot be used with GPL code.)
* * The name of Brendan Grant may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY BRENDAN GRANT ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL BRENDAN GRANT BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using Microsoft.Win32;
using System.Xml.Serialization;
namespace BrendanGrant.Helpers.FileAssociation
{
#region Public Enums
/// <summary>
/// Broad categories of system recognized file format types.
/// </summary>
public enum PerceivedTypes
{
/// <summary>
/// No
/// </summary>
None,
/// <summary>
/// Image file
/// </summary>
Image,
/// <summary>
/// Text file
/// </summary>
Text,
/// <summary>
/// Audio file
/// </summary>
Audio,
/// <summary>
/// Video file
/// </summary>
Video,
/// <summary>
/// Compressed file
/// </summary>
Compressed,
/// <summary>
/// System file
/// </summary>
System,
}
#endregion
/// <summary>
/// Provides instance methods for the creation, modification, and deletion of file extension associations in the Windows registry.
/// </summary>
public class FileAssociationInfo
{
private RegistryWrapper registryWrapper = new RegistryWrapper();
/// <summary>
/// Gets array containing known file extensions from HKEY_CLASSES_ROOT.
/// </summary>
/// <returns>String array containing extensions.</returns>
public static string[] GetExtensions()
{
RegistryKey root = Registry.ClassesRoot;
List<string> extensionList = new List<string>();
string[] subKeys = root.GetSubKeyNames();
foreach (string subKey in subKeys)
{
//TODO: Consider removing dot?
if (subKey.StartsWith("."))
{
extensionList.Add(subKey);
}
}
return extensionList.ToArray(); ;
}
private string extension;
/// <summary>
/// Gets or sets a value that determines the MIME type of the file.
/// </summary>
public string ContentType
{
get { return GetContentType(this); }
set { SetContentType(this, value); }
}
/// <summary>
/// Gets a value indicating whether the extension exists.
/// </summary>
public bool Exists
{
get
{
RegistryKey root = Registry.ClassesRoot;
try
{
RegistryKey key = root.OpenSubKey(extension);
if (key == null)
return false;
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
return false;
}
return true;
}
}
/// <summary>
/// Gets the name of the extension.
/// </summary>
public string Extension
{
get { return extension; }
set { extension = value; }
}
/// <summary>
/// Gets or sets array of containing program file names which should be displayed in the Open With List.
/// </summary>
/// <example>notepad.exe, wordpad.exe, othertexteditor.exe</example>
public string[] OpenWithList
{
get { return GetOpenWithList(this); }
set { SetOpenWithList(this, value); }
}
/// <summary>
/// Gets or sets a value that determines the <see cref="PerceivedType"/> of the file.
/// </summary>
public PerceivedTypes PerceivedType
{
get { return GetPerceivedType(this); }
set { SetPerceivedType(this, value); }
}
/// <summary>
/// Gets or sets a value that indicates the filter component that is used to search for text within documents of this type.
/// </summary>
public Guid PersistentHandler
{
get { return GetPersistentHandler(this); }
set { SetPersistentHandler(this, value); }
}
/// <summary>
/// Gets or set a value that indicates the name of the associated application with the behavior to handle this extension.
/// </summary>
[XmlAttribute()]
public string ProgID
{
get { return GetProgID(this); }
set { SetProgID(this, value); }
}
/// <summary>
/// Creates the extension key.
/// </summary>
public void Create()
{
Create(this);
}
/// <summary>
/// Deletes the extension key.
/// </summary>
public void Delete()
{
Delete(this);
}
/// <summary>
/// Verifies that given extension exists and is associated with given program id
/// </summary>
/// <param name="extension">Extension to be checked for.</param>
/// <param name="progId">progId to be checked for.</param>
/// <returns>True if association exists, false if it does not.</returns>
public bool IsValid(string extension, string progId)
{
FileAssociationInfo fai = new FileAssociationInfo(extension);
if (!fai.Exists)
return false;
if (progId != fai.ProgID)
return false;
return true;
}
/// <summary>
/// Initializes a new instance of the <see cref="FileAssociationInfo"/>FileAssociationInfo class, which acts as a wrapper for a file extension within the registry.
/// </summary>
/// <param name="extension">The dot prefixed extension.</param>
/// <example>FileAssociationInfo(".mp3")
/// FileAssociationInfo(".txt")
/// FileAssociationInfo(".doc")</example>
public FileAssociationInfo(string extension)
{
this.extension = extension;
}
#region Public Functions - Creators
/// <summary>
/// Creates actual extension association key in registry for the specified extension and supplied attributes.
/// </summary>
/// <param name="progId">Name of expected handling program.</param>
/// <returns>FileAssociationInfo instance referring to specified extension.</returns>
public FileAssociationInfo Create(string progId)
{
return Create(progId, PerceivedTypes.None, string.Empty, null);
}
/// <summary>
/// Creates actual extension association key in registry for the specified extension and supplied attributes.
/// </summary>
/// <param name="progId">Name of expected handling program.</param>
/// <param name="perceivedType"><see cref="PerceivedTypes"/>PerceivedType of file type.</param>
/// <returns>FileAssociationInfo instance referring to specified extension.</returns>
public FileAssociationInfo Create(string progId, PerceivedTypes perceivedType)
{
return Create(progId, perceivedType, string.Empty, null);
}
/// <summary>
/// Creates actual extension association key in registry for the specified extension and supplied attributes.
/// </summary>
/// <param name="progId">Name of expected handling program.</param>
/// <param name="perceivedType"><see cref="PerceivedTypes"/>PerceivedType of file type.</param>
/// <param name="contentType">MIME type of file type.</param>
/// <returns>FileAssociationInfo instance referring to specified extension.</returns>
public FileAssociationInfo Create(string progId, PerceivedTypes perceivedType, string contentType)
{
return Create(progId, PerceivedTypes.None, contentType, null);
}
/// <summary>
/// Creates actual extension association key in registry for the specified extension and supplied attributes.
/// </summary>
/// <param name="progId">Name of expected handling program.</param>
/// <param name="perceivedType"><see cref="PerceivedTypes"/>PerceivedType of file type.</param>
/// <param name="contentType">MIME type of file type.</param>
/// <param name="openwithList"></param>
/// <returns>FileAssociationInfo instance referring to specified extension.</returns>
public FileAssociationInfo Create(string progId, PerceivedTypes perceivedType, string contentType, string[] openwithList)
{
FileAssociationInfo fai = new FileAssociationInfo(extension);
if (fai.Exists)
{
fai.Delete();
}
fai.Create();
fai.ProgID = progId;
if (perceivedType != PerceivedTypes.None)
fai.PerceivedType = perceivedType;
if (contentType != string.Empty)
fai.ContentType = contentType;
if (openwithList != null)
fai.OpenWithList = openwithList;
return fai;
}
#endregion
#region Private Functions - Property backend
/// <summary>
/// Gets array of containing program file names which should be displayed in the Open With List.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> that provides specifics of the extension to be changed.</param>
/// <returns>Program file names</returns>
protected string[] GetOpenWithList(FileAssociationInfo file)
{
if (!file.Exists)
throw new Exception("Extension does not exist");
RegistryKey root = Registry.ClassesRoot;
RegistryKey key = root.OpenSubKey(file.extension);
key = key.OpenSubKey("OpenWithList");
if (key == null)
{
return new string[0];
}
return key.GetSubKeyNames();
}
/// <summary>
/// Sets array of containing program file names which should be displayed in the Open With List.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> that provides specifics of the extension to be changed.</param>
/// <param name="programList">Program file names</param>
protected void SetOpenWithList(FileAssociationInfo file, string[] programList)
{
if (!file.Exists)
throw new Exception("Extension does not exist");
RegistryKey root = Registry.ClassesRoot;
RegistryKey key = root.OpenSubKey(file.extension, true);
RegistryKey tmpkey = key.OpenSubKey("OpenWithList", true);
if (tmpkey != null)
{
key.DeleteSubKeyTree("OpenWithList");
}
key = key.CreateSubKey("OpenWithList");
foreach (string s in programList)
{
key.CreateSubKey(s);
}
ShellNotification.NotifyOfChange();
}
/// <summary>
/// Gets or value that determines the <see cref="PerceivedType"/>PerceivedType of the file.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> that provides specifics of the extension to be changed.</param>
/// <returns><see cref="PerceivedTypes"/> that specifies Perceived Type of extension.</returns>
protected PerceivedTypes GetPerceivedType(FileAssociationInfo file)
{
if (!file.Exists)
throw new Exception("Extension does not exist");
object val = registryWrapper.Read(file.extension, "PerceivedType");
PerceivedTypes actualType = PerceivedTypes.None;
if (val == null)
return actualType;
try
{
actualType = (PerceivedTypes)Enum.Parse(typeof(PerceivedTypes), val.ToString(), true);
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
}
return actualType;
}
/// <summary>
/// Sets a value that determines the <see cref="PerceivedType"/>PerceivedType of the file.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> that provides specifics of the extension to be changed.</param>
/// <param name="type"><see cref="PerceivedTypes"/> to be set that specifies Perceived Type of extension.</param>
protected void SetPerceivedType(FileAssociationInfo file, PerceivedTypes type)
{
if (!file.Exists)
throw new Exception("Extension does not exist");
registryWrapper.Write(file.extension, "PerceivedType", type.ToString());
ShellNotification.NotifyOfChange();
}
/// <summary>
/// Gets a value that indicates the filter component that is used to search for text within documents of this type.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> that provides specifics of the extension to be changed.</param>
/// <returns>Guid of filter component.</returns>
protected Guid GetPersistentHandler(FileAssociationInfo file)
{
if (!file.Exists)
throw new Exception("Extension does not exist");
object val = registryWrapper.Read(file.extension + "\\PersistentHandler", string.Empty);
if (val == null)
return new Guid();
else
return new Guid(val.ToString());
}
/// <summary>
/// Sets a value that indicates the filter component that is used to search for text within documents of this type.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> that provides specifics of the extension to be changed.</param>
/// <param name="persistentHandler">Guid of filter component.</param>
protected void SetPersistentHandler(FileAssociationInfo file, Guid persistentHandler)
{
if (!file.Exists)
throw new Exception("Extension does not exist");
if (persistentHandler == Guid.Empty)
return;
this.registryWrapper.Write(file.extension + "\\" + PersistentHandler, string.Empty, persistentHandler);
ShellNotification.NotifyOfChange();
}
/// <summary>
/// Gets a value that determines the MIME type of the file.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> that provides specifics of the extension to be changed.</param>
/// <returns>MIME content type of extension.</returns>
protected string GetContentType(FileAssociationInfo file)
{
if (!file.Exists)
throw new Exception("Extension does not exist");
object val = registryWrapper.Read(file.extension, "Content Type");
if (val == null)
{
return string.Empty;
}
else
{
return val.ToString();
}
}
/// <summary>
/// Sets a value that determines the MIME type of the file.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> that provides specifics of the extension to be changed.</param>
/// <param name="type">MIME content type of extension.</param>
protected void SetContentType(FileAssociationInfo file, string type)
{
if (!file.Exists)
throw new Exception("Extension does not exist");
registryWrapper.Write(file.extension, "Content Type", type);
ShellNotification.NotifyOfChange();
}
/// <summary>
/// Gets a value that indicates the name of the associated application with the behavior to handle this extension.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> that provides specifics of the extension to be changed.</param>
/// <returns>Associated Program ID of handling program.</returns>
protected string GetProgID(FileAssociationInfo file)
{
if (!file.Exists)
throw new Exception("Extension does not exist");
object val = registryWrapper.Read(file.extension, string.Empty);
if (val == null)
return string.Empty;
return val.ToString();
}
/// <summary>
/// Set a value that indicates the name of the associated application with the behavior to handle this extension.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> that provides specifics of the extension to be changed.</param>
/// <param name="progId">Associated Program ID of handling program.</param>
protected void SetProgID(FileAssociationInfo file, string progId)
{
if (!file.Exists)
throw new Exception("Extension does not exist");
registryWrapper.Write(file.extension, string.Empty, progId);
ShellNotification.NotifyOfChange();
}
#endregion
/// <summary>
/// Creates actual file extension entry in registry.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> instance that contains specifics on extension to be created.</param>
protected void Create(FileAssociationInfo file)
{
if (file.Exists)
{
file.Delete();
}
RegistryKey root = Registry.ClassesRoot;
root.CreateSubKey(file.extension);
}
/// <summary>
/// Deletes actual file extension entry in registry.
/// </summary>
/// <param name="file"><see cref="FileAssociationInfo"/> instance that contains specifics on extension to be deleted.</param>
protected void Delete(FileAssociationInfo file)
{
if (!file.Exists)
{
throw new Exception("Key not found.");
}
RegistryKey root = Registry.ClassesRoot;
root.DeleteSubKeyTree(file.extension);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
namespace System.IO
{
/*
* This class is used to access a contiguous block of memory, likely outside
* the GC heap (or pinned in place in the GC heap, but a MemoryStream may
* make more sense in those cases). It's great if you have a pointer and
* a length for a section of memory mapped in by someone else and you don't
* want to copy this into the GC heap. UnmanagedMemoryStream assumes these
* two things:
*
* 1) All the memory in the specified block is readable or writable,
* depending on the values you pass to the constructor.
* 2) The lifetime of the block of memory is at least as long as the lifetime
* of the UnmanagedMemoryStream.
* 3) You clean up the memory when appropriate. The UnmanagedMemoryStream
* currently will do NOTHING to free this memory.
* 4) All calls to Write and WriteByte may not be threadsafe currently.
*
* It may become necessary to add in some sort of
* DeallocationMode enum, specifying whether we unmap a section of memory,
* call free, run a user-provided delegate to free the memory, etc.
* We'll suggest user write a subclass of UnmanagedMemoryStream that uses
* a SafeHandle subclass to hold onto the memory.
*
*/
/// <summary>
/// Stream over a memory pointer or over a SafeBuffer
/// </summary>
public class UnmanagedMemoryStream : Stream
{
private SafeBuffer _buffer;
private unsafe byte* _mem;
private long _length;
private long _capacity;
private long _position;
private long _offset;
private FileAccess _access;
private bool _isOpen;
private Task<Int32> _lastReadTask; // The last successful task returned from ReadAsync
/// <summary>
/// Creates a closed stream.
/// </summary>
// Needed for subclasses that need to map a file, etc.
protected UnmanagedMemoryStream()
{
unsafe
{
_mem = null;
}
_isOpen = false;
}
/// <summary>
/// Creates a stream over a SafeBuffer.
/// </summary>
/// <param name="buffer"></param>
/// <param name="offset"></param>
/// <param name="length"></param>
public UnmanagedMemoryStream(SafeBuffer buffer, long offset, long length)
{
Initialize(buffer, offset, length, FileAccess.Read);
}
/// <summary>
/// Creates a stream over a SafeBuffer.
/// </summary>
public UnmanagedMemoryStream(SafeBuffer buffer, long offset, long length, FileAccess access)
{
Initialize(buffer, offset, length, access);
}
/// <summary>
/// Subclasses must call this method (or the other overload) to properly initialize all instance fields.
/// </summary>
/// <param name="buffer"></param>
/// <param name="offset"></param>
/// <param name="length"></param>
/// <param name="access"></param>
protected void Initialize(SafeBuffer buffer, long offset, long length, FileAccess access)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (length < 0)
{
throw new ArgumentOutOfRangeException(nameof(length), SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (buffer.ByteLength < (ulong)(offset + length))
{
throw new ArgumentException(SR.Argument_InvalidSafeBufferOffLen);
}
if (access < FileAccess.Read || access > FileAccess.ReadWrite)
{
throw new ArgumentOutOfRangeException(nameof(access));
}
if (_isOpen)
{
throw new InvalidOperationException(SR.InvalidOperation_CalledTwice);
}
// check for wraparound
unsafe
{
byte* pointer = null;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
buffer.AcquirePointer(ref pointer);
if ((pointer + offset + length) < pointer)
{
throw new ArgumentException(SR.ArgumentOutOfRange_UnmanagedMemStreamWrapAround);
}
}
finally
{
if (pointer != null)
{
buffer.ReleasePointer();
}
}
}
_offset = offset;
_buffer = buffer;
_length = length;
_capacity = length;
_access = access;
_isOpen = true;
}
/// <summary>
/// Creates a stream over a byte*.
/// </summary>
[CLSCompliant(false)]
public unsafe UnmanagedMemoryStream(byte* pointer, long length)
{
Initialize(pointer, length, length, FileAccess.Read);
}
/// <summary>
/// Creates a stream over a byte*.
/// </summary>
[CLSCompliant(false)]
public unsafe UnmanagedMemoryStream(byte* pointer, long length, long capacity, FileAccess access)
{
Initialize(pointer, length, capacity, access);
}
/// <summary>
/// Subclasses must call this method (or the other overload) to properly initialize all instance fields.
/// </summary>
[CLSCompliant(false)]
protected unsafe void Initialize(byte* pointer, long length, long capacity, FileAccess access)
{
if (pointer == null)
throw new ArgumentNullException(nameof(pointer));
if (length < 0 || capacity < 0)
throw new ArgumentOutOfRangeException((length < 0) ? nameof(length) : nameof(capacity), SR.ArgumentOutOfRange_NeedNonNegNum);
if (length > capacity)
throw new ArgumentOutOfRangeException(nameof(length), SR.ArgumentOutOfRange_LengthGreaterThanCapacity);
// Check for wraparound.
if (((byte*)((long)pointer + capacity)) < pointer)
throw new ArgumentOutOfRangeException(nameof(capacity), SR.ArgumentOutOfRange_UnmanagedMemStreamWrapAround);
if (access < FileAccess.Read || access > FileAccess.ReadWrite)
throw new ArgumentOutOfRangeException(nameof(access), SR.ArgumentOutOfRange_Enum);
if (_isOpen)
throw new InvalidOperationException(SR.InvalidOperation_CalledTwice);
_mem = pointer;
_offset = 0;
_length = length;
_capacity = capacity;
_access = access;
_isOpen = true;
}
/// <summary>
/// Returns true if the stream can be read; otherwise returns false.
/// </summary>
public override bool CanRead
{
get { return _isOpen && (_access & FileAccess.Read) != 0; }
}
/// <summary>
/// Returns true if the stream can seek; otherwise returns false.
/// </summary>
public override bool CanSeek
{
get { return _isOpen; }
}
/// <summary>
/// Returns true if the stream can be written to; otherwise returns false.
/// </summary>
public override bool CanWrite
{
get { return _isOpen && (_access & FileAccess.Write) != 0; }
}
/// <summary>
/// Closes the stream. The stream's memory needs to be dealt with separately.
/// </summary>
/// <param name="disposing"></param>
protected override void Dispose(bool disposing)
{
_isOpen = false;
unsafe { _mem = null; }
// Stream allocates WaitHandles for async calls. So for correctness
// call base.Dispose(disposing) for better perf, avoiding waiting
// for the finalizers to run on those types.
base.Dispose(disposing);
}
private void EnsureNotClosed()
{
if (!_isOpen)
throw Error.GetStreamIsClosed();
}
private void EnsureReadable()
{
if (!CanRead)
throw Error.GetReadNotSupported();
}
private void EnsureWriteable()
{
if (!CanWrite)
throw Error.GetWriteNotSupported();
}
/// <summary>
/// Since it's a memory stream, this method does nothing.
/// </summary>
public override void Flush()
{
EnsureNotClosed();
}
/// <summary>
/// Since it's a memory stream, this method does nothing specific.
/// </summary>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public override Task FlushAsync(CancellationToken cancellationToken)
{
if (cancellationToken.IsCancellationRequested)
return Task.FromCanceled(cancellationToken);
try
{
Flush();
return Task.CompletedTask;
}
catch (Exception ex)
{
return Task.FromException(ex);
}
}
/// <summary>
/// Number of bytes in the stream.
/// </summary>
public override long Length
{
get
{
EnsureNotClosed();
return Interlocked.Read(ref _length);
}
}
/// <summary>
/// Number of bytes that can be written to the stream.
/// </summary>
public long Capacity
{
get
{
EnsureNotClosed();
return _capacity;
}
}
/// <summary>
/// ReadByte will read byte at the Position in the stream
/// </summary>
public override long Position
{
get
{
if (!CanSeek) throw Error.GetStreamIsClosed();
return Interlocked.Read(ref _position);
}
set
{
if (value < 0) throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_NeedNonNegNum);
if (!CanSeek) throw Error.GetStreamIsClosed();
Interlocked.Exchange(ref _position, value);
}
}
/// <summary>
/// Pointer to memory at the current Position in the stream.
/// </summary>
[CLSCompliant(false)]
public unsafe byte* PositionPointer
{
get
{
if (_buffer != null)
throw new NotSupportedException(SR.NotSupported_UmsSafeBuffer);
EnsureNotClosed();
// Use a temp to avoid a race
long pos = Interlocked.Read(ref _position);
if (pos > _capacity)
throw new IndexOutOfRangeException(SR.IndexOutOfRange_UMSPosition);
byte* ptr = _mem + pos;
return ptr;
}
set
{
if (_buffer != null)
throw new NotSupportedException(SR.NotSupported_UmsSafeBuffer);
EnsureNotClosed();
if (value < _mem)
throw new IOException(SR.IO_SeekBeforeBegin);
long newPosition = (long)value - (long)_mem;
if (newPosition < 0)
throw new ArgumentOutOfRangeException("offset", SR.ArgumentOutOfRange_UnmanagedMemStreamLength);
Interlocked.Exchange(ref _position, newPosition);
}
}
/// <summary>
/// Reads bytes from stream and puts them into the buffer
/// </summary>
/// <param name="buffer">Buffer to read the bytes to.</param>
/// <param name="offset">Starting index in the buffer.</param>
/// <param name="count">Maximum number of bytes to read.</param>
/// <returns>Number of bytes actually read.</returns>
public override int Read(byte[] buffer, int offset, int count)
{
if (buffer == null)
throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
if (offset < 0)
throw new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
if (buffer.Length - offset < count)
throw new ArgumentException(SR.Argument_InvalidOffLen);
return ReadCore(new Span<byte>(buffer, offset, count));
}
public override int Read(Span<byte> destination)
{
if (GetType() == typeof(UnmanagedMemoryStream))
{
return ReadCore(destination);
}
else
{
// UnmanagedMemoryStream is not sealed, and a derived type may have overridden Read(byte[], int, int) prior
// to this Read(Span<byte>) overload being introduced. In that case, this Read(Span<byte>) overload
// should use the behavior of Read(byte[],int,int) overload.
return base.Read(destination);
}
}
internal int ReadCore(Span<byte> destination)
{
EnsureNotClosed();
EnsureReadable();
// Use a local variable to avoid a race where another thread
// changes our position after we decide we can read some bytes.
long pos = Interlocked.Read(ref _position);
long len = Interlocked.Read(ref _length);
long n = Math.Min(len - pos, destination.Length);
if (n <= 0)
{
return 0;
}
int nInt = (int)n; // Safe because n <= count, which is an Int32
if (nInt < 0)
{
return 0; // _position could be beyond EOF
}
Debug.Assert(pos + nInt >= 0, "_position + n >= 0"); // len is less than 2^63 -1.
unsafe
{
fixed (byte* pBuffer = &MemoryMarshal.GetReference(destination))
{
if (_buffer != null)
{
byte* pointer = null;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
_buffer.AcquirePointer(ref pointer);
Buffer.Memcpy(pBuffer, pointer + pos + _offset, nInt);
}
finally
{
if (pointer != null)
{
_buffer.ReleasePointer();
}
}
}
else
{
Buffer.Memcpy(pBuffer, _mem + pos, nInt);
}
}
}
Interlocked.Exchange(ref _position, pos + n);
return nInt;
}
/// <summary>
/// Reads bytes from stream and puts them into the buffer
/// </summary>
/// <param name="buffer">Buffer to read the bytes to.</param>
/// <param name="offset">Starting index in the buffer.</param>
/// <param name="count">Maximum number of bytes to read.</param>
/// <param name="cancellationToken">Token that can be used to cancel this operation.</param>
/// <returns>Task that can be used to access the number of bytes actually read.</returns>
public override Task<Int32> ReadAsync(Byte[] buffer, Int32 offset, Int32 count, CancellationToken cancellationToken)
{
if (buffer == null)
throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
if (offset < 0)
throw new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
if (buffer.Length - offset < count)
throw new ArgumentException(SR.Argument_InvalidOffLen);
if (cancellationToken.IsCancellationRequested)
return Task.FromCanceled<Int32>(cancellationToken);
try
{
Int32 n = Read(buffer, offset, count);
Task<Int32> t = _lastReadTask;
return (t != null && t.Result == n) ? t : (_lastReadTask = Task.FromResult<Int32>(n));
}
catch (Exception ex)
{
Debug.Assert(!(ex is OperationCanceledException));
return Task.FromException<Int32>(ex);
}
}
/// <summary>
/// Reads bytes from stream and puts them into the buffer
/// </summary>
/// <param name="destination">Buffer to read the bytes to.</param>
/// <param name="cancellationToken">Token that can be used to cancel this operation.</param>
public override ValueTask<int> ReadAsync(Memory<byte> destination, CancellationToken cancellationToken = default(CancellationToken))
{
if (cancellationToken.IsCancellationRequested)
{
return new ValueTask<int>(Task.FromCanceled<int>(cancellationToken));
}
try
{
// ReadAsync(Memory<byte>,...) needs to delegate to an existing virtual to do the work, in case an existing derived type
// has changed or augmented the logic associated with reads. If the Memory wraps an array, we could delegate to
// ReadAsync(byte[], ...), but that would defeat part of the purpose, as ReadAsync(byte[], ...) often needs to allocate
// a Task<int> for the return value, so we want to delegate to one of the synchronous methods. We could always
// delegate to the Read(Span<byte>) method, and that's the most efficient solution when dealing with a concrete
// UnmanagedMemoryStream, but if we're dealing with a type derived from UnmanagedMemoryStream, Read(Span<byte>) will end up delegating
// to Read(byte[], ...), which requires it to get a byte[] from ArrayPool and copy the data. So, we special-case the
// very common case of the Memory<byte> wrapping an array: if it does, we delegate to Read(byte[], ...) with it,
// as that will be efficient in both cases, and we fall back to Read(Span<byte>) if the Memory<byte> wrapped something
// else; if this is a concrete UnmanagedMemoryStream, that'll be efficient, and only in the case where the Memory<byte> wrapped
// something other than an array and this is an UnmanagedMemoryStream-derived type that doesn't override Read(Span<byte>) will
// it then fall back to doing the ArrayPool/copy behavior.
return new ValueTask<int>(
destination.TryGetArray(out ArraySegment<byte> destinationArray) ?
Read(destinationArray.Array, destinationArray.Offset, destinationArray.Count) :
Read(destination.Span));
}
catch (Exception ex)
{
return new ValueTask<int>(Task.FromException<int>(ex));
}
}
/// <summary>
/// Returns the byte at the stream current Position and advances the Position.
/// </summary>
/// <returns></returns>
public override int ReadByte()
{
EnsureNotClosed();
EnsureReadable();
long pos = Interlocked.Read(ref _position); // Use a local to avoid a race condition
long len = Interlocked.Read(ref _length);
if (pos >= len)
return -1;
Interlocked.Exchange(ref _position, pos + 1);
int result;
if (_buffer != null)
{
unsafe
{
byte* pointer = null;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
_buffer.AcquirePointer(ref pointer);
result = *(pointer + pos + _offset);
}
finally
{
if (pointer != null)
{
_buffer.ReleasePointer();
}
}
}
}
else
{
unsafe
{
result = _mem[pos];
}
}
return result;
}
/// <summary>
/// Advanced the Position to specific location in the stream.
/// </summary>
/// <param name="offset">Offset from the loc parameter.</param>
/// <param name="loc">Origin for the offset parameter.</param>
/// <returns></returns>
public override long Seek(long offset, SeekOrigin loc)
{
EnsureNotClosed();
switch (loc)
{
case SeekOrigin.Begin:
if (offset < 0)
throw new IOException(SR.IO_SeekBeforeBegin);
Interlocked.Exchange(ref _position, offset);
break;
case SeekOrigin.Current:
long pos = Interlocked.Read(ref _position);
if (offset + pos < 0)
throw new IOException(SR.IO_SeekBeforeBegin);
Interlocked.Exchange(ref _position, offset + pos);
break;
case SeekOrigin.End:
long len = Interlocked.Read(ref _length);
if (len + offset < 0)
throw new IOException(SR.IO_SeekBeforeBegin);
Interlocked.Exchange(ref _position, len + offset);
break;
default:
throw new ArgumentException(SR.Argument_InvalidSeekOrigin);
}
long finalPos = Interlocked.Read(ref _position);
Debug.Assert(finalPos >= 0, "_position >= 0");
return finalPos;
}
/// <summary>
/// Sets the Length of the stream.
/// </summary>
/// <param name="value"></param>
public override void SetLength(long value)
{
if (value < 0)
throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_NeedNonNegNum);
if (_buffer != null)
throw new NotSupportedException(SR.NotSupported_UmsSafeBuffer);
EnsureNotClosed();
EnsureWriteable();
if (value > _capacity)
throw new IOException(SR.IO_FixedCapacity);
long pos = Interlocked.Read(ref _position);
long len = Interlocked.Read(ref _length);
if (value > len)
{
unsafe
{
Buffer.ZeroMemory(_mem + len, value - len);
}
}
Interlocked.Exchange(ref _length, value);
if (pos > value)
{
Interlocked.Exchange(ref _position, value);
}
}
/// <summary>
/// Writes buffer into the stream
/// </summary>
/// <param name="buffer">Buffer that will be written.</param>
/// <param name="offset">Starting index in the buffer.</param>
/// <param name="count">Number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
{
if (buffer == null)
throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
if (offset < 0)
throw new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
if (buffer.Length - offset < count)
throw new ArgumentException(SR.Argument_InvalidOffLen);
WriteCore(new Span<byte>(buffer, offset, count));
}
public override void Write(ReadOnlySpan<byte> source)
{
if (GetType() == typeof(UnmanagedMemoryStream))
{
WriteCore(source);
}
else
{
// UnmanagedMemoryStream is not sealed, and a derived type may have overridden Write(byte[], int, int) prior
// to this Write(Span<byte>) overload being introduced. In that case, this Write(Span<byte>) overload
// should use the behavior of Write(byte[],int,int) overload.
base.Write(source);
}
}
internal unsafe void WriteCore(ReadOnlySpan<byte> source)
{
EnsureNotClosed();
EnsureWriteable();
long pos = Interlocked.Read(ref _position); // Use a local to avoid a race condition
long len = Interlocked.Read(ref _length);
long n = pos + source.Length;
// Check for overflow
if (n < 0)
{
throw new IOException(SR.IO_StreamTooLong);
}
if (n > _capacity)
{
throw new NotSupportedException(SR.IO_FixedCapacity);
}
if (_buffer == null)
{
// Check to see whether we are now expanding the stream and must
// zero any memory in the middle.
if (pos > len)
{
Buffer.ZeroMemory(_mem + len, pos - len);
}
// set length after zeroing memory to avoid race condition of accessing unzeroed memory
if (n > len)
{
Interlocked.Exchange(ref _length, n);
}
}
fixed (byte* pBuffer = &MemoryMarshal.GetReference(source))
{
if (_buffer != null)
{
long bytesLeft = _capacity - pos;
if (bytesLeft < source.Length)
{
throw new ArgumentException(SR.Arg_BufferTooSmall);
}
byte* pointer = null;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
_buffer.AcquirePointer(ref pointer);
Buffer.Memcpy(pointer + pos + _offset, pBuffer, source.Length);
}
finally
{
if (pointer != null)
{
_buffer.ReleasePointer();
}
}
}
else
{
Buffer.Memcpy(_mem + pos, pBuffer, source.Length);
}
}
Interlocked.Exchange(ref _position, n);
return;
}
/// <summary>
/// Writes buffer into the stream. The operation completes synchronously.
/// </summary>
/// <param name="buffer">Buffer that will be written.</param>
/// <param name="offset">Starting index in the buffer.</param>
/// <param name="count">Number of bytes to write.</param>
/// <param name="cancellationToken">Token that can be used to cancel the operation.</param>
/// <returns>Task that can be awaited </returns>
public override Task WriteAsync(Byte[] buffer, Int32 offset, Int32 count, CancellationToken cancellationToken)
{
if (buffer == null)
throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer);
if (offset < 0)
throw new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
if (buffer.Length - offset < count)
throw new ArgumentException(SR.Argument_InvalidOffLen);
if (cancellationToken.IsCancellationRequested)
return Task.FromCanceled(cancellationToken);
try
{
Write(buffer, offset, count);
return Task.CompletedTask;
}
catch (Exception ex)
{
Debug.Assert(!(ex is OperationCanceledException));
return Task.FromException(ex);
}
}
/// <summary>
/// Writes buffer into the stream. The operation completes synchronously.
/// </summary>
/// <param name="buffer">Buffer that will be written.</param>
/// <param name="cancellationToken">Token that can be used to cancel the operation.</param>
public override Task WriteAsync(ReadOnlyMemory<byte> source, CancellationToken cancellationToken = default(CancellationToken))
{
if (cancellationToken.IsCancellationRequested)
{
return Task.FromCanceled(cancellationToken);
}
try
{
// See corresponding comment in ReadAsync for why we don't just always use Write(ReadOnlySpan<byte>).
// Unlike ReadAsync, we could delegate to WriteAsync(byte[], ...) here, but we don't for consistency.
if (MemoryMarshal.TryGetArray(source, out ArraySegment<byte> sourceArray))
{
Write(sourceArray.Array, sourceArray.Offset, sourceArray.Count);
}
else
{
Write(source.Span);
}
return Task.CompletedTask;
}
catch (Exception ex)
{
return Task.FromException(ex);
}
}
/// <summary>
/// Writes a byte to the stream and advances the current Position.
/// </summary>
/// <param name="value"></param>
public override void WriteByte(byte value)
{
EnsureNotClosed();
EnsureWriteable();
long pos = Interlocked.Read(ref _position); // Use a local to avoid a race condition
long len = Interlocked.Read(ref _length);
long n = pos + 1;
if (pos >= len)
{
// Check for overflow
if (n < 0)
throw new IOException(SR.IO_StreamTooLong);
if (n > _capacity)
throw new NotSupportedException(SR.IO_FixedCapacity);
// Check to see whether we are now expanding the stream and must
// zero any memory in the middle.
// don't do if created from SafeBuffer
if (_buffer == null)
{
if (pos > len)
{
unsafe
{
Buffer.ZeroMemory(_mem + len, pos - len);
}
}
// set length after zeroing memory to avoid race condition of accessing unzeroed memory
Interlocked.Exchange(ref _length, n);
}
}
if (_buffer != null)
{
unsafe
{
byte* pointer = null;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
_buffer.AcquirePointer(ref pointer);
*(pointer + pos + _offset) = value;
}
finally
{
if (pointer != null)
{
_buffer.ReleasePointer();
}
}
}
}
else
{
unsafe
{
_mem[pos] = value;
}
}
Interlocked.Exchange(ref _position, n);
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.InteropServices;
using System.Runtime.Serialization;
using GlmSharp.Swizzle;
// ReSharper disable InconsistentNaming
namespace GlmSharp
{
/// <summary>
/// A matrix of type Half with 4 columns and 2 rows.
/// </summary>
[Serializable]
[StructLayout(LayoutKind.Sequential)]
public struct hmat4x2 : IEnumerable<Half>, IEquatable<hmat4x2>
{
#region Fields
/// <summary>
/// Column 0, Rows 0
/// </summary>
public Half m00;
/// <summary>
/// Column 0, Rows 1
/// </summary>
public Half m01;
/// <summary>
/// Column 1, Rows 0
/// </summary>
public Half m10;
/// <summary>
/// Column 1, Rows 1
/// </summary>
public Half m11;
/// <summary>
/// Column 2, Rows 0
/// </summary>
public Half m20;
/// <summary>
/// Column 2, Rows 1
/// </summary>
public Half m21;
/// <summary>
/// Column 3, Rows 0
/// </summary>
public Half m30;
/// <summary>
/// Column 3, Rows 1
/// </summary>
public Half m31;
#endregion
#region Constructors
/// <summary>
/// Component-wise constructor
/// </summary>
public hmat4x2(Half m00, Half m01, Half m10, Half m11, Half m20, Half m21, Half m30, Half m31)
{
this.m00 = m00;
this.m01 = m01;
this.m10 = m10;
this.m11 = m11;
this.m20 = m20;
this.m21 = m21;
this.m30 = m30;
this.m31 = m31;
}
/// <summary>
/// Constructs this matrix from a hmat2. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hmat2 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m10 = m.m10;
this.m11 = m.m11;
this.m20 = Half.Zero;
this.m21 = Half.Zero;
this.m30 = Half.Zero;
this.m31 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a hmat3x2. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hmat3x2 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m10 = m.m10;
this.m11 = m.m11;
this.m20 = m.m20;
this.m21 = m.m21;
this.m30 = Half.Zero;
this.m31 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a hmat4x2. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hmat4x2 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m10 = m.m10;
this.m11 = m.m11;
this.m20 = m.m20;
this.m21 = m.m21;
this.m30 = m.m30;
this.m31 = m.m31;
}
/// <summary>
/// Constructs this matrix from a hmat2x3. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hmat2x3 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m10 = m.m10;
this.m11 = m.m11;
this.m20 = Half.Zero;
this.m21 = Half.Zero;
this.m30 = Half.Zero;
this.m31 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a hmat3. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hmat3 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m10 = m.m10;
this.m11 = m.m11;
this.m20 = m.m20;
this.m21 = m.m21;
this.m30 = Half.Zero;
this.m31 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a hmat4x3. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hmat4x3 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m10 = m.m10;
this.m11 = m.m11;
this.m20 = m.m20;
this.m21 = m.m21;
this.m30 = m.m30;
this.m31 = m.m31;
}
/// <summary>
/// Constructs this matrix from a hmat2x4. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hmat2x4 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m10 = m.m10;
this.m11 = m.m11;
this.m20 = Half.Zero;
this.m21 = Half.Zero;
this.m30 = Half.Zero;
this.m31 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a hmat3x4. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hmat3x4 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m10 = m.m10;
this.m11 = m.m11;
this.m20 = m.m20;
this.m21 = m.m21;
this.m30 = Half.Zero;
this.m31 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a hmat4. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hmat4 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m10 = m.m10;
this.m11 = m.m11;
this.m20 = m.m20;
this.m21 = m.m21;
this.m30 = m.m30;
this.m31 = m.m31;
}
/// <summary>
/// Constructs this matrix from a series of column vectors. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hvec2 c0, hvec2 c1)
{
this.m00 = c0.x;
this.m01 = c0.y;
this.m10 = c1.x;
this.m11 = c1.y;
this.m20 = Half.Zero;
this.m21 = Half.Zero;
this.m30 = Half.Zero;
this.m31 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a series of column vectors. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hvec2 c0, hvec2 c1, hvec2 c2)
{
this.m00 = c0.x;
this.m01 = c0.y;
this.m10 = c1.x;
this.m11 = c1.y;
this.m20 = c2.x;
this.m21 = c2.y;
this.m30 = Half.Zero;
this.m31 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a series of column vectors. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat4x2(hvec2 c0, hvec2 c1, hvec2 c2, hvec2 c3)
{
this.m00 = c0.x;
this.m01 = c0.y;
this.m10 = c1.x;
this.m11 = c1.y;
this.m20 = c2.x;
this.m21 = c2.y;
this.m30 = c3.x;
this.m31 = c3.y;
}
#endregion
#region Properties
/// <summary>
/// Creates a 2D array with all values (address: Values[x, y])
/// </summary>
public Half[,] Values => new[,] { { m00, m01 }, { m10, m11 }, { m20, m21 }, { m30, m31 } };
/// <summary>
/// Creates a 1D array with all values (internal order)
/// </summary>
public Half[] Values1D => new[] { m00, m01, m10, m11, m20, m21, m30, m31 };
/// <summary>
/// Gets or sets the column nr 0
/// </summary>
public hvec2 Column0
{
get
{
return new hvec2(m00, m01);
}
set
{
m00 = value.x;
m01 = value.y;
}
}
/// <summary>
/// Gets or sets the column nr 1
/// </summary>
public hvec2 Column1
{
get
{
return new hvec2(m10, m11);
}
set
{
m10 = value.x;
m11 = value.y;
}
}
/// <summary>
/// Gets or sets the column nr 2
/// </summary>
public hvec2 Column2
{
get
{
return new hvec2(m20, m21);
}
set
{
m20 = value.x;
m21 = value.y;
}
}
/// <summary>
/// Gets or sets the column nr 3
/// </summary>
public hvec2 Column3
{
get
{
return new hvec2(m30, m31);
}
set
{
m30 = value.x;
m31 = value.y;
}
}
/// <summary>
/// Gets or sets the row nr 0
/// </summary>
public hvec4 Row0
{
get
{
return new hvec4(m00, m10, m20, m30);
}
set
{
m00 = value.x;
m10 = value.y;
m20 = value.z;
m30 = value.w;
}
}
/// <summary>
/// Gets or sets the row nr 1
/// </summary>
public hvec4 Row1
{
get
{
return new hvec4(m01, m11, m21, m31);
}
set
{
m01 = value.x;
m11 = value.y;
m21 = value.z;
m31 = value.w;
}
}
#endregion
#region Static Properties
/// <summary>
/// Predefined all-zero matrix
/// </summary>
public static hmat4x2 Zero { get; } = new hmat4x2(Half.Zero, Half.Zero, Half.Zero, Half.Zero, Half.Zero, Half.Zero, Half.Zero, Half.Zero);
/// <summary>
/// Predefined all-ones matrix
/// </summary>
public static hmat4x2 Ones { get; } = new hmat4x2(Half.One, Half.One, Half.One, Half.One, Half.One, Half.One, Half.One, Half.One);
/// <summary>
/// Predefined identity matrix
/// </summary>
public static hmat4x2 Identity { get; } = new hmat4x2(Half.One, Half.Zero, Half.Zero, Half.One, Half.Zero, Half.Zero, Half.Zero, Half.Zero);
/// <summary>
/// Predefined all-MaxValue matrix
/// </summary>
public static hmat4x2 AllMaxValue { get; } = new hmat4x2(Half.MaxValue, Half.MaxValue, Half.MaxValue, Half.MaxValue, Half.MaxValue, Half.MaxValue, Half.MaxValue, Half.MaxValue);
/// <summary>
/// Predefined diagonal-MaxValue matrix
/// </summary>
public static hmat4x2 DiagonalMaxValue { get; } = new hmat4x2(Half.MaxValue, Half.Zero, Half.Zero, Half.MaxValue, Half.Zero, Half.Zero, Half.Zero, Half.Zero);
/// <summary>
/// Predefined all-MinValue matrix
/// </summary>
public static hmat4x2 AllMinValue { get; } = new hmat4x2(Half.MinValue, Half.MinValue, Half.MinValue, Half.MinValue, Half.MinValue, Half.MinValue, Half.MinValue, Half.MinValue);
/// <summary>
/// Predefined diagonal-MinValue matrix
/// </summary>
public static hmat4x2 DiagonalMinValue { get; } = new hmat4x2(Half.MinValue, Half.Zero, Half.Zero, Half.MinValue, Half.Zero, Half.Zero, Half.Zero, Half.Zero);
/// <summary>
/// Predefined all-Epsilon matrix
/// </summary>
public static hmat4x2 AllEpsilon { get; } = new hmat4x2(Half.Epsilon, Half.Epsilon, Half.Epsilon, Half.Epsilon, Half.Epsilon, Half.Epsilon, Half.Epsilon, Half.Epsilon);
/// <summary>
/// Predefined diagonal-Epsilon matrix
/// </summary>
public static hmat4x2 DiagonalEpsilon { get; } = new hmat4x2(Half.Epsilon, Half.Zero, Half.Zero, Half.Epsilon, Half.Zero, Half.Zero, Half.Zero, Half.Zero);
/// <summary>
/// Predefined all-NaN matrix
/// </summary>
public static hmat4x2 AllNaN { get; } = new hmat4x2(Half.NaN, Half.NaN, Half.NaN, Half.NaN, Half.NaN, Half.NaN, Half.NaN, Half.NaN);
/// <summary>
/// Predefined diagonal-NaN matrix
/// </summary>
public static hmat4x2 DiagonalNaN { get; } = new hmat4x2(Half.NaN, Half.Zero, Half.Zero, Half.NaN, Half.Zero, Half.Zero, Half.Zero, Half.Zero);
/// <summary>
/// Predefined all-NegativeInfinity matrix
/// </summary>
public static hmat4x2 AllNegativeInfinity { get; } = new hmat4x2(Half.NegativeInfinity, Half.NegativeInfinity, Half.NegativeInfinity, Half.NegativeInfinity, Half.NegativeInfinity, Half.NegativeInfinity, Half.NegativeInfinity, Half.NegativeInfinity);
/// <summary>
/// Predefined diagonal-NegativeInfinity matrix
/// </summary>
public static hmat4x2 DiagonalNegativeInfinity { get; } = new hmat4x2(Half.NegativeInfinity, Half.Zero, Half.Zero, Half.NegativeInfinity, Half.Zero, Half.Zero, Half.Zero, Half.Zero);
/// <summary>
/// Predefined all-PositiveInfinity matrix
/// </summary>
public static hmat4x2 AllPositiveInfinity { get; } = new hmat4x2(Half.PositiveInfinity, Half.PositiveInfinity, Half.PositiveInfinity, Half.PositiveInfinity, Half.PositiveInfinity, Half.PositiveInfinity, Half.PositiveInfinity, Half.PositiveInfinity);
/// <summary>
/// Predefined diagonal-PositiveInfinity matrix
/// </summary>
public static hmat4x2 DiagonalPositiveInfinity { get; } = new hmat4x2(Half.PositiveInfinity, Half.Zero, Half.Zero, Half.PositiveInfinity, Half.Zero, Half.Zero, Half.Zero, Half.Zero);
#endregion
#region Functions
/// <summary>
/// Returns an enumerator that iterates through all fields.
/// </summary>
public IEnumerator<Half> GetEnumerator()
{
yield return m00;
yield return m01;
yield return m10;
yield return m11;
yield return m20;
yield return m21;
yield return m30;
yield return m31;
}
/// <summary>
/// Returns an enumerator that iterates through all fields.
/// </summary>
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
#endregion
/// <summary>
/// Returns the number of Fields (4 x 2 = 8).
/// </summary>
public int Count => 8;
/// <summary>
/// Gets/Sets a specific indexed component (a bit slower than direct access).
/// </summary>
public Half this[int fieldIndex]
{
get
{
switch (fieldIndex)
{
case 0: return m00;
case 1: return m01;
case 2: return m10;
case 3: return m11;
case 4: return m20;
case 5: return m21;
case 6: return m30;
case 7: return m31;
default: throw new ArgumentOutOfRangeException("fieldIndex");
}
}
set
{
switch (fieldIndex)
{
case 0: this.m00 = value; break;
case 1: this.m01 = value; break;
case 2: this.m10 = value; break;
case 3: this.m11 = value; break;
case 4: this.m20 = value; break;
case 5: this.m21 = value; break;
case 6: this.m30 = value; break;
case 7: this.m31 = value; break;
default: throw new ArgumentOutOfRangeException("fieldIndex");
}
}
}
/// <summary>
/// Gets/Sets a specific 2D-indexed component (a bit slower than direct access).
/// </summary>
public Half this[int col, int row]
{
get
{
return this[col * 2 + row];
}
set
{
this[col * 2 + row] = value;
}
}
/// <summary>
/// Returns true iff this equals rhs component-wise.
/// </summary>
public bool Equals(hmat4x2 rhs) => (((m00.Equals(rhs.m00) && m01.Equals(rhs.m01)) && (m10.Equals(rhs.m10) && m11.Equals(rhs.m11))) && ((m20.Equals(rhs.m20) && m21.Equals(rhs.m21)) && (m30.Equals(rhs.m30) && m31.Equals(rhs.m31))));
/// <summary>
/// Returns true iff this equals rhs type- and component-wise.
/// </summary>
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) return false;
return obj is hmat4x2 && Equals((hmat4x2) obj);
}
/// <summary>
/// Returns true iff this equals rhs component-wise.
/// </summary>
public static bool operator ==(hmat4x2 lhs, hmat4x2 rhs) => lhs.Equals(rhs);
/// <summary>
/// Returns true iff this does not equal rhs (component-wise).
/// </summary>
public static bool operator !=(hmat4x2 lhs, hmat4x2 rhs) => !lhs.Equals(rhs);
/// <summary>
/// Returns a hash code for this instance.
/// </summary>
public override int GetHashCode()
{
unchecked
{
return ((((((((((((((m00.GetHashCode()) * 397) ^ m01.GetHashCode()) * 397) ^ m10.GetHashCode()) * 397) ^ m11.GetHashCode()) * 397) ^ m20.GetHashCode()) * 397) ^ m21.GetHashCode()) * 397) ^ m30.GetHashCode()) * 397) ^ m31.GetHashCode();
}
}
/// <summary>
/// Returns a transposed version of this matrix.
/// </summary>
public hmat2x4 Transposed => new hmat2x4(m00, m10, m20, m30, m01, m11, m21, m31);
/// <summary>
/// Returns the minimal component of this matrix.
/// </summary>
public Half MinElement => Half.Min(Half.Min(Half.Min(Half.Min(Half.Min(Half.Min(Half.Min(m00, m01), m10), m11), m20), m21), m30), m31);
/// <summary>
/// Returns the maximal component of this matrix.
/// </summary>
public Half MaxElement => Half.Max(Half.Max(Half.Max(Half.Max(Half.Max(Half.Max(Half.Max(m00, m01), m10), m11), m20), m21), m30), m31);
/// <summary>
/// Returns the euclidean length of this matrix.
/// </summary>
public float Length => (float)Math.Sqrt((((m00*m00 + m01*m01) + (m10*m10 + m11*m11)) + ((m20*m20 + m21*m21) + (m30*m30 + m31*m31))));
/// <summary>
/// Returns the squared euclidean length of this matrix.
/// </summary>
public float LengthSqr => (((m00*m00 + m01*m01) + (m10*m10 + m11*m11)) + ((m20*m20 + m21*m21) + (m30*m30 + m31*m31)));
/// <summary>
/// Returns the sum of all fields.
/// </summary>
public Half Sum => (((m00 + m01) + (m10 + m11)) + ((m20 + m21) + (m30 + m31)));
/// <summary>
/// Returns the euclidean norm of this matrix.
/// </summary>
public float Norm => (float)Math.Sqrt((((m00*m00 + m01*m01) + (m10*m10 + m11*m11)) + ((m20*m20 + m21*m21) + (m30*m30 + m31*m31))));
/// <summary>
/// Returns the one-norm of this matrix.
/// </summary>
public float Norm1 => (((Half.Abs(m00) + Half.Abs(m01)) + (Half.Abs(m10) + Half.Abs(m11))) + ((Half.Abs(m20) + Half.Abs(m21)) + (Half.Abs(m30) + Half.Abs(m31))));
/// <summary>
/// Returns the two-norm of this matrix.
/// </summary>
public float Norm2 => (float)Math.Sqrt((((m00*m00 + m01*m01) + (m10*m10 + m11*m11)) + ((m20*m20 + m21*m21) + (m30*m30 + m31*m31))));
/// <summary>
/// Returns the max-norm of this matrix.
/// </summary>
public Half NormMax => Half.Max(Half.Max(Half.Max(Half.Max(Half.Max(Half.Max(Half.Max(Half.Abs(m00), Half.Abs(m01)), Half.Abs(m10)), Half.Abs(m11)), Half.Abs(m20)), Half.Abs(m21)), Half.Abs(m30)), Half.Abs(m31));
/// <summary>
/// Returns the p-norm of this matrix.
/// </summary>
public double NormP(double p) => Math.Pow((((Math.Pow((double)Half.Abs(m00), p) + Math.Pow((double)Half.Abs(m01), p)) + (Math.Pow((double)Half.Abs(m10), p) + Math.Pow((double)Half.Abs(m11), p))) + ((Math.Pow((double)Half.Abs(m20), p) + Math.Pow((double)Half.Abs(m21), p)) + (Math.Pow((double)Half.Abs(m30), p) + Math.Pow((double)Half.Abs(m31), p)))), 1 / p);
/// <summary>
/// Executes a matrix-matrix-multiplication hmat4x2 * hmat2x4 -> hmat2.
/// </summary>
public static hmat2 operator*(hmat4x2 lhs, hmat2x4 rhs) => new hmat2(((lhs.m00 * rhs.m00 + lhs.m10 * rhs.m01) + (lhs.m20 * rhs.m02 + lhs.m30 * rhs.m03)), ((lhs.m01 * rhs.m00 + lhs.m11 * rhs.m01) + (lhs.m21 * rhs.m02 + lhs.m31 * rhs.m03)), ((lhs.m00 * rhs.m10 + lhs.m10 * rhs.m11) + (lhs.m20 * rhs.m12 + lhs.m30 * rhs.m13)), ((lhs.m01 * rhs.m10 + lhs.m11 * rhs.m11) + (lhs.m21 * rhs.m12 + lhs.m31 * rhs.m13)));
/// <summary>
/// Executes a matrix-matrix-multiplication hmat4x2 * hmat3x4 -> hmat3x2.
/// </summary>
public static hmat3x2 operator*(hmat4x2 lhs, hmat3x4 rhs) => new hmat3x2(((lhs.m00 * rhs.m00 + lhs.m10 * rhs.m01) + (lhs.m20 * rhs.m02 + lhs.m30 * rhs.m03)), ((lhs.m01 * rhs.m00 + lhs.m11 * rhs.m01) + (lhs.m21 * rhs.m02 + lhs.m31 * rhs.m03)), ((lhs.m00 * rhs.m10 + lhs.m10 * rhs.m11) + (lhs.m20 * rhs.m12 + lhs.m30 * rhs.m13)), ((lhs.m01 * rhs.m10 + lhs.m11 * rhs.m11) + (lhs.m21 * rhs.m12 + lhs.m31 * rhs.m13)), ((lhs.m00 * rhs.m20 + lhs.m10 * rhs.m21) + (lhs.m20 * rhs.m22 + lhs.m30 * rhs.m23)), ((lhs.m01 * rhs.m20 + lhs.m11 * rhs.m21) + (lhs.m21 * rhs.m22 + lhs.m31 * rhs.m23)));
/// <summary>
/// Executes a matrix-matrix-multiplication hmat4x2 * hmat4 -> hmat4x2.
/// </summary>
public static hmat4x2 operator*(hmat4x2 lhs, hmat4 rhs) => new hmat4x2(((lhs.m00 * rhs.m00 + lhs.m10 * rhs.m01) + (lhs.m20 * rhs.m02 + lhs.m30 * rhs.m03)), ((lhs.m01 * rhs.m00 + lhs.m11 * rhs.m01) + (lhs.m21 * rhs.m02 + lhs.m31 * rhs.m03)), ((lhs.m00 * rhs.m10 + lhs.m10 * rhs.m11) + (lhs.m20 * rhs.m12 + lhs.m30 * rhs.m13)), ((lhs.m01 * rhs.m10 + lhs.m11 * rhs.m11) + (lhs.m21 * rhs.m12 + lhs.m31 * rhs.m13)), ((lhs.m00 * rhs.m20 + lhs.m10 * rhs.m21) + (lhs.m20 * rhs.m22 + lhs.m30 * rhs.m23)), ((lhs.m01 * rhs.m20 + lhs.m11 * rhs.m21) + (lhs.m21 * rhs.m22 + lhs.m31 * rhs.m23)), ((lhs.m00 * rhs.m30 + lhs.m10 * rhs.m31) + (lhs.m20 * rhs.m32 + lhs.m30 * rhs.m33)), ((lhs.m01 * rhs.m30 + lhs.m11 * rhs.m31) + (lhs.m21 * rhs.m32 + lhs.m31 * rhs.m33)));
/// <summary>
/// Executes a matrix-vector-multiplication.
/// </summary>
public static hvec2 operator*(hmat4x2 m, hvec4 v) => new hvec2(((m.m00 * v.x + m.m10 * v.y) + (m.m20 * v.z + m.m30 * v.w)), ((m.m01 * v.x + m.m11 * v.y) + (m.m21 * v.z + m.m31 * v.w)));
/// <summary>
/// Executes a component-wise * (multiply).
/// </summary>
public static hmat4x2 CompMul(hmat4x2 A, hmat4x2 B) => new hmat4x2(A.m00 * B.m00, A.m01 * B.m01, A.m10 * B.m10, A.m11 * B.m11, A.m20 * B.m20, A.m21 * B.m21, A.m30 * B.m30, A.m31 * B.m31);
/// <summary>
/// Executes a component-wise / (divide).
/// </summary>
public static hmat4x2 CompDiv(hmat4x2 A, hmat4x2 B) => new hmat4x2(A.m00 / B.m00, A.m01 / B.m01, A.m10 / B.m10, A.m11 / B.m11, A.m20 / B.m20, A.m21 / B.m21, A.m30 / B.m30, A.m31 / B.m31);
/// <summary>
/// Executes a component-wise + (add).
/// </summary>
public static hmat4x2 CompAdd(hmat4x2 A, hmat4x2 B) => new hmat4x2(A.m00 + B.m00, A.m01 + B.m01, A.m10 + B.m10, A.m11 + B.m11, A.m20 + B.m20, A.m21 + B.m21, A.m30 + B.m30, A.m31 + B.m31);
/// <summary>
/// Executes a component-wise - (subtract).
/// </summary>
public static hmat4x2 CompSub(hmat4x2 A, hmat4x2 B) => new hmat4x2(A.m00 - B.m00, A.m01 - B.m01, A.m10 - B.m10, A.m11 - B.m11, A.m20 - B.m20, A.m21 - B.m21, A.m30 - B.m30, A.m31 - B.m31);
/// <summary>
/// Executes a component-wise + (add).
/// </summary>
public static hmat4x2 operator+(hmat4x2 lhs, hmat4x2 rhs) => new hmat4x2(lhs.m00 + rhs.m00, lhs.m01 + rhs.m01, lhs.m10 + rhs.m10, lhs.m11 + rhs.m11, lhs.m20 + rhs.m20, lhs.m21 + rhs.m21, lhs.m30 + rhs.m30, lhs.m31 + rhs.m31);
/// <summary>
/// Executes a component-wise + (add) with a scalar.
/// </summary>
public static hmat4x2 operator+(hmat4x2 lhs, Half rhs) => new hmat4x2(lhs.m00 + rhs, lhs.m01 + rhs, lhs.m10 + rhs, lhs.m11 + rhs, lhs.m20 + rhs, lhs.m21 + rhs, lhs.m30 + rhs, lhs.m31 + rhs);
/// <summary>
/// Executes a component-wise + (add) with a scalar.
/// </summary>
public static hmat4x2 operator+(Half lhs, hmat4x2 rhs) => new hmat4x2(lhs + rhs.m00, lhs + rhs.m01, lhs + rhs.m10, lhs + rhs.m11, lhs + rhs.m20, lhs + rhs.m21, lhs + rhs.m30, lhs + rhs.m31);
/// <summary>
/// Executes a component-wise - (subtract).
/// </summary>
public static hmat4x2 operator-(hmat4x2 lhs, hmat4x2 rhs) => new hmat4x2(lhs.m00 - rhs.m00, lhs.m01 - rhs.m01, lhs.m10 - rhs.m10, lhs.m11 - rhs.m11, lhs.m20 - rhs.m20, lhs.m21 - rhs.m21, lhs.m30 - rhs.m30, lhs.m31 - rhs.m31);
/// <summary>
/// Executes a component-wise - (subtract) with a scalar.
/// </summary>
public static hmat4x2 operator-(hmat4x2 lhs, Half rhs) => new hmat4x2(lhs.m00 - rhs, lhs.m01 - rhs, lhs.m10 - rhs, lhs.m11 - rhs, lhs.m20 - rhs, lhs.m21 - rhs, lhs.m30 - rhs, lhs.m31 - rhs);
/// <summary>
/// Executes a component-wise - (subtract) with a scalar.
/// </summary>
public static hmat4x2 operator-(Half lhs, hmat4x2 rhs) => new hmat4x2(lhs - rhs.m00, lhs - rhs.m01, lhs - rhs.m10, lhs - rhs.m11, lhs - rhs.m20, lhs - rhs.m21, lhs - rhs.m30, lhs - rhs.m31);
/// <summary>
/// Executes a component-wise / (divide) with a scalar.
/// </summary>
public static hmat4x2 operator/(hmat4x2 lhs, Half rhs) => new hmat4x2(lhs.m00 / rhs, lhs.m01 / rhs, lhs.m10 / rhs, lhs.m11 / rhs, lhs.m20 / rhs, lhs.m21 / rhs, lhs.m30 / rhs, lhs.m31 / rhs);
/// <summary>
/// Executes a component-wise / (divide) with a scalar.
/// </summary>
public static hmat4x2 operator/(Half lhs, hmat4x2 rhs) => new hmat4x2(lhs / rhs.m00, lhs / rhs.m01, lhs / rhs.m10, lhs / rhs.m11, lhs / rhs.m20, lhs / rhs.m21, lhs / rhs.m30, lhs / rhs.m31);
/// <summary>
/// Executes a component-wise * (multiply) with a scalar.
/// </summary>
public static hmat4x2 operator*(hmat4x2 lhs, Half rhs) => new hmat4x2(lhs.m00 * rhs, lhs.m01 * rhs, lhs.m10 * rhs, lhs.m11 * rhs, lhs.m20 * rhs, lhs.m21 * rhs, lhs.m30 * rhs, lhs.m31 * rhs);
/// <summary>
/// Executes a component-wise * (multiply) with a scalar.
/// </summary>
public static hmat4x2 operator*(Half lhs, hmat4x2 rhs) => new hmat4x2(lhs * rhs.m00, lhs * rhs.m01, lhs * rhs.m10, lhs * rhs.m11, lhs * rhs.m20, lhs * rhs.m21, lhs * rhs.m30, lhs * rhs.m31);
/// <summary>
/// Executes a component-wise lesser-than comparison.
/// </summary>
public static bmat4x2 operator<(hmat4x2 lhs, hmat4x2 rhs) => new bmat4x2(lhs.m00 < rhs.m00, lhs.m01 < rhs.m01, lhs.m10 < rhs.m10, lhs.m11 < rhs.m11, lhs.m20 < rhs.m20, lhs.m21 < rhs.m21, lhs.m30 < rhs.m30, lhs.m31 < rhs.m31);
/// <summary>
/// Executes a component-wise lesser-than comparison with a scalar.
/// </summary>
public static bmat4x2 operator<(hmat4x2 lhs, Half rhs) => new bmat4x2(lhs.m00 < rhs, lhs.m01 < rhs, lhs.m10 < rhs, lhs.m11 < rhs, lhs.m20 < rhs, lhs.m21 < rhs, lhs.m30 < rhs, lhs.m31 < rhs);
/// <summary>
/// Executes a component-wise lesser-than comparison with a scalar.
/// </summary>
public static bmat4x2 operator<(Half lhs, hmat4x2 rhs) => new bmat4x2(lhs < rhs.m00, lhs < rhs.m01, lhs < rhs.m10, lhs < rhs.m11, lhs < rhs.m20, lhs < rhs.m21, lhs < rhs.m30, lhs < rhs.m31);
/// <summary>
/// Executes a component-wise lesser-or-equal comparison.
/// </summary>
public static bmat4x2 operator<=(hmat4x2 lhs, hmat4x2 rhs) => new bmat4x2(lhs.m00 <= rhs.m00, lhs.m01 <= rhs.m01, lhs.m10 <= rhs.m10, lhs.m11 <= rhs.m11, lhs.m20 <= rhs.m20, lhs.m21 <= rhs.m21, lhs.m30 <= rhs.m30, lhs.m31 <= rhs.m31);
/// <summary>
/// Executes a component-wise lesser-or-equal comparison with a scalar.
/// </summary>
public static bmat4x2 operator<=(hmat4x2 lhs, Half rhs) => new bmat4x2(lhs.m00 <= rhs, lhs.m01 <= rhs, lhs.m10 <= rhs, lhs.m11 <= rhs, lhs.m20 <= rhs, lhs.m21 <= rhs, lhs.m30 <= rhs, lhs.m31 <= rhs);
/// <summary>
/// Executes a component-wise lesser-or-equal comparison with a scalar.
/// </summary>
public static bmat4x2 operator<=(Half lhs, hmat4x2 rhs) => new bmat4x2(lhs <= rhs.m00, lhs <= rhs.m01, lhs <= rhs.m10, lhs <= rhs.m11, lhs <= rhs.m20, lhs <= rhs.m21, lhs <= rhs.m30, lhs <= rhs.m31);
/// <summary>
/// Executes a component-wise greater-than comparison.
/// </summary>
public static bmat4x2 operator>(hmat4x2 lhs, hmat4x2 rhs) => new bmat4x2(lhs.m00 > rhs.m00, lhs.m01 > rhs.m01, lhs.m10 > rhs.m10, lhs.m11 > rhs.m11, lhs.m20 > rhs.m20, lhs.m21 > rhs.m21, lhs.m30 > rhs.m30, lhs.m31 > rhs.m31);
/// <summary>
/// Executes a component-wise greater-than comparison with a scalar.
/// </summary>
public static bmat4x2 operator>(hmat4x2 lhs, Half rhs) => new bmat4x2(lhs.m00 > rhs, lhs.m01 > rhs, lhs.m10 > rhs, lhs.m11 > rhs, lhs.m20 > rhs, lhs.m21 > rhs, lhs.m30 > rhs, lhs.m31 > rhs);
/// <summary>
/// Executes a component-wise greater-than comparison with a scalar.
/// </summary>
public static bmat4x2 operator>(Half lhs, hmat4x2 rhs) => new bmat4x2(lhs > rhs.m00, lhs > rhs.m01, lhs > rhs.m10, lhs > rhs.m11, lhs > rhs.m20, lhs > rhs.m21, lhs > rhs.m30, lhs > rhs.m31);
/// <summary>
/// Executes a component-wise greater-or-equal comparison.
/// </summary>
public static bmat4x2 operator>=(hmat4x2 lhs, hmat4x2 rhs) => new bmat4x2(lhs.m00 >= rhs.m00, lhs.m01 >= rhs.m01, lhs.m10 >= rhs.m10, lhs.m11 >= rhs.m11, lhs.m20 >= rhs.m20, lhs.m21 >= rhs.m21, lhs.m30 >= rhs.m30, lhs.m31 >= rhs.m31);
/// <summary>
/// Executes a component-wise greater-or-equal comparison with a scalar.
/// </summary>
public static bmat4x2 operator>=(hmat4x2 lhs, Half rhs) => new bmat4x2(lhs.m00 >= rhs, lhs.m01 >= rhs, lhs.m10 >= rhs, lhs.m11 >= rhs, lhs.m20 >= rhs, lhs.m21 >= rhs, lhs.m30 >= rhs, lhs.m31 >= rhs);
/// <summary>
/// Executes a component-wise greater-or-equal comparison with a scalar.
/// </summary>
public static bmat4x2 operator>=(Half lhs, hmat4x2 rhs) => new bmat4x2(lhs >= rhs.m00, lhs >= rhs.m01, lhs >= rhs.m10, lhs >= rhs.m11, lhs >= rhs.m20, lhs >= rhs.m21, lhs >= rhs.m30, lhs >= rhs.m31);
}
}
| |
using System;
using System.Collections.Generic;
using Microsoft.Data.Entity.Migrations;
namespace RPG_Character_Scry.Migrations
{
public partial class InitMigration : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "Book",
columns: table => new
{
BookId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
Name = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Book", x => x.BookId);
});
migrationBuilder.CreateTable(
name: "Player",
columns: table => new
{
PlayerId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
Name = table.Column<string>(nullable: true),
PlayerName = table.Column<string>(nullable: true),
Race = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Player", x => x.PlayerId);
});
migrationBuilder.CreateTable(
name: "AbilityScore",
columns: table => new
{
AbilityScoreId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
Mod = table.Column<int>(nullable: false),
Name = table.Column<string>(nullable: true),
PlayerPlayerId = table.Column<int>(nullable: true),
ShortName = table.Column<string>(nullable: true),
Value = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_AbilityScore", x => x.AbilityScoreId);
table.ForeignKey(
name: "FK_AbilityScore_Player_PlayerPlayerId",
column: x => x.PlayerPlayerId,
principalTable: "Player",
principalColumn: "PlayerId",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "Attack",
columns: table => new
{
AttackId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
Bonus = table.Column<int>(nullable: false),
Damage = table.Column<string>(nullable: true),
Description = table.Column<string>(nullable: true),
Name = table.Column<string>(nullable: true),
PlayerPlayerId = table.Column<int>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Attack", x => x.AttackId);
table.ForeignKey(
name: "FK_Attack_Player_PlayerPlayerId",
column: x => x.PlayerPlayerId,
principalTable: "Player",
principalColumn: "PlayerId",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "Feature",
columns: table => new
{
FeatureId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
BookBookId = table.Column<int>(nullable: true),
Description = table.Column<string>(nullable: true),
Level = table.Column<int>(nullable: false),
Limit = table.Column<int>(nullable: false),
Name = table.Column<string>(nullable: true),
PlayerPlayerId = table.Column<int>(nullable: true),
StartPage = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Feature", x => x.FeatureId);
table.ForeignKey(
name: "FK_Feature_Book_BookBookId",
column: x => x.BookBookId,
principalTable: "Book",
principalColumn: "BookId",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_Feature_Player_PlayerPlayerId",
column: x => x.PlayerPlayerId,
principalTable: "Player",
principalColumn: "PlayerId",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "Spell",
columns: table => new
{
SpellId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
BookBookId = table.Column<int>(nullable: true),
CastingTime = table.Column<string>(nullable: true),
Components = table.Column<string>(nullable: true),
Description = table.Column<string>(nullable: true),
Duration = table.Column<string>(nullable: true),
Name = table.Column<string>(nullable: true),
Page = table.Column<int>(nullable: false),
PlayerPlayerId = table.Column<int>(nullable: true),
Range = table.Column<string>(nullable: true),
School = table.Column<int>(nullable: false),
System = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Spell", x => x.SpellId);
table.UniqueConstraint("AK_Spell_System_Name", x => new { x.System, x.Name });
table.ForeignKey(
name: "FK_Spell_Book_BookBookId",
column: x => x.BookBookId,
principalTable: "Book",
principalColumn: "BookId",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_Spell_Player_PlayerPlayerId",
column: x => x.PlayerPlayerId,
principalTable: "Player",
principalColumn: "PlayerId",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "SkillToollInfo",
columns: table => new
{
SkillToollInfoId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
AbilityScoreAbilityScoreId = table.Column<int>(nullable: true),
Name = table.Column<string>(nullable: true),
PlayerPlayerId = table.Column<int>(nullable: true),
Profinincy = table.Column<bool>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_SkillToollInfo", x => x.SkillToollInfoId);
table.ForeignKey(
name: "FK_SkillToollInfo_AbilityScore_AbilityScoreAbilityScoreId",
column: x => x.AbilityScoreAbilityScoreId,
principalTable: "AbilityScore",
principalColumn: "AbilityScoreId",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_SkillToollInfo_Player_PlayerPlayerId",
column: x => x.PlayerPlayerId,
principalTable: "Player",
principalColumn: "PlayerId",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "ClassInfo",
columns: table => new
{
ClassInfoId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
Name = table.Column<string>(nullable: true),
SpellSpellId = table.Column<int>(nullable: true),
System = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_ClassInfo", x => x.ClassInfoId);
table.ForeignKey(
name: "FK_ClassInfo_Spell_SpellSpellId",
column: x => x.SpellSpellId,
principalTable: "Spell",
principalColumn: "SpellId",
onDelete: ReferentialAction.Restrict);
});
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable("Attack");
migrationBuilder.DropTable("ClassInfo");
migrationBuilder.DropTable("Feature");
migrationBuilder.DropTable("SkillToollInfo");
migrationBuilder.DropTable("Spell");
migrationBuilder.DropTable("AbilityScore");
migrationBuilder.DropTable("Book");
migrationBuilder.DropTable("Player");
}
}
}
| |
//
// Copyright (c) 2003-2006 Jaroslaw Kowalski <jaak@jkowalski.net>
// Copyright (c) 2006-2014 Piotr Fusik <piotr@fusik.info>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using Sooda.Caching;
using Sooda.Logging;
using Sooda.QL;
using Sooda.Schema;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Data;
namespace Sooda.ObjectMapper
{
public class SoodaObjectListSnapshot : ISoodaObjectList
{
private static readonly Logger logger = LogManager.GetLogger("Sooda.ListSnapshot");
public SoodaObjectListSnapshot()
{
}
public SoodaObjectListSnapshot(IList list)
{
foreach (SoodaObject o in list)
{
AddObjectToSnapshot(o);
}
}
public SoodaObjectListSnapshot(IList list, SoodaObjectFilter filter)
{
foreach (SoodaObject o in list)
{
if (filter(o))
AddObjectToSnapshot(o);
}
}
public SoodaObjectListSnapshot(IList list, SoodaWhereClause whereClause)
{
foreach (SoodaObject o in list)
{
if (whereClause.Matches(o, true))
AddObjectToSnapshot(o);
}
}
public SoodaObjectListSnapshot(IList list, SoqlBooleanExpression filterExpression) : this(list, new SoodaWhereClause(filterExpression))
{
}
public SoodaObjectListSnapshot(IList list, int first, int length)
{
this.classInfo = null;
int start = first;
if (start < 0)
{
length += start;
start = 0;
}
if (start + length > list.Count)
length = list.Count - start;
items.Capacity = length;
for (int i = 0; i < length; ++i)
{
items.Add(list[start + i]);
}
count = items.Count;
}
public SoodaObjectListSnapshot(IList list, IComparer comp)
{
items.Capacity = list.Count;
for (int i = 0; i < list.Count; ++i)
{
items.Add(list[i]);
}
items.Sort(comp);
count = items.Count;
}
public SoodaObjectListSnapshot(SoodaTransaction tran, SoodaObjectFilter filter, ClassInfo ci)
{
this.classInfo = ci;
List<WeakSoodaObject> al = tran.GetObjectsByClassName(ci.Name);
if (al != null)
{
// al.Clone() is needed because
// the filter expression may materialize new objects
// during checking. This way we avoid "collection modified" exception
List<SoodaObject> clonedArray = new List<SoodaObject>();
foreach (WeakSoodaObject wr in al)
{
SoodaObject obj = wr.TargetSoodaObject;
if (obj != null)
clonedArray.Add(obj);
}
foreach (SoodaObject obj in clonedArray)
{
if (filter(obj))
{
items.Add(obj);
}
}
count = items.Count;
}
}
protected void AddObjectToSnapshot(SoodaObject o)
{
items.Add(o);
count = items.Count;
}
public SoodaObjectListSnapshot(SoodaTransaction t, SoodaWhereClause whereClause, SoodaOrderBy orderBy, int startIdx, int pageCount, SoodaSnapshotOptions options, ClassInfo ci)
{
this.classInfo = ci;
string[] involvedClasses = null;
bool useCache;
if ((options & SoodaSnapshotOptions.NoCache) != 0)
useCache = false;
else if ((options & SoodaSnapshotOptions.Cache) != 0)
useCache = true;
else
useCache = t.CachingPolicy.ShouldCacheCollection(ci, whereClause, orderBy, startIdx, pageCount);
if (whereClause != null && whereClause.WhereExpression != null)
{
if ((options & SoodaSnapshotOptions.NoWriteObjects) == 0 || useCache)
{
try
{
GetInvolvedClassesVisitor gic = new GetInvolvedClassesVisitor(classInfo);
gic.GetInvolvedClasses(whereClause.WhereExpression);
involvedClasses = gic.ClassNames;
}
catch
{
// logger.Warn("{0}", ex);
// cannot detect involved classes (probably because of RAWQUERY)
// - precommit all objects
// if we get here, involvedClasses remains set to null
}
}
}
else
{
// no where clause
involvedClasses = new string[] { ci.Name };
}
if ((options & SoodaSnapshotOptions.NoWriteObjects) == 0)
t.PrecommitClasses(involvedClasses);
LoadList(t, whereClause, orderBy, startIdx, pageCount, options, involvedClasses, useCache);
}
private void LoadList(SoodaTransaction transaction, SoodaWhereClause whereClause, SoodaOrderBy orderBy, int startIdx, int pageCount, SoodaSnapshotOptions options, string[] involvedClassNames, bool useCache)
{
ISoodaObjectFactory factory = transaction.GetFactory(classInfo);
string cacheKey = null;
if (useCache)
{
// cache makes sense only on clean database
if (!transaction.HasBeenPrecommitted(classInfo))
{
cacheKey = SoodaCache.GetCollectionKey(classInfo, whereClause);
}
IEnumerable keysCollection = transaction.LoadCollectionFromCache(cacheKey, logger);
if (keysCollection != null)
{
foreach (object o in keysCollection)
{
SoodaObject obj = factory.GetRef(transaction, o);
// this binds to cache
obj.EnsureFieldsInited();
items.Add(obj);
}
if (orderBy != null)
{
items.Sort(orderBy.GetComparer());
}
count = items.Count;
if (startIdx > 0)
{
if (startIdx < count)
items.RemoveRange(0, startIdx);
else
items.Clear();
}
if (pageCount != -1 && pageCount < items.Count)
{
items.RemoveRange(pageCount, items.Count - pageCount);
}
return;
}
}
SoodaDataSource ds = transaction.OpenDataSource(classInfo.GetDataSource());
if ((options & SoodaSnapshotOptions.KeysOnly) != 0)
{
if (pageCount != -1)
{
using (IDataReader reader = ds.LoadMatchingPrimaryKeys(transaction.Schema, classInfo, whereClause, orderBy, 0, -1))
{
count = 0;
while (reader.Read())
count++;
}
}
using (IDataReader reader = ds.LoadMatchingPrimaryKeys(transaction.Schema, classInfo, whereClause, orderBy, startIdx, pageCount))
{
while (reader.Read())
{
SoodaObject obj = SoodaObject.GetRefFromKeyRecordHelper(transaction, factory, reader);
items.Add(obj);
}
if (pageCount == -1)
count = items.Count;
}
}
else
{
if (pageCount != -1)
{
using (IDataReader reader = ds.LoadMatchingPrimaryKeys(transaction.Schema, classInfo, whereClause, orderBy, 0, -1))
{
count = 0;
while (reader.Read())
count++;
}
}
TableInfo[] loadedTables;
using (IDataReader reader = ds.LoadObjectList(transaction.Schema, classInfo, whereClause, orderBy, startIdx, pageCount, options, out loadedTables))
{
while (reader.Read())
{
SoodaObject obj = SoodaObject.GetRefFromRecordHelper(transaction, factory, reader, 0, loadedTables, 0);
if ((options & SoodaSnapshotOptions.VerifyAfterLoad) != 0 && whereClause != null && !whereClause.Matches(obj, false))
continue; // don't add the object
items.Add(obj);
}
if (pageCount == -1)
count = items.Count;
}
}
if (cacheKey != null && useCache && startIdx == 0 && pageCount == -1 && involvedClassNames != null)
{
TimeSpan expirationTimeout;
bool slidingExpiration;
if (transaction.CachingPolicy.GetExpirationTimeout(
classInfo, whereClause, orderBy, startIdx, pageCount, items.Count,
out expirationTimeout, out slidingExpiration))
{
transaction.StoreCollectionInCache(cacheKey, classInfo, items, involvedClassNames, (options & SoodaSnapshotOptions.KeysOnly) == 0, expirationTimeout, slidingExpiration);
}
}
}
public SoodaObject GetItem(int pos)
{
return (SoodaObject)items[pos];
}
public int Add(object obj)
{
items.Add(obj);
count = items.Count;
return count;
}
public void Remove(object obj)
{
items.Remove(obj);
count = items.Count;
}
public bool Contains(object obj)
{
return items.Contains(obj);
}
public IEnumerator GetEnumerator()
{
return items.GetEnumerator();
}
private readonly ArrayList items = new ArrayList();
private ClassInfo classInfo;
private int count;
public bool IsReadOnly
{
get { return false; }
}
object IList.this[int index]
{
get { return items[index]; }
set { items[index] = value; }
}
public void RemoveAt(int index)
{
items.RemoveAt(index);
count = items.Count;
}
public void Insert(int index, object value)
{
items.Insert(index, value);
count = items.Count;
}
public void Clear()
{
items.Clear();
count = items.Count;
}
public int IndexOf(object value)
{
return items.IndexOf(value);
}
public bool IsFixedSize
{
get
{
return false;
}
}
public bool IsSynchronized
{
get
{
return false;
}
}
public int Count
{
get
{
return items.Count;
}
}
public int PagedCount
{
get
{
return this.count;
}
}
public void CopyTo(Array array, int index)
{
items.CopyTo(array, index);
}
public object SyncRoot
{
get
{
return this;
}
}
public ISoodaObjectList GetSnapshot()
{
return this;
}
public ISoodaObjectList SelectFirst(int n)
{
return new SoodaObjectListSnapshot(this, 0, n);
}
public ISoodaObjectList SelectLast(int n)
{
return new SoodaObjectListSnapshot(this, this.Count - n, n);
}
public ISoodaObjectList SelectRange(int from, int to)
{
return new SoodaObjectListSnapshot(this, from, to - from);
}
public ISoodaObjectList Filter(SoodaObjectFilter filter)
{
return new SoodaObjectListSnapshot(this, filter);
}
public ISoodaObjectList Filter(SoqlBooleanExpression filterExpression)
{
return new SoodaObjectListSnapshot(this, filterExpression);
}
public ISoodaObjectList Filter(SoodaWhereClause whereClause)
{
return new SoodaObjectListSnapshot(this, whereClause);
}
public ISoodaObjectList Sort(IComparer comparer)
{
return new SoodaObjectListSnapshot(this, comparer);
}
public ISoodaObjectList Sort(string sortOrder)
{
return new SoodaObjectListSnapshot(this).Sort(sortOrder);
}
public ISoodaObjectList Sort(SoqlExpression expression, SortOrder sortOrder)
{
return new SoodaObjectListSnapshot(this).Sort(expression, sortOrder);
}
public ISoodaObjectList Sort(SoqlExpression expression)
{
return new SoodaObjectListSnapshot(this).Sort(expression, SortOrder.Ascending);
}
}
}
| |
using System;
using System.Globalization;
/// <summary>
/// Divide(System.Decimal,System.Decimal)
/// </summary>
public class DecimalDivide
{
#region const
private const int SEEDVALUE = 2;
private const int EQUALVALUE = 1;
private const int ZEROVALUE = 0;
#endregion
#region Public Methods
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
TestLibrary.TestFramework.LogInformation("[Negtive]");
retVal = NegTest1() && retVal;
retVal = NegTest2() && retVal;
retVal = NegTest3() && retVal;
return retVal;
}
#region Positive Test Cases
public bool PosTest1()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest1: Calling Devide method and the dividend is a random decimal,divisor is defined as Seed.");
try
{
Decimal myDecimal1 = new decimal(TestLibrary.Generator.GetInt32(-55) / SEEDVALUE);
Decimal myDecimal2 = new decimal(SEEDVALUE);
Decimal returnValue = Decimal.Divide(myDecimal1 * SEEDVALUE, myDecimal2);
if (returnValue != myDecimal1)
{
TestLibrary.TestFramework.LogError("001.2", "Calling Devide method should return " + myDecimal1);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("001.2", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest2()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest2: Calling Devide method and the dividend 0.");
try
{
Decimal myDecimal1 = new decimal(ZEROVALUE / SEEDVALUE);
Decimal myDecimal2 = new decimal(SEEDVALUE);
Decimal returnValue = Decimal.Divide(ZEROVALUE, myDecimal2);
if (returnValue != ZEROVALUE)
{
TestLibrary.TestFramework.LogError("002.2", "Calling Devide method should return " + ZEROVALUE);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("002.2", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool PosTest3()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest3: Calling Devide method and the dividend is Int32.MaxValue or Int32.MinValue.");
try
{
Decimal myDecimal1 = new decimal(Int32.MaxValue / SEEDVALUE);
Decimal myDecimal2 = new decimal(SEEDVALUE);
Decimal returnValue = Decimal.Divide(myDecimal1 * SEEDVALUE, myDecimal2);
if (returnValue != myDecimal1)
{
TestLibrary.TestFramework.LogError("003.1", "Calling Devide method should return " + myDecimal1);
retVal = false;
}
myDecimal1 = new decimal(Int32.MinValue / SEEDVALUE);
myDecimal2 = new decimal(SEEDVALUE);
returnValue = Decimal.Divide(myDecimal1 * SEEDVALUE, myDecimal2);
if (returnValue != myDecimal1)
{
TestLibrary.TestFramework.LogError("003.2", "Calling Devide method should return " + myDecimal1);
retVal = false;
}
myDecimal1 = new decimal(Int32.MinValue);
myDecimal2 = new decimal(Int32.MinValue);
returnValue = Decimal.Divide(myDecimal1 , myDecimal2);
if (returnValue != EQUALVALUE)
{
TestLibrary.TestFramework.LogError("003.3", "Calling Devide method should return " + EQUALVALUE);
retVal = false;
}
myDecimal1 = new decimal(Int32.MaxValue);
myDecimal2 = new decimal(Int32.MaxValue);
returnValue = Decimal.Divide(myDecimal1, myDecimal2);
if (returnValue != EQUALVALUE)
{
TestLibrary.TestFramework.LogError("003.4", "Calling Devide method should return " + EQUALVALUE);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("003.0", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool NegTest1()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest1: d2 is zero.");
try
{
Decimal myDecimal1 = new decimal(TestLibrary.Generator.GetInt32(-55) );
Decimal myDecimal2 = new decimal(ZEROVALUE);
Decimal returnValue = Decimal.Divide(ZEROVALUE, myDecimal2);
TestLibrary.TestFramework.LogError("101.1", "DivideByZeroException should be caught.");
retVal = false;
}
catch (DivideByZeroException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("101.2", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool NegTest2()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest2: The return value (that is, the quotient) is greater than MaxValue.");
try
{
Decimal myDecimal1 = new decimal(Int32.MaxValue);
Decimal myDecimal2 = new decimal(1e-020);
Decimal returnValue = Decimal.Divide(myDecimal1, myDecimal2);
TestLibrary.TestFramework.LogError("102.1", "OverflowException should be caught.");
retVal = false;
}
catch (OverflowException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("102.2", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
public bool NegTest3()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest3: The return value (that is, the quotient) is less than MinValue .");
try
{
Decimal myDecimal1 = new decimal(Int32.MinValue);
Decimal myDecimal2 = new decimal(1e-020);
Decimal returnValue = Decimal.Divide(myDecimal1, myDecimal2);
TestLibrary.TestFramework.LogError("103.1", "OverflowException should be caught.");
retVal = false;
}
catch (OverflowException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("103.2", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
#endregion
#endregion
public static int Main()
{
DecimalDivide test = new DecimalDivide();
TestLibrary.TestFramework.BeginTestCase("DecimalDivide");
if (test.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
}
| |
//---------------------------------------------------------------------
// This file is part of the CLR Managed Debugger (mdbg) Sample.
//
// Copyright (C) Microsoft Corporation. All rights reserved.
//---------------------------------------------------------------------
using System;
using System.Reflection;
using System.Collections;
using System.Text;
using System.Runtime.InteropServices;
using System.Globalization;
using System.Diagnostics;
using Microsoft.Samples.Debugging.CorDebug;
using Microsoft.Samples.Debugging.CorMetadata.NativeApi;
using Microsoft.Samples.Debugging.CorDebug.NativeApi;
namespace Microsoft.Samples.Debugging.CorMetadata
{
public sealed class MetadataFieldInfo : FieldInfo
{
internal MetadataFieldInfo(IMetadataImport importer,int fieldToken, MetadataType declaringType)
{
m_importer = importer;
m_fieldToken = fieldToken;
m_declaringType = declaringType;
// Initialize
int mdTypeDef;
int pchField,pcbSigBlob,pdwCPlusTypeFlab,pcchValue, pdwAttr;
IntPtr ppvSigBlob;
IntPtr ppvRawValue;
m_importer.GetFieldProps(m_fieldToken,
out mdTypeDef,
null,
0,
out pchField,
out pdwAttr,
out ppvSigBlob,
out pcbSigBlob,
out pdwCPlusTypeFlab,
out ppvRawValue,
out pcchValue
);
StringBuilder szField = new StringBuilder(pchField);
m_importer.GetFieldProps(m_fieldToken,
out mdTypeDef,
szField,
szField.Capacity,
out pchField,
out pdwAttr,
out ppvSigBlob,
out pcbSigBlob,
out pdwCPlusTypeFlab,
out ppvRawValue,
out pcchValue
);
m_fieldAttributes = (FieldAttributes)pdwAttr;
m_name = szField.ToString();
// Get the values for static literal fields with primitive types
FieldAttributes staticLiteralField = FieldAttributes.Static | FieldAttributes.HasDefault | FieldAttributes.Literal;
if ((m_fieldAttributes & staticLiteralField) == staticLiteralField)
{
m_value = ParseDefaultValue(declaringType,ppvSigBlob,ppvRawValue);
}
}
private static object ParseDefaultValue(MetadataType declaringType, IntPtr ppvSigBlob, IntPtr ppvRawValue)
{
IntPtr ppvSigTemp = ppvSigBlob;
CorCallingConvention callingConv = MetadataHelperFunctions.CorSigUncompressCallingConv(ref ppvSigTemp);
Debug.Assert(callingConv == CorCallingConvention.Field);
CorElementType elementType = MetadataHelperFunctions.CorSigUncompressElementType(ref ppvSigTemp);
if (elementType == CorElementType.ELEMENT_TYPE_VALUETYPE)
{
uint token = MetadataHelperFunctions.CorSigUncompressToken(ref ppvSigTemp);
if (token == declaringType.MetadataToken)
{
// Static literal field of the same type as the enclosing type
// may be one of the value fields of an enum
if (declaringType.ReallyIsEnum)
{
// If so, the value will be of the enum's underlying type,
// so we change it from VALUETYPE to be that type so that
// the following code will get the value
elementType = declaringType.EnumUnderlyingType;
}
}
}
switch (elementType)
{
case CorElementType.ELEMENT_TYPE_CHAR:
return (char)Marshal.ReadByte(ppvRawValue);
case CorElementType.ELEMENT_TYPE_I1:
return (sbyte)Marshal.ReadByte(ppvRawValue);
case CorElementType.ELEMENT_TYPE_U1:
return Marshal.ReadByte(ppvRawValue);
case CorElementType.ELEMENT_TYPE_I2:
return Marshal.ReadInt16(ppvRawValue);
case CorElementType.ELEMENT_TYPE_U2:
return (ushort)Marshal.ReadInt16(ppvRawValue);
case CorElementType.ELEMENT_TYPE_I4:
return Marshal.ReadInt32(ppvRawValue);
case CorElementType.ELEMENT_TYPE_U4:
return (uint)Marshal.ReadInt32(ppvRawValue);
case CorElementType.ELEMENT_TYPE_I8:
return Marshal.ReadInt64(ppvRawValue);
case CorElementType.ELEMENT_TYPE_U8:
return (ulong)Marshal.ReadInt64(ppvRawValue);
case CorElementType.ELEMENT_TYPE_I:
return Marshal.ReadIntPtr(ppvRawValue);
case CorElementType.ELEMENT_TYPE_U:
case CorElementType.ELEMENT_TYPE_R4:
case CorElementType.ELEMENT_TYPE_R8:
// Technically U and the floating-point ones are options in the CLI, but not in the CLS or C#, so these are NYI
default:
return null;
}
}
public override Object GetValue(Object obj)
{
FieldAttributes staticLiteralField = FieldAttributes.Static | FieldAttributes.HasDefault | FieldAttributes.Literal;
if ((m_fieldAttributes & staticLiteralField) != staticLiteralField)
{
throw new InvalidOperationException("Field is not a static literal field.");
}
if (m_value == null)
{
throw new NotImplementedException("GetValue not implemented for the given field type.");
}
else
{
return m_value;
}
}
public override void SetValue(Object obj, Object value,BindingFlags invokeAttr,Binder binder,CultureInfo culture)
{
throw new NotImplementedException();
}
public override Object[] GetCustomAttributes(bool inherit)
{
throw new NotImplementedException();
}
public override Object[] GetCustomAttributes(Type attributeType, bool inherit)
{
throw new NotImplementedException();
}
public override bool IsDefined (Type attributeType, bool inherit)
{
throw new NotImplementedException();
}
public override Type FieldType
{
get
{
throw new NotImplementedException();
}
}
public override RuntimeFieldHandle FieldHandle
{
get
{
throw new NotImplementedException();
}
}
public override FieldAttributes Attributes
{
get
{
return m_fieldAttributes;
}
}
public override MemberTypes MemberType
{
get
{
throw new NotImplementedException();
}
}
public override String Name
{
get
{
return m_name;
}
}
public override Type DeclaringType
{
get
{
throw new NotImplementedException();
}
}
public override Type ReflectedType
{
get
{
throw new NotImplementedException();
}
}
public override int MetadataToken
{
get
{
return m_fieldToken;
}
}
private IMetadataImport m_importer;
private int m_fieldToken;
private MetadataType m_declaringType;
private string m_name;
private FieldAttributes m_fieldAttributes;
private Object m_value;
}
}
| |
//----------------------------------------------------------------------------
// Anti-Grain Geometry - Version 2.4
// Copyright (C) 2002-2005 Maxim Shemanarev (http://www.antigrain.com)
//
// C# port by: Lars Brubaker
// larsbrubaker@gmail.com
// Copyright (C) 2007
//
// Permission to copy, use, modify, sell and distribute this software
// is granted provided this copyright notice appears in all copies.
// This software is provided "as is" without express or implied
// warranty, and with no claim as to its suitability for any purpose.
//
//----------------------------------------------------------------------------
// Contact: mcseem@antigrain.com
// mcseemagg@yahoo.com
// http://www.antigrain.com
//----------------------------------------------------------------------------
#if true
using MatterHackers.Agg.Image;
using System;
namespace MatterHackers.Agg
{
/*
//========================================================line_image_scale
public class line_image_scale
{
IImage m_source;
double m_height;
double m_scale;
public line_image_scale(IImage src, double height)
{
m_source = (src);
m_height = (height);
m_scale = (src.height() / height);
}
public double width() { return m_source.width(); }
public double height() { return m_height; }
public RGBA_Bytes pixel(int x, int y)
{
double src_y = (y + 0.5) * m_scale - 0.5;
int h = m_source.height() - 1;
int y1 = ufloor(src_y);
int y2 = y1 + 1;
RGBA_Bytes pix1 = (y1 < 0) ? new no_color() : m_source.pixel(x, y1);
RGBA_Bytes pix2 = (y2 > h) ? no_color() : m_source.pixel(x, y2);
return pix1.gradient(pix2, src_y - y1);
}
};
*/
//======================================================line_image_pattern
public class line_image_pattern : ImageBuffer
{
private IPatternFilter m_filter;
private int m_dilation;
private int m_dilation_hr;
private ImageBuffer m_buf = new ImageBuffer();
private byte[] m_data = null;
private int m_DataSizeInBytes = 0;
private int m_width;
private int m_height;
private int m_width_hr;
private int m_half_height_hr;
private int m_offset_y_hr;
//--------------------------------------------------------------------
public line_image_pattern(IPatternFilter filter)
{
m_filter = filter;
m_dilation = (filter.dilation() + 1);
m_dilation_hr = (m_dilation << LineAABasics.line_subpixel_shift);
m_width = (0);
m_height = (0);
m_width_hr = (0);
m_half_height_hr = (0);
m_offset_y_hr = (0);
}
~line_image_pattern()
{
if (m_DataSizeInBytes > 0)
{
m_data = null;
}
}
// Create
//--------------------------------------------------------------------
public line_image_pattern(IPatternFilter filter, line_image_pattern src)
{
m_filter = (filter);
m_dilation = (filter.dilation() + 1);
m_dilation_hr = (m_dilation << LineAABasics.line_subpixel_shift);
m_width = 0;
m_height = 0;
m_width_hr = 0;
m_half_height_hr = 0;
m_offset_y_hr = (0);
create(src);
}
// Create
//--------------------------------------------------------------------
public void create(IImageByte src)
{
// we are going to create a dilated image for filtering
// we add m_dilation pixels to every side of the image and then copy the image in the x
// direction into each end so that we can sample into this image to get filtering on x repeating
// if the original image look like this
//
// 123456
//
// the new image would look like this
//
// 0000000000
// 0000000000
// 5612345612
// 0000000000
// 0000000000
m_height = (int)agg_basics.uceil(src.Height);
m_width = (int)agg_basics.uceil(src.Width);
m_width_hr = (int)agg_basics.uround(src.Width * LineAABasics.line_subpixel_scale);
m_half_height_hr = (int)agg_basics.uround(src.Height * LineAABasics.line_subpixel_scale / 2);
m_offset_y_hr = m_dilation_hr + m_half_height_hr - LineAABasics.line_subpixel_scale / 2;
m_half_height_hr += LineAABasics.line_subpixel_scale / 2;
int bufferWidth = m_width + m_dilation * 2;
int bufferHeight = m_height + m_dilation * 2;
int bytesPerPixel = src.BitDepth / 8;
int NewSizeInBytes = bufferWidth * bufferHeight * bytesPerPixel;
if (m_DataSizeInBytes < NewSizeInBytes)
{
m_DataSizeInBytes = NewSizeInBytes;
m_data = new byte[m_DataSizeInBytes];
}
m_buf.AttachBuffer(m_data, 0, bufferWidth, bufferHeight, bufferWidth * bytesPerPixel, src.BitDepth, bytesPerPixel);
byte[] destBuffer = m_buf.GetBuffer();
byte[] sourceBuffer = src.GetBuffer();
// copy the image into the middle of the dest
for (int y = 0; y < m_height; y++)
{
for (int x = 0; x < m_width; x++)
{
int sourceOffset = src.GetBufferOffsetXY(x, y);
int destOffset = m_buf.GetBufferOffsetXY(m_dilation, y + m_dilation);
for (int channel = 0; channel < bytesPerPixel; channel++)
{
destBuffer[destOffset++] = sourceBuffer[sourceOffset++];
}
}
}
// copy the first two pixels form the end into the beginning and from the beginning into the end
for (int y = 0; y < m_height; y++)
{
int s1Offset = src.GetBufferOffsetXY(0, y);
int d1Offset = m_buf.GetBufferOffsetXY(m_dilation + m_width, y);
int s2Offset = src.GetBufferOffsetXY(m_width - m_dilation, y);
int d2Offset = m_buf.GetBufferOffsetXY(0, y);
for (int x = 0; x < m_dilation; x++)
{
for (int channel = 0; channel < bytesPerPixel; channel++)
{
destBuffer[d1Offset++] = sourceBuffer[s1Offset++];
destBuffer[d2Offset++] = sourceBuffer[s2Offset++];
}
}
}
}
//--------------------------------------------------------------------
public int pattern_width()
{
return m_width_hr;
}
public int line_width()
{
return m_half_height_hr;
}
public double width()
{
return m_height;
}
//--------------------------------------------------------------------
public void pixel(Color[] destBuffer, int destBufferOffset, int x, int y)
{
m_filter.pixel_high_res(m_buf, destBuffer, destBufferOffset,
x % m_width_hr + m_dilation_hr,
y + m_offset_y_hr);
}
//--------------------------------------------------------------------
public IPatternFilter filter()
{
return m_filter;
}
};
/*
//=================================================line_image_pattern_pow2
public class line_image_pattern_pow2 :
line_image_pattern<IPatternFilter>
{
uint m_mask;
//--------------------------------------------------------------------
public line_image_pattern_pow2(IPatternFilter filter) :
line_image_pattern<IPatternFilter>(filter), m_mask(line_subpixel_mask) {}
//--------------------------------------------------------------------
public line_image_pattern_pow2(IPatternFilter filter, ImageBuffer src) :
line_image_pattern<IPatternFilter>(filter), m_mask(line_subpixel_mask)
{
create(src);
}
//--------------------------------------------------------------------
public void create(ImageBuffer src)
{
line_image_pattern<IPatternFilter>::create(src);
m_mask = 1;
while(m_mask < base_type::m_width)
{
m_mask <<= 1;
m_mask |= 1;
}
m_mask <<= line_subpixel_shift - 1;
m_mask |= line_subpixel_mask;
base_type::m_width_hr = m_mask + 1;
}
//--------------------------------------------------------------------
public void pixel(RGBA_Bytes* p, int x, int y)
{
base_type::m_filter->pixel_high_res(
base_type::m_buf.rows(),
p,
(x & m_mask) + base_type::m_dilation_hr,
y + base_type::m_offset_y_hr);
}
};
*/
//===================================================distance_interpolator4
public class distance_interpolator4
{
private int m_dx;
private int m_dy;
private int m_dx_start;
private int m_dy_start;
private int m_dx_pict;
private int m_dy_pict;
private int m_dx_end;
private int m_dy_end;
private int m_dist;
private int m_dist_start;
private int m_dist_pict;
private int m_dist_end;
private int m_len;
//---------------------------------------------------------------------
public distance_interpolator4()
{
}
public distance_interpolator4(int x1, int y1, int x2, int y2,
int sx, int sy, int ex, int ey,
int len, double scale, int x, int y)
{
m_dx = (x2 - x1);
m_dy = (y2 - y1);
m_dx_start = (LineAABasics.line_mr(sx) - LineAABasics.line_mr(x1));
m_dy_start = (LineAABasics.line_mr(sy) - LineAABasics.line_mr(y1));
m_dx_end = (LineAABasics.line_mr(ex) - LineAABasics.line_mr(x2));
m_dy_end = (LineAABasics.line_mr(ey) - LineAABasics.line_mr(y2));
m_dist = (agg_basics.iround((double)(x + LineAABasics.line_subpixel_scale / 2 - x2) * (double)(m_dy) -
(double)(y + LineAABasics.line_subpixel_scale / 2 - y2) * (double)(m_dx)));
m_dist_start = ((LineAABasics.line_mr(x + LineAABasics.line_subpixel_scale / 2) - LineAABasics.line_mr(sx)) * m_dy_start -
(LineAABasics.line_mr(y + LineAABasics.line_subpixel_scale / 2) - LineAABasics.line_mr(sy)) * m_dx_start);
m_dist_end = ((LineAABasics.line_mr(x + LineAABasics.line_subpixel_scale / 2) - LineAABasics.line_mr(ex)) * m_dy_end -
(LineAABasics.line_mr(y + LineAABasics.line_subpixel_scale / 2) - LineAABasics.line_mr(ey)) * m_dx_end);
m_len = (int)(agg_basics.uround(len / scale));
double d = len * scale;
int dx = agg_basics.iround(((x2 - x1) << LineAABasics.line_subpixel_shift) / d);
int dy = agg_basics.iround(((y2 - y1) << LineAABasics.line_subpixel_shift) / d);
m_dx_pict = -dy;
m_dy_pict = dx;
m_dist_pict = ((x + LineAABasics.line_subpixel_scale / 2 - (x1 - dy)) * m_dy_pict -
(y + LineAABasics.line_subpixel_scale / 2 - (y1 + dx)) * m_dx_pict) >>
LineAABasics.line_subpixel_shift;
m_dx <<= LineAABasics.line_subpixel_shift;
m_dy <<= LineAABasics.line_subpixel_shift;
m_dx_start <<= LineAABasics.line_mr_subpixel_shift;
m_dy_start <<= LineAABasics.line_mr_subpixel_shift;
m_dx_end <<= LineAABasics.line_mr_subpixel_shift;
m_dy_end <<= LineAABasics.line_mr_subpixel_shift;
}
//---------------------------------------------------------------------
public void inc_x()
{
m_dist += m_dy;
m_dist_start += m_dy_start;
m_dist_pict += m_dy_pict;
m_dist_end += m_dy_end;
}
//---------------------------------------------------------------------
public void dec_x()
{
m_dist -= m_dy;
m_dist_start -= m_dy_start;
m_dist_pict -= m_dy_pict;
m_dist_end -= m_dy_end;
}
//---------------------------------------------------------------------
public void inc_y()
{
m_dist -= m_dx;
m_dist_start -= m_dx_start;
m_dist_pict -= m_dx_pict;
m_dist_end -= m_dx_end;
}
//---------------------------------------------------------------------
public void dec_y()
{
m_dist += m_dx;
m_dist_start += m_dx_start;
m_dist_pict += m_dx_pict;
m_dist_end += m_dx_end;
}
//---------------------------------------------------------------------
public void inc_x(int dy)
{
m_dist += m_dy;
m_dist_start += m_dy_start;
m_dist_pict += m_dy_pict;
m_dist_end += m_dy_end;
if (dy > 0)
{
m_dist -= m_dx;
m_dist_start -= m_dx_start;
m_dist_pict -= m_dx_pict;
m_dist_end -= m_dx_end;
}
if (dy < 0)
{
m_dist += m_dx;
m_dist_start += m_dx_start;
m_dist_pict += m_dx_pict;
m_dist_end += m_dx_end;
}
}
//---------------------------------------------------------------------
public void dec_x(int dy)
{
m_dist -= m_dy;
m_dist_start -= m_dy_start;
m_dist_pict -= m_dy_pict;
m_dist_end -= m_dy_end;
if (dy > 0)
{
m_dist -= m_dx;
m_dist_start -= m_dx_start;
m_dist_pict -= m_dx_pict;
m_dist_end -= m_dx_end;
}
if (dy < 0)
{
m_dist += m_dx;
m_dist_start += m_dx_start;
m_dist_pict += m_dx_pict;
m_dist_end += m_dx_end;
}
}
//---------------------------------------------------------------------
public void inc_y(int dx)
{
m_dist -= m_dx;
m_dist_start -= m_dx_start;
m_dist_pict -= m_dx_pict;
m_dist_end -= m_dx_end;
if (dx > 0)
{
m_dist += m_dy;
m_dist_start += m_dy_start;
m_dist_pict += m_dy_pict;
m_dist_end += m_dy_end;
}
if (dx < 0)
{
m_dist -= m_dy;
m_dist_start -= m_dy_start;
m_dist_pict -= m_dy_pict;
m_dist_end -= m_dy_end;
}
}
//---------------------------------------------------------------------
public void dec_y(int dx)
{
m_dist += m_dx;
m_dist_start += m_dx_start;
m_dist_pict += m_dx_pict;
m_dist_end += m_dx_end;
if (dx > 0)
{
m_dist += m_dy;
m_dist_start += m_dy_start;
m_dist_pict += m_dy_pict;
m_dist_end += m_dy_end;
}
if (dx < 0)
{
m_dist -= m_dy;
m_dist_start -= m_dy_start;
m_dist_pict -= m_dy_pict;
m_dist_end -= m_dy_end;
}
}
//---------------------------------------------------------------------
public int dist()
{
return m_dist;
}
public int dist_start()
{
return m_dist_start;
}
public int dist_pict()
{
return m_dist_pict;
}
public int dist_end()
{
return m_dist_end;
}
//---------------------------------------------------------------------
public int dx()
{
return m_dx;
}
public int dy()
{
return m_dy;
}
public int dx_start()
{
return m_dx_start;
}
public int dy_start()
{
return m_dy_start;
}
public int dx_pict()
{
return m_dx_pict;
}
public int dy_pict()
{
return m_dy_pict;
}
public int dx_end()
{
return m_dx_end;
}
public int dy_end()
{
return m_dy_end;
}
public int len()
{
return m_len;
}
};
#if true
#if false
//==================================================line_interpolator_image
public class line_interpolator_image
{
line_parameters m_lp;
dda2_line_interpolator m_li;
distance_interpolator4 m_di;
IImageByte m_ren;
int m_plen;
int m_x;
int m_y;
int m_old_x;
int m_old_y;
int m_width;
int m_max_extent;
int m_start;
int m_step;
int[] m_dist_pos = new int[max_half_width + 1];
RGBA_Bytes[] m_colors = new RGBA_Bytes[max_half_width * 2 + 4];
//---------------------------------------------------------------------
public const int max_half_width = 64;
//---------------------------------------------------------------------
public line_interpolator_image(renderer_outline_aa ren, line_parameters lp,
int sx, int sy, int ex, int ey,
int pattern_start,
double scale_x)
{
throw new NotImplementedException();
/*
m_lp=(lp);
m_li = new dda2_line_interpolator(lp.vertical ? LineAABasics.line_dbl_hr(lp.x2 - lp.x1) :
LineAABasics.line_dbl_hr(lp.y2 - lp.y1),
lp.vertical ? Math.Abs(lp.y2 - lp.y1) :
Math.Abs(lp.x2 - lp.x1) + 1);
m_di = new distance_interpolator4(lp.x1, lp.y1, lp.x2, lp.y2, sx, sy, ex, ey, lp.len, scale_x,
lp.x1 & ~LineAABasics.line_subpixel_mask, lp.y1 & ~LineAABasics.line_subpixel_mask);
m_ren=ren;
m_x = (lp.x1 >> LineAABasics.line_subpixel_shift);
m_y = (lp.y1 >> LineAABasics.line_subpixel_shift);
m_old_x=(m_x);
m_old_y=(m_y);
m_count = ((lp.vertical ? Math.Abs((lp.y2 >> LineAABasics.line_subpixel_shift) - m_y) :
Math.Abs((lp.x2 >> LineAABasics.line_subpixel_shift) - m_x)));
m_width=(ren.subpixel_width());
//m_max_extent(m_width >> (LineAABasics.line_subpixel_shift - 2));
m_max_extent = ((m_width + LineAABasics.line_subpixel_scale) >> LineAABasics.line_subpixel_shift);
m_start=(pattern_start + (m_max_extent + 2) * ren.pattern_width());
m_step=(0);
dda2_line_interpolator li = new dda2_line_interpolator(0, lp.vertical ?
(lp.dy << LineAABasics.line_subpixel_shift) :
(lp.dx << LineAABasics.line_subpixel_shift),
lp.len);
uint i;
int stop = m_width + LineAABasics.line_subpixel_scale * 2;
for(i = 0; i < max_half_width; ++i)
{
m_dist_pos[i] = li.y();
if(m_dist_pos[i] >= stop) break;
++li;
}
m_dist_pos[i] = 0x7FFF0000;
int dist1_start;
int dist2_start;
int npix = 1;
if(lp.vertical)
{
do
{
--m_li;
m_y -= lp.inc;
m_x = (m_lp.x1 + m_li.y()) >> LineAABasics.line_subpixel_shift;
if(lp.inc > 0) m_di.dec_y(m_x - m_old_x);
else m_di.inc_y(m_x - m_old_x);
m_old_x = m_x;
dist1_start = dist2_start = m_di.dist_start();
int dx = 0;
if(dist1_start < 0) ++npix;
do
{
dist1_start += m_di.dy_start();
dist2_start -= m_di.dy_start();
if(dist1_start < 0) ++npix;
if(dist2_start < 0) ++npix;
++dx;
}
while(m_dist_pos[dx] <= m_width);
if(npix == 0) break;
npix = 0;
}
while(--m_step >= -m_max_extent);
}
else
{
do
{
--m_li;
m_x -= lp.inc;
m_y = (m_lp.y1 + m_li.y()) >> LineAABasics.line_subpixel_shift;
if(lp.inc > 0) m_di.dec_x(m_y - m_old_y);
else m_di.inc_x(m_y - m_old_y);
m_old_y = m_y;
dist1_start = dist2_start = m_di.dist_start();
int dy = 0;
if(dist1_start < 0) ++npix;
do
{
dist1_start -= m_di.dx_start();
dist2_start += m_di.dx_start();
if(dist1_start < 0) ++npix;
if(dist2_start < 0) ++npix;
++dy;
}
while(m_dist_pos[dy] <= m_width);
if(npix == 0) break;
npix = 0;
}
while(--m_step >= -m_max_extent);
}
m_li.adjust_forward();
m_step -= m_max_extent;
*/
}
//---------------------------------------------------------------------
public bool step_hor()
{
throw new NotImplementedException();
/*
++m_li;
m_x += m_lp.inc;
m_y = (m_lp.y1 + m_li.y()) >> LineAABasics.line_subpixel_shift;
if(m_lp.inc > 0) m_di.inc_x(m_y - m_old_y);
else m_di.dec_x(m_y - m_old_y);
m_old_y = m_y;
int s1 = m_di.dist() / m_lp.len;
int s2 = -s1;
if(m_lp.inc < 0) s1 = -s1;
int dist_start;
int dist_pict;
int dist_end;
int dy;
int dist;
dist_start = m_di.dist_start();
dist_pict = m_di.dist_pict() + m_start;
dist_end = m_di.dist_end();
RGBA_Bytes* p0 = m_colors + max_half_width + 2;
RGBA_Bytes* p1 = p0;
int npix = 0;
p1->clear();
if(dist_end > 0)
{
if(dist_start <= 0)
{
m_ren.pixel(p1, dist_pict, s2);
}
++npix;
}
++p1;
dy = 1;
while((dist = m_dist_pos[dy]) - s1 <= m_width)
{
dist_start -= m_di.dx_start();
dist_pict -= m_di.dx_pict();
dist_end -= m_di.dx_end();
p1->clear();
if(dist_end > 0 && dist_start <= 0)
{
if(m_lp.inc > 0) dist = -dist;
m_ren.pixel(p1, dist_pict, s2 - dist);
++npix;
}
++p1;
++dy;
}
dy = 1;
dist_start = m_di.dist_start();
dist_pict = m_di.dist_pict() + m_start;
dist_end = m_di.dist_end();
while((dist = m_dist_pos[dy]) + s1 <= m_width)
{
dist_start += m_di.dx_start();
dist_pict += m_di.dx_pict();
dist_end += m_di.dx_end();
--p0;
p0->clear();
if(dist_end > 0 && dist_start <= 0)
{
if(m_lp.inc > 0) dist = -dist;
m_ren.pixel(p0, dist_pict, s2 + dist);
++npix;
}
++dy;
}
m_ren.blend_color_vspan(m_x,
m_y - dy + 1,
(uint)(p1 - p0),
p0);
return npix && ++m_step < m_count;
*/
}
//---------------------------------------------------------------------
public bool step_ver()
{
throw new NotImplementedException();
/*
++m_li;
m_y += m_lp.inc;
m_x = (m_lp.x1 + m_li.y()) >> LineAABasics.line_subpixel_shift;
if(m_lp.inc > 0) m_di.inc_y(m_x - m_old_x);
else m_di.dec_y(m_x - m_old_x);
m_old_x = m_x;
int s1 = m_di.dist() / m_lp.len;
int s2 = -s1;
if(m_lp.inc > 0) s1 = -s1;
int dist_start;
int dist_pict;
int dist_end;
int dist;
int dx;
dist_start = m_di.dist_start();
dist_pict = m_di.dist_pict() + m_start;
dist_end = m_di.dist_end();
RGBA_Bytes* p0 = m_colors + max_half_width + 2;
RGBA_Bytes* p1 = p0;
int npix = 0;
p1->clear();
if(dist_end > 0)
{
if(dist_start <= 0)
{
m_ren.pixel(p1, dist_pict, s2);
}
++npix;
}
++p1;
dx = 1;
while((dist = m_dist_pos[dx]) - s1 <= m_width)
{
dist_start += m_di.dy_start();
dist_pict += m_di.dy_pict();
dist_end += m_di.dy_end();
p1->clear();
if(dist_end > 0 && dist_start <= 0)
{
if(m_lp.inc > 0) dist = -dist;
m_ren.pixel(p1, dist_pict, s2 + dist);
++npix;
}
++p1;
++dx;
}
dx = 1;
dist_start = m_di.dist_start();
dist_pict = m_di.dist_pict() + m_start;
dist_end = m_di.dist_end();
while((dist = m_dist_pos[dx]) + s1 <= m_width)
{
dist_start -= m_di.dy_start();
dist_pict -= m_di.dy_pict();
dist_end -= m_di.dy_end();
--p0;
p0->clear();
if(dist_end > 0 && dist_start <= 0)
{
if(m_lp.inc > 0) dist = -dist;
m_ren.pixel(p0, dist_pict, s2 - dist);
++npix;
}
++dx;
}
m_ren.blend_color_hspan(m_x - dx + 1,
m_y,
(uint)(p1 - p0),
p0);
return npix && ++m_step < m_count;
*/
}
//---------------------------------------------------------------------
public int pattern_end() { return m_start + m_di.len(); }
//---------------------------------------------------------------------
public bool vertical() { return m_lp.vertical; }
public int width() { return m_width; }
}
#endif
//===================================================renderer_outline_image
//template<class BaseRenderer, class ImagePattern>
public class ImageLineRenderer : LineRenderer
{
private IImageByte m_ren;
private line_image_pattern m_pattern;
private int m_start;
private double m_scale_x;
private RectangleInt m_clip_box;
//private bool m_clipping;
//---------------------------------------------------------------------
//typedef renderer_outline_image<BaseRenderer, ImagePattern> self_type;
//---------------------------------------------------------------------
public ImageLineRenderer(IImageByte ren, line_image_pattern patt)
{
m_ren = ren;
m_pattern = patt;
m_start = (0);
m_scale_x = (1.0);
m_clip_box = new RectangleInt(0, 0, 0, 0);
//m_clipping = (false);
}
public void attach(IImageByte ren)
{
m_ren = ren;
}
//---------------------------------------------------------------------
public void pattern(line_image_pattern p)
{
m_pattern = p;
}
public line_image_pattern pattern()
{
return m_pattern;
}
//---------------------------------------------------------------------
public void reset_clipping()
{
//m_clipping = false;
}
public void clip_box(double x1, double y1, double x2, double y2)
{
m_clip_box.Left = line_coord_sat.conv(x1);
m_clip_box.Bottom = line_coord_sat.conv(y1);
m_clip_box.Right = line_coord_sat.conv(x2);
m_clip_box.Top = line_coord_sat.conv(y2);
//m_clipping = true;
}
//---------------------------------------------------------------------
public void scale_x(double s)
{
m_scale_x = s;
}
public double scale_x()
{
return m_scale_x;
}
//---------------------------------------------------------------------
public void start_x(double s)
{
m_start = agg_basics.iround(s * LineAABasics.line_subpixel_scale);
}
public double start_x()
{
return (double)(m_start) / LineAABasics.line_subpixel_scale;
}
//---------------------------------------------------------------------
public int subpixel_width()
{
return m_pattern.line_width();
}
public int pattern_width()
{
return m_pattern.pattern_width();
}
public double width()
{
return (double)(subpixel_width()) / LineAABasics.line_subpixel_scale;
}
public void pixel(Color[] p, int offset, int x, int y)
{
throw new NotImplementedException();
//m_pattern.pixel(p, x, y);
}
public void blend_color_hspan(int x, int y, uint len, Color[] colors, int colorsOffset)
{
throw new NotImplementedException();
// m_ren.blend_color_hspan(x, y, len, colors, null, 0);
}
public void blend_color_vspan(int x, int y, uint len, Color[] colors, int colorsOffset)
{
throw new NotImplementedException();
// m_ren.blend_color_vspan(x, y, len, colors, null, 0);
}
public static bool accurate_join_only()
{
return true;
}
public override void semidot(CompareFunction cmp, int xc1, int yc1, int xc2, int yc2)
{
}
public override void semidot_hline(CompareFunction cmp,
int xc1, int yc1, int xc2, int yc2,
int x1, int y1, int x2)
{
}
public override void pie(int xc, int yc, int x1, int y1, int x2, int y2)
{
}
public override void line0(line_parameters lp)
{
}
public override void line1(line_parameters lp, int sx, int sy)
{
}
public override void line2(line_parameters lp, int ex, int ey)
{
}
public void line3_no_clip(line_parameters lp,
int sx, int sy, int ex, int ey)
{
throw new NotImplementedException();
/*
if(lp.len > LineAABasics.line_max_length)
{
line_parameters lp1, lp2;
lp.divide(lp1, lp2);
int mx = lp1.x2 + (lp1.y2 - lp1.y1);
int my = lp1.y2 - (lp1.x2 - lp1.x1);
line3_no_clip(lp1, (lp.x1 + sx) >> 1, (lp.y1 + sy) >> 1, mx, my);
line3_no_clip(lp2, mx, my, (lp.x2 + ex) >> 1, (lp.y2 + ey) >> 1);
return;
}
LineAABasics.fix_degenerate_bisectrix_start(lp, ref sx, ref sy);
LineAABasics.fix_degenerate_bisectrix_end(lp, ref ex, ref ey);
line_interpolator_image li = new line_interpolator_image(this, lp,
sx, sy,
ex, ey,
m_start, m_scale_x);
if(li.vertical())
{
while(li.step_ver());
}
else
{
while(li.step_hor());
}
m_start += uround(lp.len / m_scale_x);
*/
}
public override void line3(line_parameters lp,
int sx, int sy, int ex, int ey)
{
throw new NotImplementedException();
/*
if(m_clipping)
{
int x1 = lp.x1;
int y1 = lp.y1;
int x2 = lp.x2;
int y2 = lp.y2;
uint flags = clip_line_segment(&x1, &y1, &x2, &y2, m_clip_box);
int start = m_start;
if((flags & 4) == 0)
{
if(flags)
{
line_parameters lp2(x1, y1, x2, y2,
uround(calc_distance(x1, y1, x2, y2)));
if(flags & 1)
{
m_start += uround(calc_distance(lp.x1, lp.y1, x1, y1) / m_scale_x);
sx = x1 + (y2 - y1);
sy = y1 - (x2 - x1);
}
else
{
while(Math.Abs(sx - lp.x1) + Math.Abs(sy - lp.y1) > lp2.len)
{
sx = (lp.x1 + sx) >> 1;
sy = (lp.y1 + sy) >> 1;
}
}
if(flags & 2)
{
ex = x2 + (y2 - y1);
ey = y2 - (x2 - x1);
}
else
{
while(Math.Abs(ex - lp.x2) + Math.Abs(ey - lp.y2) > lp2.len)
{
ex = (lp.x2 + ex) >> 1;
ey = (lp.y2 + ey) >> 1;
}
}
line3_no_clip(lp2, sx, sy, ex, ey);
}
else
{
line3_no_clip(lp, sx, sy, ex, ey);
}
}
m_start = start + uround(lp.len / m_scale_x);
}
else
{
line3_no_clip(lp, sx, sy, ex, ey);
}
*/
}
};
#endif
}
#endif
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Runtime.Serialization;
using System.Diagnostics;
namespace System.Xml
{
internal enum StringHandleConstStringType
{
Type = 0,
Root = 1,
Item = 2
}
internal class StringHandle : IEquatable<StringHandle>
{
private XmlBufferReader _bufferReader;
private StringHandleType _type;
private int _key;
private int _offset;
private int _length;
private static string[] s_constStrings = {
"type",
"root",
"item"
};
public StringHandle(XmlBufferReader bufferReader)
{
_bufferReader = bufferReader;
SetValue(0, 0);
}
public void SetValue(int offset, int length)
{
_type = StringHandleType.UTF8;
_offset = offset;
_length = length;
}
public void SetConstantValue(StringHandleConstStringType constStringType)
{
_type = StringHandleType.ConstString;
_key = (int)constStringType;
}
public void SetValue(int offset, int length, bool escaped)
{
_type = (escaped ? StringHandleType.EscapedUTF8 : StringHandleType.UTF8);
_offset = offset;
_length = length;
}
public void SetValue(int key)
{
_type = StringHandleType.Dictionary;
_key = key;
}
public void SetValue(StringHandle value)
{
_type = value._type;
_key = value._key;
_offset = value._offset;
_length = value._length;
}
public bool IsEmpty
{
get
{
if (_type == StringHandleType.UTF8)
return _length == 0;
return Equals2(string.Empty);
}
}
public bool IsXmlns
{
get
{
if (_type == StringHandleType.UTF8)
{
if (_length != 5)
return false;
byte[] buffer = _bufferReader.Buffer;
int offset = _offset;
return buffer[offset + 0] == 'x' &&
buffer[offset + 1] == 'm' &&
buffer[offset + 2] == 'l' &&
buffer[offset + 3] == 'n' &&
buffer[offset + 4] == 's';
}
return Equals2("xmlns");
}
}
public void ToPrefixHandle(PrefixHandle prefix)
{
DiagnosticUtility.DebugAssert(_type == StringHandleType.UTF8, "");
prefix.SetValue(_offset, _length);
}
public string GetString(XmlNameTable nameTable)
{
StringHandleType type = _type;
if (type == StringHandleType.UTF8)
return _bufferReader.GetString(_offset, _length, nameTable);
if (type == StringHandleType.Dictionary)
return nameTable.Add(_bufferReader.GetDictionaryString(_key).Value);
DiagnosticUtility.DebugAssert(type == StringHandleType.ConstString, "Should be ConstString");
//If not Utf8 then the StringHandleType is ConstString
return nameTable.Add(s_constStrings[_key]);
}
public string GetString()
{
StringHandleType type = _type;
if (type == StringHandleType.UTF8)
return _bufferReader.GetString(_offset, _length);
if (type == StringHandleType.Dictionary)
return _bufferReader.GetDictionaryString(_key).Value;
DiagnosticUtility.DebugAssert(type == StringHandleType.ConstString, "Should be ConstString");
//If not Utf8 then the StringHandleType is ConstString
return s_constStrings[_key];
}
public byte[] GetString(out int offset, out int length)
{
StringHandleType type = _type;
if (type == StringHandleType.UTF8)
{
offset = _offset;
length = _length;
return _bufferReader.Buffer;
}
if (type == StringHandleType.Dictionary)
{
byte[] buffer = _bufferReader.GetDictionaryString(_key).ToUTF8();
offset = 0;
length = buffer.Length;
return buffer;
}
if (type == StringHandleType.ConstString)
{
byte[] buffer = XmlConverter.ToBytes(s_constStrings[_key]);
offset = 0;
length = buffer.Length;
return buffer;
}
else
{
DiagnosticUtility.DebugAssert(type == StringHandleType.EscapedUTF8, "");
byte[] buffer = XmlConverter.ToBytes(_bufferReader.GetEscapedString(_offset, _length));
offset = 0;
length = buffer.Length;
return buffer;
}
}
public bool TryGetDictionaryString(out XmlDictionaryString value)
{
if (_type == StringHandleType.Dictionary)
{
value = _bufferReader.GetDictionaryString(_key);
return true;
}
else if (IsEmpty)
{
value = XmlDictionaryString.Empty;
return true;
}
value = null;
return false;
}
public override string ToString()
{
return GetString();
}
private bool Equals2(int key2, XmlBufferReader bufferReader2)
{
StringHandleType type = _type;
if (type == StringHandleType.Dictionary)
return _bufferReader.Equals2(_key, key2, bufferReader2);
if (type == StringHandleType.UTF8)
return _bufferReader.Equals2(_offset, _length, bufferReader2.GetDictionaryString(key2).Value);
DiagnosticUtility.DebugAssert(type == StringHandleType.EscapedUTF8 || type == StringHandleType.ConstString, "");
return GetString() == _bufferReader.GetDictionaryString(key2).Value;
}
private bool Equals2(XmlDictionaryString xmlString2)
{
StringHandleType type = _type;
if (type == StringHandleType.Dictionary)
return _bufferReader.Equals2(_key, xmlString2);
if (type == StringHandleType.UTF8)
return _bufferReader.Equals2(_offset, _length, xmlString2.ToUTF8());
DiagnosticUtility.DebugAssert(type == StringHandleType.EscapedUTF8 || type == StringHandleType.ConstString, "");
return GetString() == xmlString2.Value;
}
private bool Equals2(string s2)
{
StringHandleType type = _type;
if (type == StringHandleType.Dictionary)
return _bufferReader.GetDictionaryString(_key).Value == s2;
if (type == StringHandleType.UTF8)
return _bufferReader.Equals2(_offset, _length, s2);
DiagnosticUtility.DebugAssert(type == StringHandleType.ConstString, "");
return GetString() == s2;
}
private bool Equals2(int offset2, int length2, XmlBufferReader bufferReader2)
{
StringHandleType type = _type;
if (type == StringHandleType.Dictionary)
return bufferReader2.Equals2(offset2, length2, _bufferReader.GetDictionaryString(_key).Value);
if (type == StringHandleType.UTF8)
return _bufferReader.Equals2(_offset, _length, bufferReader2, offset2, length2);
DiagnosticUtility.DebugAssert(type == StringHandleType.EscapedUTF8 || type == StringHandleType.ConstString, "");
return GetString() == _bufferReader.GetString(offset2, length2);
}
public bool Equals(StringHandle other)
{
if (ReferenceEquals(other, null))
return false;
StringHandleType type = other._type;
if (type == StringHandleType.Dictionary)
return Equals2(other._key, other._bufferReader);
if (type == StringHandleType.UTF8)
return Equals2(other._offset, other._length, other._bufferReader);
DiagnosticUtility.DebugAssert(type == StringHandleType.EscapedUTF8 || type == StringHandleType.ConstString, "");
return Equals2(other.GetString());
}
public static bool operator ==(StringHandle s1, XmlDictionaryString xmlString2)
{
return s1.Equals2(xmlString2);
}
public static bool operator !=(StringHandle s1, XmlDictionaryString xmlString2)
{
return !s1.Equals2(xmlString2);
}
public static bool operator ==(StringHandle s1, string s2)
{
return s1.Equals2(s2);
}
public static bool operator !=(StringHandle s1, string s2)
{
return !s1.Equals2(s2);
}
public static bool operator ==(StringHandle s1, StringHandle s2)
{
return s1.Equals(s2);
}
public static bool operator !=(StringHandle s1, StringHandle s2)
{
return !s1.Equals(s2);
}
public int CompareTo(StringHandle that)
{
if (_type == StringHandleType.UTF8 && that._type == StringHandleType.UTF8)
return _bufferReader.Compare(_offset, _length, that._offset, that._length);
else
return string.Compare(this.GetString(), that.GetString(), StringComparison.Ordinal);
}
public override bool Equals(object obj)
{
return Equals(obj as StringHandle);
}
public override int GetHashCode()
{
return GetString().GetHashCode();
}
private enum StringHandleType
{
Dictionary,
UTF8,
EscapedUTF8,
ConstString
}
}
}
| |
using System;
using System.Threading.Tasks;
using Ceen;
using Ceen.Mvc;
using Ceen.Database;
using Newtonsoft.Json;
using System.Net.Http;
using System.Text;
namespace Ceen.PaaS.API
{
/// <summary>
/// Handler class for the signup page
/// </summary>
public class SignupHandler : ControllerBase, IAPIv1
{
/// <summary>
/// The error codes for signup
/// </summary>
public enum StatusCode
{
/// <summary>The signup was a success, wait for the activation email</summary>
[Code("SUCCESS")]
[Description(null, "We received your signup. We sent you an email with a confirmation link. You must click the link within 24 hours to confirm the signup. If you do not see the email in your inbox, please check your spam folder.")]
Success,
/// <summary>One or more fields were invalid or missing</summary>
[Code("INVALID_FIELD")]
[Description(null, "One or more fields do not have valid values")]
InvalidField,
/// <summary>The supplied token was invalid</summary>
[Code("INVALID_TOKEN")]
[Description(null, "Your browser did not send a valid message. Reload the page and try again. You must have javascript activated in your browser to use our signup form.")]
InvalidToken,
/// <summary>The supplied token was valid, but cannot be used yet</summary>
[Code("TOO_FAST")]
[Description(null, "You typed very fast, please wait a few seconds and try again")]
TooFastInput,
/// <summary>The account is already signed up</summary>
[Code("ALREADY_ACTIVE")]
[Description(null, "You are already signed up and have confirmed it.")]
AlreadyActivated,
/// <summary>A new activation email was sent</summary>
[Code("CHECK_EMAIL")]
[Description(null, "We sent you a new activation email, please check you spam folder and make sure you click the activation link within 24 hours.")]
SentActivationEmail,
/// <summary>An activation email is already on the way</summary>
[Code("WAIT_FOR_EMAIL")]
[Description(null, "We recently sent you an activation email. Please wait a few minutes for the email to arrive and be sure to check your spam folder.")]
WaitForActivationEmail,
[Code("TOO_MANY_REQUESTS")]
[Description(null, "Too many signup requests from this IP. Please wait an hour and try again.")]
TooManyRequestFromIp,
}
/// <summary>
/// Attempts to locate a localized message, and returns the default message if none is found
/// </summary>
/// <param name="code">The status code to respond</param>
/// <param name="language">The preferred user language</param>
/// <param name="fieldname">The field to report the error in, if any</param>
/// <returns>A result</returns>
private Task<SignupResult> GetTranslatedMessageAsync(StatusCode code, string language, string fieldname = null)
// TODO: Consider a cache for these
=> DB.RunInTransactionAsync(db =>
GetTranslatedMessage(db, code, language, fieldname)
);
/// <summary>
/// Attempts to locate a localized message, and returns the default message if none is found
/// </summary>
/// <param name="db">The database instance to use</param>
/// <param name="code">The status code to respond</param>
/// <param name="language">The preferred user language</param>
/// <param name="fieldname">The field to report the error in, if any</param>
/// <returns>A result</returns>
private SignupResult GetTranslatedMessage(System.Data.IDbConnection db, StatusCode code, string language, string fieldname = null)
{
// TODO: Consider a cache for these
var res = Services.TextHelper.GetTextFromDb(db, TextConstants.SignupMessagesPrefix + code.ToString(), language);
return new SignupResult(code, language, fieldname, res);
}
/// <summary>
/// The class for reporting a signup result
/// </summary>
public class SignupResult : SignalResponseBase<StatusCode>
{
/// <summary>
/// The field with an error, if any
/// </summary>
public readonly string Fieldname;
/// <summary>
/// Constructs a new signup result class
/// </summary>
/// <param name="code">The status code to use</param>
/// <param name="language">The language to use</param>
/// <param name="fieldname">The field with an error, if any</param>
/// <param name="overridemessage">The overridemessage, if any</param>
public SignupResult(StatusCode code, string language, string fieldname, string overridemessage)
: base(code, language, overridemessage)
{
Fieldname = fieldname;
}
}
// TODO: Use loader context, and allow configuration
/// <summary>
/// The minimum time the user is expected to use when entering their information
/// </summary>
public static readonly TimeSpan MIN_INPUT_TIME = TimeSpan.FromSeconds(5);
/// <summary>
/// The minimum time the user must wait before we re-send the activation email
/// </summary>
public static readonly TimeSpan MIN_EMAIL_TIME = TimeSpan.FromMinutes(5);
/// <summary>
/// The maximum time a token is valid
/// </summary>
public static readonly TimeSpan MAX_TOKEN_LIFETIME = TimeSpan.FromDays(1);
/// <summary>
/// Creates a token for signing up
/// </summary>
[HttpPost]
public async Task<IResult> Create()
{
var rec = new Database.Signup.RequestToken() {
WhenCreated = DateTime.Now
};
await DB.RunInTransactionAsync(db => {
db.InsertItem(rec);
});
return Json(new { Token = rec.ID });
}
/// <summary>
/// Handles a confirmation request
/// </summary>
/// <param name="code">The activation code</param>
[HttpPost]
public Task<IResult> Confirm(string code)
{
return DB.RunInTransactionAsync(db =>
{
var entry = db.SelectSingle<Database.Signup.SignupEntry>(x => x.ActivationCode == code);
if (entry == null)
return Status(Forbidden, "Invalid token");
if (DateTime.Now - entry.When > MAX_TOKEN_LIFETIME)
return Status(BadRequest, "Token exists but is no longer valid, please sign up again");
entry.Status = Database.Signup.SignupStatus.Confirmed;
db.UpdateItem(entry);
return OK;
});
}
/// <summary>
/// The request from the client for a signup
/// </summary>
public class SignupData
{
/// <summary>
/// The name to use
/// </summary>
public string Name;
/// <summary>
/// The email to use
/// </summary>
public string Email;
/// <summary>
/// The token to use
/// </summary>
public string Token;
}
/// <summary>
/// Handles a signup request
/// </summary>
[HttpPost]
public async Task<IResult> Index(SignupData data)
{
var language = Services.LocaleHelper.GetBestLocale(Context.Request);
if (string.IsNullOrWhiteSpace(data.Name))
return await GetTranslatedMessageAsync(StatusCode.InvalidField, language, "name");
if (string.IsNullOrWhiteSpace(data.Email))
return await GetTranslatedMessageAsync(StatusCode.InvalidField, language, "email");
if (string.IsNullOrWhiteSpace(data.Token))
return await GetTranslatedMessageAsync(StatusCode.InvalidToken, language);
if (!Services.PasswordPolicy.IsValidEmail(data.Email))
return await GetTranslatedMessageAsync(StatusCode.InvalidField, language, "email");
Database.Signup.SignupEntry entry = null;
var res = await DB.RunInTransactionAsync(async db => {
var item = db.SelectItemById<Database.Signup.RequestToken>(data.Token);
if (item == null)
return GetTranslatedMessage(db, StatusCode.InvalidToken, language);
if ((DateTime.Now - item.WhenCreated) < MIN_INPUT_TIME)
return GetTranslatedMessage(db, StatusCode.TooFastInput, language);
// Delete this instance
db.DeleteItem(item);
// And delete everything older than 1h
var deleteLimit = DateTime.Now - TimeSpan.FromHours(1);
db.Delete<Database.Signup.RequestToken>(x => x.WhenCreated < deleteLimit);
entry = db.SelectSingle<Database.Signup.SignupEntry>(x => x.Email == data.Email);
if (entry != null)
{
if (entry.Status == Database.Signup.SignupStatus.Activated || entry.Status == Database.Signup.SignupStatus.Confirmed)
return GetTranslatedMessage(db, StatusCode.AlreadyActivated, language);
var last_sent = db.SelectSingle<Database.SentEmailLog>(x => x.To == entry.Email);
if (last_sent != null && (DateTime.Now - last_sent.When) < MIN_EMAIL_TIME)
return GetTranslatedMessage(db, StatusCode.WaitForActivationEmail, language);
if (await Services.SendEmail.SignupEmail.ViolatesIPRateLimit())
return GetTranslatedMessage(db, StatusCode.TooManyRequestFromIp, language);
// Create a new code, to avoid attacks based on the stale code
entry.ActivationCode = Services.PasswordPolicy.GenerateActivationCode();
entry.LastAttempt = DateTime.Now;
db.UpdateItem(entry);
return GetTranslatedMessage(db, StatusCode.SentActivationEmail, language);
}
db.InsertItem(entry = new Database.Signup.SignupEntry()
{
Email = data.Email,
Name = data.Name,
ActivationCode = Services.PasswordPolicy.GenerateActivationCode(),
Status = Database.Signup.SignupStatus.Created,
LastAttempt = DateTime.Now,
Locale = language
});
return GetTranslatedMessage(db, StatusCode.Success, language);
});
// Queue the sending without hogging the database lock
if ((res.CodeValue == StatusCode.Success) || (res.CodeValue == StatusCode.SentActivationEmail))
await Queues.SendSignupConfirmationEmailAsync(entry.Name, entry.Email, entry.ID, language);
return res;
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Hyak.Common;
using Microsoft.Azure;
using Microsoft.Azure.Management.RecoveryServices.Backup;
using Microsoft.Azure.Management.RecoveryServices.Backup.Models;
using Newtonsoft.Json.Linq;
namespace Microsoft.Azure.Management.RecoveryServices.Backup
{
/// <summary>
/// The Resource Manager API includes operations for triggering and
/// managing restore actions of the items protected by your Recovery
/// Services Vault.
/// </summary>
internal partial class RestoreOperations : IServiceOperations<RecoveryServicesBackupManagementClient>, IRestoreOperations
{
/// <summary>
/// Initializes a new instance of the RestoreOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
internal RestoreOperations(RecoveryServicesBackupManagementClient client)
{
this._client = client;
}
private RecoveryServicesBackupManagementClient _client;
/// <summary>
/// Gets a reference to the
/// Microsoft.Azure.Management.RecoveryServices.Backup.RecoveryServicesBackupManagementClient.
/// </summary>
public RecoveryServicesBackupManagementClient Client
{
get { return this._client; }
}
/// <summary>
/// The Trigger Restore Operation starts an operation in the service
/// which triggers the restore of the specified item in the specified
/// container in your Recovery Services Vault based on the specified
/// recovery point ID. This is an asynchronous operation. To determine
/// whether the backend service has finished processing the request,
/// call Get Protected Item Operation Result API.
/// </summary>
/// <param name='resourceGroupName'>
/// Required. Resource group name of your recovery services vault.
/// </param>
/// <param name='resourceName'>
/// Required. Name of your recovery services vault.
/// </param>
/// <param name='customRequestHeaders'>
/// Optional. Request header parameters.
/// </param>
/// <param name='fabricName'>
/// Optional. Fabric name of the protected item.
/// </param>
/// <param name='containerName'>
/// Optional. Name of the container where the protected item belongs to.
/// </param>
/// <param name='protectedItemName'>
/// Optional. Name of the protected item whose recovery points are to
/// be fetched.
/// </param>
/// <param name='recoveryPointId'>
/// Optional. ID of the recovery point whose details are to be fetched.
/// </param>
/// <param name='request'>
/// Optional. Restore request for the backup item.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Base recovery job response for all the asynchronous operations.
/// </returns>
public async Task<BaseRecoveryServicesJobResponse> TriggerRestoreAsync(string resourceGroupName, string resourceName, CustomRequestHeaders customRequestHeaders, string fabricName, string containerName, string protectedItemName, string recoveryPointId, TriggerRestoreRequest request, CancellationToken cancellationToken)
{
// Validate
if (resourceGroupName == null)
{
throw new ArgumentNullException("resourceGroupName");
}
if (resourceName == null)
{
throw new ArgumentNullException("resourceName");
}
if (request != null)
{
if (request.Item != null)
{
if (request.Item.Properties == null)
{
throw new ArgumentNullException("request.Item.Properties");
}
}
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("resourceName", resourceName);
tracingParameters.Add("customRequestHeaders", customRequestHeaders);
tracingParameters.Add("fabricName", fabricName);
tracingParameters.Add("containerName", containerName);
tracingParameters.Add("protectedItemName", protectedItemName);
tracingParameters.Add("recoveryPointId", recoveryPointId);
tracingParameters.Add("request", request);
TracingAdapter.Enter(invocationId, this, "TriggerRestoreAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/resourceGroups/";
url = url + Uri.EscapeDataString(resourceGroupName);
url = url + "/providers/";
if (this.Client.ResourceNamespace != null)
{
url = url + Uri.EscapeDataString(this.Client.ResourceNamespace);
}
url = url + "/";
url = url + "vaults";
url = url + "/";
url = url + Uri.EscapeDataString(resourceName);
url = url + "/backupFabrics/";
if (fabricName != null)
{
url = url + Uri.EscapeDataString(fabricName);
}
url = url + "/protectionContainers/";
if (containerName != null)
{
url = url + Uri.EscapeDataString(containerName);
}
url = url + "/protectedItems/";
if (protectedItemName != null)
{
url = url + Uri.EscapeDataString(protectedItemName);
}
url = url + "/recoveryPoints/";
if (recoveryPointId != null)
{
url = url + Uri.EscapeDataString(recoveryPointId);
}
url = url + "/restore";
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2016-05-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Post;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("Accept-Language", customRequestHeaders.Culture);
httpRequest.Headers.Add("x-ms-client-request-id", customRequestHeaders.ClientRequestId);
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
JToken requestDoc = null;
if (request != null)
{
if (request.Item != null)
{
JObject itemValue = new JObject();
requestDoc = itemValue;
JObject propertiesValue = new JObject();
itemValue["properties"] = propertiesValue;
if (request.Item.Properties is IaasVMRestoreRequest)
{
propertiesValue["objectType"] = "IaasVMRestoreRequest";
IaasVMRestoreRequest derived = ((IaasVMRestoreRequest)request.Item.Properties);
if (derived.RecoveryPointId != null)
{
propertiesValue["recoveryPointId"] = derived.RecoveryPointId;
}
if (derived.RecoveryType != null)
{
propertiesValue["recoveryType"] = derived.RecoveryType;
}
if (derived.StorageAccountId != null)
{
propertiesValue["storageAccountId"] = derived.StorageAccountId;
}
propertiesValue["createNewCloudService"] = derived.CreateNewCloudService;
if (derived.VirtualNetworkId != null)
{
propertiesValue["virtualNetworkId"] = derived.VirtualNetworkId;
}
if (derived.Region != null)
{
propertiesValue["region"] = derived.Region;
}
if (derived.AffinityGroup != null)
{
propertiesValue["affinityGroup"] = derived.AffinityGroup;
}
if (derived.SubnetId != null)
{
propertiesValue["subnetId"] = derived.SubnetId;
}
if (derived.SourceResourceId != null)
{
propertiesValue["sourceResourceId"] = derived.SourceResourceId;
}
if (derived.TargetVirtualMachineId != null)
{
propertiesValue["targetVirtualMachineId"] = derived.TargetVirtualMachineId;
}
if (derived.TargetResourceGroupId != null)
{
propertiesValue["targetResourceGroupId"] = derived.TargetResourceGroupId;
}
if (derived.TargetDomainNameId != null)
{
propertiesValue["targetDomainNameId"] = derived.TargetDomainNameId;
}
if (derived.EncryptionDetails != null)
{
JObject encryptionDetailsValue = new JObject();
propertiesValue["encryptionDetails"] = encryptionDetailsValue;
encryptionDetailsValue["encryptionEnabled"] = derived.EncryptionDetails.EncryptionEnabled;
if (derived.EncryptionDetails.KekUrl != null)
{
encryptionDetailsValue["kekUrl"] = derived.EncryptionDetails.KekUrl;
}
if (derived.EncryptionDetails.SecretKeyUrl != null)
{
encryptionDetailsValue["secretKeyUrl"] = derived.EncryptionDetails.SecretKeyUrl;
}
if (derived.EncryptionDetails.KekVaultId != null)
{
encryptionDetailsValue["kekVaultId"] = derived.EncryptionDetails.KekVaultId;
}
if (derived.EncryptionDetails.SecretKeyVaultId != null)
{
encryptionDetailsValue["secretKeyVaultId"] = derived.EncryptionDetails.SecretKeyVaultId;
}
}
}
}
}
requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented);
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.Accepted)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
BaseRecoveryServicesJobResponse result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.Accepted)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new BaseRecoveryServicesJobResponse();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
JToken locationValue = responseDoc["location"];
if (locationValue != null && locationValue.Type != JTokenType.Null)
{
string locationInstance = ((string)locationValue);
result.Location = locationInstance;
}
JToken azureAsyncOperationValue = responseDoc["azureAsyncOperation"];
if (azureAsyncOperationValue != null && azureAsyncOperationValue.Type != JTokenType.Null)
{
string azureAsyncOperationInstance = ((string)azureAsyncOperationValue);
result.AzureAsyncOperation = azureAsyncOperationInstance;
}
JToken retryAfterValue = responseDoc["retryAfter"];
if (retryAfterValue != null && retryAfterValue.Type != JTokenType.Null)
{
string retryAfterInstance = ((string)retryAfterValue);
result.RetryAfter = retryAfterInstance;
}
JToken statusValue = responseDoc["Status"];
if (statusValue != null && statusValue.Type != JTokenType.Null)
{
OperationStatus statusInstance = ((OperationStatus)Enum.Parse(typeof(OperationStatus), ((string)statusValue), true));
result.Status = statusInstance;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("Azure-AsyncOperation"))
{
result.AzureAsyncOperation = httpResponse.Headers.GetValues("Azure-AsyncOperation").FirstOrDefault();
}
if (httpResponse.Headers.Contains("Location"))
{
result.Location = httpResponse.Headers.GetValues("Location").FirstOrDefault();
}
if (httpResponse.Headers.Contains("Retry-After"))
{
result.RetryAfter = httpResponse.Headers.GetValues("Retry-After").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
/*
Copyright (C) 2013-2015 MetaMorph Software, Inc
Permission is hereby granted, free of charge, to any person obtaining a
copy of this data, including any software or models in source or binary
form, as well as any drawings, specifications, and documentation
(collectively "the Data"), to deal in the Data without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Data, and to
permit persons to whom the Data is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Data.
THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA.
=======================
This version of the META tools is a fork of an original version produced
by Vanderbilt University's Institute for Software Integrated Systems (ISIS).
Their license statement:
Copyright (C) 2011-2014 Vanderbilt University
Developed with the sponsorship of the Defense Advanced Research Projects
Agency (DARPA) and delivered to the U.S. Government with Unlimited Rights
as defined in DFARS 252.227-7013.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this data, including any software or models in source or binary
form, as well as any drawings, specifications, and documentation
(collectively "the Data"), to deal in the Data without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Data, and to
permit persons to whom the Data is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Data.
THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Xunit;
using System.IO;
using GME.MGA;
namespace DynamicsTeamTest.Projects
{
public class MSD_FailCheckerFixture : XmeImportFixture
{
protected override string xmeFilename
{
get { return Path.Combine("MSD_FailChecker", "MSD_FailChecker.xme"); }
}
}
public partial class MSD_FailChecker : IUseFixture<MSD_FailCheckerFixture>
{
internal string mgaFile { get { return this.fixture.mgaFile; } }
private MSD_FailCheckerFixture fixture { get; set; }
public void SetFixture(MSD_FailCheckerFixture data)
{
this.fixture = data;
}
//[Fact]
//[Trait("Model", "MSD_FailChecker")]
//[Trait("ProjectImport/Open", "MSD_FailChecker")]
//public void ProjectXmeImport()
//{
// Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
//}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("ProjectImport/Open", "MSD_FailChecker")]
public void ProjectMgaOpen()
{
var mgaReference = "MGA=" + mgaFile;
MgaProject project = new MgaProject();
project.OpenEx(mgaReference, "CyPhyML", null);
project.Close(true);
Assert.True(File.Exists(mgaReference.Substring("MGA=".Length)));
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_PropertyParameter_Testbench_MSD_DamperParameterHasNoValueNoConnection()
{
string outputDir = "PropertyParameter_Testbench_MSD_DamperParameterHasNoValueNoConnection";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@PropertyParameter|kind=Testing|relpos=0/@Testbench_MSD_DamperParameterHasNoValueNoConnection|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_PropertyParameter_Testbench_TestBenchPropertyHasNoValue()
{
string outputDir = "PropertyParameter_Testbench_TestBenchPropertyHasNoValue";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@PropertyParameter|kind=Testing|relpos=0/@Testbench_TestBenchPropertyHasNoValue|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_PropertyParameter_Testbench_TestBenchParamHasNoValue()
{
string outputDir = "PropertyParameter_Testbench_TestBenchParamHasNoValue";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@PropertyParameter|kind=Testing|relpos=0/@Testbench_TestBenchParamHasNoValue|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldPass", "MSD_FailChecker")]
public void Pass_ModelicaConnector_Testbench_FloatingModelicaConnector_damper()
{
string outputDir = "ModelicaConnector_Testbench_FloatingModelicaConnector_damper";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ModelicaConnector|kind=Testing|relpos=0/@Testbench_FloatingModelicaConnector_damper|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 should have succeeded, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldPass", "MSD_FailChecker")]
public void Pass_ModelicaConnector_Testbench_FloatingModelicaConnector_CA()
{
string outputDir = "ModelicaConnector_Testbench_FloatingModelicaConnector_CA";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ModelicaConnector|kind=Testing|relpos=0/@Testbench_FloatingModelicaConnector_CA|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 should have succeeded, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldPass", "MSD_FailChecker")]
public void Pass_ModelicaConnector_Testbench_FloatingModelicaConnector_TestComponent()
{
string outputDir = "ModelicaConnector_Testbench_FloatingModelicaConnector_TestComponent";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ModelicaConnector|kind=Testing|relpos=0/@Testbench_FloatingModelicaConnector_TestComponent|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 should have succeeded, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_ModelicaConnector_Testbench_ModelicaConnectorClassConflict_2errors()
{
string outputDir = "ModelicaConnector_Testbench_ModelicaConnectorClassConflict_2errors";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ModelicaConnector|kind=Testing|relpos=0/@Testbench_ModelicaConnectorClassConflict_2errors|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_ModelicaConnector_Testbench_ModelicaConnectorClassInvalid_1()
{
string outputDir = "ModelicaConnector_Testbench_ModelicaConnectorClassInvalid_1";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ModelicaConnector|kind=Testing|relpos=0/@Testbench_ModelicaConnectorClassInvalid_1|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_ModelicaConnector_Testbench_ModelicaConnectorClassInvalid_2()
{
string outputDir = "ModelicaConnector_Testbench_ModelicaConnectorClassInvalid_2";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ModelicaConnector|kind=Testing|relpos=0/@Testbench_ModelicaConnectorClassInvalid_2|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_TestbenchSetup_Testbench_PostProcessingPathEmpty()
{
string outputDir = "TestbenchSetup_Testbench_PostProcessingPathEmpty";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@TestbenchSetup|kind=Testing|relpos=0/@Testbench_PostProcessingPathEmpty|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_TestbenchSetup_Testbench_PostProcessingFileDoesNotExist()
{
string outputDir = "TestbenchSetup_Testbench_PostProcessingFileDoesNotExist";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@TestbenchSetup|kind=Testing|relpos=0/@Testbench_PostProcessingFileDoesNotExist|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_TestbenchSetup_Testbench_2Workflows()
{
string outputDir = "TestbenchSetup_Testbench_2Workflows";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@TestbenchSetup|kind=Testing|relpos=0/@Testbench_2Workflows|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_TestbenchSetup_Testbench_PostProcessingScriptIsNotPython()
{
string outputDir = "TestbenchSetup_Testbench_PostProcessingScriptIsNotPython";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@TestbenchSetup|kind=Testing|relpos=0/@Testbench_PostProcessingScriptIsNotPython|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_TestbenchSetup_Testbench_WorkflowRefIsNull()
{
string outputDir = "TestbenchSetup_Testbench_WorkflowRefIsNull";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@TestbenchSetup|kind=Testing|relpos=0/@Testbench_WorkflowRefIsNull|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_TestbenchSetup_Testbench_2SolverSettings()
{
string outputDir = "TestbenchSetup_Testbench_2SolverSettings";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@TestbenchSetup|kind=Testing|relpos=0/@Testbench_2SolverSettings|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_TestbenchSetup_Testbench_2SUTs()
{
string outputDir = "TestbenchSetup_Testbench_2SUTs";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@TestbenchSetup|kind=Testing|relpos=0/@Testbench_2SUTs|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_TestbenchSetup_Testbench_WorkflowHasNoTask()
{
string outputDir = "TestbenchSetup_Testbench_WorkflowHasNoTask";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@TestbenchSetup|kind=Testing|relpos=0/@Testbench_WorkflowHasNoTask|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_TestbenchSetup_Testbench_workflowWrongTask()
{
string outputDir = "TestbenchSetup_Testbench_workflowWrongTask";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@TestbenchSetup|kind=Testing|relpos=0/@Testbench_workflowWrongTask|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_Names_Testbench_NameInvalidComponent()
{
string outputDir = "Names_Testbench_NameInvalidComponent";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@Names|kind=Testing|relpos=0/@Testbench_NameInvalidComponent|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_Names_Testbench_NameInvalidComponentRef()
{
string outputDir = "Names_Testbench_NameInvalidComponentRef";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@Names|kind=Testing|relpos=0/@Testbench_NameInvalidComponentRef|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_Names_Testbench_NameInvalidModelicaParameter()
{
string outputDir = "Names_Testbench_NameInvalidModelicaParameter";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@Names|kind=Testing|relpos=0/@Testbench_NameInvalidModelicaParameter|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_Names_Testbench_NameInvalidParameter()
{
string outputDir = "Names_Testbench_NameInvalidParameter";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@Names|kind=Testing|relpos=0/@Testbench_NameInvalidParameter|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_Names_Testbench_NameInvalidProperty()
{
string outputDir = "Names_Testbench_NameInvalidProperty";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@Names|kind=Testing|relpos=0/@Testbench_NameInvalidProperty|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_Names_Testbench_NonUniqueNames_CA()
{
string outputDir = "Names_Testbench_NonUniqueNames_CA";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@Names|kind=Testing|relpos=0/@Testbench_NonUniqueNames_CA|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_Names_Testbench_NonUniqueNamesTestComponents()
{
string outputDir = "Names_Testbench_NonUniqueNamesTestComponents";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@Names|kind=Testing|relpos=0/@Testbench_NonUniqueNamesTestComponents|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_ModelicaModelURI_Testbench_ModelicaUriInvalid_2()
{
string outputDir = "ModelicaModelURI_Testbench_ModelicaUriInvalid_2";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ModelicaModelURI|kind=Testing|relpos=0/@Testbench_ModelicaUriInvalid_2|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_ModelicaModelURI_Testbench_ModelicaUriInvalid_1()
{
string outputDir = "ModelicaModelURI_Testbench_ModelicaUriInvalid_1";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ModelicaModelURI|kind=Testing|relpos=0/@Testbench_ModelicaUriInvalid_1|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_ComponentsInTree_ComponentDefinedInCA()
{
string outputDir = "ComponentsInTree_ComponentDefinedInCA";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ComponentsInTree|kind=Testing|relpos=0/@ComponentDefinedInCA|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_ComponentsInTree_ComponentDefininedInOtherCA()
{
string outputDir = "ComponentsInTree_ComponentDefininedInOtherCA";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ComponentsInTree|kind=Testing|relpos=0/@ComponentDefininedInOtherCA|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_ComponentsInTree_ComponentDefinedInOtherTB()
{
string outputDir = "ComponentsInTree_ComponentDefinedInOtherTB";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ComponentsInTree|kind=Testing|relpos=0/@ComponentDefinedInOtherTB|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CheckerShouldFail", "MSD_FailChecker")]
public void Fail_ComponentsInTree_ComponentDefinedInThisTB()
{
string outputDir = "ComponentsInTree_ComponentDefinedInThisTB";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@ComponentsInTree|kind=Testing|relpos=0/@ComponentDefinedInThisTB|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.False(result, "CyPhy2Modelica_v2 should have failed, but did not.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CyPhy2Modelica", "MSD_FailChecker")]
public void RulesFailures_Testbench()
{
string outputDir = "RulesFailures_Testbench";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesFailures|kind=Testing|relpos=0/@Testbench|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 failed.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CyPhy2Modelica", "MSD_FailChecker")]
public void RulesWarnings_Testbench_Damper_ParameterHasNoValue()
{
string outputDir = "RulesWarnings_Testbench_Damper_ParameterHasNoValue";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesWarnings|kind=Testing|relpos=0/@Testbench_Damper_ParameterHasNoValue|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 failed.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CyPhy2Modelica", "MSD_FailChecker")]
public void RulesWarnings_Testbench_TestComponentParamHasNoValue()
{
string outputDir = "RulesWarnings_Testbench_TestComponentParamHasNoValue";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesWarnings|kind=Testing|relpos=0/@Testbench_TestComponentParamHasNoValue|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 failed.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CyPhy2Modelica", "MSD_FailChecker")]
public void RulesWarnings_Testbench_Damper_PropertyHasNoValue()
{
string outputDir = "RulesWarnings_Testbench_Damper_PropertyHasNoValue";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesWarnings|kind=Testing|relpos=0/@Testbench_Damper_PropertyHasNoValue|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 failed.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CyPhy2Modelica", "MSD_FailChecker")]
public void RulesWarnings_Testbench_TestComponentPropertyHasNoValue()
{
string outputDir = "RulesWarnings_Testbench_TestComponentPropertyHasNoValue";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesWarnings|kind=Testing|relpos=0/@Testbench_TestComponentPropertyHasNoValue|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 failed.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CyPhy2Modelica", "MSD_FailChecker")]
public void RulesWarnings_Testbench_CAPropertyHasNoValue()
{
string outputDir = "RulesWarnings_Testbench_CAPropertyHasNoValue";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesWarnings|kind=Testing|relpos=0/@Testbench_CAPropertyHasNoValue|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 failed.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CyPhy2Modelica", "MSD_FailChecker")]
public void RulesWarnings_Testbench_CAParameterHasNoValue()
{
string outputDir = "RulesWarnings_Testbench_CAParameterHasNoValue";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@RulesWarnings|kind=Testing|relpos=0/@Testbench_CAParameterHasNoValue|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 failed.");
}
[Fact]
[Trait("Model", "MSD_FailChecker")]
[Trait("CyPhy2Modelica", "MSD_FailChecker")]
public void Test_Benches_MassSpringDamperTest()
{
string outputDir = "Test Benches_MassSpringDamperTest";
string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@MassSpringDamperTest|kind=TestBench|relpos=0";
Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath);
Assert.True(result, "CyPhy2Modelica_v2 failed.");
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Composition;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Extensions;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.CSharp.LineSeparator
{
[ExportLanguageService(typeof(ILineSeparatorService), LanguageNames.CSharp), Shared]
internal class CSharpLineSeparatorService : ILineSeparatorService
{
/// <summary>
/// Given a tree returns line separator spans.
/// The operation may take fairly long time on a big tree so it is cancelable.
/// </summary>
public async Task<IEnumerable<TextSpan>> GetLineSeparatorsAsync(
Document document,
TextSpan textSpan,
CancellationToken cancellationToken)
{
var tree = await document.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false);
var node = await tree.GetRootAsync(cancellationToken).ConfigureAwait(false);
var spans = new List<TextSpan>();
var blocks = node.Traverse<SyntaxNode>(textSpan, IsSeparableContainer);
foreach (var block in blocks)
{
if (cancellationToken.IsCancellationRequested)
{
return SpecializedCollections.EmptyEnumerable<TextSpan>();
}
var typeBlock = block as TypeDeclarationSyntax;
if (typeBlock != null)
{
ProcessNodeList(typeBlock.Members, spans, cancellationToken);
continue;
}
var namespaceBlock = block as NamespaceDeclarationSyntax;
if (namespaceBlock != null)
{
ProcessUsings(namespaceBlock.Usings, spans, cancellationToken);
ProcessNodeList(namespaceBlock.Members, spans, cancellationToken);
continue;
}
var progBlock = block as CompilationUnitSyntax;
if (progBlock != null)
{
ProcessUsings(progBlock.Usings, spans, cancellationToken);
ProcessNodeList(progBlock.Members, spans, cancellationToken);
}
}
return spans;
}
/// <summary>Node types that are interesting for line separation.</summary>
private static bool IsSeparableBlock(SyntaxNode node)
{
if (SyntaxFacts.IsTypeDeclaration(node.Kind()))
{
return true;
}
switch (node.Kind())
{
case SyntaxKind.NamespaceDeclaration:
case SyntaxKind.MethodDeclaration:
case SyntaxKind.PropertyDeclaration:
case SyntaxKind.EventDeclaration:
case SyntaxKind.IndexerDeclaration:
case SyntaxKind.ConstructorDeclaration:
case SyntaxKind.DestructorDeclaration:
case SyntaxKind.OperatorDeclaration:
case SyntaxKind.ConversionOperatorDeclaration:
return true;
default:
return false;
}
}
/// <summary>Node types that may contain separable blocks.</summary>
private static bool IsSeparableContainer(SyntaxNode node)
{
return node is TypeDeclarationSyntax ||
node is NamespaceDeclarationSyntax ||
node is CompilationUnitSyntax;
}
private static bool IsBadType(SyntaxNode node)
{
var typeDecl = node as TypeDeclarationSyntax;
if (typeDecl != null)
{
if (typeDecl.OpenBraceToken.IsMissing ||
typeDecl.CloseBraceToken.IsMissing)
{
return true;
}
}
return false;
}
private static bool IsBadEnum(SyntaxNode node)
{
var enumDecl = node as EnumDeclarationSyntax;
if (enumDecl != null)
{
if (enumDecl.OpenBraceToken.IsMissing ||
enumDecl.CloseBraceToken.IsMissing)
{
return true;
}
}
return false;
}
private static bool IsBadMethod(SyntaxNode node)
{
var methodDecl = node as MethodDeclarationSyntax;
if (methodDecl != null)
{
if (methodDecl.Body != null &&
(methodDecl.Body.OpenBraceToken.IsMissing ||
methodDecl.Body.CloseBraceToken.IsMissing))
{
return true;
}
}
return false;
}
private static bool IsBadProperty(SyntaxNode node)
{
return IsBadAccessorList(node as PropertyDeclarationSyntax);
}
private static bool IsBadEvent(SyntaxNode node)
{
return IsBadAccessorList(node as EventDeclarationSyntax);
}
private static bool IsBadIndexer(SyntaxNode node)
{
return IsBadAccessorList(node as IndexerDeclarationSyntax);
}
private static bool IsBadAccessorList(BasePropertyDeclarationSyntax baseProperty)
{
if (baseProperty?.AccessorList == null)
{
return false;
}
return baseProperty.AccessorList.OpenBraceToken.IsMissing ||
baseProperty.AccessorList.CloseBraceToken.IsMissing;
}
private static bool IsBadConstructor(SyntaxNode node)
{
var constructorDecl = node as ConstructorDeclarationSyntax;
if (constructorDecl != null)
{
if (constructorDecl.Body != null &&
(constructorDecl.Body.OpenBraceToken.IsMissing ||
constructorDecl.Body.CloseBraceToken.IsMissing))
{
return true;
}
}
return false;
}
private static bool IsBadDestructor(SyntaxNode node)
{
var destructorDecl = node as DestructorDeclarationSyntax;
if (destructorDecl != null)
{
if (destructorDecl.Body != null &&
(destructorDecl.Body.OpenBraceToken.IsMissing ||
destructorDecl.Body.CloseBraceToken.IsMissing))
{
return true;
}
}
return false;
}
private static bool IsBadOperator(SyntaxNode node)
{
var operatorDecl = node as OperatorDeclarationSyntax;
if (operatorDecl != null)
{
if (operatorDecl.Body != null &&
(operatorDecl.Body.OpenBraceToken.IsMissing ||
operatorDecl.Body.CloseBraceToken.IsMissing))
{
return true;
}
}
return false;
}
private static bool IsBadConversionOperator(SyntaxNode node)
{
var conversionDecl = node as ConversionOperatorDeclarationSyntax;
if (conversionDecl != null)
{
if (conversionDecl.Body != null &&
(conversionDecl.Body.OpenBraceToken.IsMissing ||
conversionDecl.Body.CloseBraceToken.IsMissing))
{
return true;
}
}
return false;
}
private static bool IsBadNode(SyntaxNode node)
{
if (node is IncompleteMemberSyntax)
{
return true;
}
if (IsBadType(node) ||
IsBadEnum(node) ||
IsBadMethod(node) ||
IsBadProperty(node) ||
IsBadEvent(node) ||
IsBadIndexer(node) ||
IsBadConstructor(node) ||
IsBadDestructor(node) ||
IsBadOperator(node) ||
IsBadConversionOperator(node))
{
return true;
}
return false;
}
private static void ProcessUsings(SyntaxList<UsingDirectiveSyntax> usings, List<TextSpan> spans, CancellationToken cancellationToken)
{
Contract.ThrowIfNull(spans);
if (usings.Any())
{
AddLineSeparatorSpanForNode(usings.Last(), spans, cancellationToken);
}
}
/// <summary>
/// If node is separable and not the last in its container => add line separator after the node
/// If node is separable and not the first in its container => ensure separator before the node
/// last separable node in Program needs separator after it.
/// </summary>
private static void ProcessNodeList<T>(SyntaxList<T> children, List<TextSpan> spans, CancellationToken cancellationToken) where T : SyntaxNode
{
Contract.ThrowIfNull(spans);
if (children.Count == 0)
{
// nothing to separate
return;
}
// first child needs no separator
var seenSeparator = true;
for (int i = 0; i < children.Count - 1; i++)
{
cancellationToken.ThrowIfCancellationRequested();
var cur = children[i];
if (!IsSeparableBlock(cur))
{
seenSeparator = false;
}
else
{
if (!seenSeparator)
{
var prev = children[i - 1];
AddLineSeparatorSpanForNode(prev, spans, cancellationToken);
}
AddLineSeparatorSpanForNode(cur, spans, cancellationToken);
seenSeparator = true;
}
}
// last child may need separator only before it
var lastChild = children.Last();
if (IsSeparableBlock(lastChild))
{
if (!seenSeparator)
{
var nextToLast = children[children.Count - 2];
AddLineSeparatorSpanForNode(nextToLast, spans, cancellationToken);
}
if (lastChild.IsParentKind(SyntaxKind.CompilationUnit))
{
AddLineSeparatorSpanForNode(lastChild, spans, cancellationToken);
}
}
}
private static void AddLineSeparatorSpanForNode(SyntaxNode node, List<TextSpan> spans, CancellationToken cancellationToken)
{
if (IsBadNode(node))
{
return;
}
var span = GetLineSeparatorSpanForNode(node);
if (IsLegalSpanForLineSeparator(node.SyntaxTree, span, cancellationToken))
{
spans.Add(span);
}
}
private static bool IsLegalSpanForLineSeparator(SyntaxTree syntaxTree, TextSpan textSpan, CancellationToken cancellationToken)
{
// A span is a legal location for a line separator if the following line
// contains only whitespace or the span is the last line in the buffer.
var line = syntaxTree.GetText(cancellationToken).Lines.IndexOf(textSpan.End);
if (line == syntaxTree.GetText(cancellationToken).Lines.Count - 1)
{
return true;
}
if (string.IsNullOrWhiteSpace(syntaxTree.GetText(cancellationToken).Lines[line + 1].ToString()))
{
return true;
}
return false;
}
private static TextSpan GetLineSeparatorSpanForNode(SyntaxNode node)
{
// we only want to underline the node with a long line
// for this purpose the last token is as good as the whole node, but has
// simpler and typically single line geometry (so it will be easier to find "bottom")
return node.GetLastToken().Span;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Net.NetworkInformation;
using System.Threading;
using PcapDotNet.Base;
using PcapDotNet.Core.Extensions;
using PcapDotNet.Packets;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using PcapDotNet.Packets.Ethernet;
using PcapDotNet.Packets.IpV6;
using PcapDotNet.Packets.TestUtils;
using PcapDotNet.TestUtils;
namespace PcapDotNet.Core.Test
{
/// <summary>
/// Summary description for LivePacketDeviceTests
/// </summary>
[TestClass]
[ExcludeFromCodeCoverage]
public class LivePacketDeviceTests
{
public LivePacketDeviceTests()
{
}
/// <summary>
/// Gets or sets the test context which provides
/// information about and functionality for the current test run.
/// </summary>
public TestContext TestContext { get; set; }
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
[TestMethod]
public void SendAndReceievePacketTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumPacketsToSend = 10;
using (PacketCommunicator communicator = OpenLiveDevice(100))
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet packet;
DateTime startWaiting = DateTime.Now;
PacketCommunicatorReceiveResult result = communicator.ReceivePacket(out packet);
DateTime finishedWaiting = DateTime.Now;
Assert.AreEqual(PacketCommunicatorReceiveResult.Timeout, result);
Assert.AreEqual<uint>(0, communicator.TotalStatistics.PacketsCaptured);
MoreAssert.IsInRange(TimeSpan.FromSeconds(0.99), TimeSpan.FromSeconds(1.075), finishedWaiting - startWaiting);
Packet sentPacket = _random.NextEthernetPacket(200, 300, SourceMac, DestinationMac);
DateTime startSendingTime = DateTime.Now;
for (int i = 0; i != NumPacketsToSend; ++i)
communicator.SendPacket(sentPacket);
DateTime endSendingTime = DateTime.Now;
for (int i = 0; i != NumPacketsToSend; ++i)
{
result = communicator.ReceivePacket(out packet);
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
Assert.AreEqual(100, packet.Length);
Assert.AreEqual<uint>(200, packet.OriginalLength);
MoreAssert.IsInRange(startSendingTime - TimeSpan.FromSeconds(1), endSendingTime + TimeSpan.FromSeconds(30), packet.Timestamp);
}
Assert.AreEqual<uint>(NumPacketsToSend, communicator.TotalStatistics.PacketsCaptured);
}
}
[TestMethod]
[ExpectedException(typeof(ArgumentNullException), AllowDerivedTypes = false)]
public void SendNullPacketTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SendPacket(null);
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(ArgumentNullException), AllowDerivedTypes = false)]
public void SetNullFilterTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter(null as BerkeleyPacketFilter);
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(ArgumentNullException), AllowDerivedTypes = false)]
public void SetNullSamplingMethodTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetSamplingMethod(null);
}
Assert.Fail();
}
[TestMethod]
public void ReceiveSomePacketsTest()
{
const int NumPacketsToSend = 100;
const int PacketSize = 100;
// Test normal mode
TestReceiveSomePackets(0, 0, int.MaxValue, PacketSize, false, PacketCommunicatorReceiveResult.Ok, 0, 1, 1.06);
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend, int.MaxValue, PacketSize, false, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.02);
TestReceiveSomePackets(NumPacketsToSend, 0, int.MaxValue, PacketSize, false, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.02);
TestReceiveSomePackets(NumPacketsToSend, -1, int.MaxValue, PacketSize, false, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.028);
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend + 1, int.MaxValue, PacketSize, false, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.031);
// Test non blocking
TestReceiveSomePackets(0, 0, int.MaxValue, PacketSize, true, PacketCommunicatorReceiveResult.Ok, 0, 0, 0.02);
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend, int.MaxValue, PacketSize, true, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.02);
// Test break loop
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend, NumPacketsToSend / 2, PacketSize, false, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend / 2, 0, 0.02);
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend, NumPacketsToSend / 2, PacketSize, true, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend / 2, 0, 0.02);
TestReceiveSomePackets(NumPacketsToSend, NumPacketsToSend, 0, PacketSize, false, PacketCommunicatorReceiveResult.BreakLoop, 0, 0, 0.02);
}
[TestMethod]
public void ReceivePacketsTest()
{
const int NumPacketsToSend = 100;
const int PacketSize = 100;
// Normal
TestReceivePackets(NumPacketsToSend, NumPacketsToSend, int.MaxValue, 2, PacketSize, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend, 0, 0.12);
// Wait for less packets
TestReceivePackets(NumPacketsToSend, NumPacketsToSend / 2, int.MaxValue, 2, PacketSize, PacketCommunicatorReceiveResult.Ok, NumPacketsToSend / 2, 0, 0.04);
// Wait for more packets
TestReceivePackets(NumPacketsToSend, 0, int.MaxValue, 2, PacketSize, PacketCommunicatorReceiveResult.None, NumPacketsToSend, 2, 2.45);
TestReceivePackets(NumPacketsToSend, -1, int.MaxValue, 2, PacketSize, PacketCommunicatorReceiveResult.None, NumPacketsToSend, 2, 2.3);
TestReceivePackets(NumPacketsToSend, NumPacketsToSend + 1, int.MaxValue, 2, PacketSize, PacketCommunicatorReceiveResult.None, NumPacketsToSend, 2, 2.16);
// Break loop
TestReceivePackets(NumPacketsToSend, NumPacketsToSend, 0, 2, PacketSize, PacketCommunicatorReceiveResult.BreakLoop, 0, 0, 0.027);
TestReceivePackets(NumPacketsToSend, NumPacketsToSend, NumPacketsToSend / 2, 2, PacketSize, PacketCommunicatorReceiveResult.BreakLoop, NumPacketsToSend / 2, 0, 0.046);
}
[TestMethod]
public void ReceivePacketsEnumerableTest()
{
const int NumPacketsToSend = 100;
const int PacketSize = 100;
// Normal
TestReceivePacketsEnumerable(NumPacketsToSend, NumPacketsToSend, int.MaxValue, 2, PacketSize, NumPacketsToSend, 0, 0.3);
// Wait for less packets
TestReceivePacketsEnumerable(NumPacketsToSend, NumPacketsToSend / 2, int.MaxValue, 2, PacketSize, NumPacketsToSend / 2, 0, 0.032);
// Wait for more packets
TestReceivePacketsEnumerable(NumPacketsToSend, -1, int.MaxValue, 2, PacketSize, NumPacketsToSend, 2, 2.14);
TestReceivePacketsEnumerable(NumPacketsToSend, NumPacketsToSend + 1, int.MaxValue, 2, PacketSize, NumPacketsToSend, 2, 2.13);
// Break loop
TestReceivePacketsEnumerable(NumPacketsToSend, NumPacketsToSend, 0, 2, PacketSize, 0, 0, 0.051);
TestReceivePacketsEnumerable(NumPacketsToSend, NumPacketsToSend, NumPacketsToSend / 2, 2, PacketSize, NumPacketsToSend / 2, 0, 0.1);
}
[TestMethod]
[Timeout(10 * 1000)]
public void ReceivePacketsGcCollectTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumPackets = 2;
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet sentPacket = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
for (int i = 0; i != NumPackets; ++i)
communicator.SendPacket(sentPacket);
PacketCommunicatorReceiveResult result = communicator.ReceivePackets(NumPackets, delegate
{
GC.Collect();
});
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
}
}
[TestMethod]
public void ReceiveSomePacketsGcCollectTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumPackets = 2;
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet sentPacket = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
for (int i = 0; i != NumPackets; ++i)
communicator.SendPacket(sentPacket);
int numGot;
PacketCommunicatorReceiveResult result = communicator.ReceiveSomePackets(out numGot, NumPackets,
delegate
{
GC.Collect();
});
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
Assert.AreEqual(NumPackets, numGot);
}
}
[TestMethod]
public void ReceiveStatisticsGcCollectTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumStatistics = 2;
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.Mode = PacketCommunicatorMode.Statistics;
PacketCommunicatorReceiveResult result = communicator.ReceiveStatistics(NumStatistics, delegate
{
GC.Collect();
});
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
}
}
[TestMethod]
public void ReceiveStatisticsTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumPacketsToSend = 100;
const int PacketSize = 100;
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = PacketCommunicatorMode.Statistics;
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet sentPacket = _random.NextEthernetPacket(PacketSize, SourceMac, DestinationMac);
PacketSampleStatistics statistics;
PacketCommunicatorReceiveResult result = communicator.ReceiveStatistics(out statistics);
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
MoreAssert.IsInRange(DateTime.Now.AddSeconds(-1), DateTime.Now.AddSeconds(1), statistics.Timestamp);
Assert.AreEqual<ulong>(0, statistics.AcceptedPackets);
Assert.AreEqual<ulong>(0, statistics.AcceptedBytes);
for (int i = 0; i != NumPacketsToSend; ++i)
communicator.SendPacket(sentPacket);
result = communicator.ReceiveStatistics(out statistics);
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
MoreAssert.IsInRange(DateTime.Now.AddSeconds(-1), DateTime.Now.AddSeconds(1), statistics.Timestamp);
Assert.AreEqual<ulong>(NumPacketsToSend, statistics.AcceptedPackets, "AcceptedPackets");
// Todo check byte statistics. See http://www.winpcap.org/pipermail/winpcap-users/2015-February/004931.html
// Assert.AreEqual<long>((sentPacket.Length * NumPacketsToSend), statistics.AcceptedBytes,
// "AcceptedBytes. Diff Per Packet: " +
// (statistics.AcceptedBytes - sentPacket.Length * NumPacketsToSend) /
// ((double)NumPacketsToSend));
}
}
[TestMethod]
public void GetStatisticsTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
const int NumPacketsToSend = 100;
const int NumStatisticsToGather = 3;
const int PacketSize = 100;
// Normal
TestGetStatistics(SourceMac, DestinationMac, NumPacketsToSend, NumStatisticsToGather, int.MaxValue, 5, PacketSize,
PacketCommunicatorReceiveResult.Ok, NumStatisticsToGather, NumPacketsToSend, NumStatisticsToGather, NumStatisticsToGather + 0.16);
// Wait for less statistics
TestGetStatistics(SourceMac, DestinationMac, NumPacketsToSend, NumStatisticsToGather / 2, int.MaxValue, 5, PacketSize,
PacketCommunicatorReceiveResult.Ok, NumStatisticsToGather / 2, NumPacketsToSend, NumStatisticsToGather / 2, NumStatisticsToGather / 2 + 0.17);
// Wait for more statistics
TestGetStatistics(SourceMac, DestinationMac, NumPacketsToSend, 0, int.MaxValue, 5.5, PacketSize,
PacketCommunicatorReceiveResult.None, 5, NumPacketsToSend, 5.5, 5.85);
// Break loop
TestGetStatistics(SourceMac, DestinationMac, NumPacketsToSend, NumStatisticsToGather, 0, 5, PacketSize,
PacketCommunicatorReceiveResult.BreakLoop, 0, 0, 0, 0.04);
TestGetStatistics(SourceMac, DestinationMac, NumPacketsToSend, NumStatisticsToGather, NumStatisticsToGather / 2, 5, PacketSize,
PacketCommunicatorReceiveResult.BreakLoop, NumStatisticsToGather / 2, NumPacketsToSend, NumStatisticsToGather / 2, NumStatisticsToGather / 2 + 0.22);
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void GetStatisticsOnCaptureModeErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
PacketSampleStatistics statistics;
communicator.ReceiveStatistics(out statistics);
}
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void GetPacketOnStatisticsModeErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = PacketCommunicatorMode.Statistics;
Packet packet;
communicator.ReceivePacket(out packet);
}
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetInvalidModeErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = (PacketCommunicatorMode)(-99);
}
}
// this test is removed for now since it doens't throw an exception for such big value
// [TestMethod]
// [ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
// public void SetBigKernelBufferSizeErrorTest()
// {
// using (PacketCommunicator communicator = OpenLiveDevice())
// {
// communicator.SetKernelBufferSize(1024 * 1024 * 1024);
// }
// }
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetSmallKernelBufferSizeGetPacketErrorTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetKernelBufferSize(10);
Packet packet = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
communicator.SendPacket(packet);
communicator.ReceivePacket(out packet);
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetSmallKernelBufferSizeGetSomePacketsErrorTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetKernelBufferSize(10);
Packet packet = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
communicator.SendPacket(packet);
int numPacketsGot;
communicator.ReceiveSomePackets(out numPacketsGot, 1, delegate { });
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetSmallKernelBufferSizeGetPacketsErrorTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetKernelBufferSize(10);
Packet packet = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
communicator.SendPacket(packet);
Exception exception = null;
Thread thread = new Thread(delegate()
{
try
{
communicator.ReceivePackets(1, delegate { });
}
catch (Exception e)
{
exception = e;
}
});
thread.Start();
if (!thread.Join(TimeSpan.FromSeconds(5)))
thread.Abort();
if (exception != null)
throw exception;
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetSmallKernelBufferSizeGetNextStatisticsErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = PacketCommunicatorMode.Statistics;
communicator.SetKernelBufferSize(10);
PacketSampleStatistics statistics;
communicator.ReceiveStatistics(out statistics);
}
Assert.Fail();
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetSmallKernelBufferSizeGetStatisticsErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = PacketCommunicatorMode.Statistics;
communicator.SetKernelBufferSize(10);
communicator.ReceiveStatistics(1, delegate { Assert.Fail(); });
}
Assert.Fail();
}
[TestMethod]
public void SetBigKernelMinimumBytesToCopyTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetKernelMinimumBytesToCopy(1024 * 1024);
Packet expectedPacket = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
for (int i = 0; i != 5; ++i)
{
communicator.SendPacket(expectedPacket);
Packet packet;
DateTime start = DateTime.Now;
PacketCommunicatorReceiveResult result = communicator.ReceivePacket(out packet);
DateTime end = DateTime.Now;
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
Assert.AreEqual(expectedPacket, packet);
MoreAssert.IsBigger(TimeSpan.FromSeconds(0.9), end - start);
}
}
}
[TestMethod]
public void SetSmallKernelMinimumBytesToCopyTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetKernelMinimumBytesToCopy(1);
Packet expectedPacket = _random.NextEthernetPacket(100, SourceMac, DestinationMac);
for (int i = 0; i != 100; ++i)
{
communicator.SendPacket(expectedPacket);
Packet packet;
DateTime start = DateTime.Now;
PacketCommunicatorReceiveResult result = communicator.ReceivePacket(out packet);
DateTime end = DateTime.Now;
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
Assert.AreEqual(expectedPacket, packet);
MoreAssert.IsSmallerOrEqual(TimeSpan.FromSeconds(0.07), end - start);
}
}
}
[TestMethod]
public void SetSamplingMethodOneEveryNTest()
{
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
communicator.SetSamplingMethod(new SamplingMethodOneEveryCount(5));
for (int i = 0; i != 20; ++i)
{
Packet expectedPacket = _random.NextEthernetPacket(60 * (i + 1), SourceMac, DestinationMac);
communicator.SendPacket(expectedPacket);
}
Packet packet;
PacketCommunicatorReceiveResult result;
for (int i = 0; i != 4; ++i)
{
result = communicator.ReceivePacket(out packet);
Assert.AreEqual(PacketCommunicatorReceiveResult.Ok, result);
Assert.AreEqual(60 * 5 * (i + 1), packet.Length);
}
result = communicator.ReceivePacket(out packet);
Assert.AreEqual(PacketCommunicatorReceiveResult.Timeout, result);
Assert.IsNull(packet);
}
}
[TestMethod]
public void SetSamplingMethodFirstAfterIntervalTest()
{
Random random = new Random();
MacAddress sourceMac = random.NextMacAddress();
MacAddress destinationMac = random.NextMacAddress();
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + sourceMac + " and ether dst " + destinationMac);
communicator.SetSamplingMethod(new SamplingMethodFirstAfterInterval(TimeSpan.FromSeconds(1)));
int numPacketsGot;
communicator.ReceiveSomePackets(out numPacketsGot, 100, p => { });
Packet[] packetsToSend = new Packet[11];
packetsToSend[0] = _random.NextEthernetPacket(60, sourceMac, destinationMac);
for (int i = 0; i != 10; ++i)
packetsToSend[i + 1] = _random.NextEthernetPacket(60 * (i + 2), sourceMac, destinationMac);
List<Packet> packets = new List<Packet>(6);
Thread thread = new Thread(() => packets.AddRange(communicator.ReceivePackets(6)));
thread.Start();
communicator.SendPacket(packetsToSend[0]);
Thread.Sleep(TimeSpan.FromSeconds(0.7));
for (int i = 0; i != 10; ++i)
{
communicator.SendPacket(packetsToSend[i + 1]);
Thread.Sleep(TimeSpan.FromSeconds(0.55));
}
if (!thread.Join(TimeSpan.FromSeconds(10)))
thread.Abort();
Assert.AreEqual(6, packets.Count, packets.Select(p => (p.Timestamp-packets[0].Timestamp).TotalSeconds + "(" + p.Length + ")").SequenceToString(", "));
Packet packet;
for (int i = 0; i != 6; ++i)
Assert.AreEqual(60 * (i * 2 + 1), packets[i].Length, i.ToString());
PacketCommunicatorReceiveResult result = communicator.ReceivePacket(out packet);
Assert.AreEqual(PacketCommunicatorReceiveResult.Timeout, result);
Assert.IsNull(packet);
}
}
[TestMethod]
[ExpectedException(typeof(ArgumentOutOfRangeException), AllowDerivedTypes = false)]
public void SetSamplingMethodOneEveryNErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetSamplingMethod(new SamplingMethodOneEveryCount(0));
}
}
[TestMethod]
[ExpectedException(typeof(ArgumentOutOfRangeException), AllowDerivedTypes = false)]
public void SetSamplingMethodFirstAfterIntervalNegativeMsErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetSamplingMethod(new SamplingMethodFirstAfterInterval(-1));
}
}
[TestMethod]
[ExpectedException(typeof(ArgumentOutOfRangeException), AllowDerivedTypes = false)]
public void SetSamplingMethodFirstAfterIntervalNegativeTimespanErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetSamplingMethod(new SamplingMethodFirstAfterInterval(TimeSpan.FromSeconds(-1)));
}
}
[TestMethod]
[ExpectedException(typeof(ArgumentOutOfRangeException), AllowDerivedTypes = false)]
public void SetSamplingMethodFirstAfterIntervalBigTimespanErrorTest()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetSamplingMethod(new SamplingMethodFirstAfterInterval(TimeSpan.FromDays(25)));
}
}
[TestMethod]
[ExpectedException(typeof(InvalidOperationException), AllowDerivedTypes = false)]
public void SetInvalidDataLink()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.DataLink = new PcapDataLink(0);
Assert.AreEqual(new PcapDataLink(0), communicator.DataLink);
}
}
[TestMethod]
public void SendZeroPacket()
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SendPacket(new Packet(new byte[0], DateTime.Now, DataLinkKind.Ethernet));
}
}
private static void TestGetStatistics(string sourceMac, string destinationMac, int numPacketsToSend, int numStatisticsToGather, int numStatisticsToBreakLoop, double secondsToWait, int packetSize,
PacketCommunicatorReceiveResult expectedResult, int expectedNumStatistics, int expectedNumPackets, double expectedMinSeconds, double expectedMaxSeconds)
{
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.Mode = PacketCommunicatorMode.Statistics;
communicator.SetFilter("ether src " + sourceMac + " and ether dst " + destinationMac);
Packet sentPacket = _random.NextEthernetPacket(packetSize, sourceMac, destinationMac);
PacketCommunicatorReceiveResult result = PacketCommunicatorReceiveResult.None;
int numStatisticsGot = 0;
ulong totalPackets = 0;
ulong totalBytes = 0;
for (int i = 0; i != numPacketsToSend; ++i)
communicator.SendPacket(sentPacket);
if (numStatisticsToBreakLoop == 0)
communicator.Break();
Thread thread = new Thread(delegate()
{
result = communicator.ReceiveStatistics(numStatisticsToGather,
delegate(PacketSampleStatistics statistics)
{
Assert.IsNotNull(statistics.ToString());
totalPackets += statistics.AcceptedPackets;
totalBytes += statistics.AcceptedBytes;
++numStatisticsGot;
if (numStatisticsGot >= numStatisticsToBreakLoop)
communicator.Break();
});
});
DateTime startWaiting = DateTime.Now;
thread.Start();
if (!thread.Join(TimeSpan.FromSeconds(secondsToWait)))
thread.Abort();
DateTime finishedWaiting = DateTime.Now;
Assert.AreEqual(expectedResult, result, "Result");
Assert.AreEqual(expectedNumStatistics, numStatisticsGot, "NumStatistics");
Assert.AreEqual((ulong)expectedNumPackets, totalPackets, "NumPackets");
// Todo check byte statistics. See http://www.winpcap.org/pipermail/winpcap-users/2015-February/004931.html
// Assert.AreEqual((ulong)(numPacketsToSend * sentPacket.Length), totalBytes, "NumBytes");
MoreAssert.IsInRange(expectedMinSeconds, expectedMaxSeconds, (finishedWaiting - startWaiting).TotalSeconds);
}
}
private static void TestReceiveSomePackets(int numPacketsToSend, int numPacketsToGet, int numPacketsToBreakLoop, int packetSize, bool nonBlocking,
PacketCommunicatorReceiveResult expectedResult, int expectedNumPackets, double expectedMinSeconds, double expectedMaxSeconds)
{
string testDescription = "NumPacketsToSend=" + numPacketsToSend + ". NumPacketsToGet=" + numPacketsToGet +
". NumPacketsToBreakLoop=" + numPacketsToBreakLoop + ". PacketSize=" + packetSize +
". NonBlocking=" + nonBlocking;
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
Packet packetToSend = _random.NextEthernetPacket(packetSize, SourceMac, DestinationMac);
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.NonBlocking = nonBlocking;
Assert.AreEqual(nonBlocking, communicator.NonBlocking);
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
int numPacketsGot;
for (int i = 0; i != numPacketsToSend; ++i)
communicator.SendPacket(packetToSend);
if (numPacketsToBreakLoop == 0)
communicator.Break();
PacketHandler handler = new PacketHandler(packetToSend, communicator, numPacketsToBreakLoop);
DateTime startWaiting = DateTime.Now;
PacketCommunicatorReceiveResult result = communicator.ReceiveSomePackets(out numPacketsGot, numPacketsToGet,
handler.Handle);
DateTime finishedWaiting = DateTime.Now;
Assert.AreEqual(expectedResult, result);
Assert.AreEqual(expectedNumPackets, numPacketsGot, "NumPacketsGot. Test: " + testDescription);
Assert.AreEqual(expectedNumPackets, handler.NumPacketsHandled, "NumPacketsHandled. Test: " + testDescription);
MoreAssert.IsInRange(expectedMinSeconds, expectedMaxSeconds, (finishedWaiting - startWaiting).TotalSeconds, testDescription);
}
}
private static void TestReceivePackets(int numPacketsToSend, int numPacketsToWait, int numPacketsToBreakLoop, double secondsToWait, int packetSize,
PacketCommunicatorReceiveResult expectedResult, int expectedNumPackets,
double expectedMinSeconds, double expectedMaxSeconds)
{
string testDescription = "NumPacketsToSend=" + numPacketsToSend + ". NumPacketsToWait=" + numPacketsToWait +
". NumPacketsToBreakLoop=" + numPacketsToBreakLoop + ". SecondsToWait=" +
secondsToWait + ". PacketSize=" + packetSize;
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet sentPacket = _random.NextEthernetPacket(packetSize, SourceMac, DestinationMac);
PacketCommunicatorReceiveResult result = PacketCommunicatorReceiveResult.None;
for (int i = 0; i != numPacketsToSend; ++i)
communicator.SendPacket(sentPacket);
PacketHandler handler = new PacketHandler(sentPacket, communicator, numPacketsToBreakLoop);
Thread thread = new Thread(delegate()
{
if (numPacketsToBreakLoop == 0)
communicator.Break();
result = communicator.ReceivePackets(numPacketsToWait, handler.Handle);
});
DateTime startWaiting = DateTime.Now;
thread.Start();
if (!thread.Join(TimeSpan.FromSeconds(secondsToWait)))
thread.Abort();
DateTime finishedWaiting = DateTime.Now;
Assert.AreEqual(expectedResult, result, testDescription);
Assert.AreEqual(expectedNumPackets, handler.NumPacketsHandled);
MoreAssert.IsInRange(expectedMinSeconds, expectedMaxSeconds, (finishedWaiting - startWaiting).TotalSeconds);
}
}
private static void TestReceivePacketsEnumerable(int numPacketsToSend, int numPacketsToWait, int numPacketsToBreakLoop, double secondsToWait,
int packetSize, int expectedNumPackets, double expectedMinSeconds, double expectedMaxSeconds)
{
string testDescription = "NumPacketsToSend=" + numPacketsToSend + ". NumPacketsToWait=" + numPacketsToWait +
". NumPacketsToBreakLoop=" + numPacketsToBreakLoop + ". SecondsToWait=" +
secondsToWait + ". PacketSize=" + packetSize;
const string SourceMac = "11:22:33:44:55:66";
const string DestinationMac = "77:88:99:AA:BB:CC";
using (PacketCommunicator communicator = OpenLiveDevice())
{
communicator.SetFilter("ether src " + SourceMac + " and ether dst " + DestinationMac);
Packet sentPacket = _random.NextEthernetPacket(packetSize, SourceMac, DestinationMac);
for (int i = 0; i != numPacketsToSend; ++i)
communicator.SendPacket(sentPacket);
int actualPacketsReceived = 0;
Thread thread = new Thread(delegate()
{
if (numPacketsToBreakLoop == 0)
communicator.Break();
IEnumerable<Packet> packets = numPacketsToWait == -1
? communicator.ReceivePackets()
: communicator.ReceivePackets(numPacketsToWait);
foreach (Packet packet in packets)
{
Assert.AreEqual(sentPacket, packet);
++actualPacketsReceived;
if (actualPacketsReceived == numPacketsToBreakLoop)
break;
}
});
DateTime startWaiting = DateTime.Now;
thread.Start();
if (!thread.Join(TimeSpan.FromSeconds(secondsToWait)))
thread.Abort();
DateTime finishedWaiting = DateTime.Now;
Assert.AreEqual(expectedNumPackets, actualPacketsReceived, testDescription);
MoreAssert.IsInRange(expectedMinSeconds, expectedMaxSeconds, (finishedWaiting - startWaiting).TotalSeconds, testDescription);
}
}
public static PacketCommunicator OpenLiveDevice(int snapshotLength)
{
NetworkInterface networkInterface =
NetworkInterface.GetAllNetworkInterfaces().FirstOrDefault(
ni => !ni.IsReceiveOnly && ni.NetworkInterfaceType == NetworkInterfaceType.Ethernet && ni.OperationalStatus == OperationalStatus.Up);
LivePacketDevice device = networkInterface.GetLivePacketDevice();
MoreAssert.IsMatch(@"Network adapter '.*' on local host", device.Description);
Assert.AreEqual(DeviceAttributes.None, device.Attributes);
Assert.AreNotEqual(MacAddress.Zero, device.GetMacAddress());
Assert.AreNotEqual(string.Empty, device.GetPnpDeviceId());
MoreAssert.IsBiggerOrEqual(1, device.Addresses.Count);
foreach (DeviceAddress address in device.Addresses)
{
if (address.Address.Family == SocketAddressFamily.Internet)
{
MoreAssert.IsMatch("Address: " + SocketAddressFamily.Internet + @" [0-9]+\.[0-9]+\.[0-9]+\.[0-9]+ " +
"Netmask: " + SocketAddressFamily.Internet + @" 255\.[0-9]+\.[0-9]+\.[0-9]+ " +
"Broadcast: " + SocketAddressFamily.Internet + @" 255.255.255.255",
address.ToString());
}
else
{
Assert.AreEqual(SocketAddressFamily.Internet6, address.Address.Family);
MoreAssert.IsMatch("Address: " + SocketAddressFamily.Internet6 + @" (?:[0-9A-F]{4}:){7}[0-9A-F]{4} " +
"Netmask: " + SocketAddressFamily.Unspecified + @" " + IpV6Address.Zero + " " +
"Broadcast: " + SocketAddressFamily.Unspecified + @" " + IpV6Address.Zero,
address.ToString());
}
}
PacketCommunicator communicator = device.Open(snapshotLength, PacketDeviceOpenAttributes.Promiscuous, 1000);
try
{
MoreAssert.AreSequenceEqual(new[] {DataLinkKind.Ethernet, DataLinkKind.Docsis}.Select(kind => new PcapDataLink(kind)), communicator.SupportedDataLinks);
PacketTotalStatistics totalStatistics = communicator.TotalStatistics;
Assert.AreEqual<object>(totalStatistics, totalStatistics);
Assert.AreNotEqual(null, totalStatistics);
Assert.AreEqual(totalStatistics.GetHashCode(), totalStatistics.GetHashCode());
Assert.IsTrue(totalStatistics.Equals(totalStatistics));
Assert.IsFalse(totalStatistics.Equals(null));
Assert.AreNotEqual(null, totalStatistics);
Assert.AreNotEqual(totalStatistics, 2);
MoreAssert.IsSmallerOrEqual<uint>(1, totalStatistics.PacketsCaptured, "PacketsCaptured");
Assert.AreEqual<uint>(0, totalStatistics.PacketsDroppedByDriver, "PacketsDroppedByDriver");
Assert.AreEqual<uint>(0, totalStatistics.PacketsDroppedByInterface, "PacketsDroppedByInterface");
MoreAssert.IsSmallerOrEqual<uint>(1, totalStatistics.PacketsReceived, "PacketsReceived");
Assert.IsNotNull(totalStatistics.ToString());
communicator.SetKernelBufferSize(2 * 1024 * 1024); // 2 MB instead of 1
communicator.SetKernelMinimumBytesToCopy(10); // 10 bytes minimum to copy
communicator.SetSamplingMethod(new SamplingMethodNone());
Assert.AreEqual(DataLinkKind.Ethernet, communicator.DataLink.Kind);
communicator.DataLink = communicator.DataLink;
Assert.AreEqual("EN10MB (Ethernet)", communicator.DataLink.ToString());
Assert.AreEqual(communicator.DataLink, new PcapDataLink(communicator.DataLink.Name));
Assert.IsTrue(communicator.IsFileSystemByteOrder);
Assert.AreEqual(PacketCommunicatorMode.Capture, communicator.Mode);
Assert.IsFalse(communicator.NonBlocking);
Assert.AreEqual(snapshotLength, communicator.SnapshotLength);
return communicator;
}
catch (Exception)
{
communicator.Dispose();
throw;
}
}
public static PacketCommunicator OpenLiveDevice()
{
return OpenLiveDevice(PacketDevice.DefaultSnapshotLength);
}
private static readonly Random _random = new Random();
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
** Purpose: This class will encapsulate a short and provide an
** Object representation of it.
**
**
===========================================================*/
using System.Globalization;
using System;
using System.Runtime.InteropServices;
using System.Diagnostics.Contracts;
namespace System
{
// Wrapper for unsigned 16 bit integers.
[Serializable]
[CLSCompliant(false), System.Runtime.InteropServices.StructLayout(LayoutKind.Sequential)]
public struct UInt16 : IComparable, IFormattable, IConvertible
, IComparable<UInt16>, IEquatable<UInt16>
{
private ushort m_value;
public const ushort MaxValue = (ushort)0xFFFF;
public const ushort MinValue = 0;
// Compares this object to another object, returning an integer that
// indicates the relationship.
// Returns a value less than zero if this object
// null is considered to be less than any instance.
// If object is not of type UInt16, this method throws an ArgumentException.
//
public int CompareTo(Object value)
{
if (value == null)
{
return 1;
}
if (value is UInt16)
{
return ((int)m_value - (int)(((UInt16)value).m_value));
}
throw new ArgumentException(Environment.GetResourceString("Arg_MustBeUInt16"));
}
public int CompareTo(UInt16 value)
{
return ((int)m_value - (int)value);
}
public override bool Equals(Object obj)
{
if (!(obj is UInt16))
{
return false;
}
return m_value == ((UInt16)obj).m_value;
}
[System.Runtime.Versioning.NonVersionable]
public bool Equals(UInt16 obj)
{
return m_value == obj;
}
// Returns a HashCode for the UInt16
public override int GetHashCode()
{
return (int)m_value;
}
// Converts the current value to a String in base-10 with no extra padding.
public override String ToString()
{
Contract.Ensures(Contract.Result<String>() != null);
return Number.FormatUInt32(m_value, null, NumberFormatInfo.CurrentInfo);
}
public String ToString(IFormatProvider provider)
{
Contract.Ensures(Contract.Result<String>() != null);
return Number.FormatUInt32(m_value, null, NumberFormatInfo.GetInstance(provider));
}
public String ToString(String format)
{
Contract.Ensures(Contract.Result<String>() != null);
return Number.FormatUInt32(m_value, format, NumberFormatInfo.CurrentInfo);
}
public String ToString(String format, IFormatProvider provider)
{
Contract.Ensures(Contract.Result<String>() != null);
return Number.FormatUInt32(m_value, format, NumberFormatInfo.GetInstance(provider));
}
[CLSCompliant(false)]
public static ushort Parse(String s)
{
return Parse(s, NumberStyles.Integer, NumberFormatInfo.CurrentInfo);
}
[CLSCompliant(false)]
public static ushort Parse(String s, NumberStyles style)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
return Parse(s, style, NumberFormatInfo.CurrentInfo);
}
[CLSCompliant(false)]
public static ushort Parse(String s, IFormatProvider provider)
{
return Parse(s, NumberStyles.Integer, NumberFormatInfo.GetInstance(provider));
}
[CLSCompliant(false)]
public static ushort Parse(String s, NumberStyles style, IFormatProvider provider)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
return Parse(s, style, NumberFormatInfo.GetInstance(provider));
}
private static ushort Parse(String s, NumberStyles style, NumberFormatInfo info)
{
uint i = 0;
try
{
i = Number.ParseUInt32(s, style, info);
}
catch (OverflowException e)
{
throw new OverflowException(Environment.GetResourceString("Overflow_UInt16"), e);
}
if (i > MaxValue) throw new OverflowException(Environment.GetResourceString("Overflow_UInt16"));
return (ushort)i;
}
[CLSCompliant(false)]
public static bool TryParse(String s, out UInt16 result)
{
return TryParse(s, NumberStyles.Integer, NumberFormatInfo.CurrentInfo, out result);
}
[CLSCompliant(false)]
public static bool TryParse(String s, NumberStyles style, IFormatProvider provider, out UInt16 result)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
return TryParse(s, style, NumberFormatInfo.GetInstance(provider), out result);
}
private static bool TryParse(String s, NumberStyles style, NumberFormatInfo info, out UInt16 result)
{
result = 0;
UInt32 i;
if (!Number.TryParseUInt32(s, style, info, out i))
{
return false;
}
if (i > MaxValue)
{
return false;
}
result = (UInt16)i;
return true;
}
//
// IConvertible implementation
//
public TypeCode GetTypeCode()
{
return TypeCode.UInt16;
}
/// <internalonly/>
bool IConvertible.ToBoolean(IFormatProvider provider)
{
return Convert.ToBoolean(m_value);
}
/// <internalonly/>
char IConvertible.ToChar(IFormatProvider provider)
{
return Convert.ToChar(m_value);
}
/// <internalonly/>
sbyte IConvertible.ToSByte(IFormatProvider provider)
{
return Convert.ToSByte(m_value);
}
/// <internalonly/>
byte IConvertible.ToByte(IFormatProvider provider)
{
return Convert.ToByte(m_value);
}
/// <internalonly/>
short IConvertible.ToInt16(IFormatProvider provider)
{
return Convert.ToInt16(m_value);
}
/// <internalonly/>
ushort IConvertible.ToUInt16(IFormatProvider provider)
{
return m_value;
}
/// <internalonly/>
int IConvertible.ToInt32(IFormatProvider provider)
{
return Convert.ToInt32(m_value);
}
/// <internalonly/>
uint IConvertible.ToUInt32(IFormatProvider provider)
{
return Convert.ToUInt32(m_value);
}
/// <internalonly/>
long IConvertible.ToInt64(IFormatProvider provider)
{
return Convert.ToInt64(m_value);
}
/// <internalonly/>
ulong IConvertible.ToUInt64(IFormatProvider provider)
{
return Convert.ToUInt64(m_value);
}
/// <internalonly/>
float IConvertible.ToSingle(IFormatProvider provider)
{
return Convert.ToSingle(m_value);
}
/// <internalonly/>
double IConvertible.ToDouble(IFormatProvider provider)
{
return Convert.ToDouble(m_value);
}
/// <internalonly/>
Decimal IConvertible.ToDecimal(IFormatProvider provider)
{
return Convert.ToDecimal(m_value);
}
/// <internalonly/>
DateTime IConvertible.ToDateTime(IFormatProvider provider)
{
throw new InvalidCastException(Environment.GetResourceString("InvalidCast_FromTo", "UInt16", "DateTime"));
}
/// <internalonly/>
Object IConvertible.ToType(Type type, IFormatProvider provider)
{
return Convert.DefaultToType((IConvertible)this, type, provider);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Dynamic.Utils;
using System.Reflection;
using System.Runtime.CompilerServices;
namespace System.Linq.Expressions.Interpreter
{
internal abstract class NotEqualInstruction : Instruction
{
// Perf: EqualityComparer<T> but is 3/2 to 2 times slower.
private static Instruction s_reference,s_boolean,s_SByte,s_int16,s_char,s_int32,s_int64,s_byte,s_UInt16,s_UInt32,s_UInt64,s_single,s_double;
private static Instruction s_referenceLiftedToNull,s_booleanLiftedToNull,s_SByteLiftedToNull,s_int16LiftedToNull,s_charLiftedToNull,s_int32LiftedToNull,s_int64LiftedToNull,s_byteLiftedToNull,s_UInt16LiftedToNull,s_UInt32LiftedToNull,s_UInt64LiftedToNull,s_singleLiftedToNull,s_doubleLiftedToNull;
public override int ConsumedStack { get { return 2; } }
public override int ProducedStack { get { return 1; } }
public override string InstructionName
{
get { return "NotEqual"; }
}
private NotEqualInstruction()
{
}
internal sealed class NotEqualBoolean : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Boolean)left) != ((Boolean)right)));
}
return +1;
}
}
internal sealed class NotEqualSByte : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((SByte)left) != ((SByte)right)));
}
return +1;
}
}
internal sealed class NotEqualInt16 : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Int16)left) != ((Int16)right)));
}
return +1;
}
}
internal sealed class NotEqualChar : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Char)left) != ((Char)right)));
}
return +1;
}
}
internal sealed class NotEqualInt32 : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Int32)left) != ((Int32)right)));
}
return +1;
}
}
internal sealed class NotEqualInt64 : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Int64)left) != ((Int64)right)));
}
return +1;
}
}
internal sealed class NotEqualByte : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Byte)left) != ((Byte)right)));
}
return +1;
}
}
internal sealed class NotEqualUInt16 : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((UInt16)left) != ((UInt16)right)));
}
return +1;
}
}
internal sealed class NotEqualUInt32 : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((UInt32)left) != ((UInt32)right)));
}
return +1;
}
}
internal sealed class NotEqualUInt64 : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((UInt64)left) != ((UInt64)right)));
}
return +1;
}
}
internal sealed class NotEqualSingle : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Single)left) != ((Single)right)));
}
return +1;
}
}
internal sealed class NotEqualDouble : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(right != null));
}
else if (right == null)
{
frame.Push(ScriptingRuntimeHelpers.True);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Double)left) != ((Double)right)));
}
return +1;
}
}
internal sealed class NotEqualReference : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(frame.Pop() != frame.Pop()));
return +1;
}
}
internal sealed class NotEqualBooleanLiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Boolean)left) != ((Boolean)right)));
}
return +1;
}
}
internal sealed class NotEqualSByteLiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((SByte)left) != ((SByte)right)));
}
return +1;
}
}
internal sealed class NotEqualInt16LiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Int16)left) != ((Int16)right)));
}
return +1;
}
}
internal sealed class NotEqualCharLiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Char)left) != ((Char)right)));
}
return +1;
}
}
internal sealed class NotEqualInt32LiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Int32)left) != ((Int32)right)));
}
return +1;
}
}
internal sealed class NotEqualInt64LiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Int64)left) != ((Int64)right)));
}
return +1;
}
}
internal sealed class NotEqualByteLiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Byte)left) != ((Byte)right)));
}
return +1;
}
}
internal sealed class NotEqualUInt16LiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((UInt16)left) != ((UInt16)right)));
}
return +1;
}
}
internal sealed class NotEqualUInt32LiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((UInt32)left) != ((UInt32)right)));
}
return +1;
}
}
internal sealed class NotEqualUInt64LiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((UInt64)left) != ((UInt64)right)));
}
return +1;
}
}
internal sealed class NotEqualSingleLiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Single)left) != ((Single)right)));
}
return +1;
}
}
internal sealed class NotEqualDoubleLiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
var right = frame.Pop();
var left = frame.Pop();
if (left == null || right == null)
{
frame.Push(null);
}
else
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(((Double)left) != ((Double)right)));
}
return +1;
}
}
internal sealed class NotEqualReferenceLiftedToNull : NotEqualInstruction
{
public override int Run(InterpretedFrame frame)
{
frame.Push(ScriptingRuntimeHelpers.BooleanToObject(frame.Pop() != frame.Pop()));
return +1;
}
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
public static Instruction Create(Type type, bool liftedToNull)
{
if (liftedToNull)
{
// Boxed enums can be unboxed as their underlying types:
switch (System.Dynamic.Utils.TypeExtensions.GetTypeCode(type.GetTypeInfo().IsEnum ? Enum.GetUnderlyingType(type) : TypeUtils.GetNonNullableType(type)))
{
case TypeCode.Boolean: return s_booleanLiftedToNull ?? (s_booleanLiftedToNull = new NotEqualBooleanLiftedToNull());
case TypeCode.SByte: return s_SByteLiftedToNull ?? (s_SByteLiftedToNull = new NotEqualSByteLiftedToNull());
case TypeCode.Byte: return s_byteLiftedToNull ?? (s_byteLiftedToNull = new NotEqualByteLiftedToNull());
case TypeCode.Char: return s_charLiftedToNull ?? (s_charLiftedToNull = new NotEqualCharLiftedToNull());
case TypeCode.Int16: return s_int16LiftedToNull ?? (s_int16LiftedToNull = new NotEqualInt16LiftedToNull());
case TypeCode.Int32: return s_int32LiftedToNull ?? (s_int32LiftedToNull = new NotEqualInt32LiftedToNull());
case TypeCode.Int64: return s_int64LiftedToNull ?? (s_int64LiftedToNull = new NotEqualInt64LiftedToNull());
case TypeCode.UInt16: return s_UInt16LiftedToNull ?? (s_UInt16LiftedToNull = new NotEqualUInt16LiftedToNull());
case TypeCode.UInt32: return s_UInt32LiftedToNull ?? (s_UInt32LiftedToNull = new NotEqualUInt32LiftedToNull());
case TypeCode.UInt64: return s_UInt64LiftedToNull ?? (s_UInt64LiftedToNull = new NotEqualUInt64LiftedToNull());
case TypeCode.Single: return s_singleLiftedToNull ?? (s_singleLiftedToNull = new NotEqualSingleLiftedToNull());
case TypeCode.Double: return s_doubleLiftedToNull ?? (s_doubleLiftedToNull = new NotEqualDoubleLiftedToNull());
case TypeCode.String:
case TypeCode.Object:
if (!type.GetTypeInfo().IsValueType)
{
return s_referenceLiftedToNull ?? (s_referenceLiftedToNull = new NotEqualReferenceLiftedToNull());
}
// TODO: Nullable<T>
throw Error.ExpressionNotSupportedForNullableType("NotEqual", type);
default:
throw Error.ExpressionNotSupportedForType("NotEqual", type);
}
}
else
{
// Boxed enums can be unboxed as their underlying types:
switch (System.Dynamic.Utils.TypeExtensions.GetTypeCode(type.GetTypeInfo().IsEnum ? Enum.GetUnderlyingType(type) : TypeUtils.GetNonNullableType(type)))
{
case TypeCode.Boolean: return s_boolean ?? (s_boolean = new NotEqualBoolean());
case TypeCode.SByte: return s_SByte ?? (s_SByte = new NotEqualSByte());
case TypeCode.Byte: return s_byte ?? (s_byte = new NotEqualByte());
case TypeCode.Char: return s_char ?? (s_char = new NotEqualChar());
case TypeCode.Int16: return s_int16 ?? (s_int16 = new NotEqualInt16());
case TypeCode.Int32: return s_int32 ?? (s_int32 = new NotEqualInt32());
case TypeCode.Int64: return s_int64 ?? (s_int64 = new NotEqualInt64());
case TypeCode.UInt16: return s_UInt16 ?? (s_UInt16 = new NotEqualUInt16());
case TypeCode.UInt32: return s_UInt32 ?? (s_UInt32 = new NotEqualUInt32());
case TypeCode.UInt64: return s_UInt64 ?? (s_UInt64 = new NotEqualUInt64());
case TypeCode.Single: return s_single ?? (s_single = new NotEqualSingle());
case TypeCode.Double: return s_double ?? (s_double = new NotEqualDouble());
case TypeCode.String:
case TypeCode.Object:
if (!type.GetTypeInfo().IsValueType)
{
return s_reference ?? (s_reference = new NotEqualReference());
}
// TODO: Nullable<T>
throw Error.ExpressionNotSupportedForNullableType("NotEqual", type);
default:
throw Error.ExpressionNotSupportedForType("NotEqual", type);
}
}
}
public override string ToString()
{
return "NotEqual()";
}
}
}
| |
#region License, Terms and Author(s)
//
// ELMAH - Error Logging Modules and Handlers for ASP.NET
// Copyright (c) 2004-9 Atif Aziz. All rights reserved.
//
// Author(s):
//
// Atif Aziz, http://www.raboof.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
[assembly: Elmah.Scc("$Id: SQLiteErrorLog.cs 924 2011-12-23 22:41:47Z azizatif $")]
namespace Elmah
{
#region Imports
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.SQLite;
using System.Globalization;
using System.IO;
using IDictionary = System.Collections.IDictionary;
#endregion
/// <summary>
/// An <see cref="ErrorLog"/> implementation that uses SQLite as its backing store.
/// </summary>
public class SQLiteErrorLog : ErrorLog
{
private readonly string _connectionString;
/// <summary>
/// Initializes a new instance of the <see cref="SQLiteErrorLog"/> class
/// using a dictionary of configured settings.
/// </summary>
public SQLiteErrorLog(IDictionary config)
{
if (config == null)
throw new ArgumentNullException("config");
string connectionString = ConnectionStringHelper.GetConnectionString(config, true);
//
// If there is no connection string to use then throw an
// exception to abort construction.
//
if (connectionString.Length == 0)
throw new ApplicationException("Connection string is missing for the SQLite error log.");
_connectionString = connectionString;
InitializeDatabase();
ApplicationName = config.Find("applicationName", string.Empty);
}
/// <summary>
/// Initializes a new instance of the <see cref="SQLiteErrorLog"/> class
/// to use a specific connection string for connecting to the database.
/// </summary>
public SQLiteErrorLog(string connectionString)
{
if (connectionString == null)
throw new ArgumentNullException("connectionString");
if (connectionString.Length == 0)
throw new ArgumentException(null, "connectionString");
_connectionString = ConnectionStringHelper.GetResolvedConnectionString(connectionString);
InitializeDatabase();
}
private static readonly object _lock = new object();
private void InitializeDatabase()
{
string connectionString = ConnectionString;
Debug.AssertStringNotEmpty(connectionString);
string dbFilePath = ConnectionStringHelper.GetDataSourceFilePath(connectionString);
if (File.Exists(dbFilePath))
return;
//
// Make sure that we don't have multiple threads all trying to create the database
//
lock (_lock)
{
//
// Just double check that no other thread has created the database while
// we were waiting for the lock
//
if (File.Exists(dbFilePath))
return;
SQLiteConnection.CreateFile(dbFilePath);
const string sql = @"
CREATE TABLE Error (
ErrorId INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
Application TEXT NOT NULL,
Host TEXT NOT NULL,
Type TEXT NOT NULL,
Source TEXT NOT NULL,
Message TEXT NOT NULL,
User TEXT NOT NULL,
StatusCode INTEGER NOT NULL,
TimeUtc TEXT NOT NULL,
AllXml TEXT NOT NULL
)";
using (SQLiteConnection connection = new SQLiteConnection(connectionString))
using (SQLiteCommand command = new SQLiteCommand(sql, connection))
{
connection.Open();
command.ExecuteNonQuery();
}
}
}
/// <summary>
/// Gets the name of this error log implementation.
/// </summary>
public override string Name
{
get { return "SQLite Error Log"; }
}
/// <summary>
/// Gets the connection string used by the log to connect to the database.
/// </summary>
public virtual string ConnectionString
{
get { return _connectionString; }
}
/// <summary>
/// Logs an error to the database.
/// </summary>
/// <remarks>
/// Use the stored procedure called by this implementation to set a
/// policy on how long errors are kept in the log. The default
/// implementation stores all errors for an indefinite time.
/// </remarks>
public override string Log(Error error)
{
if (error == null)
throw new ArgumentNullException("error");
string errorXml = ErrorXml.EncodeString(error);
const string query = @"
INSERT INTO Error (
Application, Host,
Type, Source, Message, User, StatusCode,
TimeUtc, AllXml)
VALUES (
@Application, @Host,
@Type, @Source, @Message, @User, @StatusCode,
@TimeUtc, @AllXml);
SELECT last_insert_rowid();";
using (SQLiteConnection connection = new SQLiteConnection(ConnectionString))
using (SQLiteCommand command = new SQLiteCommand(query, connection))
{
SQLiteParameterCollection parameters = command.Parameters;
parameters.Add("@Application", DbType.String, 60).Value = ApplicationName;
parameters.Add("@Host", DbType.String, 30).Value = error.HostName;
parameters.Add("@Type", DbType.String, 100).Value = error.Type;
parameters.Add("@Source", DbType.String, 60).Value = error.Source;
parameters.Add("@Message", DbType.String, 500).Value = error.Message;
parameters.Add("@User", DbType.String, 50).Value = error.User;
parameters.Add("@StatusCode", DbType.Int64).Value = error.StatusCode;
parameters.Add("@TimeUtc", DbType.DateTime).Value = error.Time.ToUniversalTime();
parameters.Add("@AllXml", DbType.String).Value = errorXml;
connection.Open();
return Convert.ToInt64(command.ExecuteScalar()).ToString(CultureInfo.InvariantCulture);
}
}
/// <summary>
/// Returns a page of errors from the databse in descending order
/// of logged time.
/// </summary>
public override int GetErrors(int pageIndex, int pageSize, ICollection<ErrorLogEntry> errorEntryList)
{
if (pageIndex < 0)
throw new ArgumentOutOfRangeException("pageIndex", pageIndex, null);
if (pageSize < 0)
throw new ArgumentOutOfRangeException("pageSize", pageSize, null);
const string sql = @"
SELECT
ErrorId,
Application,
Host,
Type,
Source,
Message,
User,
StatusCode,
TimeUtc
FROM
Error
ORDER BY
ErrorId DESC
LIMIT
@PageIndex * @PageSize,
@PageSize;
SELECT COUNT(*) FROM Error";
using (SQLiteConnection connection = new SQLiteConnection(ConnectionString))
using (SQLiteCommand command = new SQLiteCommand(sql, connection))
{
SQLiteParameterCollection parameters = command.Parameters;
parameters.Add("@PageIndex", DbType.Int16).Value = pageIndex;
parameters.Add("@PageSize", DbType.Int16).Value = pageSize;
connection.Open();
using (SQLiteDataReader reader = command.ExecuteReader())
{
if (errorEntryList != null)
{
while (reader.Read())
{
var id = Convert.ToString(reader["ErrorId"], CultureInfo.InvariantCulture);
var error = new Error
{
ApplicationName = reader["Application"].ToString(),
HostName = reader["Host"].ToString(),
Type = reader["Type"].ToString(),
Source = reader["Source"].ToString(),
Message = reader["Message"].ToString(),
User = reader["User"].ToString(),
StatusCode = Convert.ToInt32(reader["StatusCode"]),
Time = Convert.ToDateTime(reader["TimeUtc"]).ToLocalTime()
};
errorEntryList.Add(new ErrorLogEntry(this, id, error));
}
}
//
// Get the result of SELECT COUNT(*) FROM Page
//
reader.NextResult();
reader.Read();
return reader.GetInt32(0);
}
}
}
/// <summary>
/// Returns the specified error from the database, or null
/// if it does not exist.
/// </summary>
public override ErrorLogEntry GetError(string id)
{
if (id == null)
throw new ArgumentNullException("id");
if (id.Length == 0)
throw new ArgumentException(null, "id");
long key;
try
{
key = long.Parse(id, CultureInfo.InvariantCulture);
}
catch (FormatException e)
{
throw new ArgumentException(e.Message, "id", e);
}
const string sql = @"
SELECT
AllXml
FROM
Error
WHERE
ErrorId = @ErrorId";
using (SQLiteConnection connection = new SQLiteConnection(ConnectionString))
using (SQLiteCommand command = new SQLiteCommand(sql, connection))
{
SQLiteParameterCollection parameters = command.Parameters;
parameters.Add("@ErrorId", DbType.Int64).Value = key;
connection.Open();
string errorXml = (string) command.ExecuteScalar();
if (errorXml == null)
return null;
Error error = ErrorXml.DecodeString(errorXml);
return new ErrorLogEntry(this, id, error);
}
}
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Globalization;
using System.IO;
using System.Text;
using Newtonsoft.Json.Utilities;
namespace Newtonsoft.Json.Bson
{
internal class BsonBinaryWriter
{
private static readonly Encoding Encoding = new UTF8Encoding(false);
private readonly BinaryWriter _writer;
private byte[] _largeByteBuffer;
public DateTimeKind DateTimeKindHandling { get; set; }
public BsonBinaryWriter(BinaryWriter writer)
{
DateTimeKindHandling = DateTimeKind.Utc;
_writer = writer;
}
public void Flush()
{
_writer.Flush();
}
public void Close()
{
#if !(NETFX_CORE || PORTABLE40 || PORTABLE)
_writer.Close();
#else
_writer.Dispose();
#endif
}
public void WriteToken(BsonToken t)
{
CalculateSize(t);
WriteTokenInternal(t);
}
private void WriteTokenInternal(BsonToken t)
{
switch (t.Type)
{
case BsonType.Object:
{
BsonObject value = (BsonObject)t;
_writer.Write(value.CalculatedSize);
foreach (BsonProperty property in value)
{
_writer.Write((sbyte)property.Value.Type);
WriteString((string)property.Name.Value, property.Name.ByteCount, null);
WriteTokenInternal(property.Value);
}
_writer.Write((byte)0);
}
break;
case BsonType.Array:
{
BsonArray value = (BsonArray)t;
_writer.Write(value.CalculatedSize);
ulong index = 0;
foreach (BsonToken c in value)
{
_writer.Write((sbyte)c.Type);
WriteString(index.ToString(CultureInfo.InvariantCulture), MathUtils.IntLength(index), null);
WriteTokenInternal(c);
index++;
}
_writer.Write((byte)0);
}
break;
case BsonType.Integer:
{
BsonValue value = (BsonValue)t;
_writer.Write(Convert.ToInt32(value.Value, CultureInfo.InvariantCulture));
}
break;
case BsonType.Long:
{
BsonValue value = (BsonValue)t;
_writer.Write(Convert.ToInt64(value.Value, CultureInfo.InvariantCulture));
}
break;
case BsonType.Number:
{
BsonValue value = (BsonValue)t;
_writer.Write(Convert.ToDouble(value.Value, CultureInfo.InvariantCulture));
}
break;
case BsonType.String:
{
BsonString value = (BsonString)t;
WriteString((string)value.Value, value.ByteCount, value.CalculatedSize - 4);
}
break;
case BsonType.Boolean:
{
BsonValue value = (BsonValue)t;
_writer.Write((bool)value.Value);
}
break;
case BsonType.Null:
case BsonType.Undefined:
break;
case BsonType.Date:
{
BsonValue value = (BsonValue)t;
long ticks = 0;
if (value.Value is DateTime)
{
DateTime dateTime = (DateTime)value.Value;
if (DateTimeKindHandling == DateTimeKind.Utc)
dateTime = dateTime.ToUniversalTime();
else if (DateTimeKindHandling == DateTimeKind.Local)
dateTime = dateTime.ToLocalTime();
ticks = DateTimeUtils.ConvertDateTimeToJavaScriptTicks(dateTime, false);
}
#if !NET20
else
{
DateTimeOffset dateTimeOffset = (DateTimeOffset)value.Value;
ticks = DateTimeUtils.ConvertDateTimeToJavaScriptTicks(dateTimeOffset.UtcDateTime, dateTimeOffset.Offset);
}
#endif
_writer.Write(ticks);
}
break;
case BsonType.Binary:
{
BsonBinary value = (BsonBinary)t;
byte[] data = (byte[])value.Value;
_writer.Write(data.Length);
_writer.Write((byte)value.BinaryType);
_writer.Write(data);
}
break;
case BsonType.Oid:
{
BsonValue value = (BsonValue)t;
byte[] data = (byte[])value.Value;
_writer.Write(data);
}
break;
case BsonType.Regex:
{
BsonRegex value = (BsonRegex)t;
WriteString((string)value.Pattern.Value, value.Pattern.ByteCount, null);
WriteString((string)value.Options.Value, value.Options.ByteCount, null);
}
break;
default:
throw new ArgumentOutOfRangeException("t", "Unexpected token when writing BSON: {0}".FormatWith(CultureInfo.InvariantCulture, t.Type));
}
}
private void WriteString(string s, int byteCount, int? calculatedlengthPrefix)
{
if (calculatedlengthPrefix != null)
_writer.Write(calculatedlengthPrefix.Value);
WriteUtf8Bytes(s, byteCount);
_writer.Write((byte)0);
}
public void WriteUtf8Bytes(string s, int byteCount)
{
if (s != null)
{
if (_largeByteBuffer == null)
{
_largeByteBuffer = new byte[256];
}
if (byteCount <= 256)
{
Encoding.GetBytes(s, 0, s.Length, _largeByteBuffer, 0);
_writer.Write(_largeByteBuffer, 0, byteCount);
}
else
{
byte[] bytes = Encoding.GetBytes(s);
_writer.Write(bytes);
}
}
}
private int CalculateSize(int stringByteCount)
{
return stringByteCount + 1;
}
private int CalculateSizeWithLength(int stringByteCount, bool includeSize)
{
int baseSize = (includeSize)
? 5 // size bytes + terminator
: 1; // terminator
return baseSize + stringByteCount;
}
private int CalculateSize(BsonToken t)
{
switch (t.Type)
{
case BsonType.Object:
{
BsonObject value = (BsonObject)t;
int bases = 4;
foreach (BsonProperty p in value)
{
int size = 1;
size += CalculateSize(p.Name);
size += CalculateSize(p.Value);
bases += size;
}
bases += 1;
value.CalculatedSize = bases;
return bases;
}
case BsonType.Array:
{
BsonArray value = (BsonArray)t;
int size = 4;
ulong index = 0;
foreach (BsonToken c in value)
{
size += 1;
size += CalculateSize(MathUtils.IntLength(index));
size += CalculateSize(c);
index++;
}
size += 1;
value.CalculatedSize = size;
return value.CalculatedSize;
}
case BsonType.Integer:
return 4;
case BsonType.Long:
return 8;
case BsonType.Number:
return 8;
case BsonType.String:
{
BsonString value = (BsonString)t;
string s = (string)value.Value;
value.ByteCount = (s != null) ? Encoding.GetByteCount(s) : 0;
value.CalculatedSize = CalculateSizeWithLength(value.ByteCount, value.IncludeLength);
return value.CalculatedSize;
}
case BsonType.Boolean:
return 1;
case BsonType.Null:
case BsonType.Undefined:
return 0;
case BsonType.Date:
return 8;
case BsonType.Binary:
{
BsonBinary value = (BsonBinary)t;
byte[] data = (byte[])value.Value;
value.CalculatedSize = 4 + 1 + data.Length;
return value.CalculatedSize;
}
case BsonType.Oid:
return 12;
case BsonType.Regex:
{
BsonRegex value = (BsonRegex)t;
int size = 0;
size += CalculateSize(value.Pattern);
size += CalculateSize(value.Options);
value.CalculatedSize = size;
return value.CalculatedSize;
}
default:
throw new ArgumentOutOfRangeException("t", "Unexpected token when writing BSON: {0}".FormatWith(CultureInfo.InvariantCulture, t.Type));
}
}
}
}
| |
using System;
using System.Collections.Immutable;
using System.Composition;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CodeFixes;
using Analyzer.Utilities;
using System.Threading;
using Analyzer.Utilities.Extensions;
using Microsoft.CodeAnalysis.Editing;
namespace Microsoft.ApiDesignGuidelines.Analyzers
{
/// <summary>
/// CA1066: Implement IEquatable when overriding Object.Equals
/// CA1067: Override Object.Equals(object) when implementing IEquatable{T}
/// </summary>
[ExportCodeFixProvider(LanguageNames.CSharp, LanguageNames.VisualBasic), Shared]
public sealed class EquatableFixer : CodeFixProvider
{
public override ImmutableArray<string> FixableDiagnosticIds =>
ImmutableArray.Create(EquatableAnalyzer.ImplementIEquatableRuleId, EquatableAnalyzer.OverrideObjectEqualsRuleId);
public override FixAllProvider GetFixAllProvider()
{
// See https://github.com/dotnet/roslyn/blob/master/docs/analyzers/FixAllProvider.md for more information on Fix All Providers
return WellKnownFixAllProviders.BatchFixer;
}
public override async Task RegisterCodeFixesAsync(CodeFixContext context)
{
SyntaxGenerator generator = SyntaxGenerator.GetGenerator(context.Document);
SyntaxNode root = await context.Document.GetSyntaxRootAsync(context.CancellationToken).ConfigureAwait(false);
SyntaxNode declaration = root.FindNode(context.Span);
declaration = generator.GetDeclaration(declaration);
if (declaration == null)
{
return;
}
SemanticModel model =
await context.Document.GetSemanticModelAsync(context.CancellationToken).ConfigureAwait(false);
INamedTypeSymbol type = model.GetDeclaredSymbol(declaration) as INamedTypeSymbol;
if (type == null || type.TypeKind != TypeKind.Class && type.TypeKind != TypeKind.Struct)
{
return;
}
INamedTypeSymbol equatableType = WellKnownTypes.GenericIEquatable(model.Compilation);
if (equatableType == null)
{
return;
}
// We cannot have multiple overlapping diagnostics of this id.
Diagnostic diagnostic = context.Diagnostics.Single();
if (type.TypeKind == TypeKind.Struct && !TypeImplementsEquatable(type, equatableType))
{
context.RegisterCodeFix(new MyCodeAction(
MicrosoftApiDesignGuidelinesAnalyzersResources.ImplementEquatable,
async ct =>
await ImplementEquatableInStructAsync(context.Document, declaration, type, model.Compilation,
equatableType, ct).ConfigureAwait(false),
equivalenceKey: MicrosoftApiDesignGuidelinesAnalyzersResources.ImplementEquatable), diagnostic);
}
if (!type.OverridesEquals())
{
context.RegisterCodeFix(new MyCodeAction(
MicrosoftApiDesignGuidelinesAnalyzersResources.OverrideEqualsOnImplementingIEquatableCodeActionTitle,
async ct =>
await OverrideObjectEqualsAsync(context.Document, declaration, type, equatableType,
ct).ConfigureAwait(false),
equivalenceKey: MicrosoftApiDesignGuidelinesAnalyzersResources.OverrideEqualsOnImplementingIEquatableCodeActionTitle), diagnostic);
}
}
private bool TypeImplementsEquatable(INamedTypeSymbol type, INamedTypeSymbol equatableType)
{
INamedTypeSymbol constructedEquatable = equatableType.Construct(type);
INamedTypeSymbol implementation = type
.Interfaces
.FirstOrDefault(x => x.Equals(constructedEquatable));
return implementation != null;
}
private async Task<Document> ImplementEquatableInStructAsync(Document document, SyntaxNode declaration,
INamedTypeSymbol typeSymbol, Compilation compilation, INamedTypeSymbol equatableType,
CancellationToken cancellationToken)
{
var editor = await DocumentEditor.CreateAsync(document, cancellationToken).ConfigureAwait(false);
var generator = editor.Generator;
var equalsMethod = generator.MethodDeclaration(
WellKnownMemberNames.ObjectEquals,
new[]
{
generator.ParameterDeclaration("other", generator.TypeExpression(typeSymbol))
},
returnType: generator.TypeExpression(SpecialType.System_Boolean),
accessibility: Accessibility.Public,
statements: generator.DefaultMethodBody(compilation));
editor.AddMember(declaration, equalsMethod);
INamedTypeSymbol constructedType = equatableType.Construct(typeSymbol);
editor.AddInterfaceType(declaration, generator.TypeExpression(constructedType));
return editor.GetChangedDocument();
}
private async Task<Document> OverrideObjectEqualsAsync(Document document, SyntaxNode declaration,
INamedTypeSymbol typeSymbol, INamedTypeSymbol equatableType, CancellationToken cancellationToken)
{
var editor = await DocumentEditor.CreateAsync(document, cancellationToken).ConfigureAwait(false);
var generator = editor.Generator;
var argumentName = generator.IdentifierName("obj");
SyntaxNode returnStatement;
if (HasExplicitEqualsImplementation(typeSymbol, equatableType))
{
returnStatement = typeSymbol.TypeKind == TypeKind.Class
? GetReturnStatementForExplicitClass(generator, typeSymbol, argumentName, equatableType)
: GetReturnStatementForExplicitStruct(generator, typeSymbol, argumentName, equatableType);
}
else
{
returnStatement = typeSymbol.TypeKind == TypeKind.Class
? GetReturnStatementForImplicitClass(generator, typeSymbol, argumentName)
: GetReturnStatementForImplicitStruct(generator, typeSymbol, argumentName);
}
var equalsMethod = generator.MethodDeclaration(
WellKnownMemberNames.ObjectEquals,
new[]
{
generator.ParameterDeclaration(argumentName.ToString(),
generator.TypeExpression(SpecialType.System_Object))
},
returnType: generator.TypeExpression(SpecialType.System_Boolean),
accessibility: Accessibility.Public,
modifiers: DeclarationModifiers.Override,
statements: new[] {returnStatement});
editor.AddMember(declaration, equalsMethod);
return editor.GetChangedDocument();
}
private bool HasExplicitEqualsImplementation(INamedTypeSymbol typeSymbol, INamedTypeSymbol equatableType)
{
INamedTypeSymbol constructedType = equatableType.Construct(typeSymbol);
IMethodSymbol constructedEqualsMethod = constructedType.GetMembers().OfType<IMethodSymbol>().Single();
foreach (IMethodSymbol method in typeSymbol.GetMembers().OfType<IMethodSymbol>())
{
foreach (IMethodSymbol explicitImplementation in method.ExplicitInterfaceImplementations)
{
if (explicitImplementation.Equals(constructedEqualsMethod))
{
return true;
}
}
}
return false;
}
private static SyntaxNode GetReturnStatementForExplicitClass(SyntaxGenerator generator,
INamedTypeSymbol typeSymbol, SyntaxNode argumentName, INamedTypeSymbol equatableType)
{
return generator.ReturnStatement(
generator.InvocationExpression(
generator.MemberAccessExpression(
generator.CastExpression(
equatableType.Construct(typeSymbol),
generator.ThisExpression()),
WellKnownMemberNames.ObjectEquals),
generator.TryCastExpression(
argumentName,
typeSymbol)));
}
private static SyntaxNode GetReturnStatementForExplicitStruct(SyntaxGenerator generator,
INamedTypeSymbol typeSymbol, SyntaxNode argumentName, INamedTypeSymbol equatableType)
{
return generator.ReturnStatement(
generator.LogicalAndExpression(
generator.IsTypeExpression(
argumentName,
typeSymbol),
generator.InvocationExpression(
generator.MemberAccessExpression(
generator.CastExpression(
equatableType.Construct(typeSymbol),
generator.ThisExpression()),
WellKnownMemberNames.ObjectEquals),
generator.CastExpression(
typeSymbol,
argumentName))));
}
private static SyntaxNode GetReturnStatementForImplicitClass(SyntaxGenerator generator,
INamedTypeSymbol typeSymbol, SyntaxNode argumentName)
{
return generator.ReturnStatement(
generator.InvocationExpression(
generator.IdentifierName(WellKnownMemberNames.ObjectEquals),
generator.Argument(
generator.TryCastExpression(
argumentName,
typeSymbol))));
}
private static SyntaxNode GetReturnStatementForImplicitStruct(SyntaxGenerator generator,
INamedTypeSymbol typeSymbol, SyntaxNode argumentName)
{
return generator.ReturnStatement(
generator.LogicalAndExpression(
generator.IsTypeExpression(
argumentName,
typeSymbol),
generator.InvocationExpression(
generator.IdentifierName(WellKnownMemberNames.ObjectEquals),
generator.CastExpression(
typeSymbol,
argumentName))));
}
// Needed for Telemetry (https://github.com/dotnet/roslyn-analyzers/issues/192)
private class MyCodeAction : DocumentChangeAction
{
public override string EquivalenceKey { get; }
public MyCodeAction(string title, Func<CancellationToken, Task<Document>> createChangedDocument,
string equivalenceKey)
: base(title, createChangedDocument)
{
EquivalenceKey = equivalenceKey;
}
}
}
}
| |
//
// Copyright (c)1998-2011 Pearson Education, Inc. or its affiliate(s).
// All rights reserved.
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Xml;
using System.Xml.Schema;
using OpenADK.Library.Global;
using OpenADK.Library.Impl;
using OpenADK.Library.Impl.Surrogates;
namespace OpenADK.Library
{
/// <summary> Renders a <c>SifElement</c> to an XML stream in SIF format.
///
/// Agents do not typically use the SifWriter class directly, but may do so to
/// render a SIF Data Object or a SIF Message to an output stream. The following
/// code demonstrates how to Write a SifDataObject to System.out:
///
///
/// <c>
/// StudentPersonal sp = ...<br/>
/// SifWriter out = new SifWriter( System.out );<br/>
/// out.write( sp );<br/>
///
/// </summary>
/// <author> Eric Petersen
/// </author>
/// <version> 1.0
/// </version>
public class SifWriter
{
private XmlWriter fWriter;
private XmlWriterSettings fSettings;
private SifFormatter fFormatter;
public const string XML_NAMESPACE = "http://www.w3.org/XML/1998/namespace";
public const string XSI_PREFIX = "xsi";
public const string NIL = "nil";
private bool fSerializeIds = false;
/// <summary> Elements that should not be included in the output</summary>
protected internal IDictionary<string, IElementDef> fFilter = null;
/// <summary> The version of SIF to use when rendering XML</summary>
protected internal SifVersion fVersion;
private bool fRootAttributesWritten;
private SifWriter()
{
fSettings = new XmlWriterSettings();
fSettings.OmitXmlDeclaration = true;
// Allow multiple fragments (objects) to be written without a root node
fSettings.ConformanceLevel = ConformanceLevel.Fragment;
fSettings.CheckCharacters = true;
fSettings.Indent = true;
fSettings.IndentChars = " ";
fSettings.Encoding = SifIOFormatter.ENCODING;
SetSifVersion( Adk.SifVersion );
}
private void SetSifVersion( SifVersion version )
{
fVersion = version;
fFormatter = Adk.Dtd.GetFormatter( version );
}
/// <summary> Constructor</summary>
/// <param name="outStream">The OutputStream to Write to
/// </param>
public SifWriter( Stream outStream )
: this()
{
fWriter = XmlWriter.Create( outStream, fSettings );
}
/// <summary>
/// Creates an instance of a SifWriter using a TextWriter
/// </summary>
/// <param name="writer">The writer to write to. The writer needs to be using the proper encoding for the purpose in which it is used</param>
public SifWriter( TextWriter writer )
: this()
{
fWriter = XmlWriter.Create( writer, fSettings );
}
public SifWriter(XmlWriter writer) : this()
{
fWriter = XmlWriter.Create(writer, fSettings);
}
/// <summary>
/// Gets or Sets whether SifWriter will get and set xml:id values, using the SifElement <see cref="SifElement.XmlId"/> property.s
/// </summary>
public bool SerializeIds
{
get { return fSerializeIds; }
set { fSerializeIds = value; }
}
/// <summary> Places a filter on this SifWriter such that only elements (and their
/// children) identified in the array will be included in the output. Note
/// that attributes are always included even if not specified in the filter
/// list, as are top-level SIF Data Objects like StudentPersonal.
///
/// The filter remains in effect until the <c>clearFilter</c> method
/// is called or a null array is passed to this method.
///
/// </summary>
/// <value>An array of ElementDef constants from the SifDtd class
/// that identify elements to include in the output, or <c>null</c>
/// to clear the current filter</value>
public virtual IElementDef[] Filter
{
set
{
if ( value == null )
{
clearFilter();
}
else
{
if ( fFilter == null )
{
fFilter = new Dictionary<string, IElementDef>();
}
else
{
fFilter.Clear();
}
for ( int i = 0; i < value.Length; i++ )
{
if ( value[i] != null )
{
fFilter[value[i].Name] = value[i];
}
}
}
}
}
/// <summary> Clears the filter previously set with the setFilter method</summary>
public virtual void clearFilter()
{
if ( fFilter != null )
{
fFilter.Clear();
fFilter = null;
}
}
private const int EMPTY = 0;
private const int OPEN = 1;
//private const int CLOSE = 2;
private Boolean HasContent( SifElement o,
SifVersion version )
{
if ( o.ChildCount > 0 )
{
return true;
}
ICollection<SimpleField> fields = o.GetFields();
foreach ( SimpleField f in fields )
{
// TODO: This is a perfect place to optimize. Version-specific lookups
// should be optimized
if ( f.ElementDef.IsSupported( version ) && !f.ElementDef.IsAttribute( version ) )
{
return true;
}
}
return false;
}
/// <summary> Write a SIF Message in the version of SIF in effect for that object.
/// To change the version of SIF that is used, call the
/// <c>SifMessagePayload.setSIFVersion</c> method prior to calling
/// this function.
///
/// </summary>
/// <param name="o">The SIF Message to Write to the output stream
/// </param>
[MethodImpl( MethodImplOptions.Synchronized )]
public virtual void Write( SifMessagePayload o )
{
SetSifVersion( o.SifVersion );
fWriter.WriteStartElement( "SIF_Message", o.GetXmlns() );
writeRootAttributes( true );
Write( (SifElement) o );
fWriter.WriteEndElement();
}
/// <summary> Write a SIF Data Object in the version of SIF in effect for that object.
/// To change the version of SIF that is used, call the
/// <c>SifDataObject.setSIFVersion</c> method prior to calling
/// this function.
///
/// </summary>
/// <param name="o">The SifDataObject instance to Write to the output stream
/// </param>
[MethodImpl( MethodImplOptions.Synchronized )]
public virtual void Write( SifDataObject o )
{
SetSifVersion( o.SifVersion );
if ( o is SifDataObjectXml )
{
fWriter.WriteRaw( o.ToXml() );
}
else
{
WriteElement( o );
}
}
/// <summary> Write a SIF Data Object to the output stream using whatever XML content
/// is currently defined for that object.
///
/// </summary>
/// <param name="o">The SifDataObjectXml instance to Write to the output stream
/// </param>
[MethodImpl( MethodImplOptions.Synchronized )]
public virtual void Write( SifDataObjectXml o )
{
SetSifVersion( o.SifVersion );
fWriter.WriteRaw( o.ToXml() );
}
/// <summary> Write a SIF element in the version of SIF specified.
///
/// </summary>
/// <param name="version">The version of SIF to use when rendering the SIF element
/// </param>
/// <param name="o">The SIF Element instance to Write to the output stream
/// </param>
[MethodImpl( MethodImplOptions.Synchronized )]
public virtual void Write( SifElement o,
SifVersion version )
{
SetSifVersion( version );
WriteElement( o );
}
/// <summary>
/// Write a SIF element in the version of SIF currently in effect for this
/// SIFWriter
/// </summary>
/// <param name="o">The SIF Element instance to write to the output stream</param>
public virtual void Write( SifElement o )
{
WriteElement( o );
}
/// <summary> Write a SIF element in the version of SIF currently in effect for this
/// SifWriter.
/// </summary>
/// <param name="element">The SIF Element instance to Write to the output stream
/// <param name="isLegacy">if true, this method assumes that it needs to do more work,
/// such as looking for rendering surrogates for the specific version of SIF</param>
/// </param>
private void WriteElement( SifElement element, bool isLegacy )
{
IElementDef def = element.ElementDef;
if ( !(Include( element ) && def.IsSupported( fVersion ) ) )
{
return;
}
if (isLegacy)
{
IRenderSurrogate surrogate = def.GetVersionInfo(fVersion).GetSurrogate();
if (surrogate != null)
{
surrogate.RenderRaw(fWriter, fVersion, element, fFormatter);
return;
}
}
if ( element.IsEmpty() || !HasContent( element, fVersion ) )
{
if (element is XMLData)
{
XmlDocument doc = ((XMLData) element).Xml;
doc.Save( fWriter );
}
else
{
Write( element, EMPTY, isLegacy );
}
}
else
{
Write( element, OPEN, isLegacy );
ICollection<Element> elements = fFormatter.GetContent( element, fVersion );
foreach ( Element childElement in elements )
{
if ( childElement is SifElement )
{
WriteElement( (SifElement) childElement, isLegacy );
}
else
{
Write( (SimpleField) childElement, isLegacy );
}
}
fWriter.WriteEndElement();
}
}
/// <summary>
/// Write a SIF element in the version of SIF currently in effect for this
/// SIFWriter.
/// </summary>
/// <param name="o"></param>
private void WriteElement( SifElement o )
{
WriteElement( o, fVersion.CompareTo( SifVersion.SIF20 ) < 0 );
}
private void Write( SimpleField f, bool isLegacy )
{
if ( !Include( f ) )
{
return;
}
if ( isLegacy )
{
IRenderSurrogate surrogate = f.ElementDef.GetVersionInfo( fVersion ).GetSurrogate();
if ( surrogate != null )
{
surrogate.RenderRaw( fWriter, fVersion, f, fFormatter );
return;
}
}
// "<tag [attr...]>[text]" or "<tag [attr...]/>"
String fieldValue = null;
SifSimpleType simpleValue = f.SifValue;
if ( simpleValue != null )
{
fieldValue = simpleValue.ToString( fFormatter );
}
if ( fieldValue == null )
{
if ( !isLegacy )
{
fWriter.WriteStartElement( f.ElementDef.Tag( fVersion ) );
fWriter.WriteAttributeString(NIL, XmlSchema.InstanceNamespace, "true");
//fWriter.WriteElementString( f.ElementDef.Tag( fVersion ), null );
fWriter.WriteFullEndElement();
}
else
{
// The specified version of SIF doesn't support
// the xsi:nil attribute. Set the value to an empty
// string
fWriter.WriteStartElement( f.ElementDef.Tag( fVersion ) );
fWriter.WriteFullEndElement();
}
}
else
{
fWriter.WriteStartElement( f.ElementDef.Tag( fVersion ) );
if ( f.DoNotEncode )
{
fWriter.WriteRaw( fieldValue );
}
else
{
fWriter.WriteString( fieldValue );
}
fWriter.WriteEndElement();
}
}
private void Write( SifElement o, int mode, Boolean isLegacy )
{
if ( !Include( o ) )
{
return;
}
// "<tag [attr...]>[text]" or "<tag [attr...]/>"
string tag = o.ElementDef.Tag( fVersion );
fWriter.WriteStartElement( tag );
if (!fRootAttributesWritten)
{
writeRootAttributes(false);
}
WriteAttributes( o );
if ( mode == EMPTY )
{
fWriter.WriteEndElement();
}
else
{
// Check for a text value (or an xs:nil value)
SimpleField elementValue = o.GetField( o.ElementDef );
if ( elementValue != null )
{
SifSimpleType sst = elementValue.SifValue;
if ( sst == null || sst.RawValue == null )
{
// The value of this element has been set and it is
// null. This should be rendered as 'xs:nil' in SIF 2.x and greater
if ( !isLegacy )
{
fWriter.WriteAttributeString(NIL, XmlSchema.InstanceNamespace, "true");
}
}
else
{
if ( o.DoNotEncode )
{
fWriter.WriteRaw( o.TextValue );
}
else
{
String xmlValue = sst.ToString( fFormatter );
fWriter.WriteString( xmlValue );
}
}
}
}
}
/// <summary> Write the attributes of a SifElement to the output stream</summary>
/// <param name="o">The SifElement whose attributes are to be written
/// </param>
private void WriteAttributes( SifElement o )
{
// TODO: We need to make sure the GetFields() API returns a usable collection
ICollection<SimpleField> fields = fFormatter.GetFields( o, fVersion );
foreach (SimpleField f in fields )
{
IElementVersionInfo evi = f.ElementDef.GetVersionInfo( fVersion );
if ( evi != null && evi.IsAttribute )
{
// Null attribute values are not supported in SIF, unlike
// element values, which can be represented with xs:nil
SifSimpleType sst = f.SifValue;
if (sst.RawValue != null)
{
String tag = evi.Tag;
Boolean handled = false;
if ( tag.StartsWith( "x" ) )
{
if ( evi.Tag.Equals( "xml:lang" ) )
{
fWriter.WriteAttributeString("xml", "lang", null, sst.ToString(fFormatter));
}
else if ( evi.Tag.Equals( "xsi:type" ) )
{
fWriter.WriteAttributeString("type", XmlSchema.InstanceNamespace, sst.ToString(fFormatter));
}
handled = true;
}
if ( !handled )
{
fWriter.WriteStartAttribute( evi.Tag, string.Empty );
fWriter.WriteString( sst.ToString( fFormatter ) );
fWriter.WriteEndAttribute();
}
}
}
}
if ( fSerializeIds && o.XmlId != null )
{
fWriter.WriteAttributeString( "id", XML_NAMESPACE, o.XmlId );
}
}
private bool Include( Element o )
{
if ( o.ElementDef.IsSupported( fVersion ) && o.IsChanged() )
{
if ( fFilter == null || o.ElementDef.Object )
{
return true;
}
// If the element is in the filter list, include it
if ( fFilter.ContainsKey( o.ElementDef.Name ) )
{
return true;
}
// If any of the element's parents are in the filter list, include it
Element parent = o.Parent;
Element cur = o;
while ( parent != null )
{
IElementDef tst = Adk.Dtd.LookupElementDef( parent.ElementDef, cur.ElementDef.Name );
if ( tst != null && fFilter.ContainsKey( tst.Name ) )
{
return true;
}
cur = parent;
parent = parent.Parent;
}
IElementDef parentDef = o.ElementDef.Parent;
while ( parentDef != null )
{
if ( fFilter.ContainsKey( parentDef.Name ) )
{
return true;
}
parentDef = parentDef.Parent;
}
// At this point the element should not be included *unless* it is
// the parent of one of the elements in the filter list. In this
// case it has to be included or else that child will not be.
foreach ( IElementDef def in fFilter.Values )
{
parentDef = def.Parent;
if ( parentDef != null && parentDef.Name.Equals( o.ElementDef.Name ) )
{
return true;
}
}
}
return false;
}
/// <summary>
/// Flushes whatever is in the buffer to the underlying streams and also flushes the underlying stream
/// </summary>
public void Flush()
{
fWriter.Flush();
}
/// <summary>
/// Closes this stream and the underlying stream
/// </summary>
public void Close()
{
fWriter.Close();
}
/// <summary>
/// By Default, SIFWriter writes an XML Namespace when it starts
/// writing to an element stream. If this is not desirable, it can
/// be suppressed with this call. However, if suppressed, the ADK may
/// not be able to parse the resulting XML, as it relies on XML Namespace
/// parsing for some features.
/// </summary>
/// <param name="suppress"></param>
public void SuppressNamespace( bool suppress )
{
if (suppress)
{
fRootAttributesWritten = suppress;
// Trick the .NET XMLWriter by telling it that the XSI namespace is already
// declared in the current scope (WARNING, EXTREME HACK. May fail on future builds of .NET)
try
{
Type xmlWriterType = fWriter.GetType();
if ( xmlWriterType.Name == "XmlWellFormedWriter" )
{
MethodInfo mi =
xmlWriterType.GetMethod( "PushNamespace", BindingFlags.Instance | BindingFlags.NonPublic );
if ( mi != null )
{
mi.Invoke(fWriter, new object[] { XSI_PREFIX, XmlSchema.InstanceNamespace, true });
}
}
} catch( Exception ex )
{
Adk.Log.Error( "Unable to suppress namespace support on XmlWellFormedWriter: " + ex.Message, ex );
}
}
}
private void writeRootAttributes( bool includeVersion)
{
if (!fRootAttributesWritten)
{
if(includeVersion)
{
fWriter.WriteAttributeString( "Version", fVersion.ToString() );
}
if (fFormatter.SupportsNamespaces)
{
fWriter.WriteAttributeString("xmlns", XSI_PREFIX, null, XmlSchema.InstanceNamespace);
}
}
fRootAttributesWritten = true;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Contracts;
namespace System.Globalization
{
/*=================================ThaiBuddhistCalendar==========================
**
** ThaiBuddhistCalendar is based on Gregorian calendar. Its year value has
** an offset to the Gregorain calendar.
**
** Calendar support range:
** Calendar Minimum Maximum
** ========== ========== ==========
** Gregorian 0001/01/01 9999/12/31
** Thai 0544/01/01 10542/12/31
============================================================================*/
[System.Runtime.InteropServices.ComVisible(true)]
public class ThaiBuddhistCalendar : Calendar
{
// Initialize our era info.
static internal EraInfo[] thaiBuddhistEraInfo = new EraInfo[] {
new EraInfo( 1, 1, 1, 1, -543, 544, GregorianCalendar.MaxYear + 543) // era #, start year/month/day, yearOffset, minEraYear
};
//
// The era value for the current era.
//
public const int ThaiBuddhistEra = 1;
//internal static Calendar m_defaultInstance;
internal GregorianCalendarHelper helper;
[System.Runtime.InteropServices.ComVisible(false)]
public override DateTime MinSupportedDateTime
{
get
{
return (DateTime.MinValue);
}
}
[System.Runtime.InteropServices.ComVisible(false)]
public override DateTime MaxSupportedDateTime
{
get
{
return (DateTime.MaxValue);
}
}
public ThaiBuddhistCalendar()
{
helper = new GregorianCalendarHelper(this, thaiBuddhistEraInfo);
}
internal override CalendarId ID
{
get
{
return (CalendarId.THAI);
}
}
public override DateTime AddMonths(DateTime time, int months)
{
return (helper.AddMonths(time, months));
}
public override DateTime AddYears(DateTime time, int years)
{
return (helper.AddYears(time, years));
}
public override int GetDaysInMonth(int year, int month, int era)
{
return (helper.GetDaysInMonth(year, month, era));
}
public override int GetDaysInYear(int year, int era)
{
return (helper.GetDaysInYear(year, era));
}
public override int GetDayOfMonth(DateTime time)
{
return (helper.GetDayOfMonth(time));
}
public override DayOfWeek GetDayOfWeek(DateTime time)
{
return (helper.GetDayOfWeek(time));
}
public override int GetDayOfYear(DateTime time)
{
return (helper.GetDayOfYear(time));
}
public override int GetMonthsInYear(int year, int era)
{
return (helper.GetMonthsInYear(year, era));
}
[SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems.
[System.Runtime.InteropServices.ComVisible(false)]
public override int GetWeekOfYear(DateTime time, CalendarWeekRule rule, DayOfWeek firstDayOfWeek)
{
return (helper.GetWeekOfYear(time, rule, firstDayOfWeek));
}
public override int GetEra(DateTime time)
{
return (helper.GetEra(time));
}
public override int GetMonth(DateTime time)
{
return (helper.GetMonth(time));
}
public override int GetYear(DateTime time)
{
return (helper.GetYear(time));
}
public override bool IsLeapDay(int year, int month, int day, int era)
{
return (helper.IsLeapDay(year, month, day, era));
}
public override bool IsLeapYear(int year, int era)
{
return (helper.IsLeapYear(year, era));
}
// Returns the leap month in a calendar year of the specified era. This method returns 0
// if this calendar does not have leap month, or this year is not a leap year.
//
[System.Runtime.InteropServices.ComVisible(false)]
public override int GetLeapMonth(int year, int era)
{
return (helper.GetLeapMonth(year, era));
}
public override bool IsLeapMonth(int year, int month, int era)
{
return (helper.IsLeapMonth(year, month, era));
}
public override DateTime ToDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, int era)
{
return (helper.ToDateTime(year, month, day, hour, minute, second, millisecond, era));
}
public override int[] Eras
{
get
{
return (helper.Eras);
}
}
private const int DEFAULT_TWO_DIGIT_YEAR_MAX = 2572;
public override int TwoDigitYearMax
{
get
{
if (twoDigitYearMax == -1)
{
twoDigitYearMax = GetSystemTwoDigitYearSetting(ID, DEFAULT_TWO_DIGIT_YEAR_MAX);
}
return (twoDigitYearMax);
}
set
{
VerifyWritable();
if (value < 99 || value > helper.MaxYear)
{
throw new ArgumentOutOfRangeException(
"year",
String.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range,
99,
helper.MaxYear));
}
twoDigitYearMax = value;
}
}
public override int ToFourDigitYear(int year)
{
if (year < 0)
{
throw new ArgumentOutOfRangeException("year",
SR.ArgumentOutOfRange_NeedNonNegNum);
}
Contract.EndContractBlock();
return (helper.ToFourDigitYear(year, this.TwoDigitYearMax));
}
}
}
| |
/*
Copyright (c) 2014, Lars Brubaker
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
*/
using System;
using MatterHackers.Agg.Image;
using MatterHackers.Agg.Platform;
using MatterHackers.Agg.RasterizerScanline;
using MatterHackers.Agg.UI;
using MatterHackers.Agg.UI.Examples;
using MatterHackers.Agg.VertexSource;
namespace MatterHackers.Agg
{
internal class pattern_src_brightness_to_alpha_RGBA_Bytes : ImageProxy
{
private static byte[] brightness_to_alpha =
{
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 254, 254, 254, 254, 254,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 253, 253,
253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 252,
252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 251, 251, 251, 251, 251,
251, 251, 251, 251, 250, 250, 250, 250, 250, 250, 250, 250, 249, 249, 249, 249,
249, 249, 249, 248, 248, 248, 248, 248, 248, 248, 247, 247, 247, 247, 247, 246,
246, 246, 246, 246, 246, 245, 245, 245, 245, 245, 244, 244, 244, 244, 243, 243,
243, 243, 243, 242, 242, 242, 242, 241, 241, 241, 241, 240, 240, 240, 239, 239,
239, 239, 238, 238, 238, 238, 237, 237, 237, 236, 236, 236, 235, 235, 235, 234,
234, 234, 233, 233, 233, 232, 232, 232, 231, 231, 230, 230, 230, 229, 229, 229,
228, 228, 227, 227, 227, 226, 226, 225, 225, 224, 224, 224, 223, 223, 222, 222,
221, 221, 220, 220, 219, 219, 219, 218, 218, 217, 217, 216, 216, 215, 214, 214,
213, 213, 212, 212, 211, 211, 210, 210, 209, 209, 208, 207, 207, 206, 206, 205,
204, 204, 203, 203, 202, 201, 201, 200, 200, 199, 198, 198, 197, 196, 196, 195,
194, 194, 193, 192, 192, 191, 190, 190, 189, 188, 188, 187, 186, 186, 185, 184,
183, 183, 182, 181, 180, 180, 179, 178, 177, 177, 176, 175, 174, 174, 173, 172,
171, 171, 170, 169, 168, 167, 166, 166, 165, 164, 163, 162, 162, 161, 160, 159,
158, 157, 156, 156, 155, 154, 153, 152, 151, 150, 149, 148, 148, 147, 146, 145,
144, 143, 142, 141, 140, 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, 129,
128, 128, 127, 125, 124, 123, 122, 121, 120, 119, 118, 117, 116, 115, 114, 113,
112, 111, 110, 109, 108, 107, 106, 105, 104, 102, 101, 100, 99, 98, 97, 96,
95, 94, 93, 91, 90, 89, 88, 87, 86, 85, 84, 82, 81, 80, 79, 78,
77, 75, 74, 73, 72, 71, 70, 69, 67, 66, 65, 64, 63, 61, 60, 59,
58, 57, 56, 54, 53, 52, 51, 50, 48, 47, 46, 45, 44, 42, 41, 40,
39, 37, 36, 35, 34, 33, 31, 30, 29, 28, 27, 25, 24, 23, 22, 20,
19, 18, 17, 15, 14, 13, 12, 11, 9, 8, 7, 6, 4, 3, 2, 1
};
public pattern_src_brightness_to_alpha_RGBA_Bytes(ImageBuffer rb)
: base(new ImageBuffer(rb, new BlenderBGRA()))
{
}
public override Color GetPixel(int x, int y)
{
Color c = linkedImage.GetPixel(x, y);
c.Alpha0To255 = brightness_to_alpha[c.Red0To255 + c.Green0To255 + c.Blue0To255];
return c;
}
};
public class line_patterns_application : GuiWidget, IDemoApp
{
private Color m_ctrl_color;
private CheckBox m_approximation_method;
private bezier_ctrl m_curve1 = new bezier_ctrl();
private bezier_ctrl m_curve2 = new bezier_ctrl();
private bezier_ctrl m_curve3 = new bezier_ctrl();
private bezier_ctrl m_curve4 = new bezier_ctrl();
private bezier_ctrl m_curve5 = new bezier_ctrl();
private bezier_ctrl m_curve6 = new bezier_ctrl();
private bezier_ctrl m_curve7 = new bezier_ctrl();
private bezier_ctrl m_curve8 = new bezier_ctrl();
private bezier_ctrl m_curve9 = new bezier_ctrl();
private Slider m_scale_x;
private Slider m_start_x;
public static ImageBuffer rbuf_img0 = new ImageBuffer();
public static ImageBuffer rbuf_img1 = new ImageBuffer();
public static ImageBuffer rbuf_img2 = new ImageBuffer();
public static ImageBuffer rbuf_img3 = new ImageBuffer();
public static ImageBuffer rbuf_img4 = new ImageBuffer();
public static ImageBuffer rbuf_img5 = new ImageBuffer();
public static ImageBuffer rbuf_img6 = new ImageBuffer();
public static ImageBuffer rbuf_img7 = new ImageBuffer();
public static ImageBuffer rbuf_img8 = new ImageBuffer();
static line_patterns_application()
{
if (!ImageIO.LoadImageData("1.bmp", line_patterns_application.rbuf_img0)
|| !ImageIO.LoadImageData("2.bmp", line_patterns_application.rbuf_img1)
|| !ImageIO.LoadImageData("3.bmp", line_patterns_application.rbuf_img2)
|| !ImageIO.LoadImageData("4.bmp", line_patterns_application.rbuf_img3)
|| !ImageIO.LoadImageData("5.bmp", line_patterns_application.rbuf_img4)
|| !ImageIO.LoadImageData("6.bmp", line_patterns_application.rbuf_img5)
|| !ImageIO.LoadImageData("7.bmp", line_patterns_application.rbuf_img6)
|| !ImageIO.LoadImageData("8.bmp", line_patterns_application.rbuf_img7)
|| !ImageIO.LoadImageData("9.bmp", line_patterns_application.rbuf_img8))
{
String buf = "There must be files 1%s...9%s\n"
+ "Download and unzip:\n"
+ "http://www.antigrain.com/line_patterns.bmp.zip\n"
+ "or\n"
+ "http://www.antigrain.com/line_patterns.ppm.tar.gz\n";
throw new System.Exception(buf);
}
}
public line_patterns_application()
{
AnchorAll();
m_ctrl_color = new Color(0, 0.3, 0.5, 0.3);
m_scale_x = new Slider(5.0, 5.0, 240.0, 12.0);
m_start_x = new Slider(250.0, 5.0, 495.0, 12.0);
m_approximation_method = new CheckBox(10, 30, "Approximation Method = curve_div");
m_curve1.line_color(m_ctrl_color);
m_curve2.line_color(m_ctrl_color);
m_curve3.line_color(m_ctrl_color);
m_curve4.line_color(m_ctrl_color);
m_curve5.line_color(m_ctrl_color);
m_curve6.line_color(m_ctrl_color);
m_curve7.line_color(m_ctrl_color);
m_curve8.line_color(m_ctrl_color);
m_curve9.line_color(m_ctrl_color);
m_curve1.curve(64, 19, 14, 126, 118, 266, 19, 265);
m_curve2.curve(112, 113, 178, 32, 200, 132, 125, 438);
m_curve3.curve(401, 24, 326, 149, 285, 11, 177, 77);
m_curve4.curve(188, 427, 129, 295, 19, 283, 25, 410);
m_curve5.curve(451, 346, 302, 218, 265, 441, 459, 400);
m_curve6.curve(454, 198, 14, 13, 220, 291, 483, 283);
m_curve7.curve(301, 398, 355, 231, 209, 211, 170, 353);
m_curve8.curve(484, 101, 222, 33, 486, 435, 487, 138);
m_curve9.curve(143, 147, 11, 45, 83, 427, 132, 197);
AddChild(m_curve1);
AddChild(m_curve2);
AddChild(m_curve3);
AddChild(m_curve4);
AddChild(m_curve5);
AddChild(m_curve6);
AddChild(m_curve7);
AddChild(m_curve8);
AddChild(m_curve9);
AddChild(m_approximation_method);
m_scale_x.Text = "Scale X=%.2f";
m_scale_x.SetRange(0.2, 3.0);
m_scale_x.Value = 1.0;
AddChild(m_scale_x);
m_start_x.Text = "Start X=%.2f";
m_start_x.SetRange(0.0, 10.0);
m_start_x.Value = 0.0;
AddChild(m_start_x);
m_approximation_method.CheckedStateChanged += m_approximation_method_CheckedStateChanged;
}
public string Title { get; } = "Line Patterns";
public string DemoCategory { get; } = "Vector";
public string DemoDescription { get; } = "AGG Example. Drawing Lines with Image Patterns";
private void m_approximation_method_CheckedStateChanged(object sender, EventArgs e)
{
Curves.CurveApproximationMethod method = Curves.CurveApproximationMethod.curve_div;
if (m_approximation_method.Checked)
{
method = Curves.CurveApproximationMethod.curve_inc;
m_approximation_method.Text = "Approximation Method = curve_inc";
}
else
{
m_approximation_method.Text = "Approximation Method = curve_div";
}
m_curve1.curve().approximation_method(method);
m_curve2.curve().approximation_method(method);
m_curve3.curve().approximation_method(method);
m_curve4.curve().approximation_method(method);
m_curve5.curve().approximation_method(method);
m_curve6.curve().approximation_method(method);
m_curve7.curve().approximation_method(method);
m_curve8.curve().approximation_method(method);
m_curve9.curve().approximation_method(method);
}
private void draw_curve(line_image_pattern patt, rasterizer_outline_aa ras, ImageLineRenderer ren,
pattern_src_brightness_to_alpha_RGBA_Bytes src, IVertexSource vs)
{
patt.create(src);
ren.scale_x(m_scale_x.Value);
ren.start_x(m_start_x.Value);
ras.add_path(vs);
}
public override void OnDraw(Graphics2D graphics2D)
{
ImageClippingProxy ren_base = new ImageClippingProxy(graphics2D.DestImage);
ren_base.clear(new ColorF(1.0, 1.0, .95));
ScanlineRasterizer ras = new ScanlineRasterizer();
ScanlineCachePacked8 sl = new ScanlineCachePacked8();
// Pattern source. Must have an interface:
// width() const
// height() const
// pixel(int x, int y) const
// Any agg::renderer_base<> or derived
// is good for the use as a source.
//-----------------------------------
pattern_src_brightness_to_alpha_RGBA_Bytes p1 = new pattern_src_brightness_to_alpha_RGBA_Bytes(rbuf_img0);
pattern_src_brightness_to_alpha_RGBA_Bytes p2 = new pattern_src_brightness_to_alpha_RGBA_Bytes(rbuf_img1);
pattern_src_brightness_to_alpha_RGBA_Bytes p3 = new pattern_src_brightness_to_alpha_RGBA_Bytes(rbuf_img2);
pattern_src_brightness_to_alpha_RGBA_Bytes p4 = new pattern_src_brightness_to_alpha_RGBA_Bytes(rbuf_img3);
pattern_src_brightness_to_alpha_RGBA_Bytes p5 = new pattern_src_brightness_to_alpha_RGBA_Bytes(rbuf_img4);
pattern_src_brightness_to_alpha_RGBA_Bytes p6 = new pattern_src_brightness_to_alpha_RGBA_Bytes(rbuf_img5);
pattern_src_brightness_to_alpha_RGBA_Bytes p7 = new pattern_src_brightness_to_alpha_RGBA_Bytes(rbuf_img6);
pattern_src_brightness_to_alpha_RGBA_Bytes p8 = new pattern_src_brightness_to_alpha_RGBA_Bytes(rbuf_img7);
pattern_src_brightness_to_alpha_RGBA_Bytes p9 = new pattern_src_brightness_to_alpha_RGBA_Bytes(rbuf_img8);
//pattern_filter_bilinear_RGBA_Bytes fltr = new pattern_filter_bilinear_RGBA_Bytes(); // Filtering functor
// agg::line_image_pattern is the main container for the patterns. It creates
// a copy of the patterns extended according to the needs of the filter.
// agg::line_image_pattern can operate with arbitrary image width, but if the
// width of the pattern is power of 2, it's better to use the modified
// version agg::line_image_pattern_pow2 because it works about 15-25 percent
// faster than agg::line_image_pattern (because of using simple masking instead
// of expensive '%' operation).
//-- Create with specifying the source
//-- Create uninitialized and set the source
line_image_pattern patt = new line_image_pattern(new pattern_filter_bilinear_RGBA_Bytes());
ImageLineRenderer ren_img = new ImageLineRenderer(ren_base, patt);
rasterizer_outline_aa ras_img = new rasterizer_outline_aa(ren_img);
draw_curve(patt, ras_img, ren_img, p1, m_curve1.curve());
/*
draw_curve(patt, ras_img, ren_img, p2, m_curve2.curve());
draw_curve(patt, ras_img, ren_img, p3, m_curve3.curve());
draw_curve(patt, ras_img, ren_img, p4, m_curve4.curve());
draw_curve(patt, ras_img, ren_img, p5, m_curve5.curve());
draw_curve(patt, ras_img, ren_img, p6, m_curve6.curve());
draw_curve(patt, ras_img, ren_img, p7, m_curve7.curve());
draw_curve(patt, ras_img, ren_img, p8, m_curve8.curve());
draw_curve(patt, ras_img, ren_img, p9, m_curve9.curve());
*/
base.OnDraw(graphics2D);
}
[STAThread]
public static void Main(string[] args)
{
var demoWidget = new line_patterns_application();
var systemWindow = new SystemWindow(500, 450);
systemWindow.Title = demoWidget.Title;
systemWindow.AddChild(demoWidget);
systemWindow.ShowAsSystemWindow();
}
}
}
| |
using Lucene.Net.Index;
using Lucene.Net.Search;
using Lucene.Net.Spatial.Prefix.Tree;
using Lucene.Net.Util;
using Spatial4n.Core.Context;
using Spatial4n.Core.Distance;
using Spatial4n.Core.Shapes;
using System;
using System.Collections.Generic;
using System.Diagnostics;
namespace Lucene.Net.Spatial.Prefix
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// Finds docs where its indexed shape is
/// <see cref="Queries.SpatialOperation.IsWithin">WITHIN</see>
/// the query shape. It works by looking at cells outside of the query
/// shape to ensure documents there are excluded. By default, it will
/// examine all cells, and it's fairly slow. If you know that the indexed shapes
/// are never comprised of multiple disjoint parts (which also means it is not multi-valued),
/// then you can pass <c>SpatialPrefixTree.GetDistanceForLevel(maxLevels)</c> as
/// the <c>queryBuffer</c> constructor parameter to minimally look this distance
/// beyond the query shape's edge. Even if the indexed shapes are sometimes
/// comprised of multiple disjoint parts, you might want to use this option with
/// a large buffer as a faster approximation with minimal false-positives.
///
/// @lucene.experimental
/// </summary>
public class WithinPrefixTreeFilter : AbstractVisitingPrefixTreeFilter
{
/// TODO LUCENE-4869: implement faster algorithm based on filtering out false-positives of a
// minimal query buffer by looking in a DocValues cache holding a representative
// point of each disjoint component of a document's shape(s).
private readonly IShape bufferedQueryShape;//if null then the whole world
/// <summary>
/// See <see cref="AbstractVisitingPrefixTreeFilter.AbstractVisitingPrefixTreeFilter(IShape, string, SpatialPrefixTree, int, int)"/>.
/// <c>queryBuffer</c> is the (minimum) distance beyond the query shape edge
/// where non-matching documents are looked for so they can be excluded. If
/// -1 is used then the whole world is examined (a good default for correctness).
/// </summary>
public WithinPrefixTreeFilter(IShape queryShape, string fieldName, SpatialPrefixTree grid,
int detailLevel, int prefixGridScanLevel, double queryBuffer)
: base(queryShape, fieldName, grid, detailLevel, prefixGridScanLevel)
{
if (queryBuffer == -1)
{
bufferedQueryShape = null;
}
else
{
bufferedQueryShape = BufferShape(queryShape, queryBuffer);
}
}
/// <summary>
/// Returns a new shape that is larger than shape by at distErr.
/// </summary>
protected virtual IShape BufferShape(IShape shape, double distErr)
{
//TODO move this generic code elsewhere? Spatial4j?
if (distErr <= 0)
{
throw new ArgumentException("distErr must be > 0");
}
SpatialContext ctx = m_grid.SpatialContext;
if (shape is IPoint)
{
return ctx.MakeCircle((IPoint)shape, distErr);
}
else if (shape is ICircle)
{
var circle = (ICircle)shape;
double newDist = circle.Radius + distErr;
if (ctx.IsGeo && newDist > 180)
{
newDist = 180;
}
return ctx.MakeCircle(circle.Center, newDist);
}
else
{
IRectangle bbox = shape.BoundingBox;
double newMinX = bbox.MinX - distErr;
double newMaxX = bbox.MaxX + distErr;
double newMinY = bbox.MinY - distErr;
double newMaxY = bbox.MaxY + distErr;
if (ctx.IsGeo)
{
if (newMinY < -90)
{
newMinY = -90;
}
if (newMaxY > 90)
{
newMaxY = 90;
}
if (newMinY == -90 || newMaxY == 90 || bbox.Width + 2 * distErr > 360)
{
newMinX = -180;
newMaxX = 180;
}
else
{
newMinX = DistanceUtils.NormLonDEG(newMinX);
newMaxX = DistanceUtils.NormLonDEG(newMaxX);
}
}
else
{
//restrict to world bounds
newMinX = Math.Max(newMinX, ctx.WorldBounds.MinX);
newMaxX = Math.Min(newMaxX, ctx.WorldBounds.MaxX);
newMinY = Math.Max(newMinY, ctx.WorldBounds.MinY);
newMaxY = Math.Min(newMaxY, ctx.WorldBounds.MaxY);
}
return ctx.MakeRectangle(newMinX, newMaxX, newMinY, newMaxY);
}
}
/// <exception cref="System.IO.IOException"></exception>
public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
{
return new VisitorTemplateAnonymousHelper(this, context, acceptDocs, true).GetDocIdSet();
}
#region Nested type: VisitorTemplateAnonymousHelper
private sealed class VisitorTemplateAnonymousHelper : VisitorTemplate
{
private FixedBitSet inside;
private FixedBitSet outside;
private SpatialRelation visitRelation;
public VisitorTemplateAnonymousHelper(WithinPrefixTreeFilter outerInstance, AtomicReaderContext context,
IBits acceptDocs, bool hasIndexedLeaves)
: base(outerInstance, context, acceptDocs, hasIndexedLeaves)
{
}
protected internal override void Start()
{
inside = new FixedBitSet(m_maxDoc);
outside = new FixedBitSet(m_maxDoc);
}
protected internal override DocIdSet Finish()
{
inside.AndNot(outside);
return inside;
}
protected internal override IEnumerator<Cell> FindSubCellsToVisit(Cell cell)
{
//use buffered query shape instead of orig. Works with null too.
return cell.GetSubCells(((WithinPrefixTreeFilter)m_outerInstance).bufferedQueryShape).GetEnumerator();
}
protected internal override bool Visit(Cell cell)
{
//cell.relate is based on the bufferedQueryShape; we need to examine what
// the relation is against the queryShape
visitRelation = cell.Shape.Relate(m_outerInstance.m_queryShape);
if (visitRelation == SpatialRelation.WITHIN)
{
CollectDocs(inside);
return false;
}
else if (visitRelation == SpatialRelation.DISJOINT)
{
CollectDocs(outside);
return false;
}
else if (cell.Level == m_outerInstance.m_detailLevel)
{
CollectDocs(inside);
return false;
}
return true;
}
/// <exception cref="System.IO.IOException"></exception>
protected internal override void VisitLeaf(Cell cell)
{
//visitRelation is declared as a field, populated by visit() so we don't recompute it
Debug.Assert(m_outerInstance.m_detailLevel != cell.Level);
Debug.Assert(visitRelation == cell.Shape.Relate(m_outerInstance.m_queryShape));
if (AllCellsIntersectQuery(cell, visitRelation))
{
CollectDocs(inside);
}
else
{
CollectDocs(outside);
}
}
/// <summary>
/// Returns true if the provided cell, and all its sub-cells down to
/// detailLevel all intersect the queryShape.
/// </summary>
private bool AllCellsIntersectQuery(Cell cell, SpatialRelation relate/*cell to query*/)
{
if (relate == SpatialRelation.NOT_SET)
{
relate = cell.Shape.Relate(m_outerInstance.m_queryShape);
}
if (cell.Level == m_outerInstance.m_detailLevel)
{
return relate.Intersects();
}
if (relate == SpatialRelation.WITHIN)
{
return true;
}
if (relate == SpatialRelation.DISJOINT)
{
return false;
}
// Note: Generating all these cells just to determine intersection is not ideal.
// It was easy to implement but could be optimized. For example if the docs
// in question are already marked in the 'outside' bitset then it can be avoided.
ICollection<Cell> subCells = cell.GetSubCells(null);
foreach (Cell subCell in subCells)
{
if (!AllCellsIntersectQuery(subCell, SpatialRelation.NOT_SET))
{
//recursion
return false;
}
}
return true;
}
/// <exception cref="System.IO.IOException"></exception>
protected internal override void VisitScanned(Cell cell)
{
if (AllCellsIntersectQuery(cell, SpatialRelation.NOT_SET))
{
CollectDocs(inside);
}
else
{
CollectDocs(outside);
}
}
}
#endregion
}
}
| |
//! \file KiriKiriCx.cs
//! \date Sun Sep 07 06:50:11 2014
//! \brief KiriKiri Cx encryption scheme implementation.
//
// Copyright (C) 2014-2015 by morkt
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.Serialization;
namespace GameRes.Formats.KiriKiri
{
public class CxProgramException : ApplicationException
{
public CxProgramException (string message) : base (message)
{
}
}
[Serializable]
public class CxScheme
{
public uint Mask;
public uint Offset;
public byte[] PrologOrder;
public byte[] OddBranchOrder;
public byte[] EvenBranchOrder;
public uint[] ControlBlock;
}
[Serializable]
public class CxEncryption : ICrypt
{
private uint m_mask;
private uint m_offset;
protected byte[] PrologOrder;
protected byte[] OddBranchOrder;
protected byte[] EvenBranchOrder;
protected uint[] ControlBlock;
[NonSerialized]
CxProgram[] m_program_list = new CxProgram[0x80];
[OnDeserialized()]
void PostDeserialization (StreamingContext context)
{
m_program_list = new CxProgram[0x80];
}
public CxEncryption (CxScheme scheme)
{
m_mask = scheme.Mask;
m_offset = scheme.Offset;
PrologOrder = scheme.PrologOrder;
OddBranchOrder = scheme.OddBranchOrder;
EvenBranchOrder = scheme.EvenBranchOrder;
ControlBlock = scheme.ControlBlock;
}
uint GetBaseOffset (uint hash)
{
return (hash & m_mask) + m_offset;
}
public override byte Decrypt (Xp3Entry entry, long offset, byte value)
{
uint key = entry.Hash;
uint base_offset = GetBaseOffset (key);
if (offset >= base_offset)
{
key = (key >> 16) ^ key;
}
var buffer = new byte[1] { value };
Decode (key, offset, buffer, 0, 1);
return buffer[0];
}
public override void Decrypt (Xp3Entry entry, long offset, byte[] buffer, int pos, int count)
{
uint key = entry.Hash;
uint base_offset = GetBaseOffset (key);
if (offset < base_offset)
{
int base_length = Math.Min ((int)(base_offset - offset), count);
Decode (key, offset, buffer, pos, base_length);
offset += base_length;
pos += base_length;
count -= base_length;
}
if (count > 0)
{
key = (key >> 16) ^ key;
Decode (key, offset, buffer, pos, count);
}
}
void Decode (uint key, long offset, byte[] buffer, int pos, int count)
{
Tuple<uint, uint> ret = ExecuteXCode (key);
uint key1 = ret.Item2 >> 16;
uint key2 = ret.Item2 & 0xffff;
byte key3 = (byte)(ret.Item1);
if (key1 == key2)
key2 = (key2+1) & 0xffff;
if (0 == key3)
key3 = 1;
// Trace.WriteLine (string.Format ("[offset:{3:x4}] [key1:{0:x4}] [key2:{1:x4}] [key3:{2:x6}]", key1, key2, key3, offset));
if ((key2 >= offset) && (key2 < offset + count))
buffer[pos + key2 - offset] ^= (byte)(ret.Item1 >> 16);
if ((key1 >= offset) && (key1 < offset + count))
buffer[pos + key1 - offset] ^= (byte)(ret.Item1 >> 8);
for (int i = 0; i < count; ++i)
buffer[pos + i] ^= key3;
}
public override void Encrypt (Xp3Entry entry, long offset, byte[] values, int pos, int count)
{
Decrypt (entry, offset, values, pos, count);
}
Tuple<uint, uint> ExecuteXCode (uint hash)
{
uint seed = hash & 0x7f;
if (null == m_program_list[seed])
{
m_program_list[seed] = GenerateProgram (seed);
}
hash >>= 7;
uint ret1 = m_program_list[seed].Execute (hash);
uint ret2 = m_program_list[seed].Execute (~hash);
return new Tuple<uint, uint> (ret1, ret2);
}
CxProgram GenerateProgram (uint seed)
{
var program = new CxProgram (seed, ControlBlock);
for (int stage = 5; stage > 0; --stage)
{
if (EmitCode (program, stage))
return program;
// Trace.WriteLine (string.Format ("stage {0} failed for seed {1}", stage, seed), "GenerateProgram");
program.Clear();
}
throw new CxProgramException ("Overly large CxEncryption bytecode");
}
bool EmitCode (CxProgram program, int stage)
{
return program.EmitNop (5) // 0x57 0x56 0x53 0x51 0x52
&& program.Emit (CxByteCode.MOV_EDI_ARG, 4) // 0x8b 0x7c 0x24 0x18
&& EmitBody (program, stage)
&& program.EmitNop (5) // 0x5a 0x59 0x5b 0x5e 0x5f
&& program.Emit (CxByteCode.RETN); // 0xc3
}
bool EmitBody (CxProgram program, int stage)
{
if (1 == stage)
return EmitProlog (program);
if (!program.Emit (CxByteCode.PUSH_EBX)) // 0x53
return false;
if (0 != (program.GetRandom() & 1))
{
if (!EmitBody (program, stage - 1))
return false;
}
else if (!EmitBody2 (program, stage - 1))
return false;
if (!program.Emit (CxByteCode.MOV_EBX_EAX, 2)) // 0x89 0xc3
return false;
if (0 != (program.GetRandom() & 1))
{
if (!EmitBody (program, stage - 1))
return false;
}
else if (!EmitBody2 (program, stage - 1))
return false;
return EmitOddBranch (program) && program.Emit (CxByteCode.POP_EBX); // 0x5b
}
bool EmitBody2 (CxProgram program, int stage)
{
if (1 == stage)
return EmitProlog (program);
bool rc = true;
if (0 != (program.GetRandom() & 1))
rc = EmitBody (program, stage - 1);
else
rc = EmitBody2 (program, stage - 1);
return rc && EmitEvenBranch (program);
}
bool EmitProlog (CxProgram program)
{
bool rc = true;
switch (PrologOrder[program.GetRandom() % 3])
{
case 2:
// MOV EAX, (Random() & 0x3ff)
// MOV EAX, EncryptionControlBlock[EAX]
rc = program.EmitNop (5) // 0xbe
&& program.Emit (CxByteCode.MOV_EAX_IMMED, 2) // 0x8b 0x86
&& program.EmitUInt32 (program.GetRandom() & 0x3ff)
&& program.Emit (CxByteCode.MOV_EAX_INDIRECT, 0);
break;
case 1:
rc = program.Emit (CxByteCode.MOV_EAX_EDI, 2); // 0x8b 0xc7
break;
case 0:
// MOV EAX, Random()
rc = program.Emit (CxByteCode.MOV_EAX_IMMED) // 0xb8
&& program.EmitRandom();
break;
}
return rc;
}
bool EmitEvenBranch (CxProgram program)
{
bool rc = true;
switch (EvenBranchOrder[program.GetRandom() & 7])
{
case 0:
rc = program.Emit (CxByteCode.NOT_EAX, 2); // 0xf7 0xd0
break;
case 1:
rc = program.Emit (CxByteCode.DEC_EAX); // 0x48
break;
case 2:
rc = program.Emit (CxByteCode.NEG_EAX, 2); // 0xf7 0xd8
break;
case 3:
rc = program.Emit (CxByteCode.INC_EAX); // 0x40
break;
case 4:
rc = program.EmitNop (5) // 0xbe
&& program.Emit (CxByteCode.AND_EAX_IMMED) // 0x25
&& program.EmitUInt32 (0x3ff)
&& program.Emit (CxByteCode.MOV_EAX_INDIRECT, 3); // 0x8b 0x04 0x86
break;
case 5:
rc = program.Emit (CxByteCode.PUSH_EBX) // 0x53
&& program.Emit (CxByteCode.MOV_EBX_EAX, 2) // 0x89 0xc3
&& program.Emit (CxByteCode.AND_EBX_IMMED, 2) // 0x81 0xe3
&& program.EmitUInt32 (0xaaaaaaaa)
&& program.Emit (CxByteCode.AND_EAX_IMMED) // 0x25
&& program.EmitUInt32 (0x55555555)
&& program.Emit (CxByteCode.SHR_EBX_1, 2) // 0xd1 0xeb
&& program.Emit (CxByteCode.SHL_EAX_1, 2) // 0xd1 0xe0
&& program.Emit (CxByteCode.OR_EAX_EBX, 2) // 0x09 0xd8
&& program.Emit (CxByteCode.POP_EBX); // 0x5b
break;
case 6:
rc = program.Emit (CxByteCode.XOR_EAX_IMMED) // 0x35
&& program.EmitRandom();
break;
case 7:
if (0 != (program.GetRandom() & 1))
rc = program.Emit (CxByteCode.ADD_EAX_IMMED); // 0x05
else
rc = program.Emit (CxByteCode.SUB_EAX_IMMED); // 0x2d
rc = rc && program.EmitRandom();
break;
}
return rc;
}
bool EmitOddBranch (CxProgram program)
{
bool rc = true;
switch (OddBranchOrder[program.GetRandom() % 6])
{
case 0:
rc = program.Emit (CxByteCode.PUSH_ECX) // 0x51
&& program.Emit (CxByteCode.MOV_ECX_EBX, 2) // 0x89 0xd9
&& program.Emit (CxByteCode.AND_ECX_0F, 3) // 0x83 0xe1 0x0f
&& program.Emit (CxByteCode.SHR_EAX_CL, 2) // 0xd3 0xe8
&& program.Emit (CxByteCode.POP_ECX); // 0x59
break;
case 1:
rc = program.Emit (CxByteCode.PUSH_ECX) // 0x51
&& program.Emit (CxByteCode.MOV_ECX_EBX, 2) // 0x89 0xd9
&& program.Emit (CxByteCode.AND_ECX_0F, 3) // 0x83 0xe1 0x0f
&& program.Emit (CxByteCode.SHL_EAX_CL, 2) // 0xd3 0xe0
&& program.Emit (CxByteCode.POP_ECX); // 0x59
break;
case 2:
rc = program.Emit (CxByteCode.ADD_EAX_EBX, 2); // 0x01 0xd8
break;
case 3:
rc = program.Emit (CxByteCode.NEG_EAX, 2) // 0xf7 0xd8
&& program.Emit (CxByteCode.ADD_EAX_EBX, 2); // 0x01 0xd8
break;
case 4:
rc = program.Emit (CxByteCode.IMUL_EAX_EBX, 3); // 0x0f 0xaf 0xc3
break;
case 5:
rc = program.Emit (CxByteCode.SUB_EAX_EBX, 2); // 0x29 0xd8
break;
}
return rc;
}
}
enum CxByteCode
{
NOP,
RETN,
MOV_EDI_ARG,
PUSH_EBX,
POP_EBX,
PUSH_ECX,
POP_ECX,
MOV_EAX_EBX,
MOV_EBX_EAX,
MOV_ECX_EBX,
MOV_EAX_CONTROL_BLOCK,
MOV_EAX_EDI,
MOV_EAX_INDIRECT,
ADD_EAX_EBX,
SUB_EAX_EBX,
IMUL_EAX_EBX,
AND_ECX_0F,
SHR_EBX_1,
SHL_EAX_1,
SHR_EAX_CL,
SHL_EAX_CL,
OR_EAX_EBX,
NOT_EAX,
NEG_EAX,
DEC_EAX,
INC_EAX,
IMMED = 0x100,
MOV_EAX_IMMED,
AND_EBX_IMMED,
AND_EAX_IMMED,
XOR_EAX_IMMED,
ADD_EAX_IMMED,
SUB_EAX_IMMED,
}
internal class CxProgram
{
public const int LengthLimit = 0x80;
private List<uint> m_code = new List<uint> (LengthLimit);
private uint[] m_ControlBlock;
private int m_length;
private uint m_seed;
class Context
{
public uint eax;
public uint ebx;
public uint ecx;
public uint edi;
public Stack<uint> stack = new Stack<uint>();
}
public CxProgram (uint seed, uint[] control_block)
{
m_seed = seed;
m_length = 0;
m_ControlBlock = control_block;
}
public uint Execute (uint hash)
{
var context = new Context();
var iterator = m_code.GetEnumerator();
uint immed = 0;
while (iterator.MoveNext())
{
var bytecode = (CxByteCode)iterator.Current;
if (CxByteCode.IMMED == (bytecode & CxByteCode.IMMED))
{
if (!iterator.MoveNext())
throw new CxProgramException ("Incomplete IMMED bytecode in CxEncryption program");
immed = iterator.Current;
}
switch (bytecode)
{
case CxByteCode.NOP: break;
case CxByteCode.IMMED: break;
case CxByteCode.MOV_EDI_ARG: context.edi = hash; break;
case CxByteCode.PUSH_EBX: context.stack.Push (context.ebx); break;
case CxByteCode.POP_EBX: context.ebx = context.stack.Pop(); break;
case CxByteCode.PUSH_ECX: context.stack.Push (context.ecx); break;
case CxByteCode.POP_ECX: context.ecx = context.stack.Pop(); break;
case CxByteCode.MOV_EBX_EAX: context.ebx = context.eax; break;
case CxByteCode.MOV_EAX_EDI: context.eax = context.edi; break;
case CxByteCode.MOV_ECX_EBX: context.ecx = context.ebx; break;
case CxByteCode.MOV_EAX_EBX: context.eax = context.ebx; break;
case CxByteCode.AND_ECX_0F: context.ecx &= 0x0f; break;
case CxByteCode.SHR_EBX_1: context.ebx >>= 1; break;
case CxByteCode.SHL_EAX_1: context.eax <<= 1; break;
case CxByteCode.SHR_EAX_CL: context.eax >>= (int)context.ecx; break;
case CxByteCode.SHL_EAX_CL: context.eax <<= (int)context.ecx; break;
case CxByteCode.OR_EAX_EBX: context.eax |= context.ebx; break;
case CxByteCode.NOT_EAX: context.eax = ~context.eax; break;
case CxByteCode.NEG_EAX: context.eax = (uint)-context.eax; break;
case CxByteCode.DEC_EAX: context.eax--; break;
case CxByteCode.INC_EAX: context.eax++; break;
case CxByteCode.ADD_EAX_EBX: context.eax += context.ebx; break;
case CxByteCode.SUB_EAX_EBX: context.eax -= context.ebx; break;
case CxByteCode.IMUL_EAX_EBX: context.eax *= context.ebx; break;
case CxByteCode.ADD_EAX_IMMED: context.eax += immed; break;
case CxByteCode.SUB_EAX_IMMED: context.eax -= immed; break;
case CxByteCode.AND_EBX_IMMED: context.ebx &= immed; break;
case CxByteCode.AND_EAX_IMMED: context.eax &= immed; break;
case CxByteCode.XOR_EAX_IMMED: context.eax ^= immed; break;
case CxByteCode.MOV_EAX_IMMED: context.eax = immed; break;
case CxByteCode.MOV_EAX_INDIRECT:
if (context.eax >= m_ControlBlock.Length)
throw new CxProgramException ("Index out of bounds in CxEncryption program");
context.eax = ~m_ControlBlock[context.eax];
break;
case CxByteCode.RETN:
if (context.stack.Count > 0)
throw new CxProgramException ("Imbalanced stack in CxEncryption program");
return context.eax;
default:
throw new CxProgramException ("Invalid bytecode in CxEncryption program");
}
}
throw new CxProgramException ("CxEncryption program without RETN bytecode");
}
public void Clear ()
{
m_length = 0;
m_code.Clear();
}
public bool EmitNop (int count)
{
if (m_length + count > LengthLimit)
return false;
m_length += count;
return true;
}
public bool Emit (CxByteCode code, int length = 1)
{
if (m_length + length > LengthLimit)
return false;
m_length += length;
m_code.Add ((uint)code);
return true;
}
public bool EmitUInt32 (uint x)
{
if (m_length + 4 > LengthLimit)
return false;
m_length += 4;
m_code.Add (x);
return true;
}
public bool EmitRandom ()
{
return EmitUInt32 (GetRandom());
}
public uint GetRandom ()
{
uint seed = m_seed;
m_seed = 1103515245 * seed + 12345;
return m_seed ^ (seed << 16) ^ (seed >> 16);
}
}
[Serializable]
public class ZecchouCrypt : CxEncryption
{
public ZecchouCrypt (CxScheme scheme) : base (scheme)
{
}
public override bool ObfuscatedIndex { get { return true; } }
}
/* CxEncryption base branch order
OddBranchOrder
{
case 0: SHR_EAX_CL
case 1: SHL_EAX_CL
case 2: ADD_EAX_EBX
case 3: NEG_EAX; ADD_EAX_EBX
case 4: IMUL_EAX_EBX
case 5: SUB_EAX_EBX
}
EvenBranchOrder
{
case 0: NOT_EAX
case 1: DEC_EAX
case 2: NEG_EAX
case 3: INC_EAX
case 4: MOV_EAX_INDIRECT
case 5: OR_EAX_EBX
case 6: XOR_EAX_IMMED
case 7: ADD_EAX_IMMED
}
*/
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.