context
stringlengths
2.52k
185k
gt
stringclasses
1 value
using System; using System.CodeDom.Compiler; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.Globalization; using System.Linq; using System.Text; namespace EduHub.Data.Entities { /// <summary> /// SMS messages Data Set /// </summary> [GeneratedCode("EduHub Data", "0.9")] public sealed partial class SPSMSDataSet : EduHubDataSet<SPSMS> { /// <inheritdoc /> public override string Name { get { return "SPSMS"; } } /// <inheritdoc /> public override bool SupportsEntityLastModified { get { return true; } } internal SPSMSDataSet(EduHubContext Context) : base(Context) { Index_REPLY_CODE = new Lazy<NullDictionary<string, IReadOnlyList<SPSMS>>>(() => this.ToGroupedNullDictionary(i => i.REPLY_CODE)); Index_SPSMSKEY = new Lazy<Dictionary<int, SPSMS>>(() => this.ToDictionary(i => i.SPSMSKEY)); } /// <summary> /// Matches CSV file headers to actions, used to deserialize <see cref="SPSMS" /> /// </summary> /// <param name="Headers">The CSV column headers</param> /// <returns>An array of actions which deserialize <see cref="SPSMS" /> fields for each CSV column header</returns> internal override Action<SPSMS, string>[] BuildMapper(IReadOnlyList<string> Headers) { var mapper = new Action<SPSMS, string>[Headers.Count]; for (var i = 0; i < Headers.Count; i++) { switch (Headers[i]) { case "SPSMSKEY": mapper[i] = (e, v) => e.SPSMSKEY = int.Parse(v); break; case "MESSAGE": mapper[i] = (e, v) => e.MESSAGE = v; break; case "CREATED_DATE": mapper[i] = (e, v) => e.CREATED_DATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture); break; case "CREATED_BY": mapper[i] = (e, v) => e.CREATED_BY = v; break; case "NOTIFY_REPLIES": mapper[i] = (e, v) => e.NOTIFY_REPLIES = v; break; case "AUTO_MESSAGE": mapper[i] = (e, v) => e.AUTO_MESSAGE = v; break; case "EMERGENCY": mapper[i] = (e, v) => e.EMERGENCY = v; break; case "REPLY_CODE": mapper[i] = (e, v) => e.REPLY_CODE = v; break; case "LW_DATE": mapper[i] = (e, v) => e.LW_DATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture); break; case "LW_TIME": mapper[i] = (e, v) => e.LW_TIME = v == null ? (short?)null : short.Parse(v); break; case "LW_USER": mapper[i] = (e, v) => e.LW_USER = v; break; default: mapper[i] = MapperNoOp; break; } } return mapper; } /// <summary> /// Merges <see cref="SPSMS" /> delta entities /// </summary> /// <param name="Entities">Iterator for base <see cref="SPSMS" /> entities</param> /// <param name="DeltaEntities">List of delta <see cref="SPSMS" /> entities</param> /// <returns>A merged <see cref="IEnumerable{SPSMS}"/> of entities</returns> internal override IEnumerable<SPSMS> ApplyDeltaEntities(IEnumerable<SPSMS> Entities, List<SPSMS> DeltaEntities) { HashSet<int> Index_SPSMSKEY = new HashSet<int>(DeltaEntities.Select(i => i.SPSMSKEY)); using (var deltaIterator = DeltaEntities.GetEnumerator()) { using (var entityIterator = Entities.GetEnumerator()) { while (deltaIterator.MoveNext()) { var deltaClusteredKey = deltaIterator.Current.SPSMSKEY; bool yieldEntity = false; while (entityIterator.MoveNext()) { var entity = entityIterator.Current; bool overwritten = Index_SPSMSKEY.Remove(entity.SPSMSKEY); if (entity.SPSMSKEY.CompareTo(deltaClusteredKey) <= 0) { if (!overwritten) { yield return entity; } } else { yieldEntity = !overwritten; break; } } yield return deltaIterator.Current; if (yieldEntity) { yield return entityIterator.Current; } } while (entityIterator.MoveNext()) { yield return entityIterator.Current; } } } } #region Index Fields private Lazy<NullDictionary<string, IReadOnlyList<SPSMS>>> Index_REPLY_CODE; private Lazy<Dictionary<int, SPSMS>> Index_SPSMSKEY; #endregion #region Index Methods /// <summary> /// Find SPSMS by REPLY_CODE field /// </summary> /// <param name="REPLY_CODE">REPLY_CODE value used to find SPSMS</param> /// <returns>List of related SPSMS entities</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SPSMS> FindByREPLY_CODE(string REPLY_CODE) { return Index_REPLY_CODE.Value[REPLY_CODE]; } /// <summary> /// Attempt to find SPSMS by REPLY_CODE field /// </summary> /// <param name="REPLY_CODE">REPLY_CODE value used to find SPSMS</param> /// <param name="Value">List of related SPSMS entities</param> /// <returns>True if the list of related SPSMS entities is found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public bool TryFindByREPLY_CODE(string REPLY_CODE, out IReadOnlyList<SPSMS> Value) { return Index_REPLY_CODE.Value.TryGetValue(REPLY_CODE, out Value); } /// <summary> /// Attempt to find SPSMS by REPLY_CODE field /// </summary> /// <param name="REPLY_CODE">REPLY_CODE value used to find SPSMS</param> /// <returns>List of related SPSMS entities, or null if not found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public IReadOnlyList<SPSMS> TryFindByREPLY_CODE(string REPLY_CODE) { IReadOnlyList<SPSMS> value; if (Index_REPLY_CODE.Value.TryGetValue(REPLY_CODE, out value)) { return value; } else { return null; } } /// <summary> /// Find SPSMS by SPSMSKEY field /// </summary> /// <param name="SPSMSKEY">SPSMSKEY value used to find SPSMS</param> /// <returns>Related SPSMS entity</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public SPSMS FindBySPSMSKEY(int SPSMSKEY) { return Index_SPSMSKEY.Value[SPSMSKEY]; } /// <summary> /// Attempt to find SPSMS by SPSMSKEY field /// </summary> /// <param name="SPSMSKEY">SPSMSKEY value used to find SPSMS</param> /// <param name="Value">Related SPSMS entity</param> /// <returns>True if the related SPSMS entity is found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public bool TryFindBySPSMSKEY(int SPSMSKEY, out SPSMS Value) { return Index_SPSMSKEY.Value.TryGetValue(SPSMSKEY, out Value); } /// <summary> /// Attempt to find SPSMS by SPSMSKEY field /// </summary> /// <param name="SPSMSKEY">SPSMSKEY value used to find SPSMS</param> /// <returns>Related SPSMS entity, or null if not found</returns> /// <exception cref="ArgumentOutOfRangeException">No match was found</exception> public SPSMS TryFindBySPSMSKEY(int SPSMSKEY) { SPSMS value; if (Index_SPSMSKEY.Value.TryGetValue(SPSMSKEY, out value)) { return value; } else { return null; } } #endregion #region SQL Integration /// <summary> /// Returns a <see cref="SqlCommand"/> which checks for the existence of a SPSMS table, and if not found, creates the table and associated indexes. /// </summary> /// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param> public override SqlCommand GetSqlCreateTableCommand(SqlConnection SqlConnection) { return new SqlCommand( connection: SqlConnection, cmdText: @"IF NOT EXISTS (SELECT * FROM dbo.sysobjects WHERE id = OBJECT_ID(N'[dbo].[SPSMS]') AND OBJECTPROPERTY(id, N'IsUserTable') = 1) BEGIN CREATE TABLE [dbo].[SPSMS]( [SPSMSKEY] int IDENTITY NOT NULL, [MESSAGE] varchar(160) NULL, [CREATED_DATE] datetime NULL, [CREATED_BY] varchar(128) NULL, [NOTIFY_REPLIES] varchar(1) NULL, [AUTO_MESSAGE] varchar(1) NULL, [EMERGENCY] varchar(1) NULL, [REPLY_CODE] varchar(15) NULL, [LW_DATE] datetime NULL, [LW_TIME] smallint NULL, [LW_USER] varchar(128) NULL, CONSTRAINT [SPSMS_Index_SPSMSKEY] PRIMARY KEY CLUSTERED ( [SPSMSKEY] ASC ) ); CREATE NONCLUSTERED INDEX [SPSMS_Index_REPLY_CODE] ON [dbo].[SPSMS] ( [REPLY_CODE] ASC ); END"); } /// <summary> /// Returns a <see cref="SqlCommand"/> which disables all non-clustered table indexes. /// Typically called before <see cref="SqlBulkCopy"/> to improve performance. /// <see cref="GetSqlRebuildIndexesCommand(SqlConnection)"/> should be called to rebuild and enable indexes after performance sensitive work is completed. /// </summary> /// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param> /// <returns>A <see cref="SqlCommand"/> which (when executed) will disable all non-clustered table indexes</returns> public override SqlCommand GetSqlDisableIndexesCommand(SqlConnection SqlConnection) { return new SqlCommand( connection: SqlConnection, cmdText: @"IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SPSMS]') AND name = N'SPSMS_Index_REPLY_CODE') ALTER INDEX [SPSMS_Index_REPLY_CODE] ON [dbo].[SPSMS] DISABLE; "); } /// <summary> /// Returns a <see cref="SqlCommand"/> which rebuilds and enables all non-clustered table indexes. /// </summary> /// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param> /// <returns>A <see cref="SqlCommand"/> which (when executed) will rebuild and enable all non-clustered table indexes</returns> public override SqlCommand GetSqlRebuildIndexesCommand(SqlConnection SqlConnection) { return new SqlCommand( connection: SqlConnection, cmdText: @"IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[SPSMS]') AND name = N'SPSMS_Index_REPLY_CODE') ALTER INDEX [SPSMS_Index_REPLY_CODE] ON [dbo].[SPSMS] REBUILD PARTITION = ALL; "); } /// <summary> /// Returns a <see cref="SqlCommand"/> which deletes the <see cref="SPSMS"/> entities passed /// </summary> /// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param> /// <param name="Entities">The <see cref="SPSMS"/> entities to be deleted</param> public override SqlCommand GetSqlDeleteCommand(SqlConnection SqlConnection, IEnumerable<SPSMS> Entities) { SqlCommand command = new SqlCommand(); int parameterIndex = 0; StringBuilder builder = new StringBuilder(); List<int> Index_SPSMSKEY = new List<int>(); foreach (var entity in Entities) { Index_SPSMSKEY.Add(entity.SPSMSKEY); } builder.AppendLine("DELETE [dbo].[SPSMS] WHERE"); // Index_SPSMSKEY builder.Append("[SPSMSKEY] IN ("); for (int index = 0; index < Index_SPSMSKEY.Count; index++) { if (index != 0) builder.Append(", "); // SPSMSKEY var parameterSPSMSKEY = $"@p{parameterIndex++}"; builder.Append(parameterSPSMSKEY); command.Parameters.Add(parameterSPSMSKEY, SqlDbType.Int).Value = Index_SPSMSKEY[index]; } builder.Append(");"); command.Connection = SqlConnection; command.CommandText = builder.ToString(); return command; } /// <summary> /// Provides a <see cref="IDataReader"/> for the SPSMS data set /// </summary> /// <returns>A <see cref="IDataReader"/> for the SPSMS data set</returns> public override EduHubDataSetDataReader<SPSMS> GetDataSetDataReader() { return new SPSMSDataReader(Load()); } /// <summary> /// Provides a <see cref="IDataReader"/> for the SPSMS data set /// </summary> /// <returns>A <see cref="IDataReader"/> for the SPSMS data set</returns> public override EduHubDataSetDataReader<SPSMS> GetDataSetDataReader(List<SPSMS> Entities) { return new SPSMSDataReader(new EduHubDataSetLoadedReader<SPSMS>(this, Entities)); } // Modest implementation to primarily support SqlBulkCopy private class SPSMSDataReader : EduHubDataSetDataReader<SPSMS> { public SPSMSDataReader(IEduHubDataSetReader<SPSMS> Reader) : base (Reader) { } public override int FieldCount { get { return 11; } } public override object GetValue(int i) { switch (i) { case 0: // SPSMSKEY return Current.SPSMSKEY; case 1: // MESSAGE return Current.MESSAGE; case 2: // CREATED_DATE return Current.CREATED_DATE; case 3: // CREATED_BY return Current.CREATED_BY; case 4: // NOTIFY_REPLIES return Current.NOTIFY_REPLIES; case 5: // AUTO_MESSAGE return Current.AUTO_MESSAGE; case 6: // EMERGENCY return Current.EMERGENCY; case 7: // REPLY_CODE return Current.REPLY_CODE; case 8: // LW_DATE return Current.LW_DATE; case 9: // LW_TIME return Current.LW_TIME; case 10: // LW_USER return Current.LW_USER; default: throw new ArgumentOutOfRangeException(nameof(i)); } } public override bool IsDBNull(int i) { switch (i) { case 1: // MESSAGE return Current.MESSAGE == null; case 2: // CREATED_DATE return Current.CREATED_DATE == null; case 3: // CREATED_BY return Current.CREATED_BY == null; case 4: // NOTIFY_REPLIES return Current.NOTIFY_REPLIES == null; case 5: // AUTO_MESSAGE return Current.AUTO_MESSAGE == null; case 6: // EMERGENCY return Current.EMERGENCY == null; case 7: // REPLY_CODE return Current.REPLY_CODE == null; case 8: // LW_DATE return Current.LW_DATE == null; case 9: // LW_TIME return Current.LW_TIME == null; case 10: // LW_USER return Current.LW_USER == null; default: return false; } } public override string GetName(int ordinal) { switch (ordinal) { case 0: // SPSMSKEY return "SPSMSKEY"; case 1: // MESSAGE return "MESSAGE"; case 2: // CREATED_DATE return "CREATED_DATE"; case 3: // CREATED_BY return "CREATED_BY"; case 4: // NOTIFY_REPLIES return "NOTIFY_REPLIES"; case 5: // AUTO_MESSAGE return "AUTO_MESSAGE"; case 6: // EMERGENCY return "EMERGENCY"; case 7: // REPLY_CODE return "REPLY_CODE"; case 8: // LW_DATE return "LW_DATE"; case 9: // LW_TIME return "LW_TIME"; case 10: // LW_USER return "LW_USER"; default: throw new ArgumentOutOfRangeException(nameof(ordinal)); } } public override int GetOrdinal(string name) { switch (name) { case "SPSMSKEY": return 0; case "MESSAGE": return 1; case "CREATED_DATE": return 2; case "CREATED_BY": return 3; case "NOTIFY_REPLIES": return 4; case "AUTO_MESSAGE": return 5; case "EMERGENCY": return 6; case "REPLY_CODE": return 7; case "LW_DATE": return 8; case "LW_TIME": return 9; case "LW_USER": return 10; default: throw new ArgumentOutOfRangeException(nameof(name)); } } } #endregion } }
using System; using NUnit.Framework; using OpenQA.Selenium.Environment; namespace OpenQA.Selenium { [TestFixture] public class FrameSwitchingTest : DriverTestFixture { // ---------------------------------------------------------------------------------------------- // // Tests that WebDriver doesn't do anything fishy when it navigates to a page with frames. // // ---------------------------------------------------------------------------------------------- [Test] public void ShouldAlwaysFocusOnTheTopMostFrameAfterANavigationEvent() { driver.Url = framesetPage; IWebElement element = driver.FindElement(By.TagName("frameset")); Assert.IsNotNull(element); } [Test] public void ShouldNotAutomaticallySwitchFocusToAnIFrameWhenAPageContainingThemIsLoaded() { driver.Url = iframePage; driver.Manage().Timeouts().ImplicitlyWait(TimeSpan.FromSeconds(1)); IWebElement element = driver.FindElement(By.Id("iframe_page_heading")); driver.Manage().Timeouts().ImplicitlyWait(TimeSpan.FromSeconds(0)); Assert.IsNotNull(element); } [Test] public void ShouldOpenPageWithBrokenFrameset() { driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("framesetPage3.html"); IWebElement frame1 = driver.FindElement(By.Id("first")); driver.SwitchTo().Frame(frame1); driver.SwitchTo().DefaultContent(); IWebElement frame2 = driver.FindElement(By.Id("second")); try { driver.SwitchTo().Frame(frame2); } catch (WebDriverException) { // IE9 can not switch to this broken frame - it has no window. } } // ---------------------------------------------------------------------------------------------- // // Tests that WebDriver can switch to frames as expected. // // ---------------------------------------------------------------------------------------------- [Test] public void ShouldBeAbleToSwitchToAFrameByItsIndex() { driver.Url = framesetPage; driver.SwitchTo().Frame(1); Assert.AreEqual("2", driver.FindElement(By.Id("pageNumber")).Text); } [Test] public void ShouldBeAbleToSwitchToAnIframeByItsIndex() { driver.Url = iframePage; driver.SwitchTo().Frame(0); Assert.AreEqual("name", driver.FindElement(By.Name("id-name1")).GetAttribute("value")); } [Test] public void ShouldBeAbleToSwitchToAFrameByItsName() { driver.Url = framesetPage; driver.SwitchTo().Frame("fourth"); Assert.AreEqual("child1", driver.FindElement(By.TagName("frame")).GetAttribute("name")); } [Test] public void ShouldBeAbleToSwitchToAnIframeByItsName() { driver.Url = iframePage; driver.SwitchTo().Frame("iframe1-name"); Assert.AreEqual("name", driver.FindElement(By.Name("id-name1")).GetAttribute("value")); } [Test] public void ShouldBeAbleToSwitchToAFrameByItsID() { driver.Url = framesetPage; driver.SwitchTo().Frame("fifth"); Assert.AreEqual("Open new window", driver.FindElement(By.Name("windowOne")).Text); } [Test] public void ShouldBeAbleToSwitchToAnIframeByItsID() { driver.Url = iframePage; driver.SwitchTo().Frame("iframe1"); Assert.AreEqual("name", driver.FindElement(By.Name("id-name1")).GetAttribute("value")); } [Test] public void ShouldBeAbleToSwitchToFrameWithNameContainingDot() { driver.Url = framesetPage; driver.SwitchTo().Frame("sixth.iframe1"); Assert.IsTrue(driver.FindElement(By.TagName("body")).Text.Contains("Page number 3")); } [Test] public void ShouldBeAbleToSwitchToAFrameUsingAPreviouslyLocatedWebElement() { driver.Url = framesetPage; IWebElement frame = driver.FindElement(By.TagName("frame")); driver.SwitchTo().Frame(frame); Assert.AreEqual("1", driver.FindElement(By.Id("pageNumber")).Text); } [Test] public void ShouldBeAbleToSwitchToAnIFrameUsingAPreviouslyLocatedWebElement() { driver.Url = iframePage; IWebElement frame = driver.FindElement(By.TagName("iframe")); driver.SwitchTo().Frame(frame); Assert.AreEqual("name", driver.FindElement(By.Name("id-name1")).GetAttribute("value")); } [Test] public void ShouldEnsureElementIsAFrameBeforeSwitching() { driver.Url = framesetPage; IWebElement frame = driver.FindElement(By.TagName("frameset")); Assert.Throws<NoSuchFrameException>(() => driver.SwitchTo().Frame(frame)); } [Test] public void FrameSearchesShouldBeRelativeToTheCurrentlySelectedFrame() { driver.Url = framesetPage; driver.SwitchTo().Frame("second"); Assert.AreEqual("2", driver.FindElement(By.Id("pageNumber")).Text); try { driver.SwitchTo().Frame("third"); Assert.Fail(); } catch (NoSuchFrameException) { // Do nothing } driver.SwitchTo().DefaultContent(); driver.SwitchTo().Frame("third"); try { driver.SwitchTo().Frame("second"); Assert.Fail(); } catch (NoSuchFrameException) { // Do nothing } driver.SwitchTo().DefaultContent(); driver.SwitchTo().Frame("second"); Assert.AreEqual("2", driver.FindElement(By.Id("pageNumber")).Text); } [Test] public void ShouldSelectChildFramesByChainedCalls() { driver.Url = framesetPage; driver.SwitchTo().Frame("fourth").SwitchTo().Frame("child2"); Assert.AreEqual("11", driver.FindElement(By.Id("pageNumber")).Text); } [Test] public void ShouldThrowFrameNotFoundExceptionLookingUpSubFramesWithSuperFrameNames() { driver.Url = framesetPage; driver.SwitchTo().Frame("fourth"); Assert.Throws<NoSuchFrameException>(() => driver.SwitchTo().Frame("second")); } [Test] public void ShouldThrowAnExceptionWhenAFrameCannotBeFound() { driver.Url = xhtmlTestPage; Assert.Throws<NoSuchFrameException>(() => driver.SwitchTo().Frame("Nothing here")); } [Test] public void ShouldThrowAnExceptionWhenAFrameCannotBeFoundByIndex() { driver.Url = xhtmlTestPage; Assert.Throws<NoSuchFrameException>(() => driver.SwitchTo().Frame(27)); } [Test] [IgnoreBrowser(Browser.Chrome, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.PhantomJS, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Safari, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Android, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.PhantomJS, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Opera, "Browser does not support parent frame navigation")] public void ShouldBeAbleToSwitchToParentFrame() { driver.Url = framesetPage; driver.SwitchTo().Frame("fourth").SwitchTo().ParentFrame().SwitchTo().Frame("first"); Assert.AreEqual("1", driver.FindElement(By.Id("pageNumber")).Text); } [Test] [IgnoreBrowser(Browser.Chrome, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.PhantomJS, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Safari, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Android, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.PhantomJS, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Opera, "Browser does not support parent frame navigation")] public void ShouldBeAbleToSwitchToParentFrameFromASecondLevelFrame() { driver.Url = framesetPage; driver.SwitchTo().Frame("fourth").SwitchTo().Frame("child1").SwitchTo().ParentFrame().SwitchTo().Frame("child2"); Assert.AreEqual("11", driver.FindElement(By.Id("pageNumber")).Text); } [Test] [IgnoreBrowser(Browser.Chrome, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.PhantomJS, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Safari, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Android, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.PhantomJS, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Opera, "Browser does not support parent frame navigation")] public void SwitchingToParentFrameFromDefaultContextIsNoOp() { driver.Url = xhtmlTestPage; driver.SwitchTo().ParentFrame(); Assert.AreEqual("XHTML Test Page", driver.Title); } [Test] [IgnoreBrowser(Browser.Chrome, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.PhantomJS, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Safari, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Android, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.PhantomJS, "Browser does not support parent frame navigation")] [IgnoreBrowser(Browser.Opera, "Browser does not support parent frame navigation")] public void ShouldBeAbleToSwitchToParentFromAnIframe() { driver.Url = iframePage; driver.SwitchTo().Frame(0); driver.SwitchTo().ParentFrame(); driver.FindElement(By.Id("iframe_page_heading")); } // ---------------------------------------------------------------------------------------------- // // General frame handling behavior tests // // ---------------------------------------------------------------------------------------------- [Test] public void ShouldContinueToReferToTheSameFrameOnceItHasBeenSelected() { driver.Url = framesetPage; driver.SwitchTo().Frame(2); IWebElement checkbox = driver.FindElement(By.XPath("//input[@name='checky']")); checkbox.Click(); checkbox.Submit(); Assert.AreEqual("Success!", driver.FindElement(By.XPath("//p")).Text); } [Test] public void ShouldFocusOnTheReplacementWhenAFrameFollowsALinkToA_TopTargettedPage() { driver.Url = framesetPage; driver.SwitchTo().Frame(0); driver.FindElement(By.LinkText("top")).Click(); // TODO(simon): Avoid going too fast when native events are there. System.Threading.Thread.Sleep(1000); Assert.AreEqual("XHTML Test Page", driver.Title); } [Test] public void ShouldAllowAUserToSwitchFromAnIframeBackToTheMainContentOfThePage() { driver.Url = iframePage; driver.SwitchTo().Frame(0); driver.SwitchTo().DefaultContent(); driver.FindElement(By.Id("iframe_page_heading")); } [Test] public void ShouldAllowTheUserToSwitchToAnIFrameAndRemainFocusedOnIt() { driver.Url = iframePage; driver.SwitchTo().Frame(0); driver.FindElement(By.Id("submitButton")).Click(); string hello = GetTextOfGreetingElement(); Assert.AreEqual(hello, "Success!"); } [Test] public void ShouldBeAbleToClickInAFrame() { driver.Url = framesetPage; driver.SwitchTo().Frame("third"); // This should replace frame "third" ... driver.FindElement(By.Id("submitButton")).Click(); // driver should still be focused on frame "third" ... Assert.AreEqual("Success!", GetTextOfGreetingElement()); // Make sure it was really frame "third" which was replaced ... driver.SwitchTo().DefaultContent().SwitchTo().Frame("third"); Assert.AreEqual("Success!", GetTextOfGreetingElement()); } [Test] public void testShouldBeAbleToClickInAFrameThatRewritesTopWindowLocation() { driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("click_tests/issue5237.html"); driver.SwitchTo().Frame("search"); driver.FindElement(By.Id("submit")).Click(); driver.SwitchTo().DefaultContent(); WaitFor(() => { return driver.Title == "Target page for issue 5237"; }, "Browser title was not 'Target page for issue 5237'"); } [Test] [IgnoreBrowser(Browser.HtmlUnit)] public void ShouldBeAbleToClickInASubFrame() { driver.Url = framesetPage; driver.SwitchTo().Frame("sixth").SwitchTo().Frame("iframe1"); // This should replaxe frame "iframe1" inside frame "sixth" ... driver.FindElement(By.Id("submitButton")).Click(); // driver should still be focused on frame "iframe1" inside frame "sixth" ... Assert.AreEqual("Success!", GetTextOfGreetingElement()); // Make sure it was really frame "iframe1" inside frame "sixth" which was replaced ... driver.SwitchTo().DefaultContent().SwitchTo().Frame("sixth").SwitchTo().Frame("iframe1"); Assert.AreEqual("Success!", driver.FindElement(By.Id("greeting")).Text); } [Test] public void ShouldBeAbleToFindElementsInIframesByXPath() { driver.Url = iframePage; driver.SwitchTo().Frame("iframe1"); IWebElement element = driver.FindElement(By.XPath("//*[@id = 'changeme']")); Assert.IsNotNull(element); } [Test] public void GetCurrentUrlShouldReturnTopLevelBrowsingContextUrl() { driver.Url = framesetPage; Assert.AreEqual(framesetPage, driver.Url); driver.SwitchTo().Frame("second"); Assert.AreEqual(framesetPage, driver.Url); } [Test] public void GetCurrentUrlShouldReturnTopLevelBrowsingContextUrlForIframes() { driver.Url = iframePage; Assert.AreEqual(iframePage, driver.Url); driver.SwitchTo().Frame("iframe1"); Assert.AreEqual(iframePage, driver.Url); } [Test] [IgnoreBrowser(Browser.PhantomJS, "Causes browser to exit")] public void ShouldBeAbleToSwitchToTheTopIfTheFrameIsDeletedFromUnderUs() { driver.Url = deletingFrame; driver.SwitchTo().Frame("iframe1"); IWebElement killIframe = driver.FindElement(By.Id("killIframe")); killIframe.Click(); driver.SwitchTo().DefaultContent(); bool frameExists = true; DateTime timeout = DateTime.Now.Add(TimeSpan.FromMilliseconds(4000)); while (DateTime.Now < timeout) { try { driver.SwitchTo().Frame("iframe1"); } catch (NoSuchFrameException) { frameExists = false; break; } } Assert.IsFalse(frameExists); IWebElement addIFrame = driver.FindElement(By.Id("addBackFrame")); addIFrame.Click(); timeout = DateTime.Now.Add(TimeSpan.FromMilliseconds(4000)); while (DateTime.Now < timeout) { try { driver.SwitchTo().Frame("iframe1"); break; } catch (NoSuchFrameException) { } } try { WaitFor(() => { IWebElement success = null; try { success = driver.FindElement(By.Id("success")); } catch (NoSuchElementException) { } return success != null; }, "Element with id 'success' still exists on page"); } catch (WebDriverException) { Assert.Fail("Could not find element after switching frame"); } } [Test] public void ShouldReturnWindowTitleInAFrameset() { driver.Url = framesetPage; driver.SwitchTo().Frame("third"); Assert.AreEqual("Unique title", driver.Title); } [Test] public void JavaScriptShouldExecuteInTheContextOfTheCurrentFrame() { IJavaScriptExecutor executor = driver as IJavaScriptExecutor; driver.Url = framesetPage; Assert.IsTrue((bool)executor.ExecuteScript("return window == window.top")); driver.SwitchTo().Frame("third"); Assert.IsTrue((bool)executor.ExecuteScript("return window != window.top")); } // ---------------------------------------------------------------------------------------------- // // Frame handling behavior tests not included in Java tests // // ---------------------------------------------------------------------------------------------- [Test] [NeedsFreshDriver(IsCreatedAfterTest = true)] public void ClosingTheFinalBrowserWindowShouldNotCauseAnExceptionToBeThrown() { driver.Url = simpleTestPage; driver.Close(); } [Test] public void ShouldBeAbleToFlipToAFrameIdentifiedByItsId() { driver.Url = framesetPage; driver.SwitchTo().Frame("fifth"); driver.FindElement(By.Id("username")); } [Test] public void ShouldBeAbleToSelectAFrameByName() { driver.Url = framesetPage; driver.SwitchTo().Frame("second"); Assert.AreEqual(driver.FindElement(By.Id("pageNumber")).Text, "2"); driver.SwitchTo().DefaultContent().SwitchTo().Frame("third"); driver.FindElement(By.Id("changeme")).Click(); driver.SwitchTo().DefaultContent().SwitchTo().Frame("second"); Assert.AreEqual(driver.FindElement(By.Id("pageNumber")).Text, "2"); } [Test] public void ShouldBeAbleToFindElementsInIframesByName() { driver.Url = iframePage; driver.SwitchTo().Frame("iframe1"); IWebElement element = driver.FindElement(By.Name("id-name1")); Assert.IsNotNull(element); } private string GetTextOfGreetingElement() { string text = string.Empty; DateTime end = DateTime.Now.Add(TimeSpan.FromMilliseconds(3000)); while (DateTime.Now < end) { try { IWebElement element = driver.FindElement(By.Id("greeting")); text = element.Text; break; } catch (NoSuchElementException) { } } return text; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Reflection; using System.Text; using System.Xml; using System.Xml.Schema; using System.Xml.Serialization; using log4net; using OpenMetaverse; using OpenMetaverse.StructuredData; namespace OpenSim.Framework { /// <summary> /// This class stores and retrieves dynamic attributes. /// </summary> /// <remarks> /// Modules that want to use dynamic attributes need to do so in a private data store /// which is accessed using a unique name. DAMap provides access to the data stores, /// each of which is an OSDMap. Modules are free to store any type of data they want /// within their data store. However, avoid storing large amounts of data because that /// would slow down database access. /// </remarks> public class DAMap : IXmlSerializable { // private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private static readonly int MIN_NAMESPACE_LENGTH = 4; private OSDMap m_map = new OSDMap(); // WARNING: this is temporary for experimentation only, it will be removed!!!! public OSDMap TopLevelMap { get { return m_map; } set { m_map = value; } } public XmlSchema GetSchema() { return null; } public static DAMap FromXml(string rawXml) { DAMap map = new DAMap(); map.ReadXml(rawXml); return map; } public void ReadXml(XmlReader reader) { ReadXml(reader.ReadInnerXml()); } public void ReadXml(string rawXml) { // System.Console.WriteLine("Trying to deserialize [{0}]", rawXml); lock (this) { m_map = (OSDMap)OSDParser.DeserializeLLSDXml(rawXml); SanitiseMap(this); } } public void WriteXml(XmlWriter writer) { writer.WriteRaw(ToXml()); } public string ToXml() { lock (this) return OSDParser.SerializeLLSDXmlString(m_map); } public void CopyFrom(DAMap other) { // Deep copy string data = null; lock (other) { if (other.CountNamespaces > 0) { data = OSDParser.SerializeLLSDXmlString(other.m_map); } } lock (this) { if (data == null) Clear(); else m_map = (OSDMap)OSDParser.DeserializeLLSDXml(data); } } /// <summary> /// Sanitise the map to remove any namespaces or stores that are not OSDMap. /// </summary> /// <param name='map'> /// </param> public static void SanitiseMap(DAMap daMap) { List<string> keysToRemove = null; OSDMap namespacesMap = daMap.m_map; foreach (string key in namespacesMap.Keys) { // Console.WriteLine("Processing ns {0}", key); if (!(namespacesMap[key] is OSDMap)) { if (keysToRemove == null) keysToRemove = new List<string>(); keysToRemove.Add(key); } } if (keysToRemove != null) { foreach (string key in keysToRemove) { // Console.WriteLine ("Removing bad ns {0}", key); namespacesMap.Remove(key); } } foreach (OSD nsOsd in namespacesMap.Values) { OSDMap nsOsdMap = (OSDMap)nsOsd; keysToRemove = null; foreach (string key in nsOsdMap.Keys) { if (!(nsOsdMap[key] is OSDMap)) { if (keysToRemove == null) keysToRemove = new List<string>(); keysToRemove.Add(key); } } if (keysToRemove != null) foreach (string key in keysToRemove) nsOsdMap.Remove(key); } } /// <summary> /// Get the number of namespaces /// </summary> public int CountNamespaces { get { lock (this) { return m_map.Count; } } } /// <summary> /// Get the number of stores. /// </summary> public int CountStores { get { int count = 0; lock (this) { foreach (OSD osdNamespace in m_map) { count += ((OSDMap)osdNamespace).Count; } } return count; } } /// <summary> /// Retrieve a Dynamic Attribute store /// </summary> /// <param name="ns">namespace for the store - use "OpenSim" for in-core modules</param> /// <param name="storeName">name of the store within the namespace</param> /// <returns>an OSDMap representing the stored data, or null if not found</returns> public OSDMap GetStore(string ns, string storeName) { OSD namespaceOsd; lock (this) { if (m_map.TryGetValue(ns, out namespaceOsd)) { OSD store; if (((OSDMap)namespaceOsd).TryGetValue(storeName, out store)) return (OSDMap)store; } } return null; } /// <summary> /// Saves a Dynamic attribute store /// </summary> /// <param name="ns">namespace for the store - use "OpenSim" for in-core modules</param> /// <param name="storeName">name of the store within the namespace</param> /// <param name="store">an OSDMap representing the data to store</param> public void SetStore(string ns, string storeName, OSDMap store) { ValidateNamespace(ns); OSDMap nsMap; lock (this) { if (!m_map.ContainsKey(ns)) { nsMap = new OSDMap(); m_map[ns] = nsMap; } nsMap = (OSDMap)m_map[ns]; // m_log.DebugFormat("[DA MAP]: Setting store to {0}:{1}", ns, storeName); nsMap[storeName] = store; } } /// <summary> /// Validate the key used for storing separate data stores. /// </summary> /// <param name='key'></param> public static void ValidateNamespace(string ns) { if (ns.Length < MIN_NAMESPACE_LENGTH) throw new Exception("Minimum namespace length is " + MIN_NAMESPACE_LENGTH); } public bool ContainsStore(string ns, string storeName) { OSD namespaceOsd; lock (this) { if (m_map.TryGetValue(ns, out namespaceOsd)) { return ((OSDMap)namespaceOsd).ContainsKey(storeName); } } return false; } public bool TryGetStore(string ns, string storeName, out OSDMap store) { OSD namespaceOsd; lock (this) { if (m_map.TryGetValue(ns, out namespaceOsd)) { OSD storeOsd; bool result = ((OSDMap)namespaceOsd).TryGetValue(storeName, out storeOsd); store = (OSDMap)storeOsd; return result; } } store = null; return false; } public void Clear() { lock (this) m_map.Clear(); } public bool RemoveStore(string ns, string storeName) { OSD namespaceOsd; lock (this) { if (m_map.TryGetValue(ns, out namespaceOsd)) { OSDMap namespaceOsdMap = (OSDMap)namespaceOsd; namespaceOsdMap.Remove(storeName); // Don't keep empty namespaces around if (namespaceOsdMap.Count <= 0) m_map.Remove(ns); } } return false; } } }
/********************************************************************++ Copyright (c) Microsoft Corporation. All rights reserved. --********************************************************************/ using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Threading; using System.Management.Automation; using System.Management.Automation.Runspaces; using System.Management.Automation.Remoting.Internal; namespace Microsoft.PowerShell.Commands { /// <summary> /// This cmdlet takes a Runspace object and checks to see if it is debuggable (i.e, if /// it is running a script or is currently stopped in the debugger. If it /// is debuggable then it breaks into the Runspace debugger in step mode. /// </summary> [SuppressMessage("Microsoft.PowerShell", "PS1012:CallShouldProcessOnlyIfDeclaringSupport")] [Cmdlet(VerbsDiagnostic.Debug, "Runspace", SupportsShouldProcess = true, DefaultParameterSetName = DebugRunspaceCommand.RunspaceParameterSet, HelpUri = "https://go.microsoft.com/fwlink/?LinkId=403731")] public sealed class DebugRunspaceCommand : PSCmdlet { #region Strings private const string RunspaceParameterSet = "RunspaceParameterSet"; private const string NameParameterSet = "NameParameterSet"; private const string IdParameterSet = "IdParameterSet"; private const string InstanceIdParameterSet = "InstanceIdParameterSet"; #endregion #region Private members private Runspace _runspace; private System.Management.Automation.Debugger _debugger; private PSDataCollection<PSStreamObject> _debugBlockingCollection; private PSDataCollection<PSStreamObject> _debugAccumulateCollection; private Pipeline _runningPipeline; private System.Management.Automation.PowerShell _runningPowerShell; // Debugging to persist until Ctrl+C or Debugger 'Exit' stops cmdlet. private bool _debugging; private ManualResetEventSlim _newRunningScriptEvent = new ManualResetEventSlim(true); private RunspaceAvailability _previousRunspaceAvailability = RunspaceAvailability.None; #endregion #region Parameters /// <summary> /// The Runspace to be debugged. /// </summary> [Parameter(Position = 0, Mandatory = true, ValueFromPipelineByPropertyName = true, ValueFromPipeline = true, ParameterSetName = DebugRunspaceCommand.RunspaceParameterSet)] public Runspace Runspace { get; set; } /// <summary> /// The name of a Runspace to be debugged. /// </summary> [Parameter(Position = 0, Mandatory = true, ParameterSetName = DebugRunspaceCommand.NameParameterSet)] public string Name { get; set; } /// <summary> /// The Id of a Runspace to be debugged. /// </summary> [Parameter(Position = 0, Mandatory = true, ParameterSetName = DebugRunspaceCommand.IdParameterSet)] public int Id { get; set; } /// <summary> /// The InstanceId of a Runspace to be debugged. /// </summary> [Parameter(Position = 0, Mandatory = true, ParameterSetName = DebugRunspaceCommand.InstanceIdParameterSet)] public Guid InstanceId { get; set; } #endregion #region Overrides /// <summary> /// End processing. Do work. /// </summary> protected override void EndProcessing() { if (ParameterSetName == DebugRunspaceCommand.RunspaceParameterSet) { _runspace = Runspace; } else { IReadOnlyList<Runspace> runspaces = null; switch (ParameterSetName) { case DebugRunspaceCommand.NameParameterSet: runspaces = GetRunspaceUtils.GetRunspacesByName(new string[] { Name }); break; case DebugRunspaceCommand.IdParameterSet: runspaces = GetRunspaceUtils.GetRunspacesById(new int[] { Id }); break; case DebugRunspaceCommand.InstanceIdParameterSet: runspaces = GetRunspaceUtils.GetRunspacesByInstanceId(new Guid[] { InstanceId }); break; } if (runspaces.Count > 1) { ThrowTerminatingError( new ErrorRecord( new PSArgumentException(Debugger.RunspaceDebuggingTooManyRunspacesFound), "DebugRunspaceTooManyRunspaceFound", ErrorCategory.InvalidOperation, this) ); } if (runspaces.Count == 1) { _runspace = runspaces[0]; } } if (_runspace == null) { ThrowTerminatingError( new ErrorRecord( new PSArgumentNullException(Debugger.RunspaceDebuggingNoRunspaceFound), "DebugRunspaceNoRunspaceFound", ErrorCategory.InvalidOperation, this) ); } Runspace defaultRunspace = LocalRunspace.DefaultRunspace; if (defaultRunspace == null || defaultRunspace.Debugger == null) { ThrowTerminatingError( new ErrorRecord( new PSInvalidOperationException(Debugger.RunspaceDebuggingNoHostRunspaceOrDebugger), "DebugRunspaceNoHostDebugger", ErrorCategory.InvalidOperation, this) ); } if (_runspace == defaultRunspace) { ThrowTerminatingError( new ErrorRecord( new PSInvalidOperationException(Debugger.RunspaceDebuggingCannotDebugDefaultRunspace), "DebugRunspaceCannotDebugHostRunspace", ErrorCategory.InvalidOperation, this) ); } if (this.Host == null || this.Host.UI == null) { ThrowTerminatingError( new ErrorRecord( new PSInvalidOperationException(Debugger.RunspaceDebuggingNoHost), "DebugRunspaceNoHostAvailable", ErrorCategory.InvalidOperation, this) ); } if (!ShouldProcess(_runspace.Name, VerbsDiagnostic.Debug)) { return; } _debugger = defaultRunspace.Debugger; try { PrepareRunspace(_runspace); // Blocking call. Send runspace/command output to host UI while debugging and wait for runspace/command completion. WaitAndReceiveRunspaceOutput(); } finally { RestoreRunspace(_runspace); } } /// <summary> /// Stop processing. /// </summary> protected override void StopProcessing() { _debugging = false; // Cancel runspace debugging. System.Management.Automation.Debugger debugger = _debugger; if ((debugger != null) && (_runspace != null)) { debugger.StopDebugRunspace(_runspace); } // Unblock the data collection. PSDataCollection<PSStreamObject> debugCollection = _debugBlockingCollection; if (debugCollection != null) { debugCollection.Complete(); } // Unblock any new command wait. _newRunningScriptEvent.Set(); } #endregion #region Private methods private void WaitAndReceiveRunspaceOutput() { _debugging = true; try { HostWriteLine(string.Format(CultureInfo.InvariantCulture, Debugger.RunspaceDebuggingStarted, _runspace.Name)); HostWriteLine(Debugger.RunspaceDebuggingEndSession); HostWriteLine(string.Empty); _runspace.AvailabilityChanged += HandleRunspaceAvailabilityChanged; _debugger.NestedDebuggingCancelledEvent += HandleDebuggerNestedDebuggingCancelledEvent; // Make sure host debugger has debugging turned on. _debugger.SetDebugMode(DebugModes.LocalScript | DebugModes.RemoteScript); // Set up host script debugger to debug the runspace. _debugger.DebugRunspace(_runspace); while (_debugging) { // Wait for running script. _newRunningScriptEvent.Wait(); if (!_debugging) { return; } AddDataEventHandlers(); try { // Block cmdlet during debugging until either the command finishes // or the user terminates the debugging session. foreach (var streamItem in _debugBlockingCollection) { streamItem.WriteStreamObject(this); } } finally { RemoveDataEventHandlers(); } if (_debugging && (!_runspace.InNestedPrompt)) { HostWriteLine(string.Empty); HostWriteLine(Debugger.RunspaceDebuggingScriptCompleted); HostWriteLine(Debugger.RunspaceDebuggingEndSession); HostWriteLine(string.Empty); } _newRunningScriptEvent.Reset(); } } finally { _runspace.AvailabilityChanged -= HandleRunspaceAvailabilityChanged; _debugger.NestedDebuggingCancelledEvent -= HandleDebuggerNestedDebuggingCancelledEvent; _debugger.StopDebugRunspace(_runspace); _newRunningScriptEvent.Dispose(); } } private void HostWriteLine(string line) { if ((this.Host != null) && (this.Host.UI != null)) { try { if (this.Host.UI.RawUI != null) { this.Host.UI.WriteLine(ConsoleColor.Yellow, this.Host.UI.RawUI.BackgroundColor, line); } else { this.Host.UI.WriteLine(line); } } catch (System.Management.Automation.Host.HostException) { } } } private void AddDataEventHandlers() { // Create new collection objects. if (_debugBlockingCollection != null) { _debugBlockingCollection.Dispose(); } if (_debugAccumulateCollection != null) { _debugAccumulateCollection.Dispose(); } _debugBlockingCollection = new PSDataCollection<PSStreamObject>(); _debugBlockingCollection.BlockingEnumerator = true; _debugAccumulateCollection = new PSDataCollection<PSStreamObject>(); _runningPowerShell = _runspace.GetCurrentBasePowerShell(); if (_runningPowerShell != null) { if (_runningPowerShell.OutputBuffer != null) { _runningPowerShell.OutputBuffer.DataAdding += HandlePowerShellOutputBufferDataAdding; } if (_runningPowerShell.ErrorBuffer != null) { _runningPowerShell.ErrorBuffer.DataAdding += HandlePowerShellErrorBufferDataAdding; } } else { _runningPipeline = _runspace.GetCurrentlyRunningPipeline(); if (_runningPipeline != null) { if (_runningPipeline.Output != null) { _runningPipeline.Output.DataReady += HandlePipelineOutputDataReady; } if (_runningPipeline.Error != null) { _runningPipeline.Error.DataReady += HandlePipelineErrorDataReady; } } } } private void RemoveDataEventHandlers() { if (_runningPowerShell != null) { if (_runningPowerShell.OutputBuffer != null) { _runningPowerShell.OutputBuffer.DataAdding -= HandlePowerShellOutputBufferDataAdding; } if (_runningPowerShell.ErrorBuffer != null) { _runningPowerShell.ErrorBuffer.DataAdding -= HandlePowerShellErrorBufferDataAdding; } _runningPowerShell = null; } else if (_runningPipeline != null) { if (_runningPipeline.Output != null) { _runningPipeline.Output.DataReady -= HandlePipelineOutputDataReady; } if (_runningPipeline.Error != null) { _runningPipeline.Error.DataReady -= HandlePipelineErrorDataReady; } _runningPipeline = null; } } private void HandleRunspaceAvailabilityChanged(object sender, RunspaceAvailabilityEventArgs e) { // Ignore nested commands. LocalRunspace localRunspace = sender as LocalRunspace; if (localRunspace != null) { var basePowerShell = localRunspace.GetCurrentBasePowerShell(); if ((basePowerShell != null) && (basePowerShell.IsNested)) { return; } } RunspaceAvailability prevAvailability = _previousRunspaceAvailability; _previousRunspaceAvailability = e.RunspaceAvailability; if ((e.RunspaceAvailability == RunspaceAvailability.Available) || (e.RunspaceAvailability == RunspaceAvailability.None)) { _debugBlockingCollection.Complete(); } else if ((e.RunspaceAvailability == RunspaceAvailability.Busy) && ((prevAvailability == RunspaceAvailability.Available) || (prevAvailability == RunspaceAvailability.None))) { _newRunningScriptEvent.Set(); } } private void HandleDebuggerNestedDebuggingCancelledEvent(object sender, EventArgs e) { StopProcessing(); } private void HandlePipelineOutputDataReady(object sender, EventArgs e) { PipelineReader<PSObject> reader = sender as PipelineReader<PSObject>; if (reader != null && reader.IsOpen) { WritePipelineCollection(reader.NonBlockingRead(), PSStreamObjectType.Output); } } private void HandlePipelineErrorDataReady(object sender, EventArgs e) { PipelineReader<object> reader = sender as PipelineReader<object>; if (reader != null && reader.IsOpen) { WritePipelineCollection(reader.NonBlockingRead(), PSStreamObjectType.Error); } } private void WritePipelineCollection<T>(Collection<T> collection, PSStreamObjectType psStreamType) { foreach (var item in collection) { if (item != null) { AddToDebugBlockingCollection(new PSStreamObject(psStreamType, item)); } } } private void HandlePowerShellOutputBufferDataAdding(object sender, DataAddingEventArgs e) { if (e.ItemAdded != null) { HandlePowerShellPStreamItem(new PSStreamObject(PSStreamObjectType.Output, e.ItemAdded)); } } private void HandlePowerShellErrorBufferDataAdding(object sender, DataAddingEventArgs e) { if (e.ItemAdded != null) { HandlePowerShellPStreamItem(new PSStreamObject(PSStreamObjectType.Error, e.ItemAdded)); } } private void HandlePowerShellPStreamItem(PSStreamObject streamItem) { if (!_debugger.InBreakpoint) { // First write any accumulated items. foreach (var item in _debugAccumulateCollection.ReadAll()) { AddToDebugBlockingCollection(item); } // Handle new item. if ((_debugBlockingCollection != null) && (_debugBlockingCollection.IsOpen)) { AddToDebugBlockingCollection(streamItem); } } else if (_debugAccumulateCollection.IsOpen) { // Add to accumulator if debugger is stopped in breakpoint. _debugAccumulateCollection.Add(streamItem); } } private void AddToDebugBlockingCollection(PSStreamObject streamItem) { if (!_debugBlockingCollection.IsOpen) { return; } if (streamItem != null) { try { _debugBlockingCollection.Add(streamItem); } catch (PSInvalidOperationException) { } } } private void PrepareRunspace(Runspace runspace) { SetLocalMode(runspace.Debugger, true); EnableHostDebugger(runspace, false); } private void RestoreRunspace(Runspace runspace) { SetLocalMode(runspace.Debugger, false); EnableHostDebugger(runspace, true); } private void EnableHostDebugger(Runspace runspace, bool enabled) { // Only enable and disable the host's runspace if we are in process attach mode. if (_debugger is ServerRemoteDebugger) { LocalRunspace localRunspace = runspace as LocalRunspace; if ((localRunspace != null) && (localRunspace.ExecutionContext != null) && (localRunspace.ExecutionContext.EngineHostInterface != null)) { try { localRunspace.ExecutionContext.EngineHostInterface.DebuggerEnabled = enabled; } catch (PSNotImplementedException) { } } } } private void SetLocalMode(System.Management.Automation.Debugger debugger, bool localMode) { ServerRemoteDebugger remoteDebugger = debugger as ServerRemoteDebugger; if (remoteDebugger != null) { remoteDebugger.LocalDebugMode = localMode; } } #endregion } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using NetGore.IO; using NUnit.Framework; namespace NetGore.Tests.IO { [TestFixture] public class IValueReaderWriterTests { /// <summary> /// Handler for creating a value from a double. /// </summary> /// <typeparam name="T">The Type of value to create.</typeparam> /// <param name="value">The value to give the new type. Round is fine as long as it is consistent.</param> /// <returns>The new value as type <typeparamref name="T"/>.</returns> delegate T CreateValueTypeHandler<out T>(double value); /// <summary> /// Handler for reading a value. /// </summary> /// <typeparam name="T">The Type of value to write.</typeparam> /// <param name="r">IValueReader to read from.</param> /// <param name="name">Name to use for reading.</param> /// <returns>The read value.</returns> delegate T ReadTestValuesHandler<out T>(IValueReader r, string name); /// <summary> /// Handler for writing a value. /// </summary> /// <typeparam name="T">The Type of value to write.</typeparam> /// <param name="w">IValueWriter to write to.</param> /// <param name="name">Name to use for writing.</param> /// <param name="value">Value to write.</param> delegate void WriteTestValuesHandler<in T>(IValueWriter w, string name, T value); static readonly object[] _emptyObjArray = new object[0]; static void AssertArraysEqual<T>(IList<T> expected, IList<T> actual) { AssertArraysEqual(expected, actual, string.Empty, _emptyObjArray); } static void AssertArraysEqual<T>(IList<T> expected, IList<T> actual, string msg, params object[] objs) { var customMsg = string.Empty; if (!string.IsNullOrEmpty(msg)) { if (objs != null && objs.Length > 0) customMsg = string.Format(msg, objs); else customMsg = msg; } Assert.AreEqual(expected.Count, actual.Count, "Lengths not equal. Type: `{0}`. Message: {1}", typeof(T), customMsg); for (var i = 0; i < expected.Count; i++) { Assert.AreEqual(expected[i], actual[i], "Type: `{0}` Index: `{1}` Message: {2}", typeof(T), i, customMsg); } } /// <summary> /// Gets the key for a value. /// </summary> /// <param name="i">The index of the value.</param> /// <returns>The key for a value with index of <paramref name="i"/>.</returns> static string GetValueKey(int i) { return "V" + Parser.Invariant.ToString(i); } static string Implode(IEnumerable<string> src) { var sb = new StringBuilder(); foreach (var s in src) { sb.Append(s); } return sb.ToString(); } /// <summary> /// Gets a range of values. /// </summary> /// <typeparam name="T">The Type to get the value as.</typeparam> /// <param name="start">The start value.</param> /// <param name="count">The number of values to get.</param> /// <param name="step">The step between each value.</param> /// <param name="creator">Handler to convert a double to type <typeparamref name="T"/>.</param> /// <returns>The array of values.</returns> static T[] Range<T>(double start, int count, double step, CreateValueTypeHandler<T> creator) { var ret = new T[count]; var current = start; for (var i = 0; i < count; i++) { ret[i] = creator(current); current += step; } return ret; } /// <summary> /// Reads multiple test values. This is not like IValueReader.ReadValues as it does not use nodes /// nor does it track the number of items written. This is just to make it easy to read many /// values over a loop. /// </summary> /// <typeparam name="T">The Type of value to write.</typeparam> /// <param name="r">IValueReader to read from.</param> /// <param name="expected">The values expected to be read.</param> /// <param name="readHandler">The read handler.</param> static void ReadTestValues<T>(IValueReader r, IList<T> expected, ReadTestValuesHandler<T> readHandler) { var actual = new T[expected.Count]; for (var i = 0; i < expected.Count; i++) { actual[i] = readHandler(r, GetValueKey(i)); } const string errmsg = "Writer Type: `{0}`"; AssertArraysEqual(expected, actual, errmsg, r.GetType()); } /// <summary> /// Writes multiple test values. This is not like IValueWriter.WriteValues as it does not use nodes /// nor does it track the number of items written. This is just to make it easy to write many /// values over a loop. /// </summary> /// <typeparam name="T">The Type of value to write.</typeparam> /// <param name="w">IValueWriter to write to.</param> /// <param name="values">The values to write.</param> /// <param name="writeHandler">The write handler.</param> static void WriteTestValues<T>(IValueWriter w, IList<T> values, WriteTestValuesHandler<T> writeHandler) { for (var i = 0; i < values.Count; i++) { writeHandler(w, GetValueKey(i), values[i]); } } #region Unit tests [Test] public void TestBools() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => x % 3 == 0); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadBool(pname))); } } } } [Test] public void TestBytes() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => (byte)x); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadByte(pname))); } } } } [Test] public void TestDoubles() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => x); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadDouble(pname))); } } } } [Test] public void TestEnumNameWithNameLookup() { var values = new TestEnum[] { TestEnum.A, TestEnum.Dee, TestEnum.Eeie, TestEnum.Cee, TestEnum.Eeie, TestEnum.Ayche, TestEnum.B, TestEnum.B, TestEnum.Cee, TestEnum.G, TestEnum.Effffuh, TestEnum.A, TestEnum.B, TestEnum.Cee }; foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { if (!creator.SupportsNameLookup) continue; using (var w = creator.GetWriter()) { for (var i = 0; i < values.Length; i++) { w.WriteEnumName(GetValueKey(i), values[i]); } } var r = creator.GetReader(); { Assert.AreEqual(values[3], r.ReadEnumName<TestEnum>(GetValueKey(3))); Assert.AreEqual(values[5], r.ReadEnumName<TestEnum>(GetValueKey(5))); Assert.AreEqual(values[0], r.ReadEnumName<TestEnum>(GetValueKey(0))); Assert.AreEqual(values[1], r.ReadEnumName<TestEnum>(GetValueKey(1))); Assert.AreEqual(values[3], r.ReadEnumName<TestEnum>(GetValueKey(3))); Assert.AreEqual(values[5], r.ReadEnumName<TestEnum>(GetValueKey(5))); Assert.AreEqual(values[4], r.ReadEnumName<TestEnum>(GetValueKey(4))); Assert.AreEqual(values[4], r.ReadEnumName<TestEnum>(GetValueKey(4))); } } } } [Test] public void TestEnumNameWithoutNameLookup() { var values = new TestEnum[] { TestEnum.A, TestEnum.Dee, TestEnum.Eeie, TestEnum.Cee, TestEnum.Eeie, TestEnum.Ayche, TestEnum.B, TestEnum.B, TestEnum.Cee, TestEnum.G, TestEnum.Effffuh, TestEnum.A, TestEnum.B, TestEnum.Cee }; foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { using (var w = creator.GetWriter()) { for (var i = 0; i < values.Length; i++) { w.WriteEnumName(GetValueKey(i), values[i]); } } var r = creator.GetReader(); { for (var i = 0; i < values.Length; i++) { Assert.AreEqual(values[i], r.ReadEnumName<TestEnum>(GetValueKey(i))); } } } } } [Test] public void TestEnumValueWithoutNameLookup() { var values = new TestEnum[] { TestEnum.A, TestEnum.Dee, TestEnum.Eeie, TestEnum.Cee, TestEnum.Eeie, TestEnum.Ayche, TestEnum.B, TestEnum.B, TestEnum.Cee, TestEnum.G, TestEnum.Effffuh, TestEnum.A, TestEnum.B, TestEnum.Cee }; foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { using (var w = creator.GetWriter()) { for (var i = 0; i < values.Length; i++) { w.WriteEnumValue(GetValueKey(i), values[i]); } } var r = creator.GetReader(); { for (var i = 0; i < values.Length; i++) { Assert.AreEqual(values[i], r.ReadEnumValue<TestEnum>(GetValueKey(i))); } } } } } [Test] public void TestEnumValuesWithNameLookup() { var values = new TestEnum[] { TestEnum.A, TestEnum.Dee, TestEnum.Eeie, TestEnum.Cee, TestEnum.Eeie, TestEnum.Ayche, TestEnum.B, TestEnum.B, TestEnum.Cee, TestEnum.G, TestEnum.Effffuh, TestEnum.A, TestEnum.B, TestEnum.Cee }; foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { if (!creator.SupportsNameLookup) continue; using (var w = creator.GetWriter()) { for (var i = 0; i < values.Length; i++) { w.WriteEnumValue(GetValueKey(i), values[i]); } } var r = creator.GetReader(); { Assert.AreEqual(values[3], r.ReadEnumValue<TestEnum>(GetValueKey(3))); Assert.AreEqual(values[5], r.ReadEnumValue<TestEnum>(GetValueKey(5))); Assert.AreEqual(values[0], r.ReadEnumValue<TestEnum>(GetValueKey(0))); Assert.AreEqual(values[1], r.ReadEnumValue<TestEnum>(GetValueKey(1))); Assert.AreEqual(values[3], r.ReadEnumValue<TestEnum>(GetValueKey(3))); Assert.AreEqual(values[5], r.ReadEnumValue<TestEnum>(GetValueKey(5))); Assert.AreEqual(values[4], r.ReadEnumValue<TestEnum>(GetValueKey(4))); Assert.AreEqual(values[4], r.ReadEnumValue<TestEnum>(GetValueKey(4))); } } } } [Test] public void TestFloats() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => (float)x); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadFloat(pname))); } } } } [Test] public void TestIllegalXmlCharactersInStrings() { var illegalStrs = new string[] { "<", ">", "\\", "/", "&", "'", "\"", "?", Environment.NewLine }; var allStrings = Implode(illegalStrs); foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { using (var w = creator.GetWriter()) { for (var i = 0; i < illegalStrs.Length; i++) { w.Write(GetValueKey(i), illegalStrs[i]); } w.Write("All", allStrings); } var r = creator.GetReader(); for (var i = 0; i < illegalStrs.Length; i++) { Assert.AreEqual(illegalStrs[i], r.ReadString(GetValueKey(i))); } Assert.AreEqual(allStrings, r.ReadString("All")); } } } [Test] public void TestInts() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => (int)x); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadInt(pname))); } } } } [Test] public void TestLongs() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => (long)x); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadLong(pname))); } } } } [Test] public void TestNameLookup() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { if (!creator.SupportsNameLookup) continue; const bool a = true; const int b = 10; const float c = 133.2f; const int d = 2051; const bool e = false; const string f = "asdf asdf lkjwreoiuwalj jk wark qoiuwer"; const int g = 2092142; using (var w = creator.GetWriter()) { w.Write("a", a); w.Write("b", b); w.Write("c", c); w.Write("d", d); w.Write("e", e); w.Write("f", f); w.Write("g", g); } var r = creator.GetReader(); { Assert.AreEqual(f, r.ReadString("f")); Assert.AreEqual(c, r.ReadFloat("c")); Assert.AreEqual(b, r.ReadInt("b")); Assert.AreEqual(g, r.ReadInt("g")); Assert.AreEqual(d, r.ReadInt("d")); Assert.AreEqual(e, r.ReadBool("e")); Assert.AreEqual(f, r.ReadString("f")); Assert.AreEqual(d, r.ReadInt("d")); Assert.AreEqual(e, r.ReadBool("e")); Assert.AreEqual(f, r.ReadString("f")); } } } } [Test] public void TestNameLookupCaseSensitivity() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { if (!creator.SupportsNameLookup) continue; // Name lookups should be case-insensitive const bool a = true; const int b = 10; const float c = 133.2f; const int d = 2051; const bool e = false; const string f = "asdf asdf lkjwreoiuwalj jk wark qoiuwer"; const int g = 2092142; using (var w = creator.GetWriter()) { w.Write("A", a); w.Write("B", b); w.Write("c", c); w.Write("D", d); w.Write("e", e); w.Write("F", f); w.Write("G", g); } var r = creator.GetReader(); { Assert.AreEqual(f, r.ReadString("f")); Assert.AreEqual(c, r.ReadFloat("c")); Assert.AreEqual(b, r.ReadInt("b")); Assert.AreEqual(g, r.ReadInt("g")); Assert.AreEqual(d, r.ReadInt("d")); Assert.AreEqual(e, r.ReadBool("e")); Assert.AreEqual(f, r.ReadString("f")); Assert.AreEqual(d, r.ReadInt("d")); Assert.AreEqual(e, r.ReadBool("e")); Assert.AreEqual(f, r.ReadString("f")); } } } } [Test] public void TestNameLookupWithNodes() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { if (!creator.SupportsNameLookup) continue; // Name lookups should be case-insensitive const bool a1 = true; const int b1 = 10; const float c1 = 133.2f; const int d1 = 2051; const bool e1 = false; const string f1 = "asdf asdf lkjwreoiuwalj jk wark qoiuwer"; const int g1 = 2092142; const bool a2 = true; const int b2 = 578; const float c2 = 17833.2f; const int d2 = 204551; const bool e2 = false; const string f2 = "asdfaasdfasdfwerqwerasdvxcvasdfaewalj jk wark qoiuwer"; const int g2 = 2092142; const bool a3 = false; const int b3 = 1054; const float c3 = 13993.2f; const int d3 = 201151; const bool e3 = false; const string f3 = "asdf asdasfwerqwreadsahhewwqrqwreqref lkjwreoiuwalj jk wark qoiuwer"; const int g3 = 2342; using (var w = creator.GetWriter()) { w.WriteStartNode("NodeA"); w.Write("a", a1); w.Write("b", b1); w.Write("c", c1); w.Write("d", d1); w.Write("e", e1); w.Write("f", f1); w.Write("g", g1); w.WriteEndNode("NodeA"); w.WriteStartNode("NodeB"); w.Write("d", d2); w.Write("e", e2); w.Write("f", f2); w.Write("g", g2); w.Write("a", a2); w.Write("b", b2); w.Write("c", c2); w.WriteEndNode("NodeB"); w.WriteStartNode("NodeC"); w.Write("d", d3); w.Write("e", e3); w.Write("f", f3); w.Write("g", g3); w.Write("a", a3); w.Write("b", b3); w.Write("c", c3); w.WriteEndNode("NodeC"); } var r = creator.GetReader(); { var nodeB = r.ReadNode("NodeB"); var nodeC = r.ReadNode("NodeC"); var nodeA = r.ReadNode("NodeA"); Assert.AreEqual(f2, nodeB.ReadString("f")); Assert.AreEqual(c2, nodeB.ReadFloat("c")); Assert.AreEqual(b2, nodeB.ReadInt("b")); Assert.AreEqual(g2, nodeB.ReadInt("g")); Assert.AreEqual(d2, nodeB.ReadInt("d")); Assert.AreEqual(e2, nodeB.ReadBool("e")); Assert.AreEqual(f2, nodeB.ReadString("f")); Assert.AreEqual(d2, nodeB.ReadInt("d")); Assert.AreEqual(e2, nodeB.ReadBool("e")); Assert.AreEqual(f2, nodeB.ReadString("f")); Assert.AreEqual(f1, nodeA.ReadString("f")); Assert.AreEqual(c1, nodeA.ReadFloat("c")); Assert.AreEqual(b1, nodeA.ReadInt("b")); Assert.AreEqual(g1, nodeA.ReadInt("g")); Assert.AreEqual(d1, nodeA.ReadInt("d")); Assert.AreEqual(e1, nodeA.ReadBool("e")); Assert.AreEqual(f1, nodeA.ReadString("f")); Assert.AreEqual(d1, nodeA.ReadInt("d")); Assert.AreEqual(e1, nodeA.ReadBool("e")); Assert.AreEqual(f1, nodeA.ReadString("f")); Assert.AreEqual(f3, nodeC.ReadString("f")); Assert.AreEqual(c3, nodeC.ReadFloat("c")); Assert.AreEqual(b3, nodeC.ReadInt("b")); Assert.AreEqual(g3, nodeC.ReadInt("g")); Assert.AreEqual(d3, nodeC.ReadInt("d")); Assert.AreEqual(e3, nodeC.ReadBool("e")); Assert.AreEqual(f3, nodeC.ReadString("f")); Assert.AreEqual(d3, nodeC.ReadInt("d")); Assert.AreEqual(e3, nodeC.ReadBool("e")); Assert.AreEqual(f3, nodeC.ReadString("f")); } } } } [Test] public void TestNameLookupWithNodesCaseSensitivity() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { if (!creator.SupportsNameLookup) continue; // Name lookups should be case-insensitive const bool a1 = true; const int b1 = 10; const float c1 = 133.2f; const int d1 = 2051; const bool e1 = false; const string f1 = "asdf asdf lkjwreoiuwalj jk wark qoiuwer"; const int g1 = 2092142; const bool a2 = true; const int b2 = 578; const float c2 = 17833.2f; const int d2 = 204551; const bool e2 = false; const string f2 = "asdfaasdfasdfwerqwerasdvxcvasdfaewalj jk wark qoiuwer"; const int g2 = 2092142; const bool a3 = false; const int b3 = 1054; const float c3 = 13993.2f; const int d3 = 201151; const bool e3 = false; const string f3 = "asdf asdasfwerqwreadsahhewwqrqwreqref lkjwreoiuwalj jk wark qoiuwer"; const int g3 = 2342; using (var w = creator.GetWriter()) { w.WriteStartNode("NodeA"); w.Write("a", a1); w.Write("B", b1); w.Write("c", c1); w.Write("D", d1); w.Write("E", e1); w.Write("f", f1); w.Write("g", g1); w.WriteEndNode("NodeA"); w.WriteStartNode("NodeB"); w.Write("D", d2); w.Write("E", e2); w.Write("F", f2); w.Write("G", g2); w.Write("a", a2); w.Write("b", b2); w.Write("c", c2); w.WriteEndNode("NodeB"); w.WriteStartNode("NodeC"); w.Write("d", d3); w.Write("e", e3); w.Write("f", f3); w.Write("g", g3); w.Write("A", a3); w.Write("B", b3); w.Write("c", c3); w.WriteEndNode("NodeC"); } var r = creator.GetReader(); { var nodeB = r.ReadNode("NodeB"); var nodeC = r.ReadNode("NodeC"); var nodeA = r.ReadNode("NodeA"); Assert.AreEqual(f2, nodeB.ReadString("f")); Assert.AreEqual(c2, nodeB.ReadFloat("c")); Assert.AreEqual(b2, nodeB.ReadInt("b")); Assert.AreEqual(g2, nodeB.ReadInt("g")); Assert.AreEqual(d2, nodeB.ReadInt("d")); Assert.AreEqual(e2, nodeB.ReadBool("e")); Assert.AreEqual(f2, nodeB.ReadString("f")); Assert.AreEqual(d2, nodeB.ReadInt("d")); Assert.AreEqual(e2, nodeB.ReadBool("e")); Assert.AreEqual(f2, nodeB.ReadString("f")); Assert.AreEqual(f1, nodeA.ReadString("f")); Assert.AreEqual(c1, nodeA.ReadFloat("c")); Assert.AreEqual(b1, nodeA.ReadInt("b")); Assert.AreEqual(g1, nodeA.ReadInt("g")); Assert.AreEqual(d1, nodeA.ReadInt("d")); Assert.AreEqual(e1, nodeA.ReadBool("e")); Assert.AreEqual(f1, nodeA.ReadString("f")); Assert.AreEqual(d1, nodeA.ReadInt("d")); Assert.AreEqual(e1, nodeA.ReadBool("e")); Assert.AreEqual(f1, nodeA.ReadString("f")); Assert.AreEqual(f3, nodeC.ReadString("f")); Assert.AreEqual(c3, nodeC.ReadFloat("c")); Assert.AreEqual(b3, nodeC.ReadInt("b")); Assert.AreEqual(g3, nodeC.ReadInt("g")); Assert.AreEqual(d3, nodeC.ReadInt("d")); Assert.AreEqual(e3, nodeC.ReadBool("e")); Assert.AreEqual(f3, nodeC.ReadString("f")); Assert.AreEqual(d3, nodeC.ReadInt("d")); Assert.AreEqual(e3, nodeC.ReadBool("e")); Assert.AreEqual(f3, nodeC.ReadString("f")); } } } } [Test] public void TestNodes() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { if (!creator.SupportsNodes) continue; var v1 = Range(0, 100, 1, x => x % 3 == 0); var v2 = Range(0, 100, 1, x => (int)x); var v3 = Range(0, 100, 1, x => (float)x); var v4 = Range(0, 100, 1, x => (byte)x); var v5 = Range(0, 100, 1, x => (ushort)x); var v6 = Range(0, 100, 1, x => Parser.Invariant.ToString(x)); using (var w = creator.GetWriter()) { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } var r = creator.GetReader(); { var r1 = r.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname))); var r2 = r.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname))); var r3 = r.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname))); var r4 = r.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname))); var r5 = r.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname))); var r6 = r.ReadMany("v6", ((preader, pname) => preader.ReadString(pname))); AssertArraysEqual(v1, r1); AssertArraysEqual(v2, r2); AssertArraysEqual(v3, r3); AssertArraysEqual(v4, r4); AssertArraysEqual(v5, r5); AssertArraysEqual(v6, r6); } } } } [Test] public void TestNodesBoolsOnly() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var tmp = createCreator()) { if (!tmp.SupportsNodes) continue; } for (var i = 1; i < 40; i++) { using (var creator = createCreator()) { var v1 = Range(0, i, 1, x => x % 3 == 0); using (var w = creator.GetWriter()) { w.WriteMany("v1", v1, w.Write); } var r = creator.GetReader(); { var r1 = r.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname))); AssertArraysEqual(v1, r1); } } } } } [Test] public void TestNodesDeepLinear() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { if (!creator.SupportsNodes) continue; var v1 = Range(0, 100, 1, x => x % 3 == 0); var v2 = Range(0, 100, 1, x => (int)x); var v3 = Range(0, 100, 1, x => (float)x); var v4 = Range(0, 100, 1, x => (byte)x); var v5 = Range(0, 100, 1, x => (ushort)x); var v6 = Range(0, 100, 1, x => Parser.Invariant.ToString(x)); using (var w = creator.GetWriter()) { w.WriteStartNode("a1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); w.WriteStartNode("b1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); w.WriteStartNode("c1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } w.WriteEndNode("c1"); w.WriteStartNode("c2"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); w.WriteStartNode("d1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } w.WriteEndNode("d1"); } w.WriteEndNode("c2"); } w.WriteEndNode("b1"); w.WriteStartNode("b2"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); w.WriteStartNode("c1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } w.WriteEndNode("c1"); } w.WriteEndNode("b2"); } w.WriteEndNode("a1"); w.WriteStartNode("a2"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); w.WriteStartNode("b1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } w.WriteEndNode("b1"); w.WriteStartNode("b2"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } w.WriteEndNode("b2"); } w.WriteEndNode("a2"); } var r = creator.GetReader(); { var a1 = r.ReadNode("a1"); var c = a1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); var a1b1 = a1.ReadNode("b1"); c = a1b1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); var a1b1c1 = a1b1.ReadNode("c1"); c = a1b1c1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); var a1b1c2 = a1b1.ReadNode("c2"); c = a1b1c2; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); var a1b1c2d1 = a1b1c2.ReadNode("d1"); c = a1b1c2d1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); var a1b2 = a1.ReadNode("b2"); c = a1b2; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); var a1b2c1 = a1b2.ReadNode("c1"); c = a1b2c1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); var a2 = r.ReadNode("a2"); c = a2; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); var a2b1 = a2.ReadNode("b1"); c = a2b1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); var a2b2 = a2.ReadNode("b2"); c = a2b2; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); } } } } [Test] public void TestNodesDeepRandom() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { if (!creator.SupportsNodes || !creator.SupportsNameLookup) continue; var v1 = Range(0, 100, 1, x => x % 3 == 0); var v2 = Range(0, 100, 1, x => (int)x); var v3 = Range(0, 100, 1, x => (float)x); var v4 = Range(0, 100, 1, x => (byte)x); var v5 = Range(0, 100, 1, x => (ushort)x); var v6 = Range(0, 100, 1, x => Parser.Invariant.ToString(x)); using (var w = creator.GetWriter()) { w.WriteStartNode("a1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); w.WriteStartNode("b1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); w.WriteStartNode("c1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } w.WriteEndNode("c1"); w.WriteStartNode("c2"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); w.WriteStartNode("d1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } w.WriteEndNode("d1"); } w.WriteEndNode("c2"); } w.WriteEndNode("b1"); w.WriteStartNode("b2"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); w.WriteStartNode("c1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } w.WriteEndNode("c1"); } w.WriteEndNode("b2"); } w.WriteEndNode("a1"); w.WriteStartNode("a2"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); w.WriteStartNode("b1"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } w.WriteEndNode("b1"); w.WriteStartNode("b2"); { w.WriteMany("v1", v1, w.Write); w.WriteMany("v2", v2, w.Write); w.WriteMany("v3", v3, w.Write); w.WriteMany("v4", v4, w.Write); w.WriteMany("v5", v5, w.Write); w.WriteMany("v6", v6, w.Write); } w.WriteEndNode("b2"); } w.WriteEndNode("a2"); } var r = creator.GetReader(); { var a1 = r.ReadNode("a1"); var a1b1 = a1.ReadNode("b1"); var a1b1c1 = a1b1.ReadNode("c1"); var a1b1c2 = a1b1.ReadNode("c2"); var a1b1c2d1 = a1b1c2.ReadNode("d1"); var a1b2 = a1.ReadNode("b2"); var a1b2c1 = a1b2.ReadNode("c1"); var a2 = r.ReadNode("a2"); var a2b1 = a2.ReadNode("b1"); var a2b2 = a2.ReadNode("b2"); var c = a1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); c = a1b2c1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); c = a1b2; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); c = a1b1c2d1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); c = a1b2; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); c = a1b1c2; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); c = a1b1c1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); c = a2b2; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); c = a2b1; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); c = a2; AssertArraysEqual(v1, c.ReadMany("v1", ((preader, pname) => preader.ReadBool(pname)))); AssertArraysEqual(v2, c.ReadMany("v2", ((preader, pname) => preader.ReadInt(pname)))); AssertArraysEqual(v3, c.ReadMany("v3", ((preader, pname) => preader.ReadFloat(pname)))); AssertArraysEqual(v4, c.ReadMany("v4", ((preader, pname) => preader.ReadByte(pname)))); AssertArraysEqual(v5, c.ReadMany("v5", ((preader, pname) => preader.ReadUShort(pname)))); AssertArraysEqual(v6, c.ReadMany("v6", ((preader, pname) => preader.ReadString(pname)))); } } } } [Test] public void TestSBytes() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => (sbyte)x); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadSByte(pname))); } } } } [Test] public void TestShorts() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => (short)x); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadShort(pname))); } } } } [Test] public void TestStrings() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => Parser.Invariant.ToString(x)); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadString(pname))); } } } } [Test] public void TestUInts() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => (uint)x); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadUInt(pname))); } } } } [Test] public void TestULongs() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => (ulong)x); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadULong(pname))); } } } } [Test] public void TestUShorts() { foreach (var createCreator in IValueReaderWriterTestHelper.CreateCreators) { using (var creator = createCreator()) { var v1 = Range(0, 100, 1, x => (ushort)x); using (var w = creator.GetWriter()) { WriteTestValues(w, v1, ((pwriter, pname, pvalue) => pwriter.Write(pname, pvalue))); } var r = creator.GetReader(); { ReadTestValues(r, v1, ((preader, pname) => preader.ReadUShort(pname))); } } } } #endregion enum TestEnum { A = -100, B, Cee = 0, Dee, Eeie, Effffuh, G, Ayche = 100 } } }
#region Apache License // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to you under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion // .NET Compact Framework has no support for WindowsIdentity #if !NETCF using System; using System.Runtime.InteropServices; using System.Security.Principal; using System.Security.Permissions; using log4net.Core; namespace log4net.Util { /// <summary> /// Impersonate a Windows Account /// </summary> /// <remarks> /// <para> /// This <see cref="SecurityContext"/> impersonates a Windows account. /// </para> /// <para> /// How the impersonation is done depends on the value of <see cref="Impersonate"/>. /// This allows the context to either impersonate a set of user credentials specified /// using username, domain name and password or to revert to the process credentials. /// </para> /// </remarks> public class WindowsSecurityContext : SecurityContext, IOptionHandler { /// <summary> /// The impersonation modes for the <see cref="WindowsSecurityContext"/> /// </summary> /// <remarks> /// <para> /// See the <see cref="WindowsSecurityContext.Credentials"/> property for /// details. /// </para> /// </remarks> public enum ImpersonationMode { /// <summary> /// Impersonate a user using the credentials supplied /// </summary> User, /// <summary> /// Revert this the thread to the credentials of the process /// </summary> Process } #region Member Variables private ImpersonationMode m_impersonationMode = ImpersonationMode.User; private string m_userName; private string m_domainName = Environment.MachineName; private string m_password; private WindowsIdentity m_identity; #endregion #region Constructor /// <summary> /// Default constructor /// </summary> /// <remarks> /// <para> /// Default constructor /// </para> /// </remarks> public WindowsSecurityContext() { } #endregion #region Public Properties /// <summary> /// Gets or sets the impersonation mode for this security context /// </summary> /// <value> /// The impersonation mode for this security context /// </value> /// <remarks> /// <para> /// Impersonate either a user with user credentials or /// revert this thread to the credentials of the process. /// The value is one of the <see cref="ImpersonationMode"/> /// enum. /// </para> /// <para> /// The default value is <see cref="ImpersonationMode.User"/> /// </para> /// <para> /// When the mode is set to <see cref="ImpersonationMode.User"/> /// the user's credentials are established using the /// <see cref="UserName"/>, <see cref="DomainName"/> and <see cref="Password"/> /// values. /// </para> /// <para> /// When the mode is set to <see cref="ImpersonationMode.Process"/> /// no other properties need to be set. If the calling thread is /// impersonating then it will be reverted back to the process credentials. /// </para> /// </remarks> public ImpersonationMode Credentials { get { return m_impersonationMode; } set { m_impersonationMode = value; } } /// <summary> /// Gets or sets the Windows username for this security context /// </summary> /// <value> /// The Windows username for this security context /// </value> /// <remarks> /// <para> /// This property must be set if <see cref="Credentials"/> /// is set to <see cref="ImpersonationMode.User"/> (the default setting). /// </para> /// </remarks> public string UserName { get { return m_userName; } set { m_userName = value; } } /// <summary> /// Gets or sets the Windows domain name for this security context /// </summary> /// <value> /// The Windows domain name for this security context /// </value> /// <remarks> /// <para> /// The default value for <see cref="DomainName"/> is the local machine name /// taken from the <see cref="Environment.MachineName"/> property. /// </para> /// <para> /// This property must be set if <see cref="Credentials"/> /// is set to <see cref="ImpersonationMode.User"/> (the default setting). /// </para> /// </remarks> public string DomainName { get { return m_domainName; } set { m_domainName = value; } } /// <summary> /// Sets the password for the Windows account specified by the <see cref="UserName"/> and <see cref="DomainName"/> properties. /// </summary> /// <value> /// The password for the Windows account specified by the <see cref="UserName"/> and <see cref="DomainName"/> properties. /// </value> /// <remarks> /// <para> /// This property must be set if <see cref="Credentials"/> /// is set to <see cref="ImpersonationMode.User"/> (the default setting). /// </para> /// </remarks> public string Password { set { m_password = value; } } #endregion #region IOptionHandler Members /// <summary> /// Initialize the SecurityContext based on the options set. /// </summary> /// <remarks> /// <para> /// This is part of the <see cref="IOptionHandler"/> delayed object /// activation scheme. The <see cref="ActivateOptions"/> method must /// be called on this object after the configuration properties have /// been set. Until <see cref="ActivateOptions"/> is called this /// object is in an undefined state and must not be used. /// </para> /// <para> /// If any of the configuration properties are modified then /// <see cref="ActivateOptions"/> must be called again. /// </para> /// <para> /// The security context will try to Logon the specified user account and /// capture a primary token for impersonation. /// </para> /// </remarks> /// <exception cref="ArgumentNullException">The required <see cref="UserName" />, /// <see cref="DomainName" /> or <see cref="Password" /> properties were not specified.</exception> public void ActivateOptions() { if (m_impersonationMode == ImpersonationMode.User) { if (m_userName == null) throw new ArgumentNullException("m_userName"); if (m_domainName == null) throw new ArgumentNullException("m_domainName"); if (m_password == null) throw new ArgumentNullException("m_password"); m_identity = LogonUser(m_userName, m_domainName, m_password); } } #endregion /// <summary> /// Impersonate the Windows account specified by the <see cref="UserName"/> and <see cref="DomainName"/> properties. /// </summary> /// <param name="state">caller provided state</param> /// <returns> /// An <see cref="IDisposable"/> instance that will revoke the impersonation of this SecurityContext /// </returns> /// <remarks> /// <para> /// Depending on the <see cref="Credentials"/> property either /// impersonate a user using credentials supplied or revert /// to the process credentials. /// </para> /// </remarks> public override IDisposable Impersonate(object state) { if (m_impersonationMode == ImpersonationMode.User) { if (m_identity != null) { return new DisposableImpersonationContext(m_identity.Impersonate()); } } else if (m_impersonationMode == ImpersonationMode.Process) { // Impersonate(0) will revert to the process credentials return new DisposableImpersonationContext(WindowsIdentity.Impersonate(IntPtr.Zero)); } return null; } /// <summary> /// Create a <see cref="WindowsIdentity"/> given the userName, domainName and password. /// </summary> /// <param name="userName">the user name</param> /// <param name="domainName">the domain name</param> /// <param name="password">the password</param> /// <returns>the <see cref="WindowsIdentity"/> for the account specified</returns> /// <remarks> /// <para> /// Uses the Windows API call LogonUser to get a principal token for the account. This /// token is used to initialize the WindowsIdentity. /// </para> /// </remarks> #if NET_4_0 [System.Security.SecuritySafeCritical] #endif [System.Security.Permissions.SecurityPermission(System.Security.Permissions.SecurityAction.Demand, UnmanagedCode = true)] private static WindowsIdentity LogonUser(string userName, string domainName, string password) { const int LOGON32_PROVIDER_DEFAULT = 0; //This parameter causes LogonUser to create a primary token. const int LOGON32_LOGON_INTERACTIVE = 2; // Call LogonUser to obtain a handle to an access token. IntPtr tokenHandle = IntPtr.Zero; if(!LogonUser(userName, domainName, password, LOGON32_LOGON_INTERACTIVE, LOGON32_PROVIDER_DEFAULT, ref tokenHandle)) { NativeError error = NativeError.GetLastError(); throw new Exception("Failed to LogonUser ["+userName+"] in Domain ["+domainName+"]. Error: "+ error.ToString()); } const int SecurityImpersonation = 2; IntPtr dupeTokenHandle = IntPtr.Zero; if(!DuplicateToken(tokenHandle, SecurityImpersonation, ref dupeTokenHandle)) { NativeError error = NativeError.GetLastError(); if (tokenHandle != IntPtr.Zero) { CloseHandle(tokenHandle); } throw new Exception("Failed to DuplicateToken after LogonUser. Error: " + error.ToString()); } WindowsIdentity identity = new WindowsIdentity(dupeTokenHandle); // Free the tokens. if (dupeTokenHandle != IntPtr.Zero) { CloseHandle(dupeTokenHandle); } if (tokenHandle != IntPtr.Zero) { CloseHandle(tokenHandle); } return identity; } #region Native Method Stubs [DllImport("advapi32.dll", SetLastError=true)] private static extern bool LogonUser(String lpszUsername, String lpszDomain, String lpszPassword, int dwLogonType, int dwLogonProvider, ref IntPtr phToken); [DllImport("kernel32.dll", CharSet=CharSet.Auto)] private extern static bool CloseHandle(IntPtr handle); [DllImport("advapi32.dll", CharSet=CharSet.Auto, SetLastError=true)] private extern static bool DuplicateToken(IntPtr ExistingTokenHandle, int SECURITY_IMPERSONATION_LEVEL, ref IntPtr DuplicateTokenHandle); #endregion #region DisposableImpersonationContext class /// <summary> /// Adds <see cref="IDisposable"/> to <see cref="WindowsImpersonationContext"/> /// </summary> /// <remarks> /// <para> /// Helper class to expose the <see cref="WindowsImpersonationContext"/> /// through the <see cref="IDisposable"/> interface. /// </para> /// </remarks> private sealed class DisposableImpersonationContext : IDisposable { private readonly WindowsImpersonationContext m_impersonationContext; /// <summary> /// Constructor /// </summary> /// <param name="impersonationContext">the impersonation context being wrapped</param> /// <remarks> /// <para> /// Constructor /// </para> /// </remarks> public DisposableImpersonationContext(WindowsImpersonationContext impersonationContext) { m_impersonationContext = impersonationContext; } /// <summary> /// Revert the impersonation /// </summary> /// <remarks> /// <para> /// Revert the impersonation /// </para> /// </remarks> public void Dispose() { m_impersonationContext.Undo(); } } #endregion } } #endif // !NETCF
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Globalization; using System.IO; using System.Reflection; using System.Diagnostics; namespace System.Runtime.Serialization.Formatters.Binary { internal sealed class ObjectReader { // System.Serializer information internal Stream _stream; internal ISurrogateSelector _surrogates; internal StreamingContext _context; internal ObjectManager _objectManager; internal InternalFE _formatterEnums; internal SerializationBinder _binder; // Top object and headers internal long _topId; internal bool _isSimpleAssembly = false; internal object _topObject; internal SerObjectInfoInit _serObjectInfoInit; internal IFormatterConverter _formatterConverter; // Stack of Object ParseRecords internal SerStack _stack; // ValueType Fixup Stack private SerStack _valueFixupStack; // Cross AppDomain internal object[] _crossAppDomainArray; //Set by the BinaryFormatter //MethodCall and MethodReturn are handled special for perf reasons private bool _fullDeserialization; private SerStack ValueFixupStack => _valueFixupStack ?? (_valueFixupStack = new SerStack("ValueType Fixup Stack")); // Older formatters generate ids for valuetypes using a different counter than ref types. Newer ones use // a single counter, only value types have a negative value. Need a way to handle older formats. private const int ThresholdForValueTypeIds = int.MaxValue; private bool _oldFormatDetected = false; private IntSizedArray _valTypeObjectIdTable; private readonly NameCache _typeCache = new NameCache(); internal object TopObject { get { return _topObject; } set { _topObject = value; if (_objectManager != null) { _objectManager.TopObject = value; } } } internal ObjectReader(Stream stream, ISurrogateSelector selector, StreamingContext context, InternalFE formatterEnums, SerializationBinder binder) { if (stream == null) { throw new ArgumentNullException(nameof(stream)); } _stream = stream; _surrogates = selector; _context = context; _binder = binder; _formatterEnums = formatterEnums; } internal object Deserialize(BinaryParser serParser, bool fCheck) { if (serParser == null) { throw new ArgumentNullException(nameof(serParser)); } _fullDeserialization = false; TopObject = null; _topId = 0; _isSimpleAssembly = (_formatterEnums._assemblyFormat == FormatterAssemblyStyle.Simple); if (_fullDeserialization) { // Reinitialize _objectManager = new ObjectManager(_surrogates, _context, false, false); _serObjectInfoInit = new SerObjectInfoInit(); } // Will call back to ParseObject, ParseHeader for each object found serParser.Run(); if (_fullDeserialization) { _objectManager.DoFixups(); } if (TopObject == null) { throw new SerializationException(SR.Serialization_TopObject); } //if TopObject has a surrogate then the actual object may be changed during special fixup //So refresh it using topID. if (HasSurrogate(TopObject.GetType()) && _topId != 0)//Not yet resolved { TopObject = _objectManager.GetObject(_topId); } if (TopObject is IObjectReference) { TopObject = ((IObjectReference)TopObject).GetRealObject(_context); } if (_fullDeserialization) { _objectManager.RaiseDeserializationEvent(); // This will raise both IDeserialization and [OnDeserialized] events } return TopObject; } private bool HasSurrogate(Type t) { ISurrogateSelector ignored; return _surrogates != null && _surrogates.GetSurrogate(t, _context, out ignored) != null; } private void CheckSerializable(Type t) { if (!t.IsSerializable && !HasSurrogate(t)) { throw new SerializationException(string.Format(CultureInfo.InvariantCulture, SR.Serialization_NonSerType, t.FullName, t.Assembly.FullName)); } } private void InitFullDeserialization() { _fullDeserialization = true; _stack = new SerStack("ObjectReader Object Stack"); _objectManager = new ObjectManager(_surrogates, _context, false, false); if (_formatterConverter == null) { _formatterConverter = new FormatterConverter(); } } internal object CrossAppDomainArray(int index) { Debug.Assert(index < _crossAppDomainArray.Length, "[System.Runtime.Serialization.Formatters.BinaryObjectReader index out of range for CrossAppDomainArray]"); return _crossAppDomainArray[index]; } internal ReadObjectInfo CreateReadObjectInfo(Type objectType) { return ReadObjectInfo.Create(objectType, _surrogates, _context, _objectManager, _serObjectInfoInit, _formatterConverter, _isSimpleAssembly); } internal ReadObjectInfo CreateReadObjectInfo(Type objectType, string[] memberNames, Type[] memberTypes) { return ReadObjectInfo.Create(objectType, memberNames, memberTypes, _surrogates, _context, _objectManager, _serObjectInfoInit, _formatterConverter, _isSimpleAssembly); } internal void Parse(ParseRecord pr) { switch (pr._parseTypeEnum) { case InternalParseTypeE.SerializedStreamHeader: ParseSerializedStreamHeader(pr); break; case InternalParseTypeE.SerializedStreamHeaderEnd: ParseSerializedStreamHeaderEnd(pr); break; case InternalParseTypeE.Object: ParseObject(pr); break; case InternalParseTypeE.ObjectEnd: ParseObjectEnd(pr); break; case InternalParseTypeE.Member: ParseMember(pr); break; case InternalParseTypeE.MemberEnd: ParseMemberEnd(pr); break; case InternalParseTypeE.Body: case InternalParseTypeE.BodyEnd: case InternalParseTypeE.Envelope: case InternalParseTypeE.EnvelopeEnd: break; case InternalParseTypeE.Empty: default: throw new SerializationException(SR.Format(SR.Serialization_XMLElement, pr._name)); } } // Styled ParseError output private void ParseError(ParseRecord processing, ParseRecord onStack) { throw new SerializationException(SR.Format(SR.Serialization_ParseError, onStack._name + " " + onStack._parseTypeEnum + " " + processing._name + " " + processing._parseTypeEnum)); } // Parse the SerializedStreamHeader element. This is the first element in the stream if present private void ParseSerializedStreamHeader(ParseRecord pr) => _stack.Push(pr); // Parse the SerializedStreamHeader end element. This is the last element in the stream if present private void ParseSerializedStreamHeaderEnd(ParseRecord pr) => _stack.Pop(); // New object encountered in stream private void ParseObject(ParseRecord pr) { if (!_fullDeserialization) { InitFullDeserialization(); } if (pr._objectPositionEnum == InternalObjectPositionE.Top) { _topId = pr._objectId; } if (pr._parseTypeEnum == InternalParseTypeE.Object) { _stack.Push(pr); // Nested objects member names are already on stack } if (pr._objectTypeEnum == InternalObjectTypeE.Array) { ParseArray(pr); return; } // If the Type is null, this means we have a typeload issue // mark the object with TypeLoadExceptionHolder if (pr._dtType == null) { pr._newObj = new TypeLoadExceptionHolder(pr._keyDt); return; } if (ReferenceEquals(pr._dtType, Converter.s_typeofString)) { // String as a top level object if (pr._value != null) { pr._newObj = pr._value; if (pr._objectPositionEnum == InternalObjectPositionE.Top) { TopObject = pr._newObj; return; } else { _stack.Pop(); RegisterObject(pr._newObj, pr, (ParseRecord)_stack.Peek()); return; } } else { // xml Doesn't have the value until later return; } } else { CheckSerializable(pr._dtType); pr._newObj = FormatterServices.GetUninitializedObject(pr._dtType); // Run the OnDeserializing methods _objectManager.RaiseOnDeserializingEvent(pr._newObj); } if (pr._newObj == null) { throw new SerializationException(SR.Format(SR.Serialization_TopObjectInstantiate, pr._dtType)); } if (pr._objectPositionEnum == InternalObjectPositionE.Top) { TopObject = pr._newObj; } if (pr._objectInfo == null) { pr._objectInfo = ReadObjectInfo.Create(pr._dtType, _surrogates, _context, _objectManager, _serObjectInfoInit, _formatterConverter, _isSimpleAssembly); } } // End of object encountered in stream private void ParseObjectEnd(ParseRecord pr) { ParseRecord objectPr = (ParseRecord)_stack.Peek() ?? pr; if (objectPr._objectPositionEnum == InternalObjectPositionE.Top) { if (ReferenceEquals(objectPr._dtType, Converter.s_typeofString)) { objectPr._newObj = objectPr._value; TopObject = objectPr._newObj; return; } } _stack.Pop(); ParseRecord parentPr = (ParseRecord)_stack.Peek(); if (objectPr._newObj == null) { return; } if (objectPr._objectTypeEnum == InternalObjectTypeE.Array) { if (objectPr._objectPositionEnum == InternalObjectPositionE.Top) { TopObject = objectPr._newObj; } RegisterObject(objectPr._newObj, objectPr, parentPr); return; } objectPr._objectInfo.PopulateObjectMembers(objectPr._newObj, objectPr._memberData); // Registration is after object is populated if ((!objectPr._isRegistered) && (objectPr._objectId > 0)) { RegisterObject(objectPr._newObj, objectPr, parentPr); } if (objectPr._isValueTypeFixup) { ValueFixup fixup = (ValueFixup)ValueFixupStack.Pop(); //Value fixup fixup.Fixup(objectPr, parentPr); // Value fixup } if (objectPr._objectPositionEnum == InternalObjectPositionE.Top) { TopObject = objectPr._newObj; } objectPr._objectInfo.ObjectEnd(); } // Array object encountered in stream private void ParseArray(ParseRecord pr) { long genId = pr._objectId; if (pr._arrayTypeEnum == InternalArrayTypeE.Base64) { // ByteArray pr._newObj = pr._value.Length > 0 ? Convert.FromBase64String(pr._value) : Array.Empty<byte>(); if (_stack.Peek() == pr) { _stack.Pop(); } if (pr._objectPositionEnum == InternalObjectPositionE.Top) { TopObject = pr._newObj; } ParseRecord parentPr = (ParseRecord)_stack.Peek(); // Base64 can be registered at this point because it is populated RegisterObject(pr._newObj, pr, parentPr); } else if ((pr._newObj != null) && Converter.IsWriteAsByteArray(pr._arrayElementTypeCode)) { // Primtive typed Array has already been read if (pr._objectPositionEnum == InternalObjectPositionE.Top) { TopObject = pr._newObj; } ParseRecord parentPr = (ParseRecord)_stack.Peek(); // Primitive typed array can be registered at this point because it is populated RegisterObject(pr._newObj, pr, parentPr); } else if ((pr._arrayTypeEnum == InternalArrayTypeE.Jagged) || (pr._arrayTypeEnum == InternalArrayTypeE.Single)) { // Multidimensional jagged array or single array bool couldBeValueType = true; if ((pr._lowerBoundA == null) || (pr._lowerBoundA[0] == 0)) { if (ReferenceEquals(pr._arrayElementType, Converter.s_typeofString)) { pr._objectA = new string[pr._lengthA[0]]; pr._newObj = pr._objectA; couldBeValueType = false; } else if (ReferenceEquals(pr._arrayElementType, Converter.s_typeofObject)) { pr._objectA = new object[pr._lengthA[0]]; pr._newObj = pr._objectA; couldBeValueType = false; } else if (pr._arrayElementType != null) { pr._newObj = Array.CreateInstance(pr._arrayElementType, pr._lengthA[0]); } pr._isLowerBound = false; } else { if (pr._arrayElementType != null) { pr._newObj = Array.CreateInstance(pr._arrayElementType, pr._lengthA, pr._lowerBoundA); } pr._isLowerBound = true; } if (pr._arrayTypeEnum == InternalArrayTypeE.Single) { if (!pr._isLowerBound && (Converter.IsWriteAsByteArray(pr._arrayElementTypeCode))) { pr._primitiveArray = new PrimitiveArray(pr._arrayElementTypeCode, (Array)pr._newObj); } else if (couldBeValueType && pr._arrayElementType != null) { if (!pr._arrayElementType.IsValueType && !pr._isLowerBound) { pr._objectA = (object[])pr._newObj; } } } pr._indexMap = new int[1]; } else if (pr._arrayTypeEnum == InternalArrayTypeE.Rectangular) { // Rectangle array pr._isLowerBound = false; if (pr._lowerBoundA != null) { for (int i = 0; i < pr._rank; i++) { if (pr._lowerBoundA[i] != 0) { pr._isLowerBound = true; } } } if (pr._arrayElementType != null) { pr._newObj = !pr._isLowerBound ? Array.CreateInstance(pr._arrayElementType, pr._lengthA) : Array.CreateInstance(pr._arrayElementType, pr._lengthA, pr._lowerBoundA); } // Calculate number of items int sum = 1; for (int i = 0; i < pr._rank; i++) { sum = sum * pr._lengthA[i]; } pr._indexMap = new int[pr._rank]; pr._rectangularMap = new int[pr._rank]; pr._linearlength = sum; } else { throw new SerializationException(SR.Format(SR.Serialization_ArrayType, pr._arrayTypeEnum)); } } // Builds a map for each item in an incoming rectangle array. The map specifies where the item is placed in the output Array Object private void NextRectangleMap(ParseRecord pr) { // For each invocation, calculate the next rectangular array position // example // indexMap 0 [0,0,0] // indexMap 1 [0,0,1] // indexMap 2 [0,0,2] // indexMap 3 [0,0,3] // indexMap 4 [0,1,0] for (int irank = pr._rank - 1; irank > -1; irank--) { // Find the current or lower dimension which can be incremented. if (pr._rectangularMap[irank] < pr._lengthA[irank] - 1) { // The current dimension is at maximum. Increase the next lower dimension by 1 pr._rectangularMap[irank]++; if (irank < pr._rank - 1) { // The current dimension and higher dimensions are zeroed. for (int i = irank + 1; i < pr._rank; i++) { pr._rectangularMap[i] = 0; } } Array.Copy(pr._rectangularMap, 0, pr._indexMap, 0, pr._rank); break; } } } // Array object item encountered in stream private void ParseArrayMember(ParseRecord pr) { ParseRecord objectPr = (ParseRecord)_stack.Peek(); // Set up for inserting value into correct array position if (objectPr._arrayTypeEnum == InternalArrayTypeE.Rectangular) { if (objectPr._memberIndex > 0) { NextRectangleMap(objectPr); // Rectangle array, calculate position in array } if (objectPr._isLowerBound) { for (int i = 0; i < objectPr._rank; i++) { objectPr._indexMap[i] = objectPr._rectangularMap[i] + objectPr._lowerBoundA[i]; } } } else { objectPr._indexMap[0] = !objectPr._isLowerBound ? objectPr._memberIndex : // Zero based array objectPr._lowerBoundA[0] + objectPr._memberIndex; // Lower Bound based array } // Set Array element according to type of element if (pr._memberValueEnum == InternalMemberValueE.Reference) { // Object Reference // See if object has already been instantiated object refObj = _objectManager.GetObject(pr._idRef); if (refObj == null) { // Object not instantiated // Array fixup manager int[] fixupIndex = new int[objectPr._rank]; Array.Copy(objectPr._indexMap, 0, fixupIndex, 0, objectPr._rank); _objectManager.RecordArrayElementFixup(objectPr._objectId, fixupIndex, pr._idRef); } else { if (objectPr._objectA != null) { objectPr._objectA[objectPr._indexMap[0]] = refObj; } else { ((Array)objectPr._newObj).SetValue(refObj, objectPr._indexMap); // Object has been instantiated } } } else if (pr._memberValueEnum == InternalMemberValueE.Nested) { //Set up dtType for ParseObject if (pr._dtType == null) { pr._dtType = objectPr._arrayElementType; } ParseObject(pr); _stack.Push(pr); if (objectPr._arrayElementType != null) { if ((objectPr._arrayElementType.IsValueType) && (pr._arrayElementTypeCode == InternalPrimitiveTypeE.Invalid)) { pr._isValueTypeFixup = true; //Valuefixup ValueFixupStack.Push(new ValueFixup((Array)objectPr._newObj, objectPr._indexMap)); //valuefixup } else { if (objectPr._objectA != null) { objectPr._objectA[objectPr._indexMap[0]] = pr._newObj; } else { ((Array)objectPr._newObj).SetValue(pr._newObj, objectPr._indexMap); } } } } else if (pr._memberValueEnum == InternalMemberValueE.InlineValue) { if ((ReferenceEquals(objectPr._arrayElementType, Converter.s_typeofString)) || (ReferenceEquals(pr._dtType, Converter.s_typeofString))) { // String in either a string array, or a string element of an object array ParseString(pr, objectPr); if (objectPr._objectA != null) { objectPr._objectA[objectPr._indexMap[0]] = pr._value; } else { ((Array)objectPr._newObj).SetValue(pr._value, objectPr._indexMap); } } else if (objectPr._isArrayVariant) { // Array of type object if (pr._keyDt == null) { throw new SerializationException(SR.Serialization_ArrayTypeObject); } object var = null; if (ReferenceEquals(pr._dtType, Converter.s_typeofString)) { ParseString(pr, objectPr); var = pr._value; } else if (ReferenceEquals(pr._dtTypeCode, InternalPrimitiveTypeE.Invalid)) { CheckSerializable(pr._dtType); // Not nested and invalid, so it is an empty object var = FormatterServices.GetUninitializedObject(pr._dtType); } else { var = pr._varValue != null ? pr._varValue : Converter.FromString(pr._value, pr._dtTypeCode); } if (objectPr._objectA != null) { objectPr._objectA[objectPr._indexMap[0]] = var; } else { ((Array)objectPr._newObj).SetValue(var, objectPr._indexMap); // Primitive type } } else { // Primitive type if (objectPr._primitiveArray != null) { // Fast path for Soap primitive arrays. Binary was handled in the BinaryParser objectPr._primitiveArray.SetValue(pr._value, objectPr._indexMap[0]); } else { object var = pr._varValue != null ? pr._varValue : Converter.FromString(pr._value, objectPr._arrayElementTypeCode); if (objectPr._objectA != null) { objectPr._objectA[objectPr._indexMap[0]] = var; } else { ((Array)objectPr._newObj).SetValue(var, objectPr._indexMap); // Primitive type } } } } else if (pr._memberValueEnum == InternalMemberValueE.Null) { objectPr._memberIndex += pr._consecutiveNullArrayEntryCount - 1; //also incremented again below } else { ParseError(pr, objectPr); } objectPr._memberIndex++; } private void ParseArrayMemberEnd(ParseRecord pr) { // If this is a nested array object, then pop the stack if (pr._memberValueEnum == InternalMemberValueE.Nested) { ParseObjectEnd(pr); } } // Object member encountered in stream private void ParseMember(ParseRecord pr) { ParseRecord objectPr = (ParseRecord)_stack.Peek(); string objName = objectPr?._name; switch (pr._memberTypeEnum) { case InternalMemberTypeE.Item: ParseArrayMember(pr); return; case InternalMemberTypeE.Field: break; } //if ((pr.PRdtType == null) && !objectPr.PRobjectInfo.isSi) if (pr._dtType == null && objectPr._objectInfo._isTyped) { pr._dtType = objectPr._objectInfo.GetType(pr._name); if (pr._dtType != null) { pr._dtTypeCode = Converter.ToCode(pr._dtType); } } if (pr._memberValueEnum == InternalMemberValueE.Null) { // Value is Null objectPr._objectInfo.AddValue(pr._name, null, ref objectPr._si, ref objectPr._memberData); } else if (pr._memberValueEnum == InternalMemberValueE.Nested) { ParseObject(pr); _stack.Push(pr); if ((pr._objectInfo != null) && pr._objectInfo._objectType != null && (pr._objectInfo._objectType.IsValueType)) { pr._isValueTypeFixup = true; //Valuefixup ValueFixupStack.Push(new ValueFixup(objectPr._newObj, pr._name, objectPr._objectInfo));//valuefixup } else { objectPr._objectInfo.AddValue(pr._name, pr._newObj, ref objectPr._si, ref objectPr._memberData); } } else if (pr._memberValueEnum == InternalMemberValueE.Reference) { // See if object has already been instantiated object refObj = _objectManager.GetObject(pr._idRef); if (refObj == null) { objectPr._objectInfo.AddValue(pr._name, null, ref objectPr._si, ref objectPr._memberData); objectPr._objectInfo.RecordFixup(objectPr._objectId, pr._name, pr._idRef); // Object not instantiated } else { objectPr._objectInfo.AddValue(pr._name, refObj, ref objectPr._si, ref objectPr._memberData); } } else if (pr._memberValueEnum == InternalMemberValueE.InlineValue) { // Primitive type or String if (ReferenceEquals(pr._dtType, Converter.s_typeofString)) { ParseString(pr, objectPr); objectPr._objectInfo.AddValue(pr._name, pr._value, ref objectPr._si, ref objectPr._memberData); } else if (pr._dtTypeCode == InternalPrimitiveTypeE.Invalid) { // The member field was an object put the value is Inline either bin.Base64 or invalid if (pr._arrayTypeEnum == InternalArrayTypeE.Base64) { objectPr._objectInfo.AddValue(pr._name, Convert.FromBase64String(pr._value), ref objectPr._si, ref objectPr._memberData); } else if (ReferenceEquals(pr._dtType, Converter.s_typeofObject)) { throw new SerializationException(SR.Format(SR.Serialization_TypeMissing, pr._name)); } else { ParseString(pr, objectPr); // Register the object if it has an objectId // Object Class with no memberInfo data // only special case where AddValue is needed? if (ReferenceEquals(pr._dtType, Converter.s_typeofSystemVoid)) { objectPr._objectInfo.AddValue(pr._name, pr._dtType, ref objectPr._si, ref objectPr._memberData); } else if (objectPr._objectInfo._isSi) { // ISerializable are added as strings, the conversion to type is done by the // ISerializable object objectPr._objectInfo.AddValue(pr._name, pr._value, ref objectPr._si, ref objectPr._memberData); } } } else { object var = pr._varValue != null ? pr._varValue : Converter.FromString(pr._value, pr._dtTypeCode); objectPr._objectInfo.AddValue(pr._name, var, ref objectPr._si, ref objectPr._memberData); } } else { ParseError(pr, objectPr); } } // Object member end encountered in stream private void ParseMemberEnd(ParseRecord pr) { switch (pr._memberTypeEnum) { case InternalMemberTypeE.Item: ParseArrayMemberEnd(pr); return; case InternalMemberTypeE.Field: if (pr._memberValueEnum == InternalMemberValueE.Nested) { ParseObjectEnd(pr); } break; default: ParseError(pr, (ParseRecord)_stack.Peek()); break; } } // Processes a string object by getting an internal ID for it and registering it with the objectManager private void ParseString(ParseRecord pr, ParseRecord parentPr) { // Process String class if ((!pr._isRegistered) && (pr._objectId > 0)) { // String is treated as an object if it has an id //m_objectManager.RegisterObject(pr.PRvalue, pr.PRobjectId); RegisterObject(pr._value, pr, parentPr, true); } } private void RegisterObject(object obj, ParseRecord pr, ParseRecord objectPr) { RegisterObject(obj, pr, objectPr, false); } private void RegisterObject(object obj, ParseRecord pr, ParseRecord objectPr, bool bIsString) { if (!pr._isRegistered) { pr._isRegistered = true; SerializationInfo si = null; long parentId = 0; MemberInfo memberInfo = null; int[] indexMap = null; if (objectPr != null) { indexMap = objectPr._indexMap; parentId = objectPr._objectId; if (objectPr._objectInfo != null) { if (!objectPr._objectInfo._isSi) { // ParentId is only used if there is a memberInfo memberInfo = objectPr._objectInfo.GetMemberInfo(pr._name); } } } // SerializationInfo is always needed for ISerialization si = pr._si; if (bIsString) { _objectManager.RegisterString((string)obj, pr._objectId, si, parentId, memberInfo); } else { _objectManager.RegisterObject(obj, pr._objectId, si, parentId, memberInfo, indexMap); } } } // Assigns an internal ID associated with the binary id number internal long GetId(long objectId) { if (!_fullDeserialization) { InitFullDeserialization(); } if (objectId > 0) { return objectId; } if (_oldFormatDetected || objectId == -1) { // Alarm bells. This is an old format. Deal with it. _oldFormatDetected = true; if (_valTypeObjectIdTable == null) { _valTypeObjectIdTable = new IntSizedArray(); } long tempObjId = 0; if ((tempObjId = _valTypeObjectIdTable[(int)objectId]) == 0) { tempObjId = ThresholdForValueTypeIds + objectId; _valTypeObjectIdTable[(int)objectId] = (int)tempObjId; } return tempObjId; } return -1 * objectId; } internal Type Bind(string assemblyString, string typeString) { Type type = null; if (_binder != null) { type = _binder.BindToType(assemblyString, typeString); } if (type == null) { type = FastBindToType(assemblyString, typeString); } return type; } internal sealed class TypeNAssembly { public Type Type; public string AssemblyName; } internal Type FastBindToType(string assemblyName, string typeName) { Type type = null; TypeNAssembly entry = (TypeNAssembly)_typeCache.GetCachedValue(typeName); if (entry == null || entry.AssemblyName != assemblyName) { // Check early to avoid throwing unnecessary exceptions if (assemblyName == null) { return null; } Assembly assm = null; AssemblyName assmName = null; try { assmName = new AssemblyName(assemblyName); } catch { return null; } if (_isSimpleAssembly) { assm = ResolveSimpleAssemblyName(assmName); } else { try { assm = Assembly.Load(assmName); } catch { } } if (assm == null) { return null; } if (_isSimpleAssembly) { GetSimplyNamedTypeFromAssembly(assm, typeName, ref type); } else { type = FormatterServices.GetTypeFromAssembly(assm, typeName); } if (type == null) { return null; } // before adding it to cache, let us do the security check CheckTypeForwardedTo(assm, type.Assembly, type); entry = new TypeNAssembly(); entry.Type = type; entry.AssemblyName = assemblyName; _typeCache.SetCachedValue(entry); } return entry.Type; } private static Assembly ResolveSimpleAssemblyName(AssemblyName assemblyName) { try { return Assembly.Load(assemblyName); } catch { } if (assemblyName != null) { try { return Assembly.Load(assemblyName.Name); } catch { } } return null; } private static void GetSimplyNamedTypeFromAssembly(Assembly assm, string typeName, ref Type type) { // Catching any exceptions that could be thrown from a failure on assembly load // This is necessary, for example, if there are generic parameters that are qualified with a version of the assembly that predates the one available try { type = FormatterServices.GetTypeFromAssembly(assm, typeName); } catch (TypeLoadException) { } catch (FileNotFoundException) { } catch (FileLoadException) { } catch (BadImageFormatException) { } if (type == null) { type = Type.GetType(typeName, ResolveSimpleAssemblyName, new TopLevelAssemblyTypeResolver(assm).ResolveType, throwOnError: false); } } private string _previousAssemblyString; private string _previousName; private Type _previousType; internal Type GetType(BinaryAssemblyInfo assemblyInfo, string name) { Type objectType = null; if (((_previousName != null) && (_previousName.Length == name.Length) && (_previousName.Equals(name))) && ((_previousAssemblyString != null) && (_previousAssemblyString.Length == assemblyInfo._assemblyString.Length) && (_previousAssemblyString.Equals(assemblyInfo._assemblyString)))) { objectType = _previousType; } else { objectType = Bind(assemblyInfo._assemblyString, name); if (objectType == null) { Assembly sourceAssembly = assemblyInfo.GetAssembly(); if (_isSimpleAssembly) { GetSimplyNamedTypeFromAssembly(sourceAssembly, name, ref objectType); } else { objectType = FormatterServices.GetTypeFromAssembly(sourceAssembly, name); } // here let us do the security check if (objectType != null) { CheckTypeForwardedTo(sourceAssembly, objectType.Assembly, objectType); } } _previousAssemblyString = assemblyInfo._assemblyString; _previousName = name; _previousType = objectType; } return objectType; } private static void CheckTypeForwardedTo(Assembly sourceAssembly, Assembly destAssembly, Type resolvedType) { // nop on core } internal sealed class TopLevelAssemblyTypeResolver { private readonly Assembly _topLevelAssembly; public TopLevelAssemblyTypeResolver(Assembly topLevelAssembly) { _topLevelAssembly = topLevelAssembly; } public Type ResolveType(Assembly assembly, string simpleTypeName, bool ignoreCase) { if (assembly == null) { assembly = _topLevelAssembly; } return assembly.GetType(simpleTypeName, throwOnError: false, ignoreCase: ignoreCase); } } } }
using System; using System.Diagnostics; using System.IO; using System.Linq; using System.Threading.Tasks; using Microsoft.MixedReality.Toolkit.Core.Extensions; using Microsoft.MixedReality.Toolkit.Core.Utilities.Editor; using UnityEditor; using UnityEditor.Build.Reporting; using UnityEngine; using Debug = UnityEngine.Debug; namespace Microsoft.MixedReality.Toolkit.Core.Utilities.Build { /// <summary> /// Cross platform player build tools /// </summary> public static class UnityPlayerBuildTools { // Build configurations. Exactly one of these should be defined for any given build. public const string BuildSymbolDebug = "debug"; public const string BuildSymbolRelease = "release"; public const string BuildSymbolMaster = "master"; /// <summary> /// Starts the build process /// </summary> /// <param name="buildInfo"></param> /// <returns>The <see cref="BuildReport"/> from Unity's <see cref="BuildPipeline"/></returns> public static BuildReport BuildUnityPlayer(IBuildInfo buildInfo) { EditorUtility.DisplayProgressBar("Build Pipeline", "Gathering Build Data...", 0.25f); // Call the pre-build action, if any buildInfo.PreBuildAction?.Invoke(buildInfo); BuildTargetGroup buildTargetGroup = buildInfo.BuildTarget.GetGroup(); string playerBuildSymbols = PlayerSettings.GetScriptingDefineSymbolsForGroup(buildTargetGroup); if (!string.IsNullOrEmpty(playerBuildSymbols)) { if (buildInfo.HasConfigurationSymbol()) { buildInfo.AppendWithoutConfigurationSymbols(playerBuildSymbols); } else { buildInfo.AppendSymbols(playerBuildSymbols.Split(';')); } } if (!string.IsNullOrEmpty(buildInfo.BuildSymbols)) { PlayerSettings.SetScriptingDefineSymbolsForGroup(buildTargetGroup, buildInfo.BuildSymbols); } if ((buildInfo.BuildOptions & BuildOptions.Development) == BuildOptions.Development) { if (!buildInfo.HasConfigurationSymbol()) { buildInfo.AppendSymbols(BuildSymbolDebug); } } if (buildInfo.HasAnySymbols(BuildSymbolDebug)) { buildInfo.BuildOptions |= BuildOptions.Development | BuildOptions.AllowDebugging; } if (buildInfo.HasAnySymbols(BuildSymbolRelease)) { // Unity automatically adds the DEBUG symbol if the BuildOptions.Development flag is // specified. In order to have debug symbols and the RELEASE symbols we have to // inject the symbol Unity relies on to enable the /debug+ flag of csc.exe which is "DEVELOPMENT_BUILD" buildInfo.AppendSymbols("DEVELOPMENT_BUILD"); } var oldColorSpace = PlayerSettings.colorSpace; if (buildInfo.ColorSpace.HasValue) { PlayerSettings.colorSpace = buildInfo.ColorSpace.Value; } BuildTarget oldBuildTarget = EditorUserBuildSettings.activeBuildTarget; BuildTargetGroup oldBuildTargetGroup = oldBuildTarget.GetGroup(); EditorUserBuildSettings.SwitchActiveBuildTarget(buildTargetGroup, buildInfo.BuildTarget); Directory.CreateDirectory(buildInfo.OutputDirectory); BuildReport buildReport = default; try { buildReport = BuildPipeline.BuildPlayer( buildInfo.Scenes.ToArray(), buildInfo.OutputDirectory, buildInfo.BuildTarget, buildInfo.BuildOptions); } catch (Exception e) { Debug.LogError($"{e.Message}\n{e.StackTrace}"); } PlayerSettings.colorSpace = oldColorSpace; PlayerSettings.SetScriptingDefineSymbolsForGroup(buildTargetGroup, playerBuildSymbols); EditorUserBuildSettings.SwitchActiveBuildTarget(oldBuildTargetGroup, oldBuildTarget); // Call the post-build action, if any buildInfo.PostBuildAction?.Invoke(buildInfo, buildReport); return buildReport; } /// <summary> /// Start a build using Unity's command line. /// </summary> public static async void StartCommandLineBuild() { // We don't need stack traces on all our logs. Makes things a lot easier to read. Application.SetStackTraceLogType(LogType.Log, StackTraceLogType.None); Debug.Log($"Starting command line build for {EditorUserBuildSettings.activeBuildTarget}..."); EditorAssemblyReloadManager.LockReloadAssemblies = true; bool success; try { switch (EditorUserBuildSettings.activeBuildTarget) { case BuildTarget.WSAPlayer: success = await UwpPlayerBuildTools.BuildPlayer(new UwpBuildInfo(true) { BuildAppx = true }); break; default: var buildInfo = new BuildInfo(true) as IBuildInfo; ParseBuildCommandLine(ref buildInfo); var buildResult = BuildUnityPlayer(buildInfo); success = buildResult.summary.result == BuildResult.Succeeded; break; } } catch (Exception e) { Debug.LogError($"Build Failed!\n{e.Message}\n{e.StackTrace}"); success = false; } Debug.Log($"Exiting command line build... Build success? {success}"); EditorApplication.Exit(success ? 0 : 1); } internal static bool CheckBuildScenes() { if (EditorBuildSettings.scenes.Length == 0) { return EditorUtility.DisplayDialog("Attention!", "No scenes are present in the build settings.\n" + "The current scene will be the one built.\n\n" + "Do you want to cancel and add one?", "Continue Anyway", "Cancel Build"); } return true; } /// <summary> /// Get the Unity Project Root Path. /// </summary> /// <returns>The full path to the project's root.</returns> public static string GetProjectPath() { return Path.GetDirectoryName(Path.GetFullPath(Application.dataPath)); } public static void ParseBuildCommandLine(ref IBuildInfo buildInfo) { string[] arguments = Environment.GetCommandLineArgs(); for (int i = 0; i < arguments.Length; ++i) { switch (arguments[i]) { case "-autoIncrement": buildInfo.AutoIncrement = true; break; case "-scenes": // TODO parse json scene list and set them. break; case "-buildOutput": buildInfo.OutputDirectory = arguments[++i]; break; case "-colorSpace": buildInfo.ColorSpace = (ColorSpace)Enum.Parse(typeof(ColorSpace), arguments[++i]); break; case "-x86": case "-x64": buildInfo.BuildPlatform = arguments[i].Substring(1); break; case "-debug": case "-master": case "-release": buildInfo.Configuration = arguments[i].Substring(1).ToLower(); break; } } } /// <summary> /// Restores any nuget packages at the path specified. /// </summary> /// <param name="nugetPath"></param> /// <param name="storePath"></param> /// <returns>True, if the nuget packages were successfully restored.</returns> public static async Task<bool> RestoreNugetPackagesAsync(string nugetPath, string storePath) { Debug.Assert(File.Exists(nugetPath)); Debug.Assert(Directory.Exists(storePath)); await new Process().StartProcessAsync(nugetPath, $"restore \"{storePath}/project.json\""); return File.Exists($"{storePath}\\project.lock.json"); } } }
// // Copyright (c) 2008-2019 the Urho3D project. // Copyright (c) 2017-2020 the rbfx project. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // using System; using System.Collections.Generic; using System.Runtime.CompilerServices; namespace Urho3DNet { /// 3x4 matrix for scene node transform calculations. public struct Matrix3x4 { /// Copy-construct from a 3x3 matrix and set the extra elements to identity. public Matrix3x4(in Matrix3 matrix) { M00 = matrix.M00; M01 = matrix.M01; M02 = matrix.M02; M03 = 0.0f; M10 = matrix.M10; M11 = matrix.M11; M12 = matrix.M12; M13 = 0.0f; M20 = matrix.M20; M21 = matrix.M21; M22 = matrix.M22; M23 = 0.0f; } /// Copy-construct from a 4x4 matrix which is assumed to contain no projection. public Matrix3x4(in Matrix4 matrix) { M00 = matrix.M00; M01 = matrix.M01; M02 = matrix.M02; M03 = matrix.M03; M10 = matrix.M10; M11 = matrix.M11; M12 = matrix.M12; M13 = matrix.M13; M20 = matrix.M20; M21 = matrix.M21; M22 = matrix.M22; M23 = matrix.M23; } /// Construct from values or identity matrix by default. public Matrix3x4( float v00 = 1, float v01 = 0, float v02 = 0, float v03 = 0, float v10 = 0, float v11 = 1, float v12 = 0, float v13 = 0, float v20 = 0, float v21 = 0, float v22 = 1, float v23 = 0) { M00 = v00; M01 = v01; M02 = v02; M03 = v03; M10 = v10; M11 = v11; M12 = v12; M13 = v13; M20 = v20; M21 = v21; M22 = v22; M23 = v23; } /// Construct from a float array. public Matrix3x4(IReadOnlyList<float> data) { M00 = data[0]; M01 = data[1]; M02 = data[2]; M03 = data[3]; M10 = data[4]; M11 = data[5]; M12 = data[6]; M13 = data[7]; M20 = data[8]; M21 = data[9]; M22 = data[10]; M23 = data[11]; } /// Construct from translation, rotation and uniform scale. public Matrix3x4(in Vector3 translation, in Quaternion rotation, float scale) { M00 = M01 = M02 = M03 = M10 = M11 = M12 = M13 = M20 = M21 = M22 = M23 = 0; SetRotation(rotation.RotationMatrix * scale); SetTranslation(translation); } /// Construct from translation, rotation and nonuniform scale. public Matrix3x4(in Vector3 translation, in Quaternion rotation, in Vector3 scale) { M00 = M01 = M02 = M03 = M10 = M11 = M12 = M13 = M20 = M21 = M22 = M23 = 0; SetRotation(rotation.RotationMatrix.Scaled(scale)); SetTranslation(translation); } /// Test for equality with another matrix without epsilon. public static bool operator ==(in Matrix3x4 lhs, in Matrix3x4 rhs) { return lhs.M00 == rhs.M00 && lhs.M01 == rhs.M01 && lhs.M02 == rhs.M02 && lhs.M03 == rhs.M03 && lhs.M10 == rhs.M10 && lhs.M11 == rhs.M11 && lhs.M12 == rhs.M12 && lhs.M13 == rhs.M13 && lhs.M20 == rhs.M20 && lhs.M21 == rhs.M21 && lhs.M22 == rhs.M22 && lhs.M23 == rhs.M23; } /// Test for inequality with another matrix without epsilon. [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool operator !=(in Matrix3x4 lhs, in Matrix3x4 rhs) { return !(lhs == rhs); } /// Multiply a Vector3 which is assumed to represent position. public static Vector3 operator *(in Matrix3x4 lhs, in Vector3 rhs) { return new Vector3( (lhs.M00 * rhs.X + lhs.M01 * rhs.Y + lhs.M02 * rhs.Z + lhs.M03), (lhs.M10 * rhs.X + lhs.M11 * rhs.Y + lhs.M12 * rhs.Z + lhs.M13), (lhs.M20 * rhs.X + lhs.M21 * rhs.Y + lhs.M22 * rhs.Z + lhs.M23) ); } /// Multiply a Vector4. public static Vector3 operator *(in Matrix3x4 lhs, in Vector4 rhs) { return new Vector3( (lhs.M00 * rhs.X + lhs.M01 * rhs.Y + lhs.M02 * rhs.Z + lhs.M03 * rhs.W), (lhs.M10 * rhs.X + lhs.M11 * rhs.Y + lhs.M12 * rhs.Z + lhs.M13 * rhs.W), (lhs.M20 * rhs.X + lhs.M21 * rhs.Y + lhs.M22 * rhs.Z + lhs.M23 * rhs.W) ); } /// Add a matrix. public static Matrix3x4 operator +(in Matrix3x4 lhs, in Matrix3x4 rhs) { return new Matrix3x4( lhs.M00 + rhs.M00, lhs.M01 + rhs.M01, lhs.M02 + rhs.M02, lhs.M03 + rhs.M03, lhs.M10 + rhs.M10, lhs.M11 + rhs.M11, lhs.M12 + rhs.M12, lhs.M13 + rhs.M13, lhs.M20 + rhs.M20, lhs.M21 + rhs.M21, lhs.M22 + rhs.M22, lhs.M23 + rhs.M23 ); } /// Subtract a matrix. public static Matrix3x4 operator -(in Matrix3x4 lhs, in Matrix3x4 rhs) { return new Matrix3x4( lhs.M00 - rhs.M00, lhs.M01 - rhs.M01, lhs.M02 - rhs.M02, lhs.M03 - rhs.M03, lhs.M10 - rhs.M10, lhs.M11 - rhs.M11, lhs.M12 - rhs.M12, lhs.M13 - rhs.M13, lhs.M20 - rhs.M20, lhs.M21 - rhs.M21, lhs.M22 - rhs.M22, lhs.M23 - rhs.M23 ); } /// Multiply with a scalar. public static Matrix3x4 operator *(in Matrix3x4 lhs, float rhs) { return new Matrix3x4( lhs.M00 * rhs, lhs.M01 * rhs, lhs.M02 * rhs, lhs.M03 * rhs, lhs.M10 * rhs, lhs.M11 * rhs, lhs.M12 * rhs, lhs.M13 * rhs, lhs.M20 * rhs, lhs.M21 * rhs, lhs.M22 * rhs, lhs.M23 * rhs ); } /// Multiply a matrix. public static Matrix3x4 operator *(in Matrix3x4 lhs, in Matrix3x4 rhs) { return new Matrix3x4( lhs.M00 * rhs.M00 + lhs.M01 * rhs.M10 + lhs.M02 * rhs.M20, lhs.M00 * rhs.M01 + lhs.M01 * rhs.M11 + lhs.M02 * rhs.M21, lhs.M00 * rhs.M02 + lhs.M01 * rhs.M12 + lhs.M02 * rhs.M22, lhs.M00 * rhs.M03 + lhs.M01 * rhs.M13 + lhs.M02 * rhs.M23 + lhs.M03, lhs.M10 * rhs.M00 + lhs.M11 * rhs.M10 + lhs.M12 * rhs.M20, lhs.M10 * rhs.M01 + lhs.M11 * rhs.M11 + lhs.M12 * rhs.M21, lhs.M10 * rhs.M02 + lhs.M11 * rhs.M12 + lhs.M12 * rhs.M22, lhs.M10 * rhs.M03 + lhs.M11 * rhs.M13 + lhs.M12 * rhs.M23 + lhs.M13, lhs.M20 * rhs.M00 + lhs.M21 * rhs.M10 + lhs.M22 * rhs.M20, lhs.M20 * rhs.M01 + lhs.M21 * rhs.M11 + lhs.M22 * rhs.M21, lhs.M20 * rhs.M02 + lhs.M21 * rhs.M12 + lhs.M22 * rhs.M22, lhs.M20 * rhs.M03 + lhs.M21 * rhs.M13 + lhs.M22 * rhs.M23 + lhs.M23 ); } /// Multiply a 4x4 matrix. public static Matrix4 operator *(in Matrix3x4 lhs, in Matrix4 rhs) { return new Matrix4( lhs.M00 * rhs.M00 + lhs.M01 * rhs.M10 + lhs.M02 * rhs.M20 + lhs.M03 * rhs.M30, lhs.M00 * rhs.M01 + lhs.M01 * rhs.M11 + lhs.M02 * rhs.M21 + lhs.M03 * rhs.M31, lhs.M00 * rhs.M02 + lhs.M01 * rhs.M12 + lhs.M02 * rhs.M22 + lhs.M03 * rhs.M32, lhs.M00 * rhs.M03 + lhs.M01 * rhs.M13 + lhs.M02 * rhs.M23 + lhs.M03 * rhs.M33, lhs.M10 * rhs.M00 + lhs.M11 * rhs.M10 + lhs.M12 * rhs.M20 + lhs.M13 * rhs.M30, lhs.M10 * rhs.M01 + lhs.M11 * rhs.M11 + lhs.M12 * rhs.M21 + lhs.M13 * rhs.M31, lhs.M10 * rhs.M02 + lhs.M11 * rhs.M12 + lhs.M12 * rhs.M22 + lhs.M13 * rhs.M32, lhs.M10 * rhs.M03 + lhs.M11 * rhs.M13 + lhs.M12 * rhs.M23 + lhs.M13 * rhs.M33, lhs.M20 * rhs.M00 + lhs.M21 * rhs.M10 + lhs.M22 * rhs.M20 + lhs.M23 * rhs.M30, lhs.M20 * rhs.M01 + lhs.M21 * rhs.M11 + lhs.M22 * rhs.M21 + lhs.M23 * rhs.M31, lhs.M20 * rhs.M02 + lhs.M21 * rhs.M12 + lhs.M22 * rhs.M22 + lhs.M23 * rhs.M32, lhs.M20 * rhs.M03 + lhs.M21 * rhs.M13 + lhs.M22 * rhs.M23 + lhs.M23 * rhs.M33, rhs.M30, rhs.M31, rhs.M32, rhs.M33 ); } /// Multiply a 3x4 matrix with a scalar. [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Matrix3x4 operator *(float lhs, in Matrix3x4 rhs) { return rhs * lhs; } /// Set translation elements. [MethodImpl(MethodImplOptions.AggressiveInlining)] public void SetTranslation(in Vector3 translation) { M03 = translation.X; M13 = translation.Y; M23 = translation.Z; } /// Set rotation elements from a 3x3 matrix. [MethodImpl(MethodImplOptions.AggressiveInlining)] public void SetRotation(in Matrix3 rotation) { M00 = rotation.M00; M01 = rotation.M01; M02 = rotation.M02; M10 = rotation.M10; M11 = rotation.M11; M12 = rotation.M12; M20 = rotation.M20; M21 = rotation.M21; M22 = rotation.M22; } /// Set scaling elements. [MethodImpl(MethodImplOptions.AggressiveInlining)] public void SetScale(in Vector3 scale) { M00 = scale.X; M11 = scale.Y; M22 = scale.Z; } /// Set uniform scaling elements. [MethodImpl(MethodImplOptions.AggressiveInlining)] public void SetScale(float scale) { M00 = scale; M11 = scale; M22 = scale; } /// Return the combined rotation and scaling matrix. public Matrix3 Matrix3 => new Matrix3(M00, M01,M02,M10,M11,M12,M20,M21,M22); /// Convert to a 4x4 matrix by filling in an identity last row. public Matrix4 Matrix4 => new Matrix4(M00,M01,M02,M03,M10,M11,M12,M13,M20,M21,M22,M23,0.0f,0.0f,0.0f,1.0f); /// Return the rotation matrix with scaling removed. public Matrix3 RotationMatrix { get { var invScale = new Vector3( 1.0f / (float)Math.Sqrt(M00 * M00 + M10 * M10 + M20 * M20), 1.0f / (float)Math.Sqrt(M01 * M01 + M11 * M11 + M21 * M21), 1.0f / (float)Math.Sqrt(M02 * M02 + M12 * M12 + M22 * M22) ); return Matrix3.Scaled(invScale); } } /// Return the translation part. public Vector3 Translation => new Vector3(M03,M13,M23); /// Return the rotation part. public Quaternion Rotation => new Quaternion(RotationMatrix); /// Return the scaling part. public Vector3 Scale=>new Vector3( (float)Math.Sqrt(M00 * M00 + M10 * M10 + M20 * M20), (float)Math.Sqrt(M01 * M01 + M11 * M11 + M21 * M21), (float)Math.Sqrt(M02 * M02 + M12 * M12 + M22 * M22) ); /// Return the scaling part with the sign. Reference rotation matrix is required to avoid ambiguity. public Vector3 SignedScale(in Matrix3 rotation) { return new Vector3( rotation.M00 * M00 + rotation.M10 * M10 + rotation.M20 * M20, rotation.M01 * M01 + rotation.M11 * M11 + rotation.M21 * M21, rotation.M02 * M02 + rotation.M12 * M12 + rotation.M22 * M22 ); } /// Test for equality with another matrix with epsilon. public bool Equals(Matrix3x4 rhs) { return M00 == rhs.M00 && M01 == rhs.M01 && M02 == rhs.M02 && M03 == rhs.M03 && M10 == rhs.M10 && M11 == rhs.M11 && M12 == rhs.M12 && M13 == rhs.M13 && M20 == rhs.M20 && M21 == rhs.M21 && M22 == rhs.M22 && M23 == rhs.M23; } /// Return decomposition to translation, rotation and scale. public void Decompose(out Vector3 translation, out Quaternion rotation, out Vector3 scale) { translation.X = M03; translation.Y = M13; translation.Z = M23; scale.X = (float)Math.Sqrt(M00 * M00 + M10 * M10 + M20 * M20); scale.Y = (float)Math.Sqrt(M01 * M01 + M11 * M11 + M21 * M21); scale.Z = (float)Math.Sqrt(M02 * M02 + M12 * M12 + M22 * M22); var invScale = new Vector3(1.0f / scale.X, 1.0f / scale.Y, 1.0f / scale.Z); rotation = new Quaternion(Matrix3.Scaled(invScale)); } /// Return inverse. public Matrix3x4 Inverse() { float det = M00 * M11 * M22 + M10 * M21 * M02 + M20 * M01 * M12 - M20 * M11 * M02 - M10 * M01 * M22 - M00 * M21 * M12; float invDet = 1.0f / det; Matrix3x4 ret; ret.M00 = (M11 * M22 - M21 * M12) * invDet; ret.M01 = -(M01 * M22 - M21 * M02) * invDet; ret.M02 = (M01 * M12 - M11 * M02) * invDet; ret.M03 = -(M03 * ret.M00 + M13 * ret.M01 + M23 * ret.M02); ret.M10 = -(M10 * M22 - M20 * M12) * invDet; ret.M11 = (M00 * M22 - M20 * M02) * invDet; ret.M12 = -(M00 * M12 - M10 * M02) * invDet; ret.M13 = -(M03 * ret.M10 + M13 * ret.M11 + M23 * ret.M12); ret.M20 = (M10 * M21 - M20 * M11) * invDet; ret.M21 = -(M00 * M21 - M20 * M01) * invDet; ret.M22 = (M00 * M11 - M10 * M01) * invDet; ret.M23 = -(M03 * ret.M20 + M13 * ret.M21 + M23 * ret.M22); return ret; } /// Return float data. public float[] Data => new []{M00, M01, M02, M03, M10, M11, M12, M13, M20, M21, M22, M23}; /// Return matrix element. public float this[int i, int j] { get { if (i < 0 || i > 2 || j < 0 || j > 3) throw new IndexOutOfRangeException(); unsafe { fixed (float* p = &M00) { return p[i * 4 + j]; } } } set { if (i < 0 || i > 2 || j < 0 || j > 3) throw new IndexOutOfRangeException(); unsafe { fixed (float* p = &M00) { p[i * 4 + j] = value; } } } } /// Return matrix row. public Vector4 Row(int i) { return new Vector4(this[i, 0], this[i, 1], this[i, 2], this[i, 3]); } /// Return matrix column. public Vector3 Column(int j) { return new Vector3(this[0, j], this[1, j], this[2, j]); } /// Return as string. public override string ToString() { return $"{M00} {M01} {M02} {M03} {M10} {M11} {M12} {M13} {M20} {M21} {M22} {M23}"; } public float M00; public float M01; public float M02; public float M03; public float M10; public float M11; public float M12; public float M13; public float M20; public float M21; public float M22; public float M23; /// Zero matrix. public static readonly Matrix3x4 Zero = new Matrix3x4(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); /// Identity matrix. public static readonly Matrix3x4 Identity = new Matrix3x4( 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0); }; }
//----------------------------------------------------------------------- // <copyright file="BluetoothSocket.Win32.cs" company="In The Hand Ltd"> // Copyright (c) 2017-19 In The Hand Ltd, All rights reserved. // This source code is licensed under the MIT License - see License.txt // </copyright> //----------------------------------------------------------------------- using InTheHand.Devices.Bluetooth; using System; using System.Net.Sockets; using System.Runtime.InteropServices; namespace InTheHand.Net.Sockets { /// <summary> /// /// </summary> public class BluetoothSocket : IDisposable { private int _socket = 0; /// <summary> /// /// </summary> public BluetoothSocket() { _socket = NativeMethods.socket(BluetoothSockets.BluetoothAddressFamily, SocketType.Stream, BluetoothSockets.RfcommProtocolType); } internal BluetoothSocket(int socket) { _socket = socket; } private void ThrowIfSocketClosed() { if (_socket == 0) throw new ObjectDisposedException("BluetoothSocket"); } public BluetoothSocket Accept() { ThrowIfSocketClosed(); int newSocket = NativeMethods.accept(_socket, IntPtr.Zero, IntPtr.Zero); if (newSocket < 0) throw new SocketException(NativeMethods.WSAGetLastError()); return new BluetoothSocket(newSocket); } public void Bind(System.Net.EndPoint localEP) { ThrowIfSocketClosed(); if (localEP == null) throw new ArgumentNullException("localEP"); var sockAddr = localEP.Serialize(); int result = NativeMethods.bind(_socket, SocketAddressToArray(sockAddr), sockAddr.Size); if(result < 0) throw new SocketException(NativeMethods.WSAGetLastError()); } private static byte[] SocketAddressToArray(System.Net.SocketAddress socketAddress) { byte[] buffer = new byte[socketAddress.Size+1]; buffer[0] = (byte)socketAddress.Family; for(int i = 1; i <buffer.Length; i++) { buffer[i] = socketAddress[i]; } return buffer; } public void Close() { if (_socket != 0) { int result = NativeMethods.closesocket(_socket); _socket = 0; } } public void Connect(global::System.Net.EndPoint remoteEP) { ThrowIfSocketClosed(); if (remoteEP == null) throw new ArgumentNullException("remoteEP"); var sockAddr = remoteEP.Serialize(); int result = NativeMethods.connect(_socket, SocketAddressToArray(sockAddr), sockAddr.Size); if (result < 0) throw new SocketException(NativeMethods.WSAGetLastError()); } public int Receive(byte[] buffer) { return Receive(buffer, buffer.Length, 0); } public int Receive(byte[] buffer, SocketFlags socketFlags) { return Receive(buffer, buffer.Length, socketFlags); } public int Receive(byte[] buffer, int size, SocketFlags socketFlags) { ThrowIfSocketClosed(); if (buffer == null) throw new ArgumentNullException("buffer"); if (size > buffer.Length) throw new ArgumentOutOfRangeException("size"); int result = NativeMethods.recv(_socket, buffer, size, (int)socketFlags); if (result < 0) throw new SocketException(NativeMethods.WSAGetLastError()); return result; } public int Send(byte[] buffer) { return Send(buffer, buffer.Length, 0); } public int Send(byte[] buffer, SocketFlags socketFlags) { return Send(buffer, buffer.Length, socketFlags); } public int Send(byte[] buffer, int size, SocketFlags socketFlags) { ThrowIfSocketClosed(); if (buffer == null) throw new ArgumentNullException("buffer"); if (size > buffer.Length) throw new ArgumentOutOfRangeException("size"); int result = NativeMethods.send(_socket, buffer, size, (int)socketFlags); if (result < 0) throw new SocketException(NativeMethods.WSAGetLastError()); return result; } public int Available { get { byte[] outVal = new byte[4]; NativeMethods.ioctlsocket(_socket, NativeMethods.FIONREAD, outVal); return BitConverter.ToInt32(outVal, 0); } } protected virtual void Dispose(bool disposing) { Close(); } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } ~BluetoothSocket() { Dispose(false); } private static class NativeMethods { private const string winsockDll = "ws2_32.dll"; internal const int FIONREAD = 0x4004667F; [DllImport(winsockDll)] internal static extern int socket(AddressFamily af, SocketType type, ProtocolType protocol); [DllImport(winsockDll)] internal static extern int closesocket(int s); [DllImport(winsockDll)] internal static extern int connect(int s, byte[] name, int namelen); [DllImport(winsockDll)] internal static extern int recv(int s, byte[] buf, int len, int flags); [DllImport(winsockDll)] internal static extern int send(int s, byte[] buf, int len, int flags); [DllImport(winsockDll)] internal static extern int bind(int s, byte[] name, int namelen); [DllImport(winsockDll)] internal static extern int accept(int s, IntPtr addr, IntPtr addrlen); [DllImport(winsockDll)] internal static extern int WSAGetLastError(); [DllImport(winsockDll)] internal static extern int ioctlsocket(int s, int cmd, byte[] argp); } } }
/* * UltraCart Rest API V2 * * UltraCart REST API Version 2 * * OpenAPI spec version: 2.0.0 * Contact: support@ultracart.com * Generated by: https://github.com/swagger-api/swagger-codegen.git */ using System; using System.Linq; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using System.ComponentModel.DataAnnotations; using SwaggerDateConverter = com.ultracart.admin.v2.Client.SwaggerDateConverter; namespace com.ultracart.admin.v2.Model { /// <summary> /// AffiliateClickQuery /// </summary> [DataContract] public partial class AffiliateClickQuery : IEquatable<AffiliateClickQuery>, IValidatableObject { /// <summary> /// Initializes a new instance of the <see cref="AffiliateClickQuery" /> class. /// </summary> /// <param name="affiliateLinkOid">Unique object identifier for for the link that this click is associated with.</param> /// <param name="affiliateOid">Affiliate ID associated with the click.</param> /// <param name="clickDtsBegin">Minimum click date/time to return.</param> /// <param name="clickDtsEnd">Maximum click date/time to return.</param> /// <param name="ipAddress">IP address that generated the click.</param> /// <param name="subId">Sub ID value passed on the click.</param> public AffiliateClickQuery(int? affiliateLinkOid = default(int?), int? affiliateOid = default(int?), string clickDtsBegin = default(string), string clickDtsEnd = default(string), string ipAddress = default(string), string subId = default(string)) { this.AffiliateLinkOid = affiliateLinkOid; this.AffiliateOid = affiliateOid; this.ClickDtsBegin = clickDtsBegin; this.ClickDtsEnd = clickDtsEnd; this.IpAddress = ipAddress; this.SubId = subId; } /// <summary> /// Unique object identifier for for the link that this click is associated with /// </summary> /// <value>Unique object identifier for for the link that this click is associated with</value> [DataMember(Name="affiliate_link_oid", EmitDefaultValue=false)] public int? AffiliateLinkOid { get; set; } /// <summary> /// Affiliate ID associated with the click /// </summary> /// <value>Affiliate ID associated with the click</value> [DataMember(Name="affiliate_oid", EmitDefaultValue=false)] public int? AffiliateOid { get; set; } /// <summary> /// Minimum click date/time to return /// </summary> /// <value>Minimum click date/time to return</value> [DataMember(Name="click_dts_begin", EmitDefaultValue=false)] public string ClickDtsBegin { get; set; } /// <summary> /// Maximum click date/time to return /// </summary> /// <value>Maximum click date/time to return</value> [DataMember(Name="click_dts_end", EmitDefaultValue=false)] public string ClickDtsEnd { get; set; } /// <summary> /// IP address that generated the click /// </summary> /// <value>IP address that generated the click</value> [DataMember(Name="ip_address", EmitDefaultValue=false)] public string IpAddress { get; set; } /// <summary> /// Sub ID value passed on the click /// </summary> /// <value>Sub ID value passed on the click</value> [DataMember(Name="sub_id", EmitDefaultValue=false)] public string SubId { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class AffiliateClickQuery {\n"); sb.Append(" AffiliateLinkOid: ").Append(AffiliateLinkOid).Append("\n"); sb.Append(" AffiliateOid: ").Append(AffiliateOid).Append("\n"); sb.Append(" ClickDtsBegin: ").Append(ClickDtsBegin).Append("\n"); sb.Append(" ClickDtsEnd: ").Append(ClickDtsEnd).Append("\n"); sb.Append(" IpAddress: ").Append(IpAddress).Append("\n"); sb.Append(" SubId: ").Append(SubId).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public virtual string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="input">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object input) { return this.Equals(input as AffiliateClickQuery); } /// <summary> /// Returns true if AffiliateClickQuery instances are equal /// </summary> /// <param name="input">Instance of AffiliateClickQuery to be compared</param> /// <returns>Boolean</returns> public bool Equals(AffiliateClickQuery input) { if (input == null) return false; return ( this.AffiliateLinkOid == input.AffiliateLinkOid || (this.AffiliateLinkOid != null && this.AffiliateLinkOid.Equals(input.AffiliateLinkOid)) ) && ( this.AffiliateOid == input.AffiliateOid || (this.AffiliateOid != null && this.AffiliateOid.Equals(input.AffiliateOid)) ) && ( this.ClickDtsBegin == input.ClickDtsBegin || (this.ClickDtsBegin != null && this.ClickDtsBegin.Equals(input.ClickDtsBegin)) ) && ( this.ClickDtsEnd == input.ClickDtsEnd || (this.ClickDtsEnd != null && this.ClickDtsEnd.Equals(input.ClickDtsEnd)) ) && ( this.IpAddress == input.IpAddress || (this.IpAddress != null && this.IpAddress.Equals(input.IpAddress)) ) && ( this.SubId == input.SubId || (this.SubId != null && this.SubId.Equals(input.SubId)) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { int hashCode = 41; if (this.AffiliateLinkOid != null) hashCode = hashCode * 59 + this.AffiliateLinkOid.GetHashCode(); if (this.AffiliateOid != null) hashCode = hashCode * 59 + this.AffiliateOid.GetHashCode(); if (this.ClickDtsBegin != null) hashCode = hashCode * 59 + this.ClickDtsBegin.GetHashCode(); if (this.ClickDtsEnd != null) hashCode = hashCode * 59 + this.ClickDtsEnd.GetHashCode(); if (this.IpAddress != null) hashCode = hashCode * 59 + this.IpAddress.GetHashCode(); if (this.SubId != null) hashCode = hashCode * 59 + this.SubId.GetHashCode(); return hashCode; } } /// <summary> /// To validate all properties of the instance /// </summary> /// <param name="validationContext">Validation context</param> /// <returns>Validation Result</returns> IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext) { yield break; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.AcceptanceTestsAzureBodyDurationAllSync { using System.Linq; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; /// <summary> /// Test Infrastructure for AutoRest /// </summary> public partial class AutoRestDurationTestService : Microsoft.Rest.ServiceClient<AutoRestDurationTestService>, IAutoRestDurationTestService, IAzureClient { /// <summary> /// The base URI of the service. /// </summary> public System.Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public Newtonsoft.Json.JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public Newtonsoft.Json.JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Credentials needed for the client to connect to Azure. /// </summary> public Microsoft.Rest.ServiceClientCredentials Credentials { get; private set; } /// <summary> /// Gets or sets the preferred language for the response. /// </summary> public string AcceptLanguage { get; set; } /// <summary> /// Gets or sets the retry timeout in seconds for Long Running Operations. /// Default value is 30. /// </summary> public int? LongRunningOperationRetryTimeout { get; set; } /// <summary> /// When set to true a unique x-ms-client-request-id value is generated and /// included in each request. Default is true. /// </summary> public bool? GenerateClientRequestId { get; set; } /// <summary> /// Gets the IDurationOperations. /// </summary> public virtual IDurationOperations Duration { get; private set; } /// <summary> /// Initializes a new instance of the AutoRestDurationTestService class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestDurationTestService(params System.Net.Http.DelegatingHandler[] handlers) : base(handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestDurationTestService class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestDurationTestService(System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : base(rootHandler, handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestDurationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected AutoRestDurationTestService(System.Uri baseUri, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestDurationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected AutoRestDurationTestService(System.Uri baseUri, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestDurationTestService class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestDurationTestService(Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestDurationTestService class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestDurationTestService(Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestDurationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestDurationTestService(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestDurationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestDurationTestService(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// An optional partial-method to perform custom initialization. /// </summary> partial void CustomInitialize(); /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { this.Duration = new DurationOperations(this); this.BaseUri = new System.Uri("https://localhost"); this.AcceptLanguage = "en-US"; this.LongRunningOperationRetryTimeout = 30; this.GenerateClientRequestId = true; SerializationSettings = new Newtonsoft.Json.JsonSerializerSettings { Formatting = Newtonsoft.Json.Formatting.Indented, DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(), Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter> { new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter() } }; DeserializationSettings = new Newtonsoft.Json.JsonSerializerSettings { DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(), Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter> { new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter() } }; CustomInitialize(); DeserializationSettings.Converters.Add(new Microsoft.Rest.Azure.CloudErrorJsonConverter()); } } }
using System.Collections.Generic; using System.Linq; using GitVersion; using GitVersion.Extensions; using GitVersion.Model.Configuration; using GitVersion.VersionCalculation; using GitVersionCore.Tests.Helpers; using NSubstitute; using NUnit.Framework; using Shouldly; namespace GitVersionCore.Tests.VersionCalculation.Strategies { [TestFixture] public class MergeMessageBaseVersionStrategyTests : TestBase { [Test] public void ShouldNotAllowIncrementOfVersion() { // When a branch is merged in you want to start building stable packages of that version // So we shouldn't bump the version var mockCommit = GitToolsTestingExtensions.CreateMockCommit(); mockCommit.Message.Returns("Merge branch 'release-0.1.5'"); mockCommit.Parents.Returns(GetParents(true)); var mockBranch = GitToolsTestingExtensions.CreateMockBranch("master", mockCommit); var branches = Substitute.For<IBranchCollection>(); branches.GetEnumerator().Returns(_ => ((IEnumerable<IBranch>)new[] { mockBranch }).GetEnumerator()); var mockRepository = Substitute.For<IGitRepository>(); mockRepository.Head.Returns(mockBranch); mockRepository.Branches.Returns(branches); mockRepository.Commits.Returns(mockBranch.Commits); var contextBuilder = new GitVersionContextBuilder().WithRepository(mockRepository); contextBuilder.Build(); var strategy = contextBuilder.ServicesProvider.GetServiceForType<IVersionStrategy, MergeMessageVersionStrategy>(); var baseVersion = strategy.GetVersions().Single(); baseVersion.ShouldIncrement.ShouldBe(false); } [TestCase("Merge branch 'release-10.10.50'", true, "10.10.50")] [TestCase("Merge branch 'release-0.2.0'", true, "0.2.0")] [TestCase("Merge branch 'Release-0.2.0'", true, "0.2.0")] [TestCase("Merge branch 'Release/0.2.0'", true, "0.2.0")] [TestCase("Merge branch 'releases-0.2.0'", true, "0.2.0")] [TestCase("Merge branch 'Releases-0.2.0'", true, "0.2.0")] [TestCase("Merge branch 'Releases/0.2.0'", true, "0.2.0")] [TestCase("Merge branch 'release-4.6.6' into support-4.6", true, "4.6.6")] [TestCase("Merge branch 'release-0.1.5'\n\nRelates to: TicketId", true, "0.1.5")] [TestCase("Finish Release-0.12.0", true, "0.12.0")] //Support Syntevo SmartGit/Hg's Gitflow merge commit messages for finishing a 'Release' branch [TestCase("Merge branch 'Release-v0.2.0'", true, "0.2.0")] [TestCase("Merge branch 'Release-v2.2'", true, "2.2.0")] [TestCase("Merge remote-tracking branch 'origin/release/0.8.0' into develop/master", true, "0.8.0")] [TestCase("Merge remote-tracking branch 'refs/remotes/origin/release/2.0.0'", true, "2.0.0")] public void TakesVersionFromMergeOfReleaseBranch(string message, bool isMergeCommit, string expectedVersion) { var parents = GetParents(isMergeCommit); AssertMergeMessage(message, expectedVersion, parents); AssertMergeMessage(message + " ", expectedVersion, parents); AssertMergeMessage(message + "\r ", expectedVersion, parents); AssertMergeMessage(message + "\r", expectedVersion, parents); AssertMergeMessage(message + "\r\n", expectedVersion, parents); AssertMergeMessage(message + "\r\n ", expectedVersion, parents); AssertMergeMessage(message + "\n", expectedVersion, parents); AssertMergeMessage(message + "\n ", expectedVersion, parents); } [TestCase("Merge branch 'hotfix-0.1.5'", false)] [TestCase("Merge branch 'develop' of github.com:Particular/NServiceBus into develop", true)] [TestCase("Merge branch '4.0.3'", true)] [TestCase("Merge branch 's'", true)] [TestCase("Merge tag '10.10.50'", true)] [TestCase("Merge branch 'hotfix-4.6.6' into support-4.6", true)] [TestCase("Merge branch 'hotfix-10.10.50'", true)] [TestCase("Merge branch 'Hotfix-10.10.50'", true)] [TestCase("Merge branch 'Hotfix/10.10.50'", true)] [TestCase("Merge branch 'hotfix-0.1.5'", true)] [TestCase("Merge branch 'hotfix-4.2.2' into support-4.2", true)] [TestCase("Merge branch 'somebranch' into release-3.0.0", true)] [TestCase("Merge branch 'hotfix-0.1.5'\n\nRelates to: TicketId", true)] [TestCase("Merge branch 'alpha-0.1.5'", true)] [TestCase("Merge pull request #95 from Particular/issue-94", false)] [TestCase("Merge pull request #95 in Particular/issue-94", true)] [TestCase("Merge pull request #95 in Particular/issue-94", false)] [TestCase("Merge pull request #64 from arledesma/feature-VS2013_3rd_party_test_framework_support", true)] [TestCase("Merge pull request #500 in FOO/bar from Particular/release-1.0.0 to develop)", true)] [TestCase("Merge pull request #500 in FOO/bar from feature/new-service to develop)", true)] [TestCase("Finish 0.14.1", true)] // Don't support Syntevo SmartGit/Hg's Gitflow merge commit messages for finishing a 'Hotfix' branch public void ShouldNotTakeVersionFromMergeOfNonReleaseBranch(string message, bool isMergeCommit) { var parents = GetParents(isMergeCommit); AssertMergeMessage(message, null, parents); AssertMergeMessage(message + " ", null, parents); AssertMergeMessage(message + "\r ", null, parents); AssertMergeMessage(message + "\r", null, parents); AssertMergeMessage(message + "\r\n", null, parents); AssertMergeMessage(message + "\r\n ", null, parents); AssertMergeMessage(message + "\n", null, parents); AssertMergeMessage(message + "\n ", null, parents); } [TestCase("Merge pull request #165 from Particular/release-1.0.0", true)] [TestCase("Merge pull request #165 in Particular/release-1.0.0", true)] [TestCase("Merge pull request #500 in FOO/bar from Particular/release-1.0.0 to develop)", true)] public void ShouldNotTakeVersionFromMergeOfReleaseBranchWithRemoteOtherThanOrigin(string message, bool isMergeCommit) { var parents = GetParents(isMergeCommit); AssertMergeMessage(message, null, parents); AssertMergeMessage(message + " ", null, parents); AssertMergeMessage(message + "\r ", null, parents); AssertMergeMessage(message + "\r", null, parents); AssertMergeMessage(message + "\r\n", null, parents); AssertMergeMessage(message + "\r\n ", null, parents); AssertMergeMessage(message + "\n", null, parents); AssertMergeMessage(message + "\n ", null, parents); } [TestCase(@"Merge pull request #1 in FOO/bar from feature/ISSUE-1 to develop * commit '38560a7eed06e8d3f3f1aaf091befcdf8bf50fea': Updated jQuery to v2.1.3")] [TestCase(@"Merge pull request #45 in BRIKKS/brikks from feature/NOX-68 to develop * commit '38560a7eed06e8d3f3f1aaf091befcdf8bf50fea': Another commit message Commit message including a IP-number https://10.50.1.1 A commit message")] [TestCase(@"Merge branch 'release/Sprint_2.0_Holdings_Computed_Balances'")] [TestCase(@"Merge branch 'develop' of http://10.0.6.3/gitblit/r/... into develop")] [TestCase(@"Merge branch 'master' of http://172.16.3.10:8082/r/asu_tk/p_sd")] [TestCase(@"Merge branch 'master' of http://212.248.89.56:8082/r/asu_tk/p_sd")] [TestCase(@"Merge branch 'DEMO' of http://10.10.10.121/gitlab/mtolland/orcid into DEMO")] public void ShouldNotTakeVersionFromUnrelatedMerge(string commitMessage) { var parents = GetParents(true); AssertMergeMessage(commitMessage, null, parents); } [TestCase("Merge branch 'support/0.2.0'", "support", "0.2.0")] [TestCase("Merge branch 'support/0.2.0'", null, null)] [TestCase("Merge branch 'release/2.0.0'", null, "2.0.0")] public void TakesVersionFromMergeOfConfiguredReleaseBranch(string message, string releaseBranch, string expectedVersion) { var config = new Config(); if (releaseBranch != null) config.Branches[releaseBranch] = new BranchConfig { IsReleaseBranch = true }; var parents = GetParents(true); AssertMergeMessage(message, expectedVersion, parents, config); } private static void AssertMergeMessage(string message, string expectedVersion, IEnumerable<ICommit> parents, Config config = null) { var commit = GitToolsTestingExtensions.CreateMockCommit(); commit.Message.Returns(message); commit.Parents.Returns(parents); var mockBranch = GitToolsTestingExtensions.CreateMockBranch("master", commit, GitToolsTestingExtensions.CreateMockCommit()); var mockRepository = Substitute.For<IGitRepository>(); mockRepository.Head.Returns(mockBranch); mockRepository.Commits.Returns(mockBranch.Commits); var contextBuilder = new GitVersionContextBuilder() .WithConfig(config ?? new Config()) .WithRepository(mockRepository); contextBuilder.Build(); var strategy = contextBuilder.ServicesProvider.GetServiceForType<IVersionStrategy, MergeMessageVersionStrategy>(); var baseVersion = strategy.GetVersions().SingleOrDefault(); if (expectedVersion == null) { baseVersion.ShouldBe(null); } else { baseVersion.ShouldNotBeNull(); baseVersion.SemanticVersion.ToString().ShouldBe(expectedVersion); } } private static List<ICommit> GetParents(bool isMergeCommit) { if (isMergeCommit) { return new List<ICommit> { null, null }; } return new List<ICommit> { null }; } } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Reflection; using System.Runtime.Serialization; using System.Web.Http; using System.Web.Http.Description; using System.Xml.Serialization; using Newtonsoft.Json; namespace FujiSan.WebAPI.Areas.HelpPage.ModelDescriptions { /// <summary> /// Generates model descriptions for given types. /// </summary> public class ModelDescriptionGenerator { // Modify this to support more data annotation attributes. private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>> { { typeof(RequiredAttribute), a => "Required" }, { typeof(RangeAttribute), a => { RangeAttribute range = (RangeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum); } }, { typeof(MaxLengthAttribute), a => { MaxLengthAttribute maxLength = (MaxLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length); } }, { typeof(MinLengthAttribute), a => { MinLengthAttribute minLength = (MinLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length); } }, { typeof(StringLengthAttribute), a => { StringLengthAttribute strLength = (StringLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength); } }, { typeof(DataTypeAttribute), a => { DataTypeAttribute dataType = (DataTypeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString()); } }, { typeof(RegularExpressionAttribute), a => { RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern); } }, }; // Modify this to add more default documentations. private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string> { { typeof(Int16), "integer" }, { typeof(Int32), "integer" }, { typeof(Int64), "integer" }, { typeof(UInt16), "unsigned integer" }, { typeof(UInt32), "unsigned integer" }, { typeof(UInt64), "unsigned integer" }, { typeof(Byte), "byte" }, { typeof(Char), "character" }, { typeof(SByte), "signed byte" }, { typeof(Uri), "URI" }, { typeof(Single), "decimal number" }, { typeof(Double), "decimal number" }, { typeof(Decimal), "decimal number" }, { typeof(String), "string" }, { typeof(Guid), "globally unique identifier" }, { typeof(TimeSpan), "time interval" }, { typeof(DateTime), "date" }, { typeof(DateTimeOffset), "date" }, { typeof(Boolean), "boolean" }, }; private Lazy<IModelDocumentationProvider> _documentationProvider; public ModelDescriptionGenerator(HttpConfiguration config) { if (config == null) { throw new ArgumentNullException("config"); } _documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider); GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase); } public Dictionary<string, ModelDescription> GeneratedModels { get; private set; } private IModelDocumentationProvider DocumentationProvider { get { return _documentationProvider.Value; } } public ModelDescription GetOrCreateModelDescription(Type modelType) { if (modelType == null) { throw new ArgumentNullException("modelType"); } Type underlyingType = Nullable.GetUnderlyingType(modelType); if (underlyingType != null) { modelType = underlyingType; } ModelDescription modelDescription; string modelName = ModelNameHelper.GetModelName(modelType); if (GeneratedModels.TryGetValue(modelName, out modelDescription)) { if (modelType != modelDescription.ModelType) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " + "Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.", modelName, modelDescription.ModelType.FullName, modelType.FullName)); } return modelDescription; } if (DefaultTypeDocumentation.ContainsKey(modelType)) { return GenerateSimpleTypeModelDescription(modelType); } if (modelType.IsEnum) { return GenerateEnumTypeModelDescription(modelType); } if (modelType.IsGenericType) { Type[] genericArguments = modelType.GetGenericArguments(); if (genericArguments.Length == 1) { Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments); if (enumerableType.IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, genericArguments[0]); } } if (genericArguments.Length == 2) { Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments); if (dictionaryType.IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]); } Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments); if (keyValuePairType.IsAssignableFrom(modelType)) { return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]); } } } if (modelType.IsArray) { Type elementType = modelType.GetElementType(); return GenerateCollectionModelDescription(modelType, elementType); } if (modelType == typeof(NameValueCollection)) { return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string)); } if (typeof(IDictionary).IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object)); } if (typeof(IEnumerable).IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, typeof(object)); } return GenerateComplexTypeModelDescription(modelType); } // Change this to provide different name for the member. private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute) { JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>(); if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName)) { return jsonProperty.PropertyName; } if (hasDataContractAttribute) { DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>(); if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name)) { return dataMember.Name; } } return member.Name; } private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute) { JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>(); XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>(); IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>(); NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>(); ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>(); bool hasMemberAttribute = member.DeclaringType.IsEnum ? member.GetCustomAttribute<EnumMemberAttribute>() != null : member.GetCustomAttribute<DataMemberAttribute>() != null; // Display member only if all the followings are true: // no JsonIgnoreAttribute // no XmlIgnoreAttribute // no IgnoreDataMemberAttribute // no NonSerializedAttribute // no ApiExplorerSettingsAttribute with IgnoreApi set to true // no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute return jsonIgnore == null && xmlIgnore == null && ignoreDataMember == null && nonSerialized == null && (apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) && (!hasDataContractAttribute || hasMemberAttribute); } private string CreateDefaultDocumentation(Type type) { string documentation; if (DefaultTypeDocumentation.TryGetValue(type, out documentation)) { return documentation; } if (DocumentationProvider != null) { documentation = DocumentationProvider.GetDocumentation(type); } return documentation; } private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel) { List<ParameterAnnotation> annotations = new List<ParameterAnnotation>(); IEnumerable<Attribute> attributes = property.GetCustomAttributes(); foreach (Attribute attribute in attributes) { Func<object, string> textGenerator; if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator)) { annotations.Add( new ParameterAnnotation { AnnotationAttribute = attribute, Documentation = textGenerator(attribute) }); } } // Rearrange the annotations annotations.Sort((x, y) => { // Special-case RequiredAttribute so that it shows up on top if (x.AnnotationAttribute is RequiredAttribute) { return -1; } if (y.AnnotationAttribute is RequiredAttribute) { return 1; } // Sort the rest based on alphabetic order of the documentation return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase); }); foreach (ParameterAnnotation annotation in annotations) { propertyModel.Annotations.Add(annotation); } } private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType) { ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType); if (collectionModelDescription != null) { return new CollectionModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, ElementDescription = collectionModelDescription }; } return null; } private ModelDescription GenerateComplexTypeModelDescription(Type modelType) { ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(complexModelDescription.Name, complexModelDescription); bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (PropertyInfo property in properties) { if (ShouldDisplayMember(property, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(property, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(property); } GenerateAnnotations(property, propertyModel); complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType); } } FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance); foreach (FieldInfo field in fields) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(field, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(field); } complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType); } } return complexModelDescription; } private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new DictionaryModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType) { EnumTypeModelDescription enumDescription = new EnumTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static)) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { EnumValueDescription enumValue = new EnumValueDescription { Name = field.Name, Value = field.GetRawConstantValue().ToString() }; if (DocumentationProvider != null) { enumValue.Documentation = DocumentationProvider.GetDocumentation(field); } enumDescription.Values.Add(enumValue); } } GeneratedModels.Add(enumDescription.Name, enumDescription); return enumDescription; } private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new KeyValuePairModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private ModelDescription GenerateSimpleTypeModelDescription(Type modelType) { SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription); return simpleModelDescription; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System { using System.Collections; using System.Collections.Generic; using System.Globalization; using System.Diagnostics.Contracts; using System.Runtime.Serialization; using System.Runtime.CompilerServices; [Serializable] [System.Runtime.InteropServices.ComVisible(true)] public abstract class StringComparer : IComparer, IEqualityComparer, IComparer<string>, IEqualityComparer<string>{ private static readonly StringComparer _invariantCulture = new CultureAwareComparer(CultureInfo.InvariantCulture, false); private static readonly StringComparer _invariantCultureIgnoreCase = new CultureAwareComparer(CultureInfo.InvariantCulture, true); private static readonly StringComparer _ordinal = new OrdinalComparer(false); private static readonly StringComparer _ordinalIgnoreCase = new OrdinalComparer(true); public static StringComparer InvariantCulture { get { Contract.Ensures(Contract.Result<StringComparer>() != null); return _invariantCulture; } } public static StringComparer InvariantCultureIgnoreCase { get { Contract.Ensures(Contract.Result<StringComparer>() != null); return _invariantCultureIgnoreCase; } } public static StringComparer CurrentCulture { get { Contract.Ensures(Contract.Result<StringComparer>() != null); return new CultureAwareComparer(CultureInfo.CurrentCulture, false); } } public static StringComparer CurrentCultureIgnoreCase { get { Contract.Ensures(Contract.Result<StringComparer>() != null); return new CultureAwareComparer(CultureInfo.CurrentCulture, true); } } public static StringComparer Ordinal { get { Contract.Ensures(Contract.Result<StringComparer>() != null); return _ordinal; } } public static StringComparer OrdinalIgnoreCase { get { Contract.Ensures(Contract.Result<StringComparer>() != null); return _ordinalIgnoreCase; } } public static StringComparer Create(CultureInfo culture, bool ignoreCase) { if( culture == null) { throw new ArgumentNullException("culture"); } Contract.Ensures(Contract.Result<StringComparer>() != null); Contract.EndContractBlock(); return new CultureAwareComparer(culture, ignoreCase); } public int Compare(object x, object y) { if (x == y) return 0; if (x == null) return -1; if (y == null) return 1; String sa = x as String; if (sa != null) { String sb = y as String; if( sb != null) { return Compare(sa, sb); } } IComparable ia = x as IComparable; if (ia != null) { return ia.CompareTo(y); } throw new ArgumentException(Environment.GetResourceString("Argument_ImplementIComparable")); } public new bool Equals(Object x, Object y) { if (x == y) return true; if (x == null || y == null) return false; String sa = x as String; if (sa != null) { String sb = y as String; if( sb != null) { return Equals(sa, sb); } } return x.Equals(y); } public int GetHashCode(object obj) { if( obj == null) { throw new ArgumentNullException("obj"); } Contract.EndContractBlock(); string s = obj as string; if( s != null) { return GetHashCode(s); } return obj.GetHashCode(); } public abstract int Compare(String x, String y); public abstract bool Equals(String x, String y); public abstract int GetHashCode(string obj); } [Serializable] internal sealed class CultureAwareComparer : StringComparer #if FEATURE_RANDOMIZED_STRING_HASHING , IWellKnownStringEqualityComparer #endif { private CompareInfo _compareInfo; private bool _ignoreCase; internal CultureAwareComparer(CultureInfo culture, bool ignoreCase) { _compareInfo = culture.CompareInfo; _ignoreCase = ignoreCase; } internal CultureAwareComparer(CompareInfo compareInfo, bool ignoreCase) { _compareInfo = compareInfo; _ignoreCase = ignoreCase; } public override int Compare(string x, string y) { if (Object.ReferenceEquals(x, y)) return 0; if (x == null) return -1; if (y == null) return 1; return _compareInfo.Compare(x, y, _ignoreCase? CompareOptions.IgnoreCase : CompareOptions.None); } public override bool Equals(string x, string y) { if (Object.ReferenceEquals(x ,y)) return true; if (x == null || y == null) return false; return (_compareInfo.Compare(x, y, _ignoreCase? CompareOptions.IgnoreCase : CompareOptions.None) == 0); } public override int GetHashCode(string obj) { if( obj == null) { throw new ArgumentNullException("obj"); } Contract.EndContractBlock(); CompareOptions options = CompareOptions.None; if( _ignoreCase) { options |= CompareOptions.IgnoreCase; } return _compareInfo.GetHashCodeOfString(obj, options); } // Equals method for the comparer itself. public override bool Equals(Object obj){ CultureAwareComparer comparer = obj as CultureAwareComparer; if( comparer == null) { return false; } return (this._ignoreCase == comparer._ignoreCase) && (this._compareInfo.Equals(comparer._compareInfo)); } public override int GetHashCode() { int hashCode = _compareInfo.GetHashCode() ; return _ignoreCase ? (~hashCode) : hashCode; } #if FEATURE_RANDOMIZED_STRING_HASHING IEqualityComparer IWellKnownStringEqualityComparer.GetRandomizedEqualityComparer() { return new CultureAwareRandomizedComparer(_compareInfo, _ignoreCase); } IEqualityComparer IWellKnownStringEqualityComparer.GetEqualityComparerForSerialization() { return this; } #endif } #if FEATURE_RANDOMIZED_STRING_HASHING internal sealed class CultureAwareRandomizedComparer : StringComparer, IWellKnownStringEqualityComparer { private CompareInfo _compareInfo; private bool _ignoreCase; private long _entropy; internal CultureAwareRandomizedComparer(CompareInfo compareInfo, bool ignoreCase) { _compareInfo = compareInfo; _ignoreCase = ignoreCase; _entropy = HashHelpers.GetEntropy(); } public override int Compare(string x, string y) { if (Object.ReferenceEquals(x, y)) return 0; if (x == null) return -1; if (y == null) return 1; return _compareInfo.Compare(x, y, _ignoreCase? CompareOptions.IgnoreCase : CompareOptions.None); } public override bool Equals(string x, string y) { if (Object.ReferenceEquals(x ,y)) return true; if (x == null || y == null) return false; return (_compareInfo.Compare(x, y, _ignoreCase? CompareOptions.IgnoreCase : CompareOptions.None) == 0); } public override int GetHashCode(string obj) { if( obj == null) { throw new ArgumentNullException("obj"); } Contract.EndContractBlock(); CompareOptions options = CompareOptions.None; if( _ignoreCase) { options |= CompareOptions.IgnoreCase; } #if FEATURE_COREFX_GLOBALIZATION return _compareInfo.GetHashCodeOfStringCore(obj, options, true, _entropy); #else return _compareInfo.GetHashCodeOfString(obj, options, true, _entropy); #endif } // Equals method for the comparer itself. public override bool Equals(Object obj){ CultureAwareRandomizedComparer comparer = obj as CultureAwareRandomizedComparer; if( comparer == null) { return false; } return (this._ignoreCase == comparer._ignoreCase) && (this._compareInfo.Equals(comparer._compareInfo)) && (this._entropy == comparer._entropy); } public override int GetHashCode() { int hashCode = _compareInfo.GetHashCode() ; return ((_ignoreCase ? (~hashCode) : hashCode) ^ ((int) (_entropy & 0x7FFFFFFF))); } IEqualityComparer IWellKnownStringEqualityComparer.GetRandomizedEqualityComparer() { return new CultureAwareRandomizedComparer(_compareInfo, _ignoreCase); } // We want to serialize the old comparer. IEqualityComparer IWellKnownStringEqualityComparer.GetEqualityComparerForSerialization() { return new CultureAwareComparer(_compareInfo, _ignoreCase); } } #endif // Provide x more optimal implementation of ordinal comparison. [Serializable] internal sealed class OrdinalComparer : StringComparer #if FEATURE_RANDOMIZED_STRING_HASHING , IWellKnownStringEqualityComparer #endif { private bool _ignoreCase; internal OrdinalComparer(bool ignoreCase) { _ignoreCase = ignoreCase; } public override int Compare(string x, string y) { if (Object.ReferenceEquals(x, y)) return 0; if (x == null) return -1; if (y == null) return 1; if( _ignoreCase) { return String.Compare(x, y, StringComparison.OrdinalIgnoreCase); } return String.CompareOrdinal(x, y); } public override bool Equals(string x, string y) { if (Object.ReferenceEquals(x ,y)) return true; if (x == null || y == null) return false; if( _ignoreCase) { if( x.Length != y.Length) { return false; } return (String.Compare(x, y, StringComparison.OrdinalIgnoreCase) == 0); } return x.Equals(y); } public override int GetHashCode(string obj) { if( obj == null) { throw new ArgumentNullException("obj"); } Contract.EndContractBlock(); if( _ignoreCase) { return TextInfo.GetHashCodeOrdinalIgnoreCase(obj); } return obj.GetHashCode(); } // Equals method for the comparer itself. public override bool Equals(Object obj){ OrdinalComparer comparer = obj as OrdinalComparer; if( comparer == null) { return false; } return (this._ignoreCase == comparer._ignoreCase); } public override int GetHashCode() { string name = "OrdinalComparer"; int hashCode = name.GetHashCode(); return _ignoreCase ? (~hashCode) : hashCode; } #if FEATURE_RANDOMIZED_STRING_HASHING IEqualityComparer IWellKnownStringEqualityComparer.GetRandomizedEqualityComparer() { return new OrdinalRandomizedComparer(_ignoreCase); } IEqualityComparer IWellKnownStringEqualityComparer.GetEqualityComparerForSerialization() { return this; } #endif } #if FEATURE_RANDOMIZED_STRING_HASHING internal sealed class OrdinalRandomizedComparer : StringComparer, IWellKnownStringEqualityComparer { private bool _ignoreCase; private long _entropy; internal OrdinalRandomizedComparer(bool ignoreCase) { _ignoreCase = ignoreCase; _entropy = HashHelpers.GetEntropy(); } public override int Compare(string x, string y) { if (Object.ReferenceEquals(x, y)) return 0; if (x == null) return -1; if (y == null) return 1; if( _ignoreCase) { return String.Compare(x, y, StringComparison.OrdinalIgnoreCase); } return String.CompareOrdinal(x, y); } public override bool Equals(string x, string y) { if (Object.ReferenceEquals(x ,y)) return true; if (x == null || y == null) return false; if( _ignoreCase) { if( x.Length != y.Length) { return false; } return (String.Compare(x, y, StringComparison.OrdinalIgnoreCase) == 0); } return x.Equals(y); } [System.Security.SecuritySafeCritical] public override int GetHashCode(string obj) { if( obj == null) { throw new ArgumentNullException("obj"); } Contract.EndContractBlock(); if( _ignoreCase) { #if FEATURE_COREFX_GLOBALIZATION return CultureInfo.InvariantCulture.CompareInfo.GetHashCodeOfStringCore(obj, CompareOptions.IgnoreCase, true, _entropy); #else return TextInfo.GetHashCodeOrdinalIgnoreCase(obj, true, _entropy); #endif } return String.InternalMarvin32HashString(obj, obj.Length, _entropy); } // Equals method for the comparer itself. public override bool Equals(Object obj){ OrdinalRandomizedComparer comparer = obj as OrdinalRandomizedComparer; if( comparer == null) { return false; } return (this._ignoreCase == comparer._ignoreCase) && (this._entropy == comparer._entropy); } public override int GetHashCode() { string name = "OrdinalRandomizedComparer"; int hashCode = name.GetHashCode(); return ((_ignoreCase ? (~hashCode) : hashCode) ^ ((int) (_entropy & 0x7FFFFFFF))); } IEqualityComparer IWellKnownStringEqualityComparer.GetRandomizedEqualityComparer() { return new OrdinalRandomizedComparer(_ignoreCase); } // We want to serialize the old comparer. IEqualityComparer IWellKnownStringEqualityComparer.GetEqualityComparerForSerialization() { return new OrdinalComparer(_ignoreCase); } } // This interface is implemented by string comparers in the framework that can opt into // randomized hashing behaviors. internal interface IWellKnownStringEqualityComparer { // Get an IEqualityComparer that has the same equality comparision rules as "this" but uses Randomized Hashing. IEqualityComparer GetRandomizedEqualityComparer(); // Get an IEqaulityComparer that can be serailzied (e.g., it exists in older versions). IEqualityComparer GetEqualityComparerForSerialization(); } #endif }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: Protos.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Proto.Cluster { /// <summary>Holder for reflection information generated from Protos.proto</summary> public static partial class ProtosReflection { #region Descriptor /// <summary>File descriptor for Protos.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static ProtosReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "CgxQcm90b3MucHJvdG8SB2NsdXN0ZXIaGFByb3RvLkFjdG9yL3Byb3Rvcy5w", "cm90byI2Cg1UYWtlT3duZXJzaGlwEhcKA3BpZBgBIAEoCzIKLmFjdG9yLlBJ", "RBIMCgRuYW1lGAIgASgJIjQKDEdyYWluUmVxdWVzdBIOCgZtZXRob2QYASAB", "KAkSFAoMbWVzc2FnZV9kYXRhGAIgASgMIiUKDUdyYWluUmVzcG9uc2USFAoM", "bWVzc2FnZV9kYXRhGAEgASgMIiEKEkdyYWluRXJyb3JSZXNwb25zZRILCgNl", "cnIYASABKAlCEKoCDVByb3RvLkNsdXN0ZXJiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::Proto.ProtosReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Proto.Cluster.TakeOwnership), global::Proto.Cluster.TakeOwnership.Parser, new[]{ "Pid", "Name" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Proto.Cluster.GrainRequest), global::Proto.Cluster.GrainRequest.Parser, new[]{ "Method", "MessageData" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Proto.Cluster.GrainResponse), global::Proto.Cluster.GrainResponse.Parser, new[]{ "MessageData" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Proto.Cluster.GrainErrorResponse), global::Proto.Cluster.GrainErrorResponse.Parser, new[]{ "Err" }, null, null, null) })); } #endregion } #region Messages public sealed partial class TakeOwnership : pb::IMessage<TakeOwnership> { private static readonly pb::MessageParser<TakeOwnership> _parser = new pb::MessageParser<TakeOwnership>(() => new TakeOwnership()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<TakeOwnership> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Proto.Cluster.ProtosReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public TakeOwnership() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public TakeOwnership(TakeOwnership other) : this() { Pid = other.pid_ != null ? other.Pid.Clone() : null; name_ = other.name_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public TakeOwnership Clone() { return new TakeOwnership(this); } /// <summary>Field number for the "pid" field.</summary> public const int PidFieldNumber = 1; private global::Proto.PID pid_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::Proto.PID Pid { get { return pid_; } set { pid_ = value; } } /// <summary>Field number for the "name" field.</summary> public const int NameFieldNumber = 2; private string name_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as TakeOwnership); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(TakeOwnership other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!object.Equals(Pid, other.Pid)) return false; if (Name != other.Name) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (pid_ != null) hash ^= Pid.GetHashCode(); if (Name.Length != 0) hash ^= Name.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (pid_ != null) { output.WriteRawTag(10); output.WriteMessage(Pid); } if (Name.Length != 0) { output.WriteRawTag(18); output.WriteString(Name); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (pid_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(Pid); } if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(TakeOwnership other) { if (other == null) { return; } if (other.pid_ != null) { if (pid_ == null) { pid_ = new global::Proto.PID(); } Pid.MergeFrom(other.Pid); } if (other.Name.Length != 0) { Name = other.Name; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { if (pid_ == null) { pid_ = new global::Proto.PID(); } input.ReadMessage(pid_); break; } case 18: { Name = input.ReadString(); break; } } } } } public sealed partial class GrainRequest : pb::IMessage<GrainRequest> { private static readonly pb::MessageParser<GrainRequest> _parser = new pb::MessageParser<GrainRequest>(() => new GrainRequest()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<GrainRequest> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Proto.Cluster.ProtosReflection.Descriptor.MessageTypes[1]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GrainRequest() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GrainRequest(GrainRequest other) : this() { method_ = other.method_; messageData_ = other.messageData_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GrainRequest Clone() { return new GrainRequest(this); } /// <summary>Field number for the "method" field.</summary> public const int MethodFieldNumber = 1; private string method_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Method { get { return method_; } set { method_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "message_data" field.</summary> public const int MessageDataFieldNumber = 2; private pb::ByteString messageData_ = pb::ByteString.Empty; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pb::ByteString MessageData { get { return messageData_; } set { messageData_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as GrainRequest); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(GrainRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Method != other.Method) return false; if (MessageData != other.MessageData) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Method.Length != 0) hash ^= Method.GetHashCode(); if (MessageData.Length != 0) hash ^= MessageData.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Method.Length != 0) { output.WriteRawTag(10); output.WriteString(Method); } if (MessageData.Length != 0) { output.WriteRawTag(18); output.WriteBytes(MessageData); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Method.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Method); } if (MessageData.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(MessageData); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(GrainRequest other) { if (other == null) { return; } if (other.Method.Length != 0) { Method = other.Method; } if (other.MessageData.Length != 0) { MessageData = other.MessageData; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { Method = input.ReadString(); break; } case 18: { MessageData = input.ReadBytes(); break; } } } } } public sealed partial class GrainResponse : pb::IMessage<GrainResponse> { private static readonly pb::MessageParser<GrainResponse> _parser = new pb::MessageParser<GrainResponse>(() => new GrainResponse()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<GrainResponse> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Proto.Cluster.ProtosReflection.Descriptor.MessageTypes[2]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GrainResponse() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GrainResponse(GrainResponse other) : this() { messageData_ = other.messageData_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GrainResponse Clone() { return new GrainResponse(this); } /// <summary>Field number for the "message_data" field.</summary> public const int MessageDataFieldNumber = 1; private pb::ByteString messageData_ = pb::ByteString.Empty; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pb::ByteString MessageData { get { return messageData_; } set { messageData_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as GrainResponse); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(GrainResponse other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (MessageData != other.MessageData) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (MessageData.Length != 0) hash ^= MessageData.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (MessageData.Length != 0) { output.WriteRawTag(10); output.WriteBytes(MessageData); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (MessageData.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(MessageData); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(GrainResponse other) { if (other == null) { return; } if (other.MessageData.Length != 0) { MessageData = other.MessageData; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { MessageData = input.ReadBytes(); break; } } } } } public sealed partial class GrainErrorResponse : pb::IMessage<GrainErrorResponse> { private static readonly pb::MessageParser<GrainErrorResponse> _parser = new pb::MessageParser<GrainErrorResponse>(() => new GrainErrorResponse()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<GrainErrorResponse> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Proto.Cluster.ProtosReflection.Descriptor.MessageTypes[3]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GrainErrorResponse() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GrainErrorResponse(GrainErrorResponse other) : this() { err_ = other.err_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GrainErrorResponse Clone() { return new GrainErrorResponse(this); } /// <summary>Field number for the "err" field.</summary> public const int ErrFieldNumber = 1; private string err_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Err { get { return err_; } set { err_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as GrainErrorResponse); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(GrainErrorResponse other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Err != other.Err) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Err.Length != 0) hash ^= Err.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Err.Length != 0) { output.WriteRawTag(10); output.WriteString(Err); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Err.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Err); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(GrainErrorResponse other) { if (other == null) { return; } if (other.Err.Length != 0) { Err = other.Err; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { Err = input.ReadString(); break; } } } } } #endregion } #endregion Designer generated code
/* THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE. This is sample code and is freely distributable. */ using System; using System.Drawing; using System.Drawing.Imaging; using System.Runtime.InteropServices; namespace ImageManipulation { /// <summary> /// Summary description for Class1. /// </summary> public unsafe abstract class Quantizer { /// <summary> /// Construct the quantizer /// </summary> /// <param name="singlePass">If true, the quantization only needs to loop through the source pixels once</param> /// <remarks> /// If you construct this class with a true value for singlePass, then the code will, when quantizing your image, /// only call the 'QuantizeImage' function. If two passes are required, the code will call 'InitialQuantizeImage' /// and then 'QuantizeImage'. /// </remarks> protected Quantizer(bool singlePass) { _singlePass = singlePass; } /// <summary> /// Quantize an image and return the resulting output bitmap /// </summary> /// <param name="source">The image to quantize</param> /// <returns>A quantized version of the image</returns> public Bitmap Quantize(Image source) { // Get the size of the source image int height = source.Height; int width = source.Width; // And construct a rectangle from these dimensions var bounds = new Rectangle(0, 0, width, height); // First off take a 32bpp copy of the image var copy = new Bitmap(width, height, PixelFormat.Format32bppArgb); // And construct an 8bpp version var output = new Bitmap(width, height, PixelFormat.Format8bppIndexed); // Now lock the bitmap into memory using (Graphics g = Graphics.FromImage(copy)) { g.PageUnit = GraphicsUnit.Pixel; // Draw the source image onto the copy bitmap, // which will effect a widening as appropriate. g.DrawImageUnscaled(source, bounds); } // Define a pointer to the bitmap data BitmapData sourceData = null; try { // Get the source image bits and lock into memory sourceData = copy.LockBits(bounds, ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); // Call the FirstPass function if not a single pass algorithm. // For something like an octree quantizer, this will run through // all image pixels, build a data structure, and create a palette. if (!_singlePass) FirstPass(sourceData, width, height); // Then set the color palette on the output bitmap. I'm passing in the current palette // as there's no way to construct a new, empty palette. output.Palette = GetPalette(output.Palette); // Then call the second pass which actually does the conversion SecondPass(sourceData, output, width, height, bounds); } finally { // Ensure that the bits are unlocked copy.UnlockBits(sourceData); } // Last but not least, return the output bitmap return output; } /// <summary> /// Execute the first pass through the pixels in the image /// </summary> /// <param name="sourceData">The source data</param> /// <param name="width">The width in pixels of the image</param> /// <param name="height">The height in pixels of the image</param> protected virtual void FirstPass(BitmapData sourceData, int width, int height) { // Define the source data pointers. The source row is a byte to // keep addition of the stride value easier (as this is in bytes) var pSourceRow = (byte*)sourceData.Scan0.ToPointer(); Int32* pSourcePixel; // Loop through each row for (int row = 0; row < height; row++) { // Set the source pixel to the first pixel in this row pSourcePixel = (Int32*)pSourceRow; // And loop through each column for (int col = 0; col < width; col++ , pSourcePixel++) // Now I have the pixel, call the FirstPassQuantize function... InitialQuantizePixel((Color32*)pSourcePixel); // Add the stride to the source row pSourceRow += sourceData.Stride; } } /// <summary> /// Execute a second pass through the bitmap /// </summary> /// <param name="sourceData">The source bitmap, locked into memory</param> /// <param name="output">The output bitmap</param> /// <param name="width">The width in pixels of the image</param> /// <param name="height">The height in pixels of the image</param> /// <param name="bounds">The bounding rectangle</param> protected virtual void SecondPass(BitmapData sourceData, Bitmap output, int width, int height, Rectangle bounds) { BitmapData outputData = null; try { // Lock the output bitmap into memory outputData = output.LockBits(bounds, ImageLockMode.WriteOnly, PixelFormat.Format8bppIndexed); // Define the source data pointers. The source row is a byte to // keep addition of the stride value easier (as this is in bytes) var pSourceRow = (byte*)sourceData.Scan0.ToPointer(); var pSourcePixel = (Int32*)pSourceRow; Int32* pPreviousPixel = pSourcePixel; // Now define the destination data pointers var pDestinationRow = (byte*)outputData.Scan0.ToPointer(); byte* pDestinationPixel = pDestinationRow; // And convert the first pixel, so that I have values going into the loop byte pixelValue = QuantizePixel((Color32*)pSourcePixel); // Assign the value of the first pixel *pDestinationPixel = pixelValue; // Loop through each row for (int row = 0; row < height; row++) { // Set the source pixel to the first pixel in this row pSourcePixel = (Int32*)pSourceRow; // And set the destination pixel pointer to the first pixel in the row pDestinationPixel = pDestinationRow; // Loop through each pixel on this scan line for (int col = 0; col < width; col++ , pSourcePixel++ , pDestinationPixel++) { // Check if this is the same as the last pixel. If so use that value // rather than calculating it again. This is an inexpensive optimisation. if (*pPreviousPixel != *pSourcePixel) { // Quantize the pixel pixelValue = QuantizePixel((Color32*)pSourcePixel); // And setup the previous pointer pPreviousPixel = pSourcePixel; } // And set the pixel in the output *pDestinationPixel = pixelValue; } // Add the stride to the source row pSourceRow += sourceData.Stride; // And to the destination row pDestinationRow += outputData.Stride; } } finally { // Ensure that I unlock the output bits output.UnlockBits(outputData); } } /// <summary> /// Override this to process the pixel in the first pass of the algorithm /// </summary> /// <param name="pixel">The pixel to quantize</param> /// <remarks> /// This function need only be overridden if your quantize algorithm needs two passes, /// such as an Octree quantizer. /// </remarks> protected virtual void InitialQuantizePixel(Color32* pixel) { } /// <summary> /// Override this to process the pixel in the second pass of the algorithm /// </summary> /// <param name="pixel">The pixel to quantize</param> /// <returns>The quantized value</returns> protected abstract byte QuantizePixel(Color32* pixel) ; /// <summary> /// Retrieve the palette for the quantized image /// </summary> /// <param name="original">Any old palette, this is overrwritten</param> /// <returns>The new color palette</returns> protected abstract ColorPalette GetPalette(ColorPalette original) ; /// <summary> /// Flag used to indicate whether a single pass or two passes are needed for quantization. /// </summary> readonly bool _singlePass; /// <summary> /// Struct that defines a 32 bpp colour /// </summary> /// <remarks> /// This struct is used to read data from a 32 bits per pixel image /// in memory, and is ordered in this manner as this is the way that /// the data is layed out in memory /// </remarks> [StructLayout(LayoutKind.Explicit)] public struct Color32 { /// <summary> /// Holds the blue component of the colour /// </summary> [FieldOffset(0)] public byte Blue; /// <summary> /// Holds the green component of the colour /// </summary> [FieldOffset(1)] public byte Green; /// <summary> /// Holds the red component of the colour /// </summary> [FieldOffset(2)] public byte Red; /// <summary> /// Holds the alpha component of the colour /// </summary> [FieldOffset(3)] public byte Alpha; /// <summary> /// Permits the color32 to be treated as an int32 /// </summary> [FieldOffset(0)] public int ARGB; /// <summary> /// Return the color for this Color32 object /// </summary> public Color Color { get { return Color.FromArgb(Alpha, Red, Green, Blue); } } } } }
#region Apache License, Version 2.0 // // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // #endregion using System; using System.Collections.Generic; using System.Text; using System.Xml; using System.IO; using NPanday.ProjectImporter.Digest; using NPanday.ProjectImporter.Digest.Model; using NPanday.Utils; using NPanday.Model.Pom; using NPanday.Artifact; using System.Reflection; using NPanday.ProjectImporter.Converter; using NPanday.ProjectImporter.Validator; /// Author: Leopoldo Lee Agdeppa III namespace NPanday.ProjectImporter.Converter.Algorithms { public class NormalPomConverter : AbstractPomConverter { public NormalPomConverter(ProjectDigest projectDigest, string mainPomFile, NPanday.Model.Pom.Model parent, string groupId) : base(projectDigest,mainPomFile,parent, groupId) { } public override void ConvertProjectToPomModel(bool writePom, string scmTag) { string packaging = "dotnet-library"; if (!string.IsNullOrEmpty(projectDigest.OutputType)) { string type = projectDigest.OutputType.ToLower(); if (npandayTypeMap.ContainsKey(type)) packaging = npandayTypeMap[type]; } GenerateHeader(packaging); Model.build.sourceDirectory = GetSourceDir(); //Add SCM Tag if (scmTag != null && scmTag != string.Empty && Model.parent==null) { Scm scmHolder = new Scm(); scmHolder.connection = string.Format("scm:svn:{0}", scmTag); scmHolder.developerConnection = string.Format("scm:svn:{0}", scmTag); scmHolder.url = scmTag; Model.scm = scmHolder; } // Add NPanday compile plugin Plugin compilePlugin = AddPlugin( "org.apache.npanday.plugins", "maven-compile-plugin", null, true ); if(!string.IsNullOrEmpty(projectDigest.TargetFramework)) AddPluginConfiguration(compilePlugin, "frameworkVersion", projectDigest.TargetFramework); if (projectDigest.Language.Equals("vb", StringComparison.OrdinalIgnoreCase)) { AddPluginConfiguration(compilePlugin, "language", "VB"); AddPluginConfiguration(compilePlugin, "rootNamespace", projectDigest.RootNamespace); string define = GetVBDefineConfigurationValue(); if (!string.IsNullOrEmpty(define)) { AddPluginConfiguration(compilePlugin, "define", define); } } else { if (!string.IsNullOrEmpty(projectDigest.DefineConstants)) AddPluginConfiguration(compilePlugin, "define", projectDigest.DefineConstants); } if (projectDigest.StartupObject != "Sub Main") AddPluginConfiguration(compilePlugin, "main", projectDigest.StartupObject); AddPluginConfiguration(compilePlugin, "doc", projectDigest.DocumentationFile); //AddPluginConfiguration(compilePlugin, "noconfig", "true"); AddPluginConfiguration(compilePlugin, "imports", "import", projectDigest.GlobalNamespaceImports); // add include list for the compiling DirectoryInfo baseDir = new DirectoryInfo(Path.GetDirectoryName(projectDigest.FullFileName)); List<string> compiles = new List<string>(); Plugin msBuildPlugin = null; foreach (Compile compile in projectDigest.Compiles) { string compilesFile = PomHelperUtility.GetRelativePath(baseDir, new FileInfo(compile.IncludeFullPath)); compiles.Add(compilesFile); // if it's a xaml file, include the auto-generated file in object\Debug\ if (compilesFile.EndsWith(".xaml.cs") || compilesFile.EndsWith(".xaml.vb")) { //add the MsBuild plugin to auto generate the .g.cs/g.vb files string platform = null; // ignore AnyCPU or unknown values if (projectDigest.Platform == "x64" || projectDigest.Platform == "x86" || projectDigest.Platform == "Itanium") platform = projectDigest.Platform; string configuration = projectDigest.Configuration; if (msBuildPlugin == null) { msBuildPlugin = AddPlugin("org.apache.npanday.plugins", "NPanday.Plugin.Msbuild.JavaBinding", null, false); AddPluginExecution(msBuildPlugin, "compile", "validate"); if (platform != null) AddPluginConfiguration(msBuildPlugin, "platform", platform); if (configuration != null && configuration != "Debug") AddPluginConfiguration(msBuildPlugin, "configuration", configuration); } } } // Add XBAP artifacts if relevant if (projectDigest.HostInBrowser) { if (msBuildPlugin == null) msBuildPlugin = AddPlugin("org.apache.npanday.plugins", "NPanday.Plugin.Msbuild.JavaBinding", null, false); AddPluginConfiguration(msBuildPlugin, "attachXBAP", "true"); } AddPluginConfiguration(compilePlugin, "includeSources", "includeSource", compiles.ToArray()); if ("true".Equals(projectDigest.SignAssembly, StringComparison.OrdinalIgnoreCase) && !string.IsNullOrEmpty(projectDigest.AssemblyOriginatorKeyFile) ) { if (Path.IsPathRooted(projectDigest.AssemblyOriginatorKeyFile)) { AddPluginConfiguration(compilePlugin, "keyfile", PomHelperUtility.GetRelativePath(baseDir, new FileInfo(projectDigest.AssemblyOriginatorKeyFile))); } else { AddPluginConfiguration(compilePlugin, "keyfile", PomHelperUtility.GetRelativePath(baseDir, new FileInfo(baseDir.FullName + @"\" + projectDigest.AssemblyOriginatorKeyFile))); } } // add integration test plugin if project is a test if (projectDigest.UnitTest) { Plugin testPlugin = AddPlugin( "org.apache.npanday.plugins", "maven-test-plugin", null, false ); AddPluginConfiguration(testPlugin, "integrationTest", "true"); // for running .net framework 4.0+ unit tests add new parameter in order to tell NUnit which runtime to // use. If there is a way to get this parameter from maven-compile-plugin use it if (projectDigest.TargetFramework == "4.5.1" || projectDigest.TargetFramework == "4.5" || projectDigest.TargetFramework == "4.0") { AddPluginConfiguration(testPlugin, "executionFrameworkVersion", projectDigest.TargetFramework); } } // Add Com Reference Dependencies if (projectDigest.ComReferenceList.Length > 0) { AddComReferenceDependency(); } //Add Project WebReferences AddWebReferences(); //Add EmbeddedResources maven-resgen-plugin AddEmbeddedResources(); // Add Project Inter-dependencies AddInterProjectDependenciesToList(); // filter the rsp included assemblies FilterReferences(); // Add Project Reference Dependencies AddProjectReferenceDependenciesToList(true); if (writePom) { PomHelperUtility.WriteModelToPom(new FileInfo(Path.Combine(projectDigest.FullDirectoryName, "pom.xml")), Model); } } protected void FilterSdkReferences(List<string> sdkReferences) { List<Reference> list = new List<Reference>(); foreach (Reference reference in projectDigest.References) { if (!sdkReferences.Contains(reference.Name)) { if (!string.IsNullOrEmpty(projectDigest.Language)) { if (!rspUtil.IsRspIncluded(reference.Name, projectDigest.Language)) { list.Add(reference); } } else { list.Add(reference); } } } projectDigest.References = list.ToArray(); } protected void FilterReferences() { FilterSdkReferences(new List<string>()); } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gcsv = Google.Cloud.SecurityCenter.V1; using sys = System; namespace Google.Cloud.SecurityCenter.V1 { /// <summary>Resource name for the <c>ExternalSystem</c> resource.</summary> public sealed partial class ExternalSystemName : gax::IResourceName, sys::IEquatable<ExternalSystemName> { /// <summary>The possible contents of <see cref="ExternalSystemName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary> /// A resource name with pattern /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// . /// </summary> OrganizationSourceFindingExternalsystem = 1, /// <summary> /// A resource name with pattern /// <c>folders/{folder}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </summary> FolderSourceFindingExternalsystem = 2, /// <summary> /// A resource name with pattern /// <c>projects/{project}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </summary> ProjectSourceFindingExternalsystem = 3, } private static gax::PathTemplate s_organizationSourceFindingExternalsystem = new gax::PathTemplate("organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}"); private static gax::PathTemplate s_folderSourceFindingExternalsystem = new gax::PathTemplate("folders/{folder}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}"); private static gax::PathTemplate s_projectSourceFindingExternalsystem = new gax::PathTemplate("projects/{project}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}"); /// <summary>Creates a <see cref="ExternalSystemName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="ExternalSystemName"/> containing the provided /// <paramref name="unparsedResourceName"/>. /// </returns> public static ExternalSystemName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new ExternalSystemName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="ExternalSystemName"/> with the pattern /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </summary> /// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="sourceId">The <c>Source</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="findingId">The <c>Finding</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="externalsystemId">The <c>Externalsystem</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="ExternalSystemName"/> constructed from the provided ids.</returns> public static ExternalSystemName FromOrganizationSourceFindingExternalsystem(string organizationId, string sourceId, string findingId, string externalsystemId) => new ExternalSystemName(ResourceNameType.OrganizationSourceFindingExternalsystem, organizationId: gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), sourceId: gax::GaxPreconditions.CheckNotNullOrEmpty(sourceId, nameof(sourceId)), findingId: gax::GaxPreconditions.CheckNotNullOrEmpty(findingId, nameof(findingId)), externalsystemId: gax::GaxPreconditions.CheckNotNullOrEmpty(externalsystemId, nameof(externalsystemId))); /// <summary> /// Creates a <see cref="ExternalSystemName"/> with the pattern /// <c>folders/{folder}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </summary> /// <param name="folderId">The <c>Folder</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="sourceId">The <c>Source</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="findingId">The <c>Finding</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="externalsystemId">The <c>Externalsystem</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="ExternalSystemName"/> constructed from the provided ids.</returns> public static ExternalSystemName FromFolderSourceFindingExternalsystem(string folderId, string sourceId, string findingId, string externalsystemId) => new ExternalSystemName(ResourceNameType.FolderSourceFindingExternalsystem, folderId: gax::GaxPreconditions.CheckNotNullOrEmpty(folderId, nameof(folderId)), sourceId: gax::GaxPreconditions.CheckNotNullOrEmpty(sourceId, nameof(sourceId)), findingId: gax::GaxPreconditions.CheckNotNullOrEmpty(findingId, nameof(findingId)), externalsystemId: gax::GaxPreconditions.CheckNotNullOrEmpty(externalsystemId, nameof(externalsystemId))); /// <summary> /// Creates a <see cref="ExternalSystemName"/> with the pattern /// <c>projects/{project}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="sourceId">The <c>Source</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="findingId">The <c>Finding</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="externalsystemId">The <c>Externalsystem</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="ExternalSystemName"/> constructed from the provided ids.</returns> public static ExternalSystemName FromProjectSourceFindingExternalsystem(string projectId, string sourceId, string findingId, string externalsystemId) => new ExternalSystemName(ResourceNameType.ProjectSourceFindingExternalsystem, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), sourceId: gax::GaxPreconditions.CheckNotNullOrEmpty(sourceId, nameof(sourceId)), findingId: gax::GaxPreconditions.CheckNotNullOrEmpty(findingId, nameof(findingId)), externalsystemId: gax::GaxPreconditions.CheckNotNullOrEmpty(externalsystemId, nameof(externalsystemId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="ExternalSystemName"/> with pattern /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </summary> /// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="sourceId">The <c>Source</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="findingId">The <c>Finding</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="externalsystemId">The <c>Externalsystem</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="ExternalSystemName"/> with pattern /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </returns> public static string Format(string organizationId, string sourceId, string findingId, string externalsystemId) => FormatOrganizationSourceFindingExternalsystem(organizationId, sourceId, findingId, externalsystemId); /// <summary> /// Formats the IDs into the string representation of this <see cref="ExternalSystemName"/> with pattern /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </summary> /// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="sourceId">The <c>Source</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="findingId">The <c>Finding</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="externalsystemId">The <c>Externalsystem</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="ExternalSystemName"/> with pattern /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </returns> public static string FormatOrganizationSourceFindingExternalsystem(string organizationId, string sourceId, string findingId, string externalsystemId) => s_organizationSourceFindingExternalsystem.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(sourceId, nameof(sourceId)), gax::GaxPreconditions.CheckNotNullOrEmpty(findingId, nameof(findingId)), gax::GaxPreconditions.CheckNotNullOrEmpty(externalsystemId, nameof(externalsystemId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="ExternalSystemName"/> with pattern /// <c>folders/{folder}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </summary> /// <param name="folderId">The <c>Folder</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="sourceId">The <c>Source</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="findingId">The <c>Finding</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="externalsystemId">The <c>Externalsystem</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="ExternalSystemName"/> with pattern /// <c>folders/{folder}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </returns> public static string FormatFolderSourceFindingExternalsystem(string folderId, string sourceId, string findingId, string externalsystemId) => s_folderSourceFindingExternalsystem.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(folderId, nameof(folderId)), gax::GaxPreconditions.CheckNotNullOrEmpty(sourceId, nameof(sourceId)), gax::GaxPreconditions.CheckNotNullOrEmpty(findingId, nameof(findingId)), gax::GaxPreconditions.CheckNotNullOrEmpty(externalsystemId, nameof(externalsystemId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="ExternalSystemName"/> with pattern /// <c>projects/{project}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="sourceId">The <c>Source</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="findingId">The <c>Finding</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="externalsystemId">The <c>Externalsystem</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="ExternalSystemName"/> with pattern /// <c>projects/{project}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c>. /// </returns> public static string FormatProjectSourceFindingExternalsystem(string projectId, string sourceId, string findingId, string externalsystemId) => s_projectSourceFindingExternalsystem.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(sourceId, nameof(sourceId)), gax::GaxPreconditions.CheckNotNullOrEmpty(findingId, nameof(findingId)), gax::GaxPreconditions.CheckNotNullOrEmpty(externalsystemId, nameof(externalsystemId))); /// <summary> /// Parses the given resource name string into a new <see cref="ExternalSystemName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// <item> /// <description> /// <c>folders/{folder}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// <item> /// <description> /// <c>projects/{project}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// </list> /// </remarks> /// <param name="externalSystemName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="ExternalSystemName"/> if successful.</returns> public static ExternalSystemName Parse(string externalSystemName) => Parse(externalSystemName, false); /// <summary> /// Parses the given resource name string into a new <see cref="ExternalSystemName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// <item> /// <description> /// <c>folders/{folder}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// <item> /// <description> /// <c>projects/{project}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="externalSystemName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="ExternalSystemName"/> if successful.</returns> public static ExternalSystemName Parse(string externalSystemName, bool allowUnparsed) => TryParse(externalSystemName, allowUnparsed, out ExternalSystemName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="ExternalSystemName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// <item> /// <description> /// <c>folders/{folder}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// <item> /// <description> /// <c>projects/{project}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// </list> /// </remarks> /// <param name="externalSystemName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="ExternalSystemName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string externalSystemName, out ExternalSystemName result) => TryParse(externalSystemName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="ExternalSystemName"/> instance; /// optionally allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// <item> /// <description> /// <c>folders/{folder}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// <item> /// <description> /// <c>projects/{project}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="externalSystemName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="ExternalSystemName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string externalSystemName, bool allowUnparsed, out ExternalSystemName result) { gax::GaxPreconditions.CheckNotNull(externalSystemName, nameof(externalSystemName)); gax::TemplatedResourceName resourceName; if (s_organizationSourceFindingExternalsystem.TryParseName(externalSystemName, out resourceName)) { result = FromOrganizationSourceFindingExternalsystem(resourceName[0], resourceName[1], resourceName[2], resourceName[3]); return true; } if (s_folderSourceFindingExternalsystem.TryParseName(externalSystemName, out resourceName)) { result = FromFolderSourceFindingExternalsystem(resourceName[0], resourceName[1], resourceName[2], resourceName[3]); return true; } if (s_projectSourceFindingExternalsystem.TryParseName(externalSystemName, out resourceName)) { result = FromProjectSourceFindingExternalsystem(resourceName[0], resourceName[1], resourceName[2], resourceName[3]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(externalSystemName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private ExternalSystemName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string externalsystemId = null, string findingId = null, string folderId = null, string organizationId = null, string projectId = null, string sourceId = null) { Type = type; UnparsedResource = unparsedResourceName; ExternalsystemId = externalsystemId; FindingId = findingId; FolderId = folderId; OrganizationId = organizationId; ProjectId = projectId; SourceId = sourceId; } /// <summary> /// Constructs a new instance of a <see cref="ExternalSystemName"/> class from the component parts of pattern /// <c>organizations/{organization}/sources/{source}/findings/{finding}/externalSystems/{externalsystem}</c> /// </summary> /// <param name="organizationId">The <c>Organization</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="sourceId">The <c>Source</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="findingId">The <c>Finding</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="externalsystemId">The <c>Externalsystem</c> ID. Must not be <c>null</c> or empty.</param> public ExternalSystemName(string organizationId, string sourceId, string findingId, string externalsystemId) : this(ResourceNameType.OrganizationSourceFindingExternalsystem, organizationId: gax::GaxPreconditions.CheckNotNullOrEmpty(organizationId, nameof(organizationId)), sourceId: gax::GaxPreconditions.CheckNotNullOrEmpty(sourceId, nameof(sourceId)), findingId: gax::GaxPreconditions.CheckNotNullOrEmpty(findingId, nameof(findingId)), externalsystemId: gax::GaxPreconditions.CheckNotNullOrEmpty(externalsystemId, nameof(externalsystemId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>Externalsystem</c> ID. May be <c>null</c>, depending on which resource name is contained by this /// instance. /// </summary> public string ExternalsystemId { get; } /// <summary> /// The <c>Finding</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance. /// </summary> public string FindingId { get; } /// <summary> /// The <c>Folder</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance. /// </summary> public string FolderId { get; } /// <summary> /// The <c>Organization</c> ID. May be <c>null</c>, depending on which resource name is contained by this /// instance. /// </summary> public string OrganizationId { get; } /// <summary> /// The <c>Project</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance. /// </summary> public string ProjectId { get; } /// <summary> /// The <c>Source</c> ID. May be <c>null</c>, depending on which resource name is contained by this instance. /// </summary> public string SourceId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.OrganizationSourceFindingExternalsystem: return s_organizationSourceFindingExternalsystem.Expand(OrganizationId, SourceId, FindingId, ExternalsystemId); case ResourceNameType.FolderSourceFindingExternalsystem: return s_folderSourceFindingExternalsystem.Expand(FolderId, SourceId, FindingId, ExternalsystemId); case ResourceNameType.ProjectSourceFindingExternalsystem: return s_projectSourceFindingExternalsystem.Expand(ProjectId, SourceId, FindingId, ExternalsystemId); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as ExternalSystemName); /// <inheritdoc/> public bool Equals(ExternalSystemName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(ExternalSystemName a, ExternalSystemName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(ExternalSystemName a, ExternalSystemName b) => !(a == b); } public partial class ExternalSystem { /// <summary> /// <see cref="gcsv::ExternalSystemName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public gcsv::ExternalSystemName ExternalSystemName { get => string.IsNullOrEmpty(Name) ? null : gcsv::ExternalSystemName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } } }
using System; using System.Collections.Generic; using System.Text; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Moq; using RabbitMQ.Client; using RabbitMQ.Client.Events; using Tossit.Core; using Xunit; namespace Tossit.RabbitMQ.Tests { public class RabbitMQMessageQueueTests { private readonly Mock<IChannelFactory> _channelFactory; private readonly Mock<EventingBasicConsumer> _eventingBasicConsumer; private readonly Mock<IEventingBasicConsumerImpl> _eventingBasicConsumerImpl; private readonly Mock<IConnectionWrapper> _connectionWrapper; private readonly Mock<IJsonConverter> _jsonConverter; private readonly Mock<IModel> _channel; private readonly Mock<IOptions<SendOptions>> _sendOptions; private readonly Mock<ILogger<RabbitMQMessageQueue>> _logger; public RabbitMQMessageQueueTests() { _connectionWrapper = new Mock<IConnectionWrapper>(); _connectionWrapper.Setup(x => x.ProducerConnection).Returns(Mock.Of<IConnection>()); _connectionWrapper.Setup(x => x.ConsumerConnection).Returns(Mock.Of<IConnection>()); _channelFactory = new Mock<IChannelFactory>(); _channel = new Mock<IModel>(); _channel.Setup(x => x.CreateBasicProperties()).Returns(Mock.Of<IBasicProperties>()); _connectionWrapper.Setup(x => x.ProducerConnection.CreateModel()).Returns(_channel.Object); _eventingBasicConsumerImpl = new Mock<IEventingBasicConsumerImpl>(); _eventingBasicConsumer = new Mock<EventingBasicConsumer>(Mock.Of<IModel>()); _eventingBasicConsumerImpl.Setup(x => x.GetEventingBasicConsumer(It.IsAny<IModel>())).Returns(_eventingBasicConsumer.Object); _jsonConverter = new Mock<IJsonConverter>(); _jsonConverter.Setup(x => x.Serialize(It.IsAny<object>())).Returns("{id:1}"); _sendOptions = new Mock<IOptions<SendOptions>>(); _sendOptions.Setup(x => x.Value.ConfirmReceiptTimeoutSeconds).Returns(It.IsAny<int>()); _sendOptions.Setup(x => x.Value.ConfirmReceiptIsActive).Returns(false); _logger = new Mock<ILogger<RabbitMQMessageQueue>>(); } [Fact] public void SendByValidArgumentsShouldReturnTrue() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); // Act var result = rabbitMQMessageQueue.Send("queue.name", "Data here"); // Arrange Assert.True(result); } [Fact] public void SendByValidArgumentsWithConfirmsActiveShouldHitConfirmSelect() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); _sendOptions.Setup(x => x.Value.ConfirmReceiptIsActive).Returns(true); // Act var result = rabbitMQMessageQueue.Send("queue.name", "Data here"); // Assert _channel.Verify(x => x.ConfirmSelect(), Times.Once); } [Fact] public void SendByValidArgumentsWithWorkerConfirmsShouldHitWaitForConfirmsOrDie() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); _sendOptions.Setup(x => x.Value.ConfirmReceiptIsActive).Returns(true); // Act var result = rabbitMQMessageQueue.Send("queue.name", "Data here"); // Assert _channel.Verify(x => x.WaitForConfirmsOrDie(It.IsAny<TimeSpan>()), Times.Once); } [Fact] public void SendByNullOrEmptyNameShouldThrowArgumentNullException() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); // Act // Arrange Assert.Throws<ArgumentNullException>(() => rabbitMQMessageQueue.Send("", "Data here")); } [Fact] public void SendByNullOrEmptyMessageShouldThrowArgumentNullException() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); // Act // Arrange Assert.Throws<ArgumentNullException>(() => rabbitMQMessageQueue.Send("queue.name", "")); } [Fact] public void ReceiveByValidArgumentsShouldReturnTrue() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); // Act var result = rabbitMQMessageQueue.Receive("queue.name", body => false); // Arrange Assert.True(result); } [Fact] public void ReceiveByNullOrEmptyNameShouldThrowArgumentNullException() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); // Act // Arrange Assert.Throws<ArgumentNullException>(() => rabbitMQMessageQueue.Receive(null, body => true)); } [Fact] public void ReceiveByNullOrEmptyFuncShouldThrowArgumentNullException() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); // Act // Arrange Assert.Throws<ArgumentNullException>(() => rabbitMQMessageQueue.Receive("queue.name", null)); } [Fact] public void InvokeWithExceptionalResultShouldHitBasicPublish() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); var ea = new BasicDeliverEventArgs { Body = Encoding.UTF8.GetBytes("any message") }; var queueName = "queue.name"; // Act rabbitMQMessageQueue.Invoke(body => throw new Exception(), ea, _channel.Object, queueName); // Arrange // BasicPublish not virtual. _channel.Verify(x => x.CreateBasicProperties(), Times.Once); } [Fact] public void InvokeWithFalseResultShouldHitBasicPublish() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); var ea = new BasicDeliverEventArgs { Body = Encoding.UTF8.GetBytes("any message") }; var queueName = "queue.name"; // Act rabbitMQMessageQueue.Invoke(body => { return false; }, ea, _channel.Object, queueName); // Arrange // BasicPublish not virtual. _channel.Verify(x => x.CreateBasicProperties(), Times.Once); } [Fact] public void InvokeWithTrueResultShouldNotHitBasicPublish() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); var ea = new BasicDeliverEventArgs { Body = Encoding.UTF8.GetBytes("any message") }; var queueName = "queue.name"; // Act rabbitMQMessageQueue.Invoke(body => { return true; }, ea, _channel.Object, queueName); // Arrange // BasicPublish not virtual. _channel.Verify(x => x.CreateBasicProperties(), Times.Never); } [Fact] public void InvokeWithTrueResultShouldAllwaysHitBasicAck() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); // Act rabbitMQMessageQueue.Invoke(null, new BasicDeliverEventArgs(), _channel.Object, null); // Arrange _channel.Verify(x => x.BasicAck(It.IsAny<ulong>(), false), Times.Once); } [Fact] public void SendWithValidParamsThanQueueBindArgumentExceptionShouldIngore() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); _channel.Setup(x => x.QueueBind("queue.name", It.IsAny<string>(), "queue.name", null)) .Throws(new ArgumentException()); // Act var result = rabbitMQMessageQueue.Send("queue.name", "message"); // Arrange Assert.True(result); } [Fact] public void ReceiveWithValidParamsThanQueueBindArgumentExceptionShouldIngore() { // Arrange var rabbitMQMessageQueue = GetRabbitMQMessageQueue(); _channel.Setup(x => x.QueueBind(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), null)) .Throws(new ArgumentException()); // Act var result = rabbitMQMessageQueue.Receive("queue.name", body => { return true; }); // Arrange Assert.True(result); } private RabbitMQMessageQueue GetRabbitMQMessageQueue() { return new RabbitMQMessageQueue( _connectionWrapper.Object, _jsonConverter.Object, _channelFactory.Object, _eventingBasicConsumerImpl.Object, _sendOptions.Object, _logger.Object ); } } }
/* Copyright (c) 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ using System; using System.Text; using System.Globalization; using System.Net; using System.IO; using System.Collections.Generic; namespace Google.GData.Client { /// <summary> /// Provides a means to generate an OAuth signature suitable for use /// with Google OAuth requests. /// </summary> public class OAuthUtil { // Google OAuth endpoints private static String requestTokenUrl = "https://www.google.com/accounts/OAuthGetRequestToken"; private static String userAuthorizationUrl = "https://www.google.com/accounts/OAuthAuthorizeToken"; private static String accessTokenUrl = "https://www.google.com/accounts/OAuthGetAccessToken"; /// <summary> /// Generates an OAuth header. /// </summary> /// <param name="uri">The URI of the request</param> /// <param name="consumerKey">The consumer key</param> /// <param name="consumerSecret">The consumer secret</param> /// <param name="httpMethod">The http method</param> /// <returns>The OAuth authorization header</returns> public static string GenerateHeader(Uri uri, String consumerKey, String consumerSecret, String httpMethod) { return GenerateHeader(uri, consumerKey, consumerSecret, string.Empty, string.Empty, httpMethod); } /// <summary> /// Generates an OAuth header. /// </summary> /// <param name="uri">The URI of the request</param> /// <param name="consumerKey">The consumer key</param> /// <param name="consumerSecret">The consumer secret</param> /// <param name="token">The OAuth token</param> /// <param name="tokenSecret">The OAuth token secret</param> /// <param name="httpMethod">The http method</param> /// <returns>The OAuth authorization header</returns> public static string GenerateHeader(Uri uri, String consumerKey, String consumerSecret, String token, String tokenSecret, String httpMethod) { OAuthParameters parameters = new OAuthParameters() { ConsumerKey = consumerKey, ConsumerSecret = consumerSecret, Token = token, TokenSecret = tokenSecret, SignatureMethod = OAuthBase.HMACSHA1SignatureType }; return GenerateHeader(uri, httpMethod, parameters); } /// <summary> /// Generates an OAuth header. /// </summary> /// <param name="uri">The URI of the request</param> /// <param name="httpMethod">The http method</param> /// <param name="parameters">The OAuth parameters</param> /// <returns>The OAuth authorization header</returns> public static string GenerateHeader(Uri uri, string httpMethod, OAuthParameters parameters) { parameters.Timestamp = OAuthBase.GenerateTimeStamp(); parameters.Nonce = OAuthBase.GenerateNonce(); string signature = OAuthBase.GenerateSignature(uri, httpMethod, parameters); StringBuilder sb = new StringBuilder(); sb.AppendFormat("Authorization: OAuth {0}=\"{1}\",", OAuthBase.OAuthVersionKey, OAuthBase.OAuthVersion); sb.AppendFormat("{0}=\"{1}\",", OAuthBase.OAuthNonceKey, OAuthBase.EncodingPerRFC3986(parameters.Nonce)); sb.AppendFormat("{0}=\"{1}\",", OAuthBase.OAuthTimestampKey, OAuthBase.EncodingPerRFC3986(parameters.Timestamp)); sb.AppendFormat("{0}=\"{1}\",", OAuthBase.OAuthConsumerKeyKey, OAuthBase.EncodingPerRFC3986(parameters.ConsumerKey)); if (parameters.BaseProperties.ContainsKey(OAuthBase.OAuthVerifierKey)) { sb.AppendFormat("{0}=\"{1}\",", OAuthBase.OAuthVerifierKey, OAuthBase.EncodingPerRFC3986(parameters.BaseProperties[OAuthBase.OAuthVerifierKey])); } if (!String.IsNullOrEmpty(parameters.Token)) { sb.AppendFormat("{0}=\"{1}\",", OAuthBase.OAuthTokenKey, OAuthBase.EncodingPerRFC3986(parameters.Token)); } if (parameters.BaseProperties.ContainsKey(OAuthBase.OAuthCallbackKey)) { sb.AppendFormat("{0}=\"{1}\",", OAuthBase.OAuthCallbackKey, OAuthBase.EncodingPerRFC3986(parameters.BaseProperties[OAuthBase.OAuthCallbackKey])); } sb.AppendFormat("{0}=\"{1}\",", OAuthBase.OAuthSignatureMethodKey, OAuthBase.HMACSHA1SignatureType); sb.AppendFormat("{0}=\"{1}\"", OAuthBase.OAuthSignatureKey, OAuthBase.EncodingPerRFC3986(signature)); return sb.ToString(); } /// <summary> /// Contacts Google for a request token, first step of the OAuth authentication process. /// When successful, updates the OAuthParameter instance passed as parameter by setting /// Token and TokenSecret. /// </summary> /// <param name="parameters">The OAuth parameters</param> public static void GetUnauthorizedRequestToken(OAuthParameters parameters) { Uri requestUri = new Uri(string.Format("{0}?scope={1}", requestTokenUrl, OAuthBase.EncodingPerRFC3986(parameters.Scope))); // callback is only needed when getting the request token bool callbackExists = false; if (!string.IsNullOrEmpty(parameters.Callback)) { parameters.BaseProperties.Add(OAuthBase.OAuthCallbackKey, parameters.Callback); callbackExists = true; } string headers = GenerateHeader(requestUri, "GET", parameters); WebRequest request = WebRequest.Create(requestUri); request.Headers.Add(headers); WebResponse response = request.GetResponse(); string result = ""; if (response != null) { Stream responseStream = response.GetResponseStream(); StreamReader reader = new StreamReader(responseStream); result = reader.ReadToEnd(); } if (callbackExists) { parameters.BaseProperties.Remove(OAuthBase.OAuthCallbackKey); } // split results and update parameters SortedDictionary<string, string> responseValues = OAuthBase.GetQueryParameters(result); parameters.Token = responseValues[OAuthBase.OAuthTokenKey]; parameters.TokenSecret = responseValues[OAuthBase.OAuthTokenSecretKey]; } /// <summary> /// Generates the url which the user should visit in order to authenticate and /// authorize with the Service Provider. /// When successful, updates the OAuthParameter instance passed as parameter by setting /// Token and TokenSecret. /// </summary> /// <param name="parameters">The OAuth parameters</param> /// <returns>The full authorization url the user should visit</returns> public static string CreateUserAuthorizationUrl(OAuthParameters parameters) { StringBuilder sb = new StringBuilder(); sb.Append(userAuthorizationUrl); sb.AppendFormat("?{0}={1}", OAuthBase.OAuthTokenKey, OAuthBase.EncodingPerRFC3986(parameters.Token)); if (!string.IsNullOrEmpty(parameters.Callback)) { sb.AppendFormat("&{0}={1}", OAuthBase.OAuthCallbackKey, OAuthBase.EncodingPerRFC3986(parameters.Callback)); } return sb.ToString(); } /// <summary> /// Helper method which parses a querystring for the OAuth related parameters. /// It updates the OAuthParameter instance passed as parameter by setting /// Token, TokenSecret and Verifier (if present). /// </summary> /// <param name="parameters">The OAuth parameters</param> public static void UpdateOAuthParametersFromCallback(string queryString, OAuthParameters parameters) { //split results and update parameters SortedDictionary<string, string> responseValues = OAuthBase.GetQueryParameters(queryString); parameters.Token = responseValues[OAuthBase.OAuthTokenKey]; if (responseValues.ContainsKey(OAuthBase.OAuthTokenSecretKey)) { parameters.TokenSecret = responseValues[OAuthBase.OAuthTokenSecretKey]; } if (responseValues.ContainsKey(OAuthBase.OAuthVerifierKey)) { parameters.Verifier = responseValues[OAuthBase.OAuthVerifierKey]; } } /// <summary> /// Exchanges the user-authorized request token for an access token. /// When successful, updates the OAuthParameter instance passed as parameter by setting /// Token and TokenSecret. /// </summary> /// <param name="parameters">The OAuth parameters</param> public static void GetAccessToken(OAuthParameters parameters) { Uri requestUri = new Uri(accessTokenUrl); string headers = GenerateHeader(requestUri, "GET", parameters); WebRequest request = WebRequest.Create(requestUri); request.Headers.Add(headers); WebResponse response = request.GetResponse(); string result = ""; if (response != null) { Stream responseStream = response.GetResponseStream(); StreamReader reader = new StreamReader(responseStream); result = reader.ReadToEnd(); } //split results and update parameters SortedDictionary<string, string> responseValues = OAuthBase.GetQueryParameters(result); parameters.Token = responseValues[OAuthBase.OAuthTokenKey]; parameters.TokenSecret = responseValues[OAuthBase.OAuthTokenSecretKey]; } /// <summary> /// Generates the url which the user should visit in order to authenticate and /// authorize with the Service Provider. /// When successful, updates the OAuth2Parameters instance passed as parameter by setting /// the returned access code. /// </summary> /// <param name="parameters">The OAuth 2.0 parameters</param> /// <returns>The full authorization url the user should visit</returns> public static string CreateOAuth2AuthorizationUrl(OAuth2Parameters parameters) { StringBuilder sb = new StringBuilder(); sb.Append(parameters.AuthUri); sb.AppendFormat("?{0}={1}", OAuthBase.OAuth2ResponseType, OAuthBase.EncodingPerRFC3986(parameters.ResponseType)); sb.AppendFormat("&{0}={1}", OAuthBase.OAuth2ClientId, OAuthBase.EncodingPerRFC3986(parameters.ClientId)); sb.AppendFormat("&{0}={1}", OAuthBase.OAuth2RedirectUri, OAuthBase.EncodingPerRFC3986(parameters.RedirectUri)); sb.AppendFormat("&{0}={1}", OAuthBase.OAuthScopeKey, OAuthBase.EncodingPerRFC3986(parameters.Scope)); sb.AppendFormat("&{0}={1}", OAuthBase.OAuth2AccessType, OAuthBase.EncodingPerRFC3986(parameters.AccessType)); sb.AppendFormat("&{0}={1}", OAuthBase.OAuth2ApprovalPrompt, OAuthBase.EncodingPerRFC3986(parameters.ApprovalPrompt)); if (!string.IsNullOrEmpty(parameters.State)) { sb.AppendFormat("&{0}={1}", OAuthBase.OAuth2State, OAuthBase.EncodingPerRFC3986(parameters.State)); } return sb.ToString(); } /// <summary> /// Exchanges the user-authorized request token for an OAuth 2.0 access token. /// When successful, updates the OAuthParameter instance passed as parameter by setting /// AccessToken, RefreshToken and TokenExpiry. /// </summary> /// <param name="queryString">The query string containing the access code</param> /// <param name="parameters">The OAuth 2.0 parameters</param> public static void GetAccessToken(String queryString, OAuth2Parameters parameters) { //split results and update parameters SortedDictionary<string, string> responseValues = OAuthBase.GetQueryParameters(queryString); parameters.AccessCode = responseValues[OAuthBase.OAuth2AccessCode]; GetAccessToken(parameters); } /// <summary> /// Exchanges the user-authorized request token for an OAuth 2.0 access token. /// When successful, updates the OAuthParameter instance passed as parameter by setting /// AccessToken, RefreshToken and TokenExpiry. /// </summary> /// <param name="parameters">The OAuth 2.0 parameters</param> public static void GetAccessToken(OAuth2Parameters parameters) { OAuthBase.GetOAuth2AccessToken(parameters, OAuthBase.GetExchangeAccessCodeRequestBody(parameters)); } /// <summary> /// Refresh the OAuth 2.0 access token. /// When successful, updates the OAuthParameter instance passed as parameter by setting /// AccessToken, RefreshToken and TokenExpiry. /// </summary> /// <param name="parameters">The OAuth 2.0 parameters</param> public static void RefreshAccessToken(OAuth2Parameters parameters) { OAuthBase.GetOAuth2AccessToken(parameters, OAuthBase.GetRefreshAccessTokenRequestBody(parameters)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.IO; using System.Reflection; using System.Resources; using System.Runtime.ExceptionServices; using Xunit; namespace System.Tests { public class AppDomainTests : RemoteExecutorTestBase { [Fact] public void CurrentDomain_Not_Null() { Assert.NotNull(AppDomain.CurrentDomain); } [Fact] public void CurrentDomain_Idempotent() { Assert.Equal(AppDomain.CurrentDomain, AppDomain.CurrentDomain); } [Fact] public void BaseDirectory_Same_As_AppContext() { Assert.Equal(AppDomain.CurrentDomain.BaseDirectory, AppContext.BaseDirectory); } [Fact] public void RelativeSearchPath_Is_Null() { Assert.Null(AppDomain.CurrentDomain.RelativeSearchPath); } [Fact] public void UnhandledException_Add_Remove() { RemoteInvoke(() => { AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(MyHandler); AppDomain.CurrentDomain.UnhandledException -= new UnhandledExceptionEventHandler(MyHandler); return SuccessExitCode; }).Dispose(); } [Fact] public void UnhandledException_NotCalled_When_Handled() { RemoteInvoke(() => { AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(NotExpectedToBeCalledHandler); try { throw new Exception(); } catch { } AppDomain.CurrentDomain.UnhandledException -= new UnhandledExceptionEventHandler(NotExpectedToBeCalledHandler); return SuccessExitCode; }).Dispose(); } [ActiveIssue(12716)] [PlatformSpecific(~TestPlatforms.OSX)] // Unhandled exception on a separate process causes xunit to crash on osx [Fact] public void UnhandledException_Called() { System.IO.File.Delete("success.txt"); RemoteInvokeOptions options = new RemoteInvokeOptions(); options.CheckExitCode = false; RemoteInvoke(() => { AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(MyHandler); throw new Exception("****This Unhandled Exception is Expected****"); #pragma warning disable 0162 return SuccessExitCode; #pragma warning restore 0162 }, options).Dispose(); Assert.True(System.IO.File.Exists("success.txt")); } static void NotExpectedToBeCalledHandler(object sender, UnhandledExceptionEventArgs args) { Assert.True(false, "UnhandledException handler not expected to be called"); } static void MyHandler(object sender, UnhandledExceptionEventArgs args) { File.Create("success.txt"); } [Fact] public void DynamicDirectory_Null() { Assert.Null(AppDomain.CurrentDomain.DynamicDirectory); } [Fact] public void FriendlyName() { string s = AppDomain.CurrentDomain.FriendlyName; Assert.NotNull(s); string expected = Assembly.GetEntryAssembly()?.GetName()?.Name; // GetEntryAssembly may be null (i.e. desktop) if (expected == null) expected = Assembly.GetExecutingAssembly().GetName().Name; Assert.Equal(expected, s); } [Fact] public void Id() { // if running directly on some platforms Xunit may be Id = 1 RemoteInvoke(() => { Assert.Equal(1, AppDomain.CurrentDomain.Id); return SuccessExitCode; }).Dispose(); } [Fact] public void IsFullyTrusted() { Assert.True(AppDomain.CurrentDomain.IsFullyTrusted); } [Fact] public void IsHomogenous() { Assert.True(AppDomain.CurrentDomain.IsHomogenous); } [Fact] public void FirstChanceException_Add_Remove() { RemoteInvoke(() => { EventHandler<FirstChanceExceptionEventArgs> handler = (sender, e) => { }; AppDomain.CurrentDomain.FirstChanceException += handler; AppDomain.CurrentDomain.FirstChanceException -= handler; return SuccessExitCode; }).Dispose(); } [Fact] [ActiveIssue(21680, TargetFrameworkMonikers.UapAot)] public void FirstChanceException_Called() { RemoteInvoke(() => { bool flag = false; EventHandler<FirstChanceExceptionEventArgs> handler = (sender, e) => { Exception ex = e.Exception; if (ex is FirstChanceTestException) { flag = !flag; } }; AppDomain.CurrentDomain.FirstChanceException += handler; try { throw new FirstChanceTestException("testing"); } catch { } AppDomain.CurrentDomain.FirstChanceException -= handler; Assert.True(flag, "FirstChanceHandler not called"); return SuccessExitCode; }).Dispose(); } class FirstChanceTestException : Exception { public FirstChanceTestException(string message) : base(message) { } } [Fact] public void ProcessExit_Add_Remove() { RemoteInvoke(() => { EventHandler handler = (sender, e) => { }; AppDomain.CurrentDomain.ProcessExit += handler; AppDomain.CurrentDomain.ProcessExit -= handler; return SuccessExitCode; }).Dispose(); } [Fact] [ActiveIssue("https://github.com/dotnet/corefx/issues/21410", TargetFrameworkMonikers.Uap)] public void ProcessExit_Called() { string path = GetTestFilePath(); RemoteInvoke((pathToFile) => { EventHandler handler = (sender, e) => { File.Create(pathToFile); }; AppDomain.CurrentDomain.ProcessExit += handler; return SuccessExitCode; }, path).Dispose(); Assert.True(File.Exists(path)); } [Fact] public void ApplyPolicy() { AssertExtensions.Throws<ArgumentNullException>("assemblyName", () => { AppDomain.CurrentDomain.ApplyPolicy(null); }); AssertExtensions.Throws<ArgumentException>(null, () => { AppDomain.CurrentDomain.ApplyPolicy(""); }); string entryAssembly = Assembly.GetEntryAssembly()?.FullName ?? Assembly.GetExecutingAssembly().FullName; Assert.Equal(AppDomain.CurrentDomain.ApplyPolicy(entryAssembly), entryAssembly); } [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework)] public void CreateDomainNonNetfx() { AssertExtensions.Throws<ArgumentNullException>("friendlyName", () => { AppDomain.CreateDomain(null); }); Assert.Throws<PlatformNotSupportedException>(() => { AppDomain.CreateDomain("test"); }); } [Fact] [SkipOnTargetFramework(~TargetFrameworkMonikers.NetFramework)] public void CreateDomainNetfx() { Assert.Throws<ArgumentNullException>(() => { AppDomain.CreateDomain(null); }); AppDomain.CreateDomain("test"); } [Fact] [ActiveIssue(21680, TargetFrameworkMonikers.UapAot)] public void ExecuteAssemblyByName() { RemoteInvoke(() => { string name = "TestApp"; var assembly = Assembly.Load(name); Assert.Equal(5, AppDomain.CurrentDomain.ExecuteAssemblyByName(assembly.FullName)); Assert.Equal(10, AppDomain.CurrentDomain.ExecuteAssemblyByName(assembly.FullName, new string[2] { "2", "3" })); Assert.Throws<FormatException>(() => AppDomain.CurrentDomain.ExecuteAssemblyByName(assembly.FullName, new string[1] { "a" })); AssemblyName assemblyName = assembly.GetName(); assemblyName.CodeBase = null; Assert.Equal(105, AppDomain.CurrentDomain.ExecuteAssemblyByName(assemblyName, new string[3] { "50", "25", "25" })); return SuccessExitCode; }).Dispose(); } [Fact] [ActiveIssue("https://github.com/dotnet/corefx/issues/18718", TargetFrameworkMonikers.Uap)] // Need to copy files out of execution directory public void ExecuteAssembly() { CopyTestAssemblies(); string name = Path.Combine(Environment.CurrentDirectory, "TestAppOutsideOfTPA", "TestAppOutsideOfTPA.exe"); AssertExtensions.Throws<ArgumentNullException>("assemblyFile", () => AppDomain.CurrentDomain.ExecuteAssembly(null)); Assert.Throws<FileNotFoundException>(() => AppDomain.CurrentDomain.ExecuteAssembly("NonExistentFile.exe")); Func<int> executeAssembly = () => AppDomain.CurrentDomain.ExecuteAssembly(name, new string[2] { "2", "3" }, null, Configuration.Assemblies.AssemblyHashAlgorithm.SHA1); if (PlatformDetection.IsFullFramework) Assert.Equal(10, executeAssembly()); else Assert.Throws<PlatformNotSupportedException>(() => executeAssembly()); Assert.Equal(5, AppDomain.CurrentDomain.ExecuteAssembly(name)); Assert.Equal(10, AppDomain.CurrentDomain.ExecuteAssembly(name, new string[2] { "2", "3" })); } [Fact] public void GetData_SetData() { RemoteInvoke(() => { AssertExtensions.Throws<ArgumentNullException>("name", () => { AppDomain.CurrentDomain.SetData(null, null); }); AppDomain.CurrentDomain.SetData("", null); Assert.Null(AppDomain.CurrentDomain.GetData("")); AppDomain.CurrentDomain.SetData("randomkey", 4); Assert.Equal(4, AppDomain.CurrentDomain.GetData("randomkey")); return SuccessExitCode; }).Dispose(); } [Fact] [ActiveIssue(21680, TargetFrameworkMonikers.UapAot)] public void SetData_SameKeyMultipleTimes_ReplacesOldValue() { RemoteInvoke(() => { string key = Guid.NewGuid().ToString("N"); for (int i = 0; i < 3; i++) { AppDomain.CurrentDomain.SetData(key, i.ToString()); Assert.Equal(i.ToString(), AppDomain.CurrentDomain.GetData(key)); } AppDomain.CurrentDomain.SetData(key, null); return SuccessExitCode; }).Dispose(); } [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Netfx is more permissive and does not throw")] public void IsCompatibilitySwitchSet() { Assert.Throws<ArgumentNullException>(() => { AppDomain.CurrentDomain.IsCompatibilitySwitchSet(null); }); AssertExtensions.Throws<ArgumentException>("switchName", () => { AppDomain.CurrentDomain.IsCompatibilitySwitchSet("");}); Assert.Null(AppDomain.CurrentDomain.IsCompatibilitySwitchSet("randomSwitch")); } [Fact] public void IsDefaultAppDomain() { // Xunit may be default app domain if run directly RemoteInvoke(() => { Assert.True(AppDomain.CurrentDomain.IsDefaultAppDomain()); return SuccessExitCode; }).Dispose(); } [Fact] public void IsFinalizingForUnload() { Assert.False(AppDomain.CurrentDomain.IsFinalizingForUnload()); } [Fact] public void toString() { string actual = AppDomain.CurrentDomain.ToString(); // NetFx has additional line endings if (PlatformDetection.IsFullFramework) actual = actual.Trim(); string expected = "Name:" + AppDomain.CurrentDomain.FriendlyName + Environment.NewLine + "There are no context policies."; Assert.Equal(expected, actual); } [Fact] public void Unload() { RemoteInvoke(() => { AssertExtensions.Throws<ArgumentNullException>("domain", () => { AppDomain.Unload(null); }); Assert.Throws<CannotUnloadAppDomainException>(() => { AppDomain.Unload(AppDomain.CurrentDomain); }); return SuccessExitCode; }).Dispose(); } [Fact] public void Load() { AssemblyName assemblyName = typeof(AppDomainTests).Assembly.GetName(); assemblyName.CodeBase = null; Assert.NotNull(AppDomain.CurrentDomain.Load(assemblyName)); Assert.NotNull(AppDomain.CurrentDomain.Load(typeof(AppDomainTests).Assembly.FullName)); } [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Does not support Assembly.Load(byte[])")] public void LoadBytes() { Assembly assembly = typeof(AppDomainTests).Assembly; byte[] aBytes = System.IO.File.ReadAllBytes(assembly.Location); Assert.NotNull(AppDomain.CurrentDomain.Load(aBytes)); } [Fact] public void ReflectionOnlyGetAssemblies() { Assert.Equal(0, AppDomain.CurrentDomain.ReflectionOnlyGetAssemblies().Length); } [Fact] public void MonitoringIsEnabled() { RemoteInvoke(() => { Assert.False(AppDomain.MonitoringIsEnabled); Assert.Throws<ArgumentException>(() => { AppDomain.MonitoringIsEnabled = false; }); if (PlatformDetection.IsFullFramework) { AppDomain.MonitoringIsEnabled = true; Assert.True(AppDomain.MonitoringIsEnabled); } else { Assert.Throws<PlatformNotSupportedException>(() => { AppDomain.MonitoringIsEnabled = true; }); } return SuccessExitCode; }).Dispose(); } [Fact] public void MonitoringSurvivedMemorySize() { Assert.Throws<InvalidOperationException>(() => { var t = AppDomain.CurrentDomain.MonitoringSurvivedMemorySize; }); } [Fact] public void MonitoringSurvivedProcessMemorySize() { Assert.Throws<InvalidOperationException>(() => { var t = AppDomain.MonitoringSurvivedProcessMemorySize; }); } [Fact] public void MonitoringTotalAllocatedMemorySize() { Assert.Throws<InvalidOperationException>(() => { var t = AppDomain.CurrentDomain.MonitoringTotalAllocatedMemorySize; }); } [Fact] public void MonitoringTotalProcessorTime() { Assert.Throws<InvalidOperationException>(() => { var t = AppDomain.CurrentDomain.MonitoringTotalProcessorTime; }); } #pragma warning disable 618 [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework)] public void GetCurrentThreadId() { Assert.Equal(AppDomain.GetCurrentThreadId(), Environment.CurrentManagedThreadId); } [Fact] public void ShadowCopyFiles() { Assert.False(AppDomain.CurrentDomain.ShadowCopyFiles); } [Fact] public void AppendPrivatePath() { RemoteInvoke(() => { AppDomain.CurrentDomain.AppendPrivatePath("test"); return SuccessExitCode; }).Dispose(); } [Fact] public void ClearPrivatePath() { RemoteInvoke(() => { AppDomain.CurrentDomain.ClearPrivatePath(); return SuccessExitCode; }).Dispose(); } [Fact] public void ClearShadowCopyPath() { RemoteInvoke(() => { AppDomain.CurrentDomain.ClearShadowCopyPath(); return SuccessExitCode; }).Dispose(); } [Fact] public void SetCachePath() { RemoteInvoke(() => { AppDomain.CurrentDomain.SetCachePath("test"); return SuccessExitCode; }).Dispose(); } [Fact] public void SetShadowCopyFiles() { RemoteInvoke(() => { AppDomain.CurrentDomain.SetShadowCopyFiles(); return SuccessExitCode; }).Dispose(); } [Fact] public void SetShadowCopyPath() { RemoteInvoke(() => { AppDomain.CurrentDomain.SetShadowCopyPath("test"); return SuccessExitCode; }).Dispose(); } #pragma warning restore 618 [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Does not support Assembly.LoadFile")] [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework)] public void GetAssemblies() { RemoteInvoke(() => { Assembly[] assemblies = AppDomain.CurrentDomain.GetAssemblies(); Assert.NotNull(assemblies); Assert.True(assemblies.Length > 0, "There must be assemblies already loaded in the process"); AppDomain.CurrentDomain.Load(typeof(AppDomainTests).Assembly.GetName().FullName); Assembly[] assemblies1 = AppDomain.CurrentDomain.GetAssemblies(); // Another thread could have loaded an assembly hence not checking for equality Assert.True(assemblies1.Length >= assemblies.Length, "Assembly.Load of an already loaded assembly should not cause another load"); Assembly.LoadFile(typeof(AppDomain).Assembly.Location); Assembly[] assemblies2 = AppDomain.CurrentDomain.GetAssemblies(); Assert.True(assemblies2.Length > assemblies.Length, "Assembly.LoadFile should cause an increase in GetAssemblies list"); int ctr = 0; foreach (var a in assemblies2) { // Dynamic assemblies do not support Location property. if (!a.IsDynamic) { if (a.Location == typeof(AppDomain).Assembly.Location) ctr++; } } foreach (var a in assemblies) { if (!a.IsDynamic) { if (a.Location == typeof(AppDomain).Assembly.Location) ctr--; } } Assert.True(ctr > 0, "Assembly.LoadFile should cause file to be loaded again"); return SuccessExitCode; }).Dispose(); } [Fact] [SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "Does not support Assembly.LoadFile")] [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework)] public void AssemblyLoad() { RemoteInvoke(() => { bool AssemblyLoadFlag = false; AssemblyLoadEventHandler handler = (sender, args) => { if (args.LoadedAssembly.FullName.Equals(typeof(AppDomainTests).Assembly.FullName)) { AssemblyLoadFlag = !AssemblyLoadFlag; } }; AppDomain.CurrentDomain.AssemblyLoad += handler; try { Assembly.LoadFile(typeof(AppDomainTests).Assembly.Location); } finally { AppDomain.CurrentDomain.AssemblyLoad -= handler; } Assert.True(AssemblyLoadFlag); return SuccessExitCode; }).Dispose(); } [Fact] [ActiveIssue("https://github.com/dotnet/corefx/issues/18718", TargetFrameworkMonikers.Uap)] // Need to copy files out of execution directory' public void AssemblyResolve() { CopyTestAssemblies(); RemoteInvoke(() => { ResolveEventHandler handler = (sender, e) => { return Assembly.LoadFile(Path.Combine(Environment.CurrentDirectory, "AssemblyResolveTests", "AssemblyResolveTests.dll")); }; AppDomain.CurrentDomain.AssemblyResolve += handler; Type t = Type.GetType("AssemblyResolveTests.Class1, AssemblyResolveTests", true); Assert.NotNull(t); return SuccessExitCode; }).Dispose(); } [Fact] [ActiveIssue("https://github.com/dotnet/corefx/issues/18718", TargetFrameworkMonikers.Uap)] // Need to copy files out of execution directory public void AssemblyResolve_RequestingAssembly() { CopyTestAssemblies(); RemoteInvoke(() => { Assembly a = Assembly.LoadFile(Path.Combine(Environment.CurrentDirectory, "TestAppOutsideOfTPA", "TestAppOutsideOfTPA.exe")); ResolveEventHandler handler = (sender, e) => { Assert.Equal(e.RequestingAssembly, a); return Assembly.LoadFile(Path.Combine(Environment.CurrentDirectory, "AssemblyResolveTests", "AssemblyResolveTests.dll")); }; AppDomain.CurrentDomain.AssemblyResolve += handler; Type ptype = a.GetType("Program"); MethodInfo myMethodInfo = ptype.GetMethod("foo"); object ret = myMethodInfo.Invoke(null, null); Assert.NotNull(ret); return SuccessExitCode; }).Dispose(); } [Fact] [ActiveIssue(21680, TargetFrameworkMonikers.UapAot)] public void TypeResolve() { RemoteInvoke(() => { Assert.Throws<TypeLoadException>(() => Type.GetType("Program", true)); ResolveEventHandler handler = (sender, args) => { return Assembly.Load("TestApp"); }; AppDomain.CurrentDomain.TypeResolve += handler; Type t; try { t = Type.GetType("Program", true); } finally { AppDomain.CurrentDomain.TypeResolve -= handler; } Assert.NotNull(t); return SuccessExitCode; }).Dispose(); } [Fact] [ActiveIssue(21680, TargetFrameworkMonikers.UapAot)] public void ResourceResolve() { RemoteInvoke(() => { ResourceManager res = new ResourceManager(typeof(FxResources.TestApp.SR)); Assert.Throws<MissingManifestResourceException>(() => res.GetString("Message")); ResolveEventHandler handler = (sender, args) => { return Assembly.Load("TestApp"); }; AppDomain.CurrentDomain.ResourceResolve += handler; String s; try { s = res.GetString("Message"); } finally { AppDomain.CurrentDomain.ResourceResolve -= handler; } Assert.Equal(s, "Happy Halloween"); return SuccessExitCode; }).Dispose(); } [Fact] public void SetThreadPrincipal() { RemoteInvoke(() => { Assert.Throws<ArgumentNullException>(() => { AppDomain.CurrentDomain.SetThreadPrincipal(null); }); var identity = new System.Security.Principal.GenericIdentity("NewUser"); var principal = new System.Security.Principal.GenericPrincipal(identity, null); AppDomain.CurrentDomain.SetThreadPrincipal(principal); return SuccessExitCode; }).Dispose(); } private void CopyTestAssemblies() { string destTestAssemblyPath = Path.Combine(Environment.CurrentDirectory, "AssemblyResolveTests", "AssemblyResolveTests.dll"); if (!File.Exists(destTestAssemblyPath) && File.Exists("AssemblyResolveTests.dll")) { Directory.CreateDirectory(Path.GetDirectoryName(destTestAssemblyPath)); File.Copy("AssemblyResolveTests.dll", destTestAssemblyPath, false); } destTestAssemblyPath = Path.Combine(Environment.CurrentDirectory, "TestAppOutsideOfTPA", "TestAppOutsideOfTPA.exe"); if (!File.Exists(destTestAssemblyPath) && File.Exists("TestAppOutsideOfTPA.exe")) { Directory.CreateDirectory(Path.GetDirectoryName(destTestAssemblyPath)); File.Copy("TestAppOutsideOfTPA.exe", destTestAssemblyPath, false); } } } } namespace FxResources.TestApp { class SR { } }
using Epi.DataSets; namespace Epi { /// <summary> /// Encapsulates all global information that persists in the config file. /// </summary> public partial class Configuration { private string configFilePath; private DataSets.Config configDataSet; #region Constructors /// <summary> /// Constructor /// </summary> /// <param name="configFilePath">Configuration file path.</param> /// <param name="configDataSet">Configuration data.</param> public Configuration(string configFilePath, DataSets.Config configDataSet) { this.configDataSet = configDataSet; this.configFilePath = configFilePath; } #endregion Constructors #region Public Constants /// <summary> /// Version of the current configuration schema. Used to spot schema differences. /// </summary> public const int CurrentSchemaVersion = 117; #endregion #region Public Properties /// <summary> /// Gets configuration file path. /// </summary> public string ConfigFilePath { get { return configFilePath; } } /// <summary> /// Gets configuration settings /// </summary> public Config.SettingsRow Settings { get { return configDataSet.Settings[0]; } } /// <summary> /// Gets the text encryption module /// </summary> public Config.TextEncryptionModuleRow TextEncryptionModule { get { if (configDataSet.TextEncryptionModule == null || configDataSet.TextEncryptionModule.Count == 0) { return null; } else { return configDataSet.TextEncryptionModule[0]; } } } /// <summary> /// Gets the file encryption module /// </summary> public Config.FileEncryptionModuleRow FileEncryptionModule { get { if (configDataSet.FileEncryptionModule == null || configDataSet.FileEncryptionModule.Count == 0) { return null; } else { return configDataSet.FileEncryptionModule[0]; } } } /// <summary> /// Return configuration version information /// </summary> public Config.VersionRow Version { get { return configDataSet.Version[0]; } } /// <summary> /// Gets/sets the full path of the current project file. /// </summary> public string CurrentProjectFilePath { get { return ParentRowRecentProjects.CurrentProjectLocation ?? string.Empty; } set { ParentRowRecentProjects.CurrentProjectLocation = value; //Save(); } } /// <summary> /// Gets a datatable for recent projects /// </summary> public Config.RecentProjectDataTable RecentProjects { get { return configDataSet.RecentProject; } } /// <summary> /// Gets a datatable for recent views /// </summary> public Config.RecentViewDataTable RecentViews { get { return configDataSet.RecentView; } } /// <summary> /// Gets a datatable for recent views /// </summary> public Config.RecentDataSourceDataTable RecentDataSources { get { return configDataSet.RecentDataSource; } } /// <summary> /// Gets a datatable for data drivers /// </summary> public Config.DataDriverDataTable DataDrivers { get { return configDataSet.DataDriver; } } /// <summary> /// Gets a datatable for gadgets /// </summary> public Config.GadgetsDataTable Gadgets { get { return configDataSet.Gadgets; } } /// <summary> /// Gets a datatable for a gadget /// </summary> public Config.GadgetDataTable Gadget { get { return configDataSet.Gadget; } } /// <summary> /// Gets a datatable for file connections /// </summary> public Config.FileDataTable FileConnections { get { return configDataSet.File; } } /// <summary> /// Gets a datatable for data connections /// </summary> public Config.DatabaseDataTable DatabaseConnections { get { return configDataSet.Database; } } /// <summary> /// Gets a datatable for directories /// </summary> public Config.DirectoriesRow Directories { get { return configDataSet.Directories[0]; } } /// <summary> /// Gets the collection of permanent variables /// If the in-memory collection is null, it loads them from persistence. /// </summary> public Config.PermanentVariableDataTable PermanentVariables { get { return ConfigDataSet.PermanentVariable; } } /// <summary> /// Gets the collection of installed Epi Info modules /// </summary> public Config.ModuleDataTable Modules { get { return configDataSet.Module; } } /// <summary> /// Gets the internal config dataset /// </summary> public DataSets.Config ConfigDataSet { get { return configDataSet; } } /// <summary> /// Gets the row from config typed dataset containing the parent row for module rows /// </summary> public Config.ModulesRow ParentRowModules { get { if (configDataSet.Modules.Count < 1) { Config.ModulesRow newrow = configDataSet.Modules.NewModulesRow(); ConfigDataSet.Modules.Rows.Add(newrow); } return configDataSet.Modules[0]; } } /// <summary> /// Parent row for connection rows /// </summary> public Config.ConnectionsRow ParentRowConnections { get { if (configDataSet.Projects.Count < 1) { Config.ConnectionsRow newrow = configDataSet.Connections.NewConnectionsRow(); ConfigDataSet.Connections.Rows.Add(newrow); } return configDataSet.Connections[0]; } } /// <summary> /// Parent row for project rows /// </summary> public Config.ProjectsRow ParentRowRecentProjects { get { if (configDataSet.Projects.Count < 1) { Config.ProjectsRow newrow = configDataSet.Projects.NewProjectsRow(); newrow.CurrentProjectLocation = string.Empty; ConfigDataSet.Projects.Rows.Add(newrow); } return configDataSet.Projects[0]; } } /// <summary> /// Parent row for gadgets /// </summary> public Config.GadgetsRow ParentRowGadgets { get { if (configDataSet.Gadgets.Count < 1) { Config.GadgetsRow newrow = configDataSet.Gadgets.NewGadgetsRow(); ConfigDataSet.Gadgets.Rows.Add(newrow); } return configDataSet.Gadgets[0]; } } /// <summary> /// Parent row for recent view rows /// </summary> public Config.ViewsRow ParentRowRecentViews { get { if (configDataSet.Views.Count < 1) { Config.ViewsRow newrow = configDataSet.Views.NewViewsRow(); ConfigDataSet.Views.Rows.Add(newrow); } return configDataSet.Views[0]; } } /// <summary> /// Parent row for project rows /// </summary> public Config.DataSourcesRow ParentRowRecentDataSources { get { if (configDataSet.DataSources.Count < 1) { Config.DataSourcesRow newrow = configDataSet.DataSources.NewDataSourcesRow(); ConfigDataSet.DataSources.Rows.Add(newrow); } return configDataSet.DataSources[0]; } } /// <summary> /// Parent row for permanent vairables /// </summary> public Config.VariablesRow ParentRowPermanentVariables { get { if (configDataSet.Variables.Count < 1) { Config.VariablesRow newrow = configDataSet.Variables.NewVariablesRow(); ConfigDataSet.Variables.Rows.Add(newrow); } return configDataSet.Variables[0]; } } #endregion Public Properties }//end class }//end namespace
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Impl.Binary { using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Runtime.Serialization; using Apache.Ignite.Core.Binary; using Apache.Ignite.Core.Cache.Affinity; using Apache.Ignite.Core.Common; using Apache.Ignite.Core.Impl.Binary.IO; using Apache.Ignite.Core.Impl.Binary.Metadata; using Apache.Ignite.Core.Impl.Cache; using Apache.Ignite.Core.Impl.Cache.Query.Continuous; using Apache.Ignite.Core.Impl.Common; using Apache.Ignite.Core.Impl.Compute; using Apache.Ignite.Core.Impl.Compute.Closure; using Apache.Ignite.Core.Impl.Datastream; using Apache.Ignite.Core.Impl.Deployment; using Apache.Ignite.Core.Impl.Messaging; using Apache.Ignite.Core.Log; /// <summary> /// Marshaller implementation. /// </summary> internal class Marshaller { /** Binary configuration. */ private readonly BinaryConfiguration _cfg; /** Type to descriptor map. */ private readonly CopyOnWriteConcurrentDictionary<Type, BinaryFullTypeDescriptor> _typeToDesc = new CopyOnWriteConcurrentDictionary<Type, BinaryFullTypeDescriptor>(); /** Type name to descriptor map. */ private readonly CopyOnWriteConcurrentDictionary<string, BinaryFullTypeDescriptor> _typeNameToDesc = new CopyOnWriteConcurrentDictionary<string, BinaryFullTypeDescriptor>(); /** ID to descriptor map. */ private readonly CopyOnWriteConcurrentDictionary<long, BinaryFullTypeDescriptor> _idToDesc = new CopyOnWriteConcurrentDictionary<long, BinaryFullTypeDescriptor>(); /** Cached binary types. */ private volatile IDictionary<int, BinaryTypeHolder> _metas = new Dictionary<int, BinaryTypeHolder>(); /** */ private volatile IIgniteInternal _ignite; /** */ private readonly ILogger _log; /// <summary> /// Constructor. /// </summary> /// <param name="cfg">Configuration.</param> /// <param name="log"></param> public Marshaller(BinaryConfiguration cfg, ILogger log = null) { _cfg = cfg ?? new BinaryConfiguration(); _log = log; CompactFooter = _cfg.CompactFooter; if (_cfg.TypeConfigurations == null) _cfg.TypeConfigurations = new List<BinaryTypeConfiguration>(); foreach (BinaryTypeConfiguration typeCfg in _cfg.TypeConfigurations) { if (string.IsNullOrEmpty(typeCfg.TypeName)) throw new BinaryObjectException("Type name cannot be null or empty: " + typeCfg); } // Define system types. They use internal reflective stuff, so configuration doesn't affect them. AddSystemTypes(); // 2. Define user types. var typeResolver = new TypeResolver(); ICollection<BinaryTypeConfiguration> typeCfgs = _cfg.TypeConfigurations; if (typeCfgs != null) foreach (BinaryTypeConfiguration typeCfg in typeCfgs) AddUserType(typeCfg, typeResolver); var typeNames = _cfg.Types; if (typeNames != null) foreach (string typeName in typeNames) AddUserType(new BinaryTypeConfiguration(typeName), typeResolver); } /// <summary> /// Gets or sets the backing grid. /// </summary> public IIgniteInternal Ignite { get { return _ignite; } set { Debug.Assert(value != null); _ignite = value; } } /// <summary> /// Gets the compact footer flag. /// </summary> public bool CompactFooter { get; set; } /// <summary> /// Gets or sets a value indicating whether type registration is disabled. /// This may be desirable for static system marshallers where everything is written in unregistered mode. /// </summary> public bool RegistrationDisabled { get; set; } /// <summary> /// Marshal object. /// </summary> /// <param name="val">Value.</param> /// <returns>Serialized data as byte array.</returns> public byte[] Marshal<T>(T val) { using (var stream = new BinaryHeapStream(128)) { Marshal(val, stream); return stream.GetArrayCopy(); } } /// <summary> /// Marshals an object. /// </summary> /// <param name="val">Value.</param> /// <param name="stream">Output stream.</param> private void Marshal<T>(T val, IBinaryStream stream) { BinaryWriter writer = StartMarshal(stream); writer.Write(val); FinishMarshal(writer); } /// <summary> /// Start marshal session. /// </summary> /// <param name="stream">Stream.</param> /// <returns>Writer.</returns> public BinaryWriter StartMarshal(IBinaryStream stream) { return new BinaryWriter(this, stream); } /// <summary> /// Finish marshal session. /// </summary> /// <param name="writer">Writer.</param> /// <returns>Dictionary with metadata.</returns> public void FinishMarshal(BinaryWriter writer) { var metas = writer.GetBinaryTypes(); var ignite = Ignite; if (ignite != null && metas != null && metas.Count > 0) { ignite.BinaryProcessor.PutBinaryTypes(metas); OnBinaryTypesSent(metas); } } /// <summary> /// Unmarshal object. /// </summary> /// <param name="data">Data array.</param> /// <param name="mode">The mode.</param> /// <returns> /// Object. /// </returns> public T Unmarshal<T>(byte[] data, BinaryMode mode = BinaryMode.Deserialize) { using (var stream = new BinaryHeapStream(data)) { return Unmarshal<T>(stream, mode); } } /// <summary> /// Unmarshal object. /// </summary> /// <param name="stream">Stream over underlying byte array with correct position.</param> /// <param name="keepBinary">Whether to keep binary objects in binary form.</param> /// <returns> /// Object. /// </returns> public T Unmarshal<T>(IBinaryStream stream, bool keepBinary) { return Unmarshal<T>(stream, keepBinary ? BinaryMode.KeepBinary : BinaryMode.Deserialize, null); } /// <summary> /// Unmarshal object. /// </summary> /// <param name="stream">Stream over underlying byte array with correct position.</param> /// <param name="mode">The mode.</param> /// <returns> /// Object. /// </returns> public T Unmarshal<T>(IBinaryStream stream, BinaryMode mode = BinaryMode.Deserialize) { return Unmarshal<T>(stream, mode, null); } /// <summary> /// Unmarshal object. /// </summary> /// <param name="stream">Stream over underlying byte array with correct position.</param> /// <param name="mode">The mode.</param> /// <param name="builder">Builder.</param> /// <returns> /// Object. /// </returns> public T Unmarshal<T>(IBinaryStream stream, BinaryMode mode, BinaryObjectBuilder builder) { return new BinaryReader(this, stream, mode, builder).Deserialize<T>(); } /// <summary> /// Start unmarshal session. /// </summary> /// <param name="stream">Stream.</param> /// <param name="keepBinary">Whether to keep binarizable as binary.</param> /// <returns> /// Reader. /// </returns> public BinaryReader StartUnmarshal(IBinaryStream stream, bool keepBinary) { return new BinaryReader(this, stream, keepBinary ? BinaryMode.KeepBinary : BinaryMode.Deserialize, null); } /// <summary> /// Start unmarshal session. /// </summary> /// <param name="stream">Stream.</param> /// <param name="mode">The mode.</param> /// <returns>Reader.</returns> public BinaryReader StartUnmarshal(IBinaryStream stream, BinaryMode mode = BinaryMode.Deserialize) { return new BinaryReader(this, stream, mode, null); } /// <summary> /// Gets metadata for the given type ID. /// </summary> /// <param name="typeId">Type ID.</param> /// <returns>Metadata or null.</returns> public BinaryType GetBinaryType(int typeId) { if (Ignite != null) { var meta = Ignite.BinaryProcessor.GetBinaryType(typeId); if (meta != null) { return meta; } } return BinaryType.Empty; } /// <summary> /// Puts the binary type metadata to Ignite. /// </summary> /// <param name="desc">Descriptor.</param> public void PutBinaryType(IBinaryTypeDescriptor desc) { Debug.Assert(desc != null); GetBinaryTypeHandler(desc); // ensure that handler exists if (Ignite != null) { var metas = new[] {new BinaryType(desc, this)}; Ignite.BinaryProcessor.PutBinaryTypes(metas); OnBinaryTypesSent(metas); } } /// <summary> /// Gets binary type handler for the given type ID. /// </summary> /// <param name="desc">Type descriptor.</param> /// <returns>Binary type handler.</returns> public IBinaryTypeHandler GetBinaryTypeHandler(IBinaryTypeDescriptor desc) { BinaryTypeHolder holder; if (!_metas.TryGetValue(desc.TypeId, out holder)) { lock (this) { if (!_metas.TryGetValue(desc.TypeId, out holder)) { IDictionary<int, BinaryTypeHolder> metas0 = new Dictionary<int, BinaryTypeHolder>(_metas); holder = new BinaryTypeHolder(desc.TypeId, desc.TypeName, desc.AffinityKeyFieldName, desc.IsEnum, this); metas0[desc.TypeId] = holder; _metas = metas0; } } } if (holder != null) { ICollection<int> ids = holder.GetFieldIds(); bool newType = ids.Count == 0 && !holder.Saved(); return new BinaryTypeHashsetHandler(ids, newType); } return null; } /// <summary> /// Callback invoked when metadata has been sent to the server and acknowledged by it. /// </summary> /// <param name="newMetas">Binary types.</param> private void OnBinaryTypesSent(IEnumerable<BinaryType> newMetas) { foreach (var meta in newMetas) { _metas[meta.TypeId].Merge(meta); } } /// <summary> /// Gets descriptor for type. /// </summary> /// <param name="type">Type.</param> /// <returns> /// Descriptor. /// </returns> public IBinaryTypeDescriptor GetDescriptor(Type type) { BinaryFullTypeDescriptor desc; if (!_typeToDesc.TryGetValue(type, out desc) || !desc.IsRegistered) { desc = RegisterType(type, desc); } return desc; } /// <summary> /// Gets descriptor for type name. /// </summary> /// <param name="typeName">Type name.</param> /// <returns>Descriptor.</returns> public IBinaryTypeDescriptor GetDescriptor(string typeName) { BinaryFullTypeDescriptor desc; if (_typeNameToDesc.TryGetValue(typeName, out desc)) { return desc; } var typeId = GetTypeId(typeName, _cfg.IdMapper); return GetDescriptor(true, typeId, typeName: typeName); } /// <summary> /// Gets descriptor for a type id. /// </summary> /// <param name="userType">User type flag.</param> /// <param name="typeId">Type id.</param> /// <param name="requiresType">If set to true, resulting descriptor must have Type property populated. /// <para /> /// When working in binary mode, we don't need Type. And there is no Type at all in some cases. /// So we should not attempt to call BinaryProcessor right away. /// Only when we really deserialize the value, requiresType is set to true /// and we attempt to resolve the type by all means.</param> /// <param name="typeName">Known type name.</param> /// <param name="knownType">Optional known type.</param> /// <returns> /// Descriptor. /// </returns> public IBinaryTypeDescriptor GetDescriptor(bool userType, int typeId, bool requiresType = false, string typeName = null, Type knownType = null) { BinaryFullTypeDescriptor desc; var typeKey = BinaryUtils.TypeKey(userType, typeId); if (_idToDesc.TryGetValue(typeKey, out desc) && (!requiresType || desc.Type != null)) return desc; if (!userType) return null; if (requiresType && _ignite != null) { // Check marshaller context for dynamically registered type. var type = knownType; if (type == null && _ignite != null) { typeName = typeName ?? _ignite.BinaryProcessor.GetTypeName(typeId); if (typeName != null) { type = ResolveType(typeName); if (type == null) { // Type is registered, but assembly is not present. return new BinarySurrogateTypeDescriptor(_cfg, typeId, typeName); } } } if (type != null) { return AddUserType(type, typeId, GetTypeName(type), true, desc); } } var meta = GetBinaryType(typeId); if (meta != BinaryType.Empty) { var typeCfg = new BinaryTypeConfiguration(meta.TypeName) { IsEnum = meta.IsEnum, AffinityKeyFieldName = meta.AffinityKeyFieldName }; return AddUserType(typeCfg, new TypeResolver()); } return new BinarySurrogateTypeDescriptor(_cfg, typeId, typeName); } /// <summary> /// Registers the type. /// </summary> /// <param name="type">The type.</param> /// <param name="desc">Existing descriptor.</param> private BinaryFullTypeDescriptor RegisterType(Type type, BinaryFullTypeDescriptor desc) { Debug.Assert(type != null); var typeName = GetTypeName(type); var typeId = GetTypeId(typeName, _cfg.IdMapper); var registered = _ignite != null && _ignite.BinaryProcessor.RegisterType(typeId, typeName); return AddUserType(type, typeId, typeName, registered, desc); } /// <summary> /// Add user type. /// </summary> /// <param name="type">The type.</param> /// <param name="typeId">The type id.</param> /// <param name="typeName">Name of the type.</param> /// <param name="registered">Registered flag.</param> /// <param name="desc">Existing descriptor.</param> /// <returns>Descriptor.</returns> private BinaryFullTypeDescriptor AddUserType(Type type, int typeId, string typeName, bool registered, BinaryFullTypeDescriptor desc) { Debug.Assert(type != null); Debug.Assert(typeName != null); var ser = GetSerializer(_cfg, null, type, typeId, null, null, _log); desc = desc == null ? new BinaryFullTypeDescriptor(type, typeId, typeName, true, _cfg.NameMapper, _cfg.IdMapper, ser, false, AffinityKeyMappedAttribute.GetFieldNameFromAttribute(type), BinaryUtils.IsIgniteEnum(type), registered) : new BinaryFullTypeDescriptor(desc, type, ser, registered); if (RegistrationDisabled) { return desc; } var typeKey = BinaryUtils.TypeKey(true, typeId); var desc0 = _idToDesc.GetOrAdd(typeKey, x => desc); if (desc0.Type != null && desc0.TypeName != typeName) { ThrowConflictingTypeError(type, desc0.Type, typeId); } desc0 = _typeNameToDesc.GetOrAdd(typeName, x => desc); if (desc0.Type != null && desc0.TypeName != typeName) { ThrowConflictingTypeError(type, desc0.Type, typeId); } _typeToDesc.Set(type, desc); return desc; } /// <summary> /// Throws the conflicting type error. /// </summary> private static void ThrowConflictingTypeError(object type1, object type2, int typeId) { throw new BinaryObjectException(string.Format("Conflicting type IDs [type1='{0}', " + "type2='{1}', typeId={2}]", type1, type2, typeId)); } /// <summary> /// Add user type. /// </summary> /// <param name="typeCfg">Type configuration.</param> /// <param name="typeResolver">The type resolver.</param> /// <exception cref="BinaryObjectException"></exception> private BinaryFullTypeDescriptor AddUserType(BinaryTypeConfiguration typeCfg, TypeResolver typeResolver) { // Get converter/mapper/serializer. IBinaryNameMapper nameMapper = typeCfg.NameMapper ?? _cfg.NameMapper ?? GetDefaultNameMapper(); IBinaryIdMapper idMapper = typeCfg.IdMapper ?? _cfg.IdMapper; bool keepDeserialized = typeCfg.KeepDeserialized ?? _cfg.KeepDeserialized; // Try resolving type. Type type = typeResolver.ResolveType(typeCfg.TypeName); if (type != null) { ValidateUserType(type); if (typeCfg.IsEnum != BinaryUtils.IsIgniteEnum(type)) { throw new BinaryObjectException( string.Format( "Invalid IsEnum flag in binary type configuration. " + "Configuration value: IsEnum={0}, actual type: IsEnum={1}, type={2}", typeCfg.IsEnum, type.IsEnum, type)); } // Type is found. var typeName = GetTypeName(type, nameMapper); int typeId = GetTypeId(typeName, idMapper); var affKeyFld = typeCfg.AffinityKeyFieldName ?? AffinityKeyMappedAttribute.GetFieldNameFromAttribute(type); var serializer = GetSerializer(_cfg, typeCfg, type, typeId, nameMapper, idMapper, _log); return AddType(type, typeId, typeName, true, keepDeserialized, nameMapper, idMapper, serializer, affKeyFld, BinaryUtils.IsIgniteEnum(type)); } else { // Type is not found. string typeName = GetTypeName(typeCfg.TypeName, nameMapper); int typeId = GetTypeId(typeName, idMapper); return AddType(null, typeId, typeName, true, keepDeserialized, nameMapper, idMapper, null, typeCfg.AffinityKeyFieldName, typeCfg.IsEnum); } } /// <summary> /// Gets the serializer. /// </summary> private static IBinarySerializerInternal GetSerializer(BinaryConfiguration cfg, BinaryTypeConfiguration typeCfg, Type type, int typeId, IBinaryNameMapper nameMapper, IBinaryIdMapper idMapper, ILogger log) { var serializer = (typeCfg != null ? typeCfg.Serializer : null) ?? (cfg != null ? cfg.Serializer : null); if (serializer == null) { if (type.GetInterfaces().Contains(typeof(IBinarizable))) return BinarizableSerializer.Instance; if (type.GetInterfaces().Contains(typeof(ISerializable))) { LogSerializableWarning(type, log); return new SerializableSerializer(type); } serializer = new BinaryReflectiveSerializer(); } var refSerializer = serializer as BinaryReflectiveSerializer; return refSerializer != null ? refSerializer.Register(type, typeId, nameMapper, idMapper) : new UserSerializerProxy(serializer); } /// <summary> /// Add type. /// </summary> /// <param name="type">Type.</param> /// <param name="typeId">Type ID.</param> /// <param name="typeName">Type name.</param> /// <param name="userType">User type flag.</param> /// <param name="keepDeserialized">Whether to cache deserialized value in IBinaryObject</param> /// <param name="nameMapper">Name mapper.</param> /// <param name="idMapper">ID mapper.</param> /// <param name="serializer">Serializer.</param> /// <param name="affKeyFieldName">Affinity key field name.</param> /// <param name="isEnum">Enum flag.</param> private BinaryFullTypeDescriptor AddType(Type type, int typeId, string typeName, bool userType, bool keepDeserialized, IBinaryNameMapper nameMapper, IBinaryIdMapper idMapper, IBinarySerializerInternal serializer, string affKeyFieldName, bool isEnum) { Debug.Assert(!string.IsNullOrEmpty(typeName)); long typeKey = BinaryUtils.TypeKey(userType, typeId); BinaryFullTypeDescriptor conflictingType; if (_idToDesc.TryGetValue(typeKey, out conflictingType) && conflictingType.TypeName != typeName) { ThrowConflictingTypeError(typeName, conflictingType.TypeName, typeId); } var descriptor = new BinaryFullTypeDescriptor(type, typeId, typeName, userType, nameMapper, idMapper, serializer, keepDeserialized, affKeyFieldName, isEnum); if (RegistrationDisabled) { return descriptor; } if (type != null) { _typeToDesc.Set(type, descriptor); } if (userType) { _typeNameToDesc.Set(typeName, descriptor); } _idToDesc.Set(typeKey, descriptor); return descriptor; } /// <summary> /// Adds a predefined system type. /// </summary> private void AddSystemType<T>(int typeId, Func<BinaryReader, T> ctor, string affKeyFldName = null, IBinarySerializerInternal serializer = null) where T : IBinaryWriteAware { var type = typeof(T); serializer = serializer ?? new BinarySystemTypeSerializer<T>(ctor); // System types always use simple name mapper. var typeName = type.Name; if (typeId == 0) { typeId = BinaryUtils.GetStringHashCodeLowerCase(typeName); } AddType(type, typeId, typeName, false, false, null, null, serializer, affKeyFldName, false); } /// <summary> /// Adds predefined system types. /// </summary> private void AddSystemTypes() { AddSystemType(BinaryTypeId.NativeJobHolder, r => new ComputeJobHolder(r)); AddSystemType(BinaryTypeId.ComputeJobWrapper, r => new ComputeJobWrapper(r)); AddSystemType(BinaryTypeId.ComputeOutFuncJob, r => new ComputeOutFuncJob(r)); AddSystemType(BinaryTypeId.ComputeOutFuncWrapper, r => new ComputeOutFuncWrapper(r)); AddSystemType(BinaryTypeId.ComputeFuncWrapper, r => new ComputeFuncWrapper(r)); AddSystemType(BinaryTypeId.ComputeFuncJob, r => new ComputeFuncJob(r)); AddSystemType(BinaryTypeId.ComputeActionJob, r => new ComputeActionJob(r)); AddSystemType(BinaryTypeId.ContinuousQueryRemoteFilterHolder, r => new ContinuousQueryFilterHolder(r)); AddSystemType(BinaryTypeId.CacheEntryProcessorHolder, r => new CacheEntryProcessorHolder(r)); AddSystemType(BinaryTypeId.CacheEntryPredicateHolder, r => new CacheEntryFilterHolder(r)); AddSystemType(BinaryTypeId.MessageListenerHolder, r => new MessageListenerHolder(r)); AddSystemType(BinaryTypeId.StreamReceiverHolder, r => new StreamReceiverHolder(r)); AddSystemType(0, r => new AffinityKey(r), "affKey"); AddSystemType(BinaryTypeId.PlatformJavaObjectFactoryProxy, r => new PlatformJavaObjectFactoryProxy()); AddSystemType(0, r => new ObjectInfoHolder(r)); AddSystemType(BinaryTypeId.IgniteUuid, r => new IgniteGuid(r)); AddSystemType(0, r => new GetAssemblyFunc()); AddSystemType(0, r => new AssemblyRequest(r)); AddSystemType(0, r => new AssemblyRequestResult(r)); AddSystemType<PeerLoadingObjectHolder>(0, null, serializer: new PeerLoadingObjectHolderSerializer()); } /// <summary> /// Logs the warning about ISerializable pitfalls. /// </summary> private static void LogSerializableWarning(Type type, ILogger log) { if (log == null) return; log.GetLogger(typeof(Marshaller).Name) .Warn("Type '{0}' implements '{1}'. It will be written in Ignite binary format, however, " + "the following limitations apply: " + "DateTime fields would not work in SQL; " + "sbyte, ushort, uint, ulong fields would not work in DML.", type, typeof(ISerializable)); } /// <summary> /// Validates binary type. /// </summary> // ReSharper disable once UnusedParameter.Local private static void ValidateUserType(Type type) { Debug.Assert(type != null); if (type.IsGenericTypeDefinition) { throw new BinaryObjectException( "Open generic types (Type.IsGenericTypeDefinition == true) are not allowed " + "in BinaryConfiguration: " + type.AssemblyQualifiedName); } if (type.IsAbstract) { throw new BinaryObjectException( "Abstract types and interfaces are not allowed in BinaryConfiguration: " + type.AssemblyQualifiedName); } } /// <summary> /// Resolves the type (opposite of <see cref="GetTypeName(Type, IBinaryNameMapper)"/>). /// </summary> public Type ResolveType(string typeName) { return new TypeResolver().ResolveType(typeName, nameMapper: _cfg.NameMapper ?? GetDefaultNameMapper()); } /// <summary> /// Gets the name of the type according to current name mapper. /// See also <see cref="ResolveType"/>. /// </summary> public string GetTypeName(Type type, IBinaryNameMapper mapper = null) { return GetTypeName(type.AssemblyQualifiedName, mapper); } /// <summary> /// Gets the name of the type. /// </summary> private string GetTypeName(string fullTypeName, IBinaryNameMapper mapper = null) { mapper = mapper ?? _cfg.NameMapper ?? GetDefaultNameMapper(); var typeName = mapper.GetTypeName(fullTypeName); if (typeName == null) { throw new BinaryObjectException("IBinaryNameMapper returned null name for type [typeName=" + fullTypeName + ", mapper=" + mapper + "]"); } return typeName; } /// <summary> /// Resolve type ID. /// </summary> /// <param name="typeName">Type name.</param> /// <param name="idMapper">ID mapper.</param> private static int GetTypeId(string typeName, IBinaryIdMapper idMapper) { Debug.Assert(typeName != null); int id = 0; if (idMapper != null) { try { id = idMapper.GetTypeId(typeName); } catch (Exception e) { throw new BinaryObjectException("Failed to resolve type ID due to ID mapper exception " + "[typeName=" + typeName + ", idMapper=" + idMapper + ']', e); } } if (id == 0) { id = BinaryUtils.GetStringHashCodeLowerCase(typeName); } return id; } /// <summary> /// Gets the default name mapper. /// </summary> private static IBinaryNameMapper GetDefaultNameMapper() { return BinaryBasicNameMapper.FullNameInstance; } } }
// TelnetSocket.cs // Copyright (c) 2016, 2017, 2019 Kenneth Gober // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. using System; using System.Collections.Generic; using System.Net.Sockets; using System.Text; using System.Threading; namespace Emulator { // The TelnetSocket class implements the 'bottom half' of the Telnet protocol. // It accepts tokens from the top half to be converted to bytes and sent to the // network (doubling IACs as needed), and converts bytes from the network into // tokens (undoubling IACs as needed) to be consumed by the top half. // Future Improvements / To Do // would copying bytes during tokenization simplify logic? // would allowing tokenizer to read/block on incomplete tokens simplify logic? // move send-buffer flush out of read loop (enables reads to block) class TelnetSocket { // event raised to indicate a received token is ready to be read by top half public delegate void ReceiveEventHandler(Object sender, EventArgs e); public event ReceiveEventHandler Receive; private Socket mSocket; private Thread mThread; private volatile Boolean mStopFlag; private Byte[] mSendBuf = new Byte[512]; private Int32 mSendPtr; private Byte[] mRecvBuf; private Int32 mRecvPtr; private Queue<Token> mTokenQueue = new Queue<Token>(); public TelnetSocket(Socket socket) { mSocket = socket; mThread = new Thread(ThreadProc); mThread.Start(); } public Boolean Connected { get { if (!mSocket.Connected) return false; if ((mSocket.Poll(0, SelectMode.SelectRead)) && (mSocket.Available == 0)) return false; return true; } } public void Close() { mStopFlag = true; } public Token Peek() { lock (mTokenQueue) { if (mTokenQueue.Count == 0) return (Connected) ? Token.None : Token.Closed; return mTokenQueue.Peek(); } } public Token Read() { lock (mTokenQueue) { if (mTokenQueue.Count == 0) return (Connected) ? Token.None : Token.Closed; return mTokenQueue.Dequeue(); } } public void Write(Token token) { switch (token.Type) { case TokenType.Data: Send_Data(token.mBuf, token.mPtr, token.mLen); break; case TokenType.Command: Send_IAC(token[0]); break; case TokenType.DM: Send_IAC(IAC.DM); break; case TokenType.NOP: Send_IAC(token[0]); break; case TokenType.DO: Send_IAC(IAC.DO, token[0]); break; case TokenType.DONT: Send_IAC(IAC.DONT, token[0]); break; case TokenType.WILL: Send_IAC(IAC.WILL, token[0]); break; case TokenType.WONT: Send_IAC(IAC.WONT, token[0]); break; case TokenType.SB: Send_IAC(IAC.SB, token.mBuf[token.mPtr]); Send_Data(token.mBuf, token.mPtr + 1, token.mLen - 1); Send_IAC(IAC.SE); break; default: throw new ArgumentException(); } } public void Flush() { Flush(SocketFlags.None); } public void Flush(SocketFlags flags) { lock (mSendBuf) { if (mSendPtr != 0) { try { mSocket.Send(mSendBuf, 0, mSendPtr, flags); mSendPtr = 0; } catch (Exception ex) { String buf = "Flush() Exception:\r\n"; while (ex != null) { buf = String.Concat(buf, "\r\n", ex.Message, " [", ex.Source, "]\r\n", ex.StackTrace); ex = ex.InnerException; } Debug.WriteLine(buf); } } } } // send data bytes, doubling each IAC (255) byte. private void Send_Data(Byte[] buffer, Int32 offset, Int32 count) { Debug.WriteLine("Send data: {0} ({1:D0} bytes)", Encoding.ASCII.GetString(buffer, offset, count), count); Int32 n = offset + count; Int32 p = offset; Int32 q = p; while (p < n) { while ((q < n) && (buffer[q] != (Byte)IAC.IAC)) q++; if (q == n) { _Send(buffer, p, q - p); break; } _Send(buffer, p, q - p + 1); p = q; q = p + 1; } } private void Send_IAC(IAC command) { Send_IAC((Byte)command); } private void Send_IAC(Byte command) { Debug.WriteLine("Send IAC {0} (255 {1:D0})", (IAC)command, command); Byte[] buf = new Byte[2]; buf[0] = (Byte)IAC.IAC; buf[1] = command; if (command != (Byte)IAC.DM) { _Send(buf, 0, 2); return; } lock (mSendBuf) { Int32 p = mSendPtr; if ((p + 2) >= mSendBuf.Length) Flush(SocketFlags.None); _Send(buf, 0, 2); Flush(SocketFlags.OutOfBand); } } private void Send_IAC(IAC command, Byte option) { Debug.WriteLine("Send IAC {0} {1} (255 {2:D0} {3:D0})", command, (Telnet.Option)option, (Byte)command, option); Byte[] buf = new Byte[3]; buf[0] = (Byte)IAC.IAC; buf[1] = (Byte)command; buf[2] = option; _Send(buf, 0, 3); } private void _Send(Byte[] buffer, Int32 offset, Int32 count) { lock (mSendBuf) { Int32 p = mSendPtr; while ((p + count) >= mSendBuf.Length) { while (p < mSendBuf.Length) mSendBuf[p++] = buffer[offset++]; count -= (p - mSendPtr); try { mSocket.Send(mSendBuf, 0, p, SocketFlags.None); mSendPtr = p = 0; } catch (Exception ex) { String buf = "_Send() Exception:\r\n"; while (ex != null) { buf = String.Concat(buf, "\r\n", ex.Message, " [", ex.Source, "]\r\n", ex.StackTrace); ex = ex.InnerException; } Debug.WriteLine(buf); } } while (count-- > 0) mSendBuf[p++] = buffer[offset++]; mSendPtr = p; } } // socket receive thread private void ThreadProc() { mSocket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.OutOfBandInline, true); mSocket.SetSocketOption(SocketOptionLevel.Tcp, SocketOptionName.Expedited, true); while (!mStopFlag) { // receive and process as much data as is available Boolean f = false; while ((mSocket.Connected) && (mSocket.Available != 0)) { Int32 n = mSocket.Available; if (n > 512) n = 512; // if there's a lot of data, process it in 512-byte chunks Byte[] buf = new Byte[n]; // always create a new buffer for Tokenize SocketError e; n = mSocket.Receive(buf, 0, n, SocketFlags.None, out e); if (e != SocketError.Success) throw new ApplicationException(String.Concat("Error reading socket: ", e.ToString())); if (n != buf.Length) throw new ApplicationException("Short read from socket"); f |= Tokenize(buf); // Tokenize returns true if a complete token was queued } if ((f) && (Receive != null)) Receive(this, new EventArgs()); if (!Connected) break; Flush(); // don't sleep without first emptying application send buffer Thread.Sleep(1); } Debug.WriteLine("Disconnected"); Flush(); mSocket.Close(); lock (mTokenQueue) mTokenQueue.Enqueue(Token.Closed); if (Receive != null) Receive(this, new EventArgs()); } // Tokenize - split a buffer into tokens, return true if a token was completed // // buffers given to Tokenize should be considered to be 'owned' by Tokenize. // Tokenize does not copy, it *references* byte ranges within buffer. // this means: buffers should not be modified/reused/refilled // // IAC commands are individual tokens // note: <IAC SB opt ... IAC SE> is treated as one token // consecutive data bytes may be merged into a single token // received data may be broken into multiple tokens (eg at <CR> or <IAC>) private Boolean Tokenize(Byte[] buffer) { Int32 p = 0; Int32 q = 0; Boolean f = false; // if a previous buffer is pending, it must contain an incomplete token // check to see if the token can now be completed if (mRecvBuf != null) { p = mRecvPtr; Token t = GetToken(ref p, buffer, ref q); if (t.Type == TokenType.Invalid) { // token still not complete, combine previous and new buffer Byte[] buf = new Byte[mRecvBuf.Length - mRecvPtr + buffer.Length]; q = 0; p = mRecvPtr; while (p < mRecvBuf.Length) buf[q++] = mRecvBuf[p++]; p = 0; while (p < buffer.Length) buf[q++] = buffer[p++]; mRecvBuf = buf; mRecvPtr = 0; return false; } mRecvBuf = null; mRecvPtr = 0; lock (mTokenQueue) mTokenQueue.Enqueue(t); f = true; } // process remaining tokens in buffer while (true) { Int32 oq = q; Token t = GetToken(ref p, buffer, ref q); if (t.Type == TokenType.Invalid) { mRecvBuf = buffer; mRecvPtr = oq; return f; } if (t.Type == TokenType.None) return f; lock (mTokenQueue) mTokenQueue.Enqueue(t); f = true; } } private Token GetToken(ref Int32 p, Byte[] buffer, ref Int32 q) { Int32 op = p; // original value of index into mRecvBuf Int32 oq = q; // original value of index into buffer Int32 state = 0; Int32 n; while ((n = PeekTokenByte(p, buffer, q)) != -1) { IAC b = (IAC)n; switch (state) { case 0: // start if (b == IAC.IAC) state = 3; else if (n == '\r') state = 2; else state = 1; ReadTokenByte(ref p, buffer, ref q); break; case 1: // data if ((b == IAC.IAC) || (n == '\r')) return new Token(TokenType.Data, buffer, oq, q - oq); ReadTokenByte(ref p, buffer, ref q); break; case 2: // CR if ((n == '\n') || (n == 0)) { ReadTokenByte(ref p, buffer, ref q); if ((mRecvBuf != null) && (p == mRecvBuf.Length) && (p != op) && (q != oq)) { // recopy if this token was split across 2 buffers n = (p - op) + (q - oq); Byte[] buf = new Byte[n]; n = 0; for (Int32 i = op; i < p; i++) buf[n++] = mRecvBuf[i]; for (Int32 i = oq; i < q; i++) buf[n++] = buffer[i]; return new Token(TokenType.Data, buf, 0, n); } return new Token(TokenType.Data, buffer, oq, q - oq); } if (q == oq) return new Token(TokenType.Data, mRecvBuf, op, p - op); return new Token(TokenType.Data, buffer, oq, q - oq); case 3: // IAC if (b == IAC.IAC) { Token t = (q == oq) ? new Token(TokenType.Data, mRecvBuf, op, 1) : new Token(TokenType.Data, buffer, oq, 1); ReadTokenByte(ref p, buffer, ref q); return t; } if ((b == IAC.GA) || (b == IAC.BRK) || (b == IAC.IP) || (b == IAC.AO) || (b == IAC.AYT) || (b == IAC.EC) || (b == IAC.EL) || (b == IAC.EOR) || (b == IAC.EOF) || (b == IAC.SUSP) || (b == IAC.ABORT)) { ReadTokenByte(ref p, buffer, ref q); return new Token(TokenType.Command, buffer, q - 1, 1); } if (b == IAC.DM) { ReadTokenByte(ref p, buffer, ref q); if (q == oq) return new Token(TokenType.DM, mRecvBuf, p - 1, 1); return new Token(TokenType.DM, buffer, q - 1, 1); } ReadTokenByte(ref p, buffer, ref q); if (b == IAC.DO) state = 4; else if (b == IAC.DONT) state = 5; else if (b == IAC.WILL) state = 6; else if (b == IAC.WONT) state = 7; else if (b == IAC.SB) state = 8; else if (q == oq) return new Token(TokenType.NOP, mRecvBuf, p - 1, 1); else return new Token(TokenType.NOP, buffer, q - 1, 1); break; case 4: // IAC DO ReadTokenByte(ref p, buffer, ref q); return new Token(TokenType.DO, buffer, q - 1, 1); case 5: // IAC DONT ReadTokenByte(ref p, buffer, ref q); return new Token(TokenType.DONT, buffer, q - 1, 1); case 6: // IAC WILL ReadTokenByte(ref p, buffer, ref q); return new Token(TokenType.WILL, buffer, q - 1, 1); case 7: // IAC WONT ReadTokenByte(ref p, buffer, ref q); return new Token(TokenType.WONT, buffer, q - 1, 1); case 8: // IAC SB ReadTokenByte(ref p, buffer, ref q); state = 9; break; case 9: // IAC SB OPT ... if (b == IAC.IAC) state = 10; ReadTokenByte(ref p, buffer, ref q); break; case 10: // IAC SB OPT ... IAC ReadTokenByte(ref p, buffer, ref q); if (b == IAC.SE) { if ((mRecvBuf != null) && (p == mRecvBuf.Length) && (p != op) && (q != oq)) { // recopy if this token was split across 2 buffers n = (p - op) + (q - oq); Byte[] buf = new Byte[n]; n = 0; for (Int32 i = op; i < p; i++) buf[n++] = mRecvBuf[i]; for (Int32 i = oq; i < q; i++) buf[n++] = buffer[i]; return new Token(TokenType.SB, buf, 2, n - 4); } return new Token(TokenType.SB, buffer, oq + 2, q - oq - 4); } if (b == IAC.IAC) state = 11; else state = 9; break; case 11: // IAC SB OPT ... IAC IAC ... if (b == IAC.IAC) state = 12; ReadTokenByte(ref p, buffer, ref q); break; case 12: // IAC SB OPT ... IAC IAC ... IAC ReadTokenByte(ref p, buffer, ref q); if (b == IAC.SE) { if ((mRecvBuf != null) && (p == mRecvBuf.Length) && (p != op) && (q != oq)) { // recopy if this token was split across 2 buffers (undoubling IACs) n = (p - op) + (q - oq); Byte[] buf = new Byte[n]; n = 0; for (Int32 i = op; i < p; i++) buf[n++] = mRecvBuf[i]; for (Int32 i = oq; i < q; i++) buf[n++] = buffer[i]; for (Int32 i = 0; i < n; i++) { if (buf[i] != (Byte)IAC.IAC) continue; if (buf[i] == buf[i + 1]) { n--; for (Int32 j = i; j < n; j++) buf[j] = buf[j + 1]; } } return new Token(TokenType.SB, buf, 2, n - 4); } // undouble embedded IACs n = q; for (Int32 i = oq; i < n; i++) { if (buffer[i] != (Byte)IAC.IAC) continue; if (buffer[i] == buffer[i + 1]) { n--; for (Int32 j = i; j < n; j++) buffer[j] = buffer[j + 1]; } } return new Token(TokenType.SB, buffer, oq + 2, n - oq - 4); } state = 11; break; } } if (state == 0) return new Token(TokenType.None, null, 0, 0); if (state == 1) return new Token(TokenType.Data, buffer, oq, q - oq); return new Token(TokenType.Invalid, null, 0, 0); } private Int32 PeekTokenByte(Int32 p, Byte[] buffer, Int32 q) { if ((mRecvBuf != null) && (p < mRecvBuf.Length)) return mRecvBuf[p]; if (q < buffer.Length) return buffer[q]; return -1; } private Int32 ReadTokenByte(ref Int32 p, Byte[] buffer, ref Int32 q) { if ((mRecvBuf != null) && (p < mRecvBuf.Length)) return mRecvBuf[p++]; if (q < buffer.Length) return buffer[q++]; return -1; } } enum IAC : byte { IAC = 255, DONT = 254, DO = 253, WONT = 252, WILL = 251, SB = 250, GA = 249, EL = 248, EC = 247, AYT = 246, AO = 245, IP = 244, BRK = 243, DM = 242, NOP = 241, SE = 240, EOR = 239, ABORT = 238, SUSP = 237, EOF = 236, } enum TokenType { Closed = -2, // connection closed (and buffer empty) Invalid = -1, // recv buffer incomplete (try again when more received) None = 0, // recv buffer empty Data = 1, // data bytes, buffer contains data Command = 2, // IAC (cmd), buffer contains cmd byte NOP = 3, // IAC (NOP), buffer contains cmd byte (NOP or unrecognized cmd) DO = 4, // IAC DO (opt), buffer contains opt DONT = 5, // IAC DONT (opt), buffer contains opt WILL = 6, // IAC WILL (opt), buffer contains opt WONT = 7, // IAC WONT (opt), buffer contains opt DM = 8, // IAC (DM), buffer contains cmd byte (DM) SB = 9, // IAC SB (opt ...) IAC SE, buffer contains opt ... } struct Token { public static Token Closed { get { return new Token(TokenType.Closed, null, 0, 0); } } public static Token Invalid { get { return new Token(TokenType.Invalid, null, 0, 0); } } public static Token None { get { return new Token(TokenType.None, null, 0, 0); } } public static Token DM { get { return new Token(TokenType.DM, new Byte[] { (Byte)IAC.DM }, 0, 1); } } public static Token Data(Byte[] buffer, Int32 offset, Int32 count) { return new Token(TokenType.Data, buffer, offset, count); } public static Token Data(Byte data) { Byte[] buf = new Byte[1]; buf[0] = data; return new Token(TokenType.Data, buf, 0, 1); } public static Token Command(IAC command) { Byte[] buf = new Byte[1]; buf[0] = (Byte)command; return new Token(TokenType.Command, buf, 0, 1); } public static Token NOP(IAC command) { Byte[] buf = new Byte[1]; buf[0] = (Byte)command; return new Token(TokenType.NOP, buf, 0, 1); } public static Token DO(Telnet.Option option) { return Token.DO((Byte)option); } public static Token DO(Byte option) { Byte[] buf = new Byte[1]; buf[0] = option; return new Token(TokenType.DO, buf, 0, 1); } public static Token DONT(Telnet.Option option) { return Token.DONT((Byte)option); } public static Token DONT(Byte option) { Byte[] buf = new Byte[1]; buf[0] = option; return new Token(TokenType.DONT, buf, 0, 1); } public static Token WILL(Telnet.Option option) { return Token.WILL((Byte)option); } public static Token WILL(Byte option) { Byte[] buf = new Byte[1]; buf[0] = option; return new Token(TokenType.WILL, buf, 0, 1); } public static Token WONT(Telnet.Option option) { return Token.WONT((Byte)option); } public static Token WONT(Byte option) { Byte[] buf = new Byte[1]; buf[0] = option; return new Token(TokenType.WONT, buf, 0, 1); } public static Token SB(Byte option, Byte[] data, Int32 offset, Int32 count) { Byte[] buf = new Byte[count + 1]; Int32 n = 0; buf[n++] = option; for (Int32 i = 0; i < count; i++) buf[n++] = data[offset + i]; return new Token(TokenType.SB, buf, 0, n); } public static Token SB(Byte option, Byte command, String data) { Int32 n = 2; if (command == (Byte)IAC.IAC) n++; n += Encoding.ASCII.GetByteCount(data); Byte[] buf = new Byte[n]; n = 0; buf[n++] = option; buf[n++] = command; if (command == (Byte)IAC.IAC) buf[n++] = command; n += Encoding.ASCII.GetBytes(data, 0, data.Length, buf, n); return new Token(TokenType.SB, buf, 0, n); } private TokenType mType; internal Byte[] mBuf; internal Int32 mPtr; internal Int32 mLen; private Object mLock; public Token(TokenType type, Byte[] buf, Int32 ptr, Int32 len) { mType = type; mBuf = buf; mPtr = ptr; mLen = len; mLock = new Object(); } public Byte this[Int32 index] { get { lock (mLock) { if ((index < 0) || (index >= mLen)) throw new ArgumentOutOfRangeException("index"); return mBuf[mPtr + index]; } } } public TokenType Type { get { return mType; } } public Int32 Length { get { lock (mLock) return mLen; } } public Int32 ReadByte() { lock (mLock) { if (mLen == 0) return -1; mLen--; return mBuf[mPtr++]; } } } }
// Copyright (c) 2012 DotNetAnywhere // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; #if LOCALTEST using System.Collections; using System.Collections.Generic; namespace System_.Collections.Generic { #else namespace System.Collections.Generic { #endif public class List<T> : IList<T>, ICollection<T>, IEnumerable<T>, IEnumerable, IList, ICollection, IReadOnlyList<T>, IReadOnlyCollection<T> { public struct Enumerator : IEnumerator<T>, IDisposable { private List<T> list; private int index; internal Enumerator(List<T> list) { this.list = list; this.index = -1; } public T Current { get { return this.list[this.index]; } } public void Dispose() { } object IEnumerator.Current { get { return this.list[this.index]; } } public bool MoveNext() { this.index++; return (this.index < this.list.Count); } public void Reset() { this.index = -1; } } private const int defaultCapacity = 4; private T[] items; private int size; public List() : this(defaultCapacity) { } public List(int capacity) { if (capacity < 0) { throw new ArgumentOutOfRangeException("capacity"); } this.items = new T[capacity]; this.size = 0; } public List(IEnumerable<T> collection) { ICollection<T> iCol = collection as ICollection<T>; if (iCol != null) { this.size = iCol.Count; this.items = new T[this.size]; iCol.CopyTo(this.items, 0); } else { this.items = new T[defaultCapacity]; this.size = 0; foreach (T item in collection) { this.Add(item); } } } public void TrimExcess() { // does nothing } private void EnsureSpace(int space) { if (this.size + space > this.items.Length) { Array.Resize<T>(ref this.items, Math.Max(this.items.Length << 1, 4)); } } private void Shift(int index, int count) { if (count > 0) { this.EnsureSpace(count); for (int i = this.size - 1; i >= index; i--) { this.items[i + count] = this.items[i]; } } else { for (int i = index; i < this.size + count; i++) { this.items[i] = this.items[i - count]; } // [Steve note] The above logic isn't enough to release the remaining // items for GC, e.g., when shifting the trailing items. // So, clear out the now-unused trailing slots. for (int i = this.size - 1; i >= this.size + count; i--) { this.items[i] = default(T); } } this.size += count; } public void Add(T item) { this.EnsureSpace(1); this.items[this.size++] = item; } public void AddRange(IEnumerable<T> collection) { ICollection<T> iCol = collection as ICollection<T>; if (iCol != null) { this.EnsureSpace(iCol.Count); iCol.CopyTo(this.items, this.size); this.size += iCol.Count; } else { foreach (T t in collection) { Add(t); } } } public int Count { get { return this.size; } } public int Capacity { get { return this.items.Length; } set { throw new NotImplementedException(); } } public T this[int index] { get { if (index >= this.size || index < 0) { throw new ArgumentOutOfRangeException("index"); } return this.items[index]; } set { if (index >= this.size || index < 0) { throw new ArgumentOutOfRangeException("index"); } this.items[index] = value; } } public List<T> FindAll(Predicate<T> match) { List<T> results = new List<T>(); for (int i = 0; i < this.size; i++) { if (match(this.items[i])) { results.Add(this.items[i]); } } return results; } public Enumerator GetEnumerator() { return new Enumerator(this); } public int IndexOf(T item, int start, int count) { return Array.IndexOf<T>(this.items, item, start, count); } public int IndexOf(T item, int start) { return this.IndexOf(item, start, this.size - start); } public void InsertRange(int index, IEnumerable<T> collection) { if (collection == null) { throw new ArgumentNullException("collection"); } if (index < 0 || index > this.size) { throw new ArgumentOutOfRangeException("index"); } List<T> toInsert = new List<T>(collection); this.Shift(index, toInsert.Count); for (int i = 0; i < toInsert.Count; i++) { this.items[index + i] = toInsert[i]; } } // public void Sort() { // Array.Sort(this.items, 0, this.size); // } public void Sort(Comparison<T> comparison) { Array.Sort(this.items, 0, this.size, comparison); } public void Sort(IComparer<T> comparer) { Array.Sort(this.items, 0, this.size, comparer); } public void Sort(int index, int count, IComparer<T> comparer) { Array.Sort(this.items, index, count, comparer); } public T[] ToArray() { T[] array = new T[this.size]; Array.Copy(this.items, array, this.size); return array; } #region Interface Members public int IndexOf(T item) { return this.IndexOf(item, 0, size); } public void Insert(int index, T item) { if (index < 0 || index > this.size) { throw new ArgumentOutOfRangeException("index"); } this.Shift(index, 1); this.items[index] = item; } public void RemoveAt(int index) { this.Shift(index, -1); } public bool IsReadOnly { get { return false; } } public void Clear() { Array.Clear(this.items, 0, this.items.Length); this.size = 0; } public bool Contains(T item) { return Array.IndexOf(this.items, item) >= 0; } public void CopyTo(T[] array, int arrayIndex) { Array.Copy(this.items, 0, (Array)array, arrayIndex, this.size); } public bool Remove(T item) { int idx = Array.IndexOf(this.items, item); if (idx >= 0) { this.RemoveAt(idx); return true; } return false; } IEnumerator<T> IEnumerable<T>.GetEnumerator() { return new Enumerator(this); } IEnumerator IEnumerable.GetEnumerator() { return new Enumerator(this); } public bool IsFixedSize { get { return false; } } object IList.this[int index] { get { return this[index]; } set { this[index] = (T)value; } } public int Add(object value) { this.Add((T)value); return this.items.Length - 1; } public bool Contains(object value) { return this.Contains((T)value); } public int IndexOf(object value) { return this.IndexOf((T)value); } public void Insert(int index, object value) { this.Insert(index, (T)value); } public void Remove(object value) { this.Remove((T)value); } public bool IsSynchronized { get { return false; } } public object SyncRoot { get { return this; } } public void CopyTo(Array array, int index) { Array.Copy(this.items, 0, array, index, this.size); } #endregion } }
using System; using System.Collections.Generic; using System.Text; namespace Memoria.Assets { public static class AudioResources { public static class Embaded { public static String GetSoundPath(String relativePath) { return "Sounds/" + relativePath + ".akb"; } } public static class Export { public static String GetSoundPath(String relativePath) { String path = Configuration.Export.Path; StringBuilder sb = new StringBuilder(path.Length + 32); sb.Append(path); if (sb.Length > 0 && sb[sb.Length - 1] != '/' && sb[sb.Length - 1] != '\\') sb.Append('/'); sb.Append("Sounds/"); sb.Append(relativePath); return sb.ToString(); } } public static class Import { public static String GetSoundPath(String relativePath) { String path = Configuration.Import.Path; StringBuilder sb = new StringBuilder(path.Length + 32); sb.Append(path); if (sb.Length > 0 && sb[sb.Length - 1] != '/' && sb[sb.Length - 1] != '\\') sb.Append('/'); sb.Append("Sounds/"); sb.Append(relativePath); return sb.ToString(); } } public static Boolean TryAppendDisplayName(String relativePath, out String directoryPath, out String fileName, out String newRelativePath) { newRelativePath = relativePath; if (String.IsNullOrEmpty(relativePath)) { directoryPath = relativePath; fileName = relativePath; return false; } // Split the path Int32 index = relativePath.LastIndexOf('/'); if (index == relativePath.Length - 1) { directoryPath = relativePath; fileName = String.Empty; return false; } if (index == 0) { directoryPath = String.Empty; fileName = relativePath.Substring(1); } else if (index < 0) { directoryPath = null; fileName = relativePath; } else { directoryPath = relativePath.Substring(0, index); fileName = relativePath.Substring(index + 1); } bool skipRename = false; // Find a display name String displayName; if (fileName.Length == 8 && fileName.StartsWith("music")) // music??? { if (!MusicDisplayNames.TryGetValue(fileName, out displayName)) return false; } else if (fileName.Length == 8 && fileName.StartsWith("se")) { if (!SoundDisplayNames.TryGetValue(fileName, out displayName)) return false; } else if (fileName.StartsWith("va_")) { string[] pathParts = directoryPath.Split(new char[] { '/' }); displayName = fileName; skipRename = true; } // TODO: FMV, SE, Songs else { return false; } // Join it to the new path if (directoryPath == null) { newRelativePath = fileName + " - " + displayName; } else { newRelativePath = directoryPath + '/' + fileName + " - " + displayName; } return true; } private static readonly Dictionary<String, String> MusicDisplayNames = new Dictionary<string, string> { {@"music000", "Victory Fanfare"}, {@"music001", "Game Over"}, {@"music002", "Dali Village"}, {@"music003", "Evil Forest"}, {@"music004", "Passive Sorrow (Music that plays at the beginning of Disc 4)"}, {@"music005", "Amarant's Theme"}, {@"music006", "Battle Theme"}, {@"music007", "Steiner's Theme (Disc 1)"}, {@"music008", "Vivi's Theme (Disc 1)"}, {@"music009", "Quina's Theme (Frog Catching)"}, {@"music010", "Garnet's Theme (Reminisce of the past, her childhood) - Disc 2"}, {@"music011", "Freya's Theme (Gizamaluke's Grotto)"}, {@"music012", "Aloha de chocobo (Chocobo's Forest)"}, {@"music013", "Decisive Action (First meeting Steiner, Disc 1)"}, {@"music014", "Stolen Eyes (Zidane and Dagger first talk on the Prima Vista Airship) Disc 1"}, {@"music015", "Vamo' Alla Flamenco (Chocobo Hot & Cold)"}, {@"music016", "King Leo (Prima Vista Theatre Stage - 3 Bell strike, under the axe you shall be) Disc 1"}, {@"music017", "Sword of Fury (Prima Vista first battle on stage)"}, {@"music018", "Strategy Conference (Beginning of Disc 1, planning kidnap Garnet)"}, {@"music020", "Queen of the Abyss (Queen Branhe's Theme)"}, {@"music021", "The Fated Hour - (Prima Vista, Dagger takes the stage with Ruby's role)"}, {@"music022", "Mistaken Love (Marcus kills Cornelia instead)"}, {@"music023", "Zidane's Theme (Provocative Zidane - boarding the Cargo Ship) Disc 1"}, {@"music024", "Boss Battle Theme"}, {@"music025", "Oeilvert"}, {@"music026", "Tantalus Theme (...Disc 3, cutscene, after Mount Gulug, finding Dagger somewhere in Alexandria)"}, {@"music027", "One Danger put behind us (Disc 1, the pub, meeting Freya first time)"}, {@"music028", "You're Not Alone"}, {@"music029", "Unforgettable Face (Sir Fratly and Freya's Theme)"}, {@"music030", "Memories of that Day (Disc 1, Zidane Reminisces about meeting Garnet)"}, {@"music031", "Ice Cavern"}, {@"music032", "Qu's Marsh"}, {@"music033", "Title Music"}, {@"music036", "RUN!"}, {@"music037", "Jesters of the Moon (Zorn and Thorn's theme)"}, {@"music038", "Faerie Battle"}, {@"music039", "Reckless Steiner (Steiner's Theme Pt. II)"}, {@"music041", "Prima Vista, Music Room"}, {@"music042", "Quad Mist (Tetra Master)"}, {@"music043", "Far away in the village (Discover the underground production area of Dali, Disc 1)"}, {@"music044", "Burmecia"}, {@"music045", "Crossing those Hills (World Map Theme)"}, {@"music046", "Mognet Central"}, {@"music047", "Lindblum Theme (Town areas)"}, {@"music048", "Fossil Roo"}, {@"music049", "Cleyra Settlement"}, {@"music050", "Eidolon Wall (Madian Sari)"}, {@"music051", "Unfathomed Reminisce (Alexandria's Theme Disc 3-4)"}, {@"music052", "Orchestra in the Forest (Disc 1, ATE, Evil Forest)"}, {@"music053", "Vivi's Theme pt. II (Disc 1, Cargo Ship, other black mages refuse to talk with Vivi)"}, {@"music054", "Black Mage Village"}, {@"music055", "Eternal Harvest - (Ceremonial Dance, strengthen Cleyra's Sandstorm)"}, {@"music056", "Pandemonium Theme (After 'You're not alone' montage fights)"}, {@"music057", "Dark City Treno"}, {@"music058", "Sneaky frog & the scoundrel (Cid's Red light, Green light game with Hedgehog pie)"}, {@"music059", "Bran Bal"}, {@"music060", "Eiko's Theme"}, {@"music061", "Greive over the skies (After ceremonial dance, Cleyra) Disc 2 (Terrible Omen)"}, {@"music062", "Conde Petie"}, {@"music063", "Gargant Roo"}, {@"music064", "Cleyra's Trunk"}, {@"music066", "Kuja's Theme"}, {@"music067", "Kuja's Theme Millennium (Desert Palace)"}, {@"music068", "Immoral Melody (Kuja's Theme Pt. II)"}, {@"music069", "Footsteps of Desire (Disc 3, fall into Kuja's Trap)"}, {@"music070", "Ambush Attack (Attack at the Iifa Tree, Cleyra, rescuing Dagger, Disc 2)"}, {@"music071", "Conde Petie Marriage Ceremony"}, {@"music072", "Ukulele de chocobo (Chocobo's Theme)"}, {@"music073", "The Four medallions (After Ipsen's Castle, talk about four mirrors) Disc 3"}, {@"music075", "Ipsen's Heritage (Ipsen's Castle)"}, {@"music077", "A transient Past (Oeilvert- Area with the faces, speaking of Terra)"}, {@"music078", "Emiko's Vocals (Ending Theme)"}, {@"music079", "South Border Crossing (South Gate)"}, {@"music080", "Iifa Tree"}, {@"music081", "Mount Gulug"}, {@"music082", "Hunter's Chance (Festival of the hunt)"}, {@"music083", "Hilda Garde 3 Airship Theme"}, {@"music084", "Emiko's Vocals (Disc 2 Madian Sari)"}, {@"music085", "Emiko's Vocals (Solo)"}, {@"music087", "Crystal World"}, {@"music088", "The chosen summoner (Dagger's guide to Alexandria's Altar) Eidolon Alexander"}, {@"music089", "Protecting my devotion (Disc 3, Steiner & Beatrix montage fight)"}, {@"music090", "Loss of me (Beatrix Theme)"}, {@"music091", "Mystery sword (Battle with Beatrix)"}, {@"music092", "Pandemonium Theme (Before 'You're not alone' montage fights)"}, {@"music093", "Secret Library of Daguerreo"}, {@"music094", "Madian Sari Theme"}, {@"music095", "Terra"}, {@"music096", "Place of memories (Memoria)"}, {@"music097", "Cid's Theme (Lindblum Castle)"}, {@"music098", "Dark Messenger (Trance Kuja battle)"}, {@"music101", "The Final battle"}, {@"music102", "Emiko's Vocals (Solo) Madian Sari, Eidolon Wall appears on fire."}, {@"music105", "Eiko's Theme pt. II"}, {@"music106", "We are theives! (Beginning of Disc 3)"}, {@"music108", "Extraction"}, {@"music109", "Black Waltz Theme"}, {@"music110", "Ending Theme Pt. I (Prima Vista Theatre Stage)"}, {@"music111", "Ending Theme Pt. II (Prima Vista Theatre Stage)"}, {@"music112", "Ending Theme Pt. III (Prima Vista Theatre Stage)"}, {@"music113", "Esto Gaza"}, {@"music114", "Heart of Melting Magic (Cid & Hilda Garde theme)"}, {@"music115", "Ending Theme Pt. II (Prima Vista Theatre Stage)"}, {@"music116", "Slew of love letters"}, {@"music117", "The Evil Mist's Rebirth (Disc 4 world map)"}, {@"music118", "Successive battles"}, {@"music120", "Final Fantasy IX - Prelude"}, {@"music121", "Final Fantasy III Theme"}, {@"music122", "Assault of the white dragons};"} }; private static readonly Dictionary<String, String> SoundDisplayNames = new Dictionary<string, string> { {@"se000001", "Menu Select"}, {@"se000002", "Menu Error"}, {@"se000003", "Menu Cancel"}, {@"se000004", "Recieve Items"}, {@"se000005", "Equip Armor"}, {@"se000006", "Menu Item Heal"}, {@"se000007", "EXP recieveing (looped)"}, {@"se000008", "Gil recieveing (looped)"}, {@"se000009", "Random Encounter (Part 1)"}, {@"se000010", "Random Encounter (Part 2)"}, {@"se000011", "Unknown"}, {@"se000012", "Open a door"}, {@"se000013", "Treasures"}, {@"se000014", "Treasure chest open (Begin)"}, {@"se000015", "Moogle (Help)"}, {@"se000016", "Level up"}, {@"se000017", "Ability Learned"}, {@"se000018", "Battle command window"}, {@"se000019", "Purchase"}, {@"se000020", "Info"}, {@"se000021", "Menu (L1 and R1) switch between players"}, {@"se000022", "Tent (Part 1)"}, {@"se000023", "Tent (Part 2) Random interval, timing based on part 3"}, {@"se000024", "Tent (Part 3) Random interval, timing based on part 2"}, {@"se000025", "Tent (Part 4) Random interval, timing based on part 1"}, {@"se000026", "Save and Load game confirmed"}, {@"se000027", "Jump"}, {@"se000028", "Unknown"}, {@"se000029", "ATE"}, {@"se000030", "Ladder climb"}, {@"se000031", "Knight footland (from jump)"}, {@"se000032", "Moogle welcome"}, {@"se000033", "Moogle tent use"}, {@"se000034", "Knight footsteps"}, {@"se000035", "Jump (Part 2)"}, {@"se000036", "Land (from jump)"}, {@"se000037", "Moogle flip-land (from save)"}, {@"se000038", "Memoria save portal"}, {@"se000040", "Return from Memoria save portal"}, {@"se000042", "Moogle save book open"}, {@"se000043", "Locked Game renew"}, {@"se000047", "Unknown"}, {@"se000049", "Moogle Land"}, {@"se010001", "Enemy (possible Lamia) attack #1"}, {@"se010002", "Enemy attack #2"}, {@"se010003", "Enemy attack #3"}, {@"se010005", "Enemy attack #4"}, {@"se010006", "Enemy attack #5 Oink"}, {@"se010007", "Enemy attack #6"}, {@"se010009", "Enemy attack #7"}, {@"se010010", "Enemy attack #8"}, {@"se010011", "Enemy attack #9"}, {@"se010013", "Enemy attack #10"}, {@"se010014", "Enemy attack #11"}, {@"se010015", "Enemy attack #12"}, {@"se010017", "Weapon (Rod) attack"}, {@"se010018", "Weapon (Staff and Flute) attack"}, {@"se010019", "Weapon Attack"}, {@"se010021", "Weapon (Rod) attack 2"}, {@"se010022", "Weapon (Staff) attack 2"}, {@"se010025", "Enemy attack #13"}, {@"se010029", "Enemy attack #14"}, {@"se010032", "Weapon Swing (Miss)"}, {@"se010033", "Weapon Swing (Miss) 2"}, {@"se010034", "Weapon Swing (Miss) 3"}, {@"se010035", "Weapon Swing (Miss) 4"}, {@"se010036", "Weapon Swing (Miss) 5"}, {@"se010037", "Weapon Swing (Miss) 6"}, {@"se010038", "Weapon Swing (Miss) 7"}, {@"se010039", "Air racket swing"}, {@"se010040", "Jump (Spear)"}, {@"se010128", "Enemy attack #15"}, // "Battle Sound Knight Sword Slash" Thunder Slash;Stock Break;Slash;Attack;Climhazzard;Hack;Sword Quiver;Cleave;Helm Divide;Judgment Sword;Battlemes1;Battlemes2;Battlemes3;Rrrragh!;Gwahaha!;MESAttack0;MESAttack1;Get some!23;Taste steel! {@"se010132", "Enemy attack #16"}, // "Battle Sound Frontal Knock" Tail;HP Switching;StrikeBC;Devil's Kiss;Dive;Counter;Open and Close {@"se010136", "Enemy strike"}, // "Battle Sound Claw & Sting" Strike;Counter;Poison Claw;Claws;Scratch;Silent Claw;Claw;Dive;Attack;Battlemes1;Battlemes2;Battlemes3;Demon's Claw {@"se010138", "Enemy attack #17"}, {@"se010140", "Enemy attack #18"}, // "Battle Sound Metallic Slice" Trouble Knife;Knife;Rusty Knife;Claws;Slash;Clamp Pinch;Attack;Battlemes1;Battlemes2;Battlemes3;Chop {@"se010144", "Enemy attack #19"}, // "Battle Sound Spear Hit" Counter;Spear;Impale {@"se010148", "Enemy attack #20"}, // "Battle Sound Bite & Scratch" Bite;Fang {@"se010152", "Enemy attack #21"}, // "Battle Sound Steal" Steal;Mug;Hit {@"se010156", "Enemy attack #22"}, // "Battle Sound Rusted Slice" Charge;Claws;Hit;Slice;Blade;Crush;Chop {@"se010160", "Enemy attack #23"}, // "Battle Sound Stab & Suck" Tongue;Absorb even more;Stab;Absorb more even more;Rapid Fire;Poison Counter;Stinger {@"se010164", "Enemy attack #24"}, // "Battle Sound Heavy Slice" Axe;Trouble Knife;Attack;MEScounter;Dagger's first hit;MEShit1;MEShit2;Hatchet;Mask Jump {@"se010168", "Enemy attack #25"}, // "Battle Sound Tongue Knock" Tongue;Stomach {@"se010172", "Enemy attack #26"}, // "Battle Sound Wing Uppercut" Wings {@"se010176", "Enemy attack #27"}, // "Battle Sound Slam" Counter;Stretch;Antenna;Edge;Strike;Attack;Mug;Hit;Battlemes1;Battlemes2;Silent Kiss;Battlemes3;The Drop {@"se010180", "Enemy attack #28"}, // "Battle Sound Charge & Fist" Head Attack;Body Ram;Knock Down;Smash;Ram;Teleport;YEOWWW!;Fist;Open and Close;Battlemes3;Freaked out;Battlemes2;Oww!;Battlemes1;Attack {@"se010182", "Enemy attack #29"}, // "Battle Sound Baku Crash" ARGHHH!;Oww!;YEOWWW! {@"se010184", "Enemy attack #30"}, // "Battle Sound Beak" Beak {@"se010188", "Unknown - Botched"}, // "Battle Sound Rush" Rush;Charge;Fat Press;Crash;ARGHHH!;Hiphop;Ram {@"se010192", "Enemy attack - Oink 2"}, // "Battle Sound Spike Hit" Battlemes1;Battlemes2;Battlemes3;Knife;Sting;Spear;Attack;Rolling Attack {@"se010196", "Enemy attack - Oink 3"}, // "Battle Sound Heave" Heave;Charge;End3;Counter {@"se010197", "Enemy Heave"}, {@"se010200", "Unknown - Botched"}, // "Battle Sound Power Up" Power Thorn;Power Zorn {@"se010204", "Enemy head attack"}, // "Battle Sound Horn Gore" Head Attack;Charge;Horn;Stab {@"se010208", "Enemy Wing attack"}, // "Battle Sound Slap" Silent Slap;Trouble Tail;Tail;Hiphop;Strike;Fin;Tentacle;Wings;Wing {@"se010212", "Enemy Slap attack"}, // "Battle Sound Whip" Thorn Whip;Slap;Left Stem;Right Stem;Right Tentacle;Left Tentacle;Trouble Counter;Spin;Leg {@"se010216", "Unknown - Botched"}, // "Battle Sound Soft Tail" Virus Tentacles;Blind Tail {@"se010220", "Unknown - Botched"}, // "Battle Sound Nymph Happy" Happy {@"se010224", "Unknown - Botched (Pumpkin Head 1)"}, {@"se010225", "Unknown - Botched"}, // "Battle Sound Mimic Call" Call {@"se010226", "Unknown - Botched (Aqua Breath 1)"}, // "Battle Sound Teleport" Teleport {@"se010228", "Enemy Charge"}, // "Battle Sound Head Attack" Head Attack {@"se010229", "Enemy Saw"}, // "Battle Sound Saw" Saw {@"se010232", "Enemy Slice"}, // "Battle Sound Lich Cutter" Death Cutter;Double Slash {@"se010256", "Enemy Fade away (Die)"}, {@"se010257", "Taharka Ipsen cutscene death (downsampled)"}, {@"se010258", "Unknown - Botched (Flee) (Part 1)"}, {@"se010259", "Unknown - Botched (Flee) (Part 2)"}, {@"se010260", "Unknown - Botched (Flee) (Part 3)"}, {@"se030005", "Enemy attack #31"}, // "Battle Sound Bomb Grow" Grow {@"se030022", "Enemy attack #32"}, // "Battle Sound Vice Appearance" Appearance {@"se030048", "Enemy attack #33"}, // "Battle Sound Groan" Groan {@"se030109", "Enemy attack #34"}, // "Battle Sound Prison Cage Escape" Event of death {@"se030284", "Enemy attack #35"}, // "Battle Sound Sand Golem Death" Golem Death {@"se030303", "Enemy attack #36"}, // "Battle Sound Armodullahan Death" Death {@"se030309", "Enemy attack #37"}, // "Battle Sound Prison Cage Death" Death {@"se030314", "Enemy attack #38"}, // "Battle Sound Gizamaluke Death" Death {@"se030318", "Enemy attack #39"}, // "Battle Sound Antlion Death" Death {@"se030336", "Enemy attack #40"}, // "Battle Sound Silver Dragon Death" Death {@"se030338", "Enemy attack #41"}, // "Battle Sound Nova Dragon Death" Death {@"se030346", "Enemy attack #42"}, // "Battle Sound Hades Death" Death by idle2;Death {@"se030347", "Enemy attack #43"}, // "Battle Sound Deathguise Death" Death1;Death0 {@"se500432", "Enemy attack #44"}, // "Battle Sound Flee 1" Escape;Flee;HappyCALC;RefuseEVT {@"se500433", "Enemy attack #45"}, // "Battle Sound Flee 2" Escape;Refuse;Happy;FleeEVTCALC {@"se500434", "Enemy attack #46"}, // "Battle Sound Flee 3" Escape;Refuse;Happy;FleeEVTCALC }; } }
using System; using System.Diagnostics; using System.Linq; using System.Text; using Roton.Emulation.Data; using Roton.Emulation.Data.Impl; using Roton.Emulation.Infrastructure; using Roton.Emulation.Items; using Roton.Infrastructure.Impl; namespace Roton.Emulation.Core.Impl { [Context(Context.Original)] [Context(Context.Super)] public sealed class Parser : IParser { private readonly Lazy<IEngine> _engine; public Parser(Lazy<IEngine> engine) { _engine = engine; } private IEngine Engine { [DebuggerStepThrough] get => _engine.Value; } public int Search(int index, int offset, string term) { var result = -1; var termBytes = term.ToBytes(); var actor = Engine.Actors[index]; var offs = new Executable {Instruction = offset}; while (offs.Instruction < actor.Length) { var oldOffset = offs.Instruction; var termOffset = 0; bool success; while (true) { ReadByte(index, offs); if (termBytes[termOffset].ToUpperCase() != Engine.State.OopByte.ToUpperCase()) { success = false; break; } termOffset++; if (termOffset >= termBytes.Length) { success = true; break; } } if (success) { ReadByte(index, offs); Engine.State.OopByte = Engine.State.OopByte.ToUpperCase(); if (!(Engine.State.OopByte >= 0x41 && Engine.State.OopByte <= 0x5A || Engine.State.OopByte == 0x5F)) { result = oldOffset; break; } } oldOffset++; offs.Instruction = oldOffset; } return result; } public int GetNumber(IOopContext context) => ReadNumber(context.Index, context); public int ReadByte(int index, IExecutable instructionSource) { var actor = Engine.Actors[index]; var value = 0; if (instructionSource.Instruction < 0 || instructionSource.Instruction >= actor.Length) { Engine.State.OopByte = 0; } else { value = actor.Code[instructionSource.Instruction]; Engine.State.OopByte = value; instructionSource.Instruction++; } return value; } public string ReadLine(int index, IExecutable instructionSource) { var result = new StringBuilder(); ReadByte(index, instructionSource); while (Engine.State.OopByte != 0x00 && Engine.State.OopByte != 0x0D) { result.Append(Engine.State.OopByte.ToChar()); ReadByte(index, instructionSource); } return result.ToString(); } public int ReadNumber(int index, IExecutable instructionSource) { var result = new StringBuilder(); var success = false; while (ReadByte(index, instructionSource) == 0x20) { } Engine.State.OopByte = Engine.State.OopByte.ToUpperCase(); while (Engine.State.OopByte >= 0x30 && Engine.State.OopByte <= 0x39) { success = true; result.Append(Engine.State.OopByte.ToChar()); ReadByte(index, instructionSource); } if (instructionSource.Instruction > 0) { instructionSource.Instruction--; } if (!success) { Engine.State.OopNumber = -1; } else { int.TryParse(result.ToString(), out var resultInt); Engine.State.OopNumber = resultInt; } return Engine.State.OopNumber; } public string ReadWord(int index, IExecutable instructionSource) { var result = new StringBuilder(); while (true) { ReadByte(index, instructionSource); if (Engine.State.OopByte != 0x20) { break; } } Engine.State.OopByte = Engine.State.OopByte.ToUpperCase(); var oopByte = Engine.State.OopByte; if (!(oopByte >= 0x30 && oopByte <= 0x39)) { while (oopByte >= 0x41 && oopByte <= 0x5A || oopByte >= 0x30 && oopByte <= 0x39 || oopByte == 0x3A || oopByte == 0x5F) { result.Append(oopByte.ToChar()); ReadByte(index, instructionSource); Engine.State.OopByte = Engine.State.OopByte.ToUpperCase(); oopByte = Engine.State.OopByte; } } if (instructionSource.Instruction > 0) { instructionSource.Instruction--; } Engine.State.OopWord = result.ToString(); return Engine.State.OopWord; } public bool? GetCondition(IOopContext oopContext) { var name = ReadWord(oopContext.Index, oopContext); var condition = Engine.ConditionList.Get(name); return condition?.Execute(oopContext) ?? Engine.World.Flags.Contains(name); } public IXyPair GetDirection(IOopContext oopContext) { var name = ReadWord(oopContext.Index, oopContext); var direction = Engine.DirectionList.Get(name); return direction?.Execute(oopContext); } public IItem GetItem(IOopContext oopContext) { var name = ReadWord(oopContext.Index, oopContext); var item = Engine.ItemList.Get(name); return item; } public ITile GetKind(IOopContext oopContext) { var word = ReadWord(oopContext.Index, oopContext); var result = new Tile(0, 0); var success = false; for (var i = 1; i < 8; i++) { if (!Engine.Colors[i].CaseInsensitiveEqual(word)) continue; result.Color = i + 8; word = ReadWord(oopContext.Index, oopContext); break; } foreach (var element in Engine.ElementList.Where(e => e != null)) { if (!element.Name.CaseInsensitiveCharacterEqual(word)) continue; success = true; result.Id = element.Id; break; } return success ? result : null; } public bool GetTarget(ISearchContext context) { context.SearchIndex++; var target = Engine.TargetList.Get(context.SearchTarget) ?? Engine.TargetList.Get(string.Empty); return target.Execute(context); } } }
// // Copyright (c) 2008-2011, Kenneth Bell // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // namespace DiscUtils.Iscsi { using System; using System.Collections.Generic; using System.IO; internal class DiskStream : SparseStream { private Session _session; private long _lun; private long _length; private long _position; private int _blockSize; private bool _canWrite; private bool _canRead; public DiskStream(Session session, long lun, FileAccess access) { _session = session; _lun = lun; LunCapacity capacity = session.GetCapacity(lun); _blockSize = capacity.BlockSize; _length = capacity.LogicalBlockCount * capacity.BlockSize; _canWrite = access != FileAccess.Read; _canRead = access != FileAccess.Write; } public override bool CanRead { get { return _canRead; } } public override bool CanSeek { get { return true; } } public override bool CanWrite { get { return _canWrite; } } public override long Length { get { return _length; } } public override long Position { get { return _position; } set { _position = value; } } public override IEnumerable<StreamExtent> Extents { get { yield return new StreamExtent(0, _length); } } public override void Flush() { } public override int Read(byte[] buffer, int offset, int count) { if (!CanRead) { throw new InvalidOperationException("Attempt to read from read-only stream"); } int maxToRead = (int)Math.Min(_length - _position, count); long firstBlock = _position / _blockSize; long lastBlock = Utilities.Ceil(_position + maxToRead, _blockSize); byte[] tempBuffer = new byte[(lastBlock - firstBlock) * _blockSize]; int numRead = _session.Read(_lun, firstBlock, (short)(lastBlock - firstBlock), tempBuffer, 0); int numCopied = Math.Min(maxToRead, numRead); Array.Copy(tempBuffer, _position - (firstBlock * _blockSize), buffer, offset, numCopied); _position += numCopied; return numCopied; } public override long Seek(long offset, SeekOrigin origin) { long effectiveOffset = offset; if (origin == SeekOrigin.Current) { effectiveOffset += _position; } else if (origin == SeekOrigin.End) { effectiveOffset += _length; } if (effectiveOffset < 0) { throw new IOException("Attempt to move before beginning of disk"); } else { _position = effectiveOffset; return _position; } } public override void SetLength(long value) { throw new NotSupportedException(); } public override void Write(byte[] buffer, int offset, int count) { if (!CanWrite) { throw new IOException("Attempt to write to read-only stream"); } if (_position + count > _length) { throw new IOException("Attempt to write beyond end of stream"); } int numWritten = 0; while (numWritten < count) { long block = _position / _blockSize; uint offsetInBlock = (uint)(_position % _blockSize); int toWrite = count - numWritten; // Need to read - we're not handling a full block if (offsetInBlock != 0 || toWrite < _blockSize) { toWrite = (int)Math.Min(toWrite, _blockSize - offsetInBlock); byte[] blockBuffer = new byte[_blockSize]; int numRead = _session.Read(_lun, block, 1, blockBuffer, 0); if (numRead != _blockSize) { throw new IOException("Incomplete read, received " + numRead + " bytes from 1 block"); } // Overlay as much data as we have for this block Array.Copy(buffer, offset + numWritten, blockBuffer, offsetInBlock, toWrite); // Write the block back _session.Write(_lun, block, 1, _blockSize, blockBuffer, 0); } else { // Processing at least one whole block, just write (after making sure to trim any partial sectors from the end)... short numBlocks = (short)(toWrite / _blockSize); toWrite = numBlocks * _blockSize; _session.Write(_lun, block, numBlocks, _blockSize, buffer, offset + numWritten); } numWritten += toWrite; _position += toWrite; } } } }
//------------------------------------------------------------------------------ // <copyright file="Roles.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ namespace System.Web.Security { using System.Web; using System.Web.Configuration; using System.Web.Management; using System.Security.Principal; using System.Security.Permissions; using System.Globalization; using System.Runtime.Serialization; using System.Collections; using System.Configuration.Provider; using System.Configuration; using System.Web.Hosting; using System.Threading; using System.Web.Util; using System.Collections.Specialized; using System.Web.Compilation; /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> // This has no hosting permission demands because of DevDiv Bugs 31461: ClientAppSvcs: ASP.net Provider support static public class Roles { static public RoleProvider Provider { get { EnsureEnabled(); if (s_Provider == null) { throw new InvalidOperationException(SR.GetString(SR.Def_role_provider_not_found)); } return s_Provider; } } static public RoleProviderCollection Providers { get { EnsureEnabled(); return s_Providers;} } static public string CookieName { get { Initialize(); return s_CookieName; }} static public bool CacheRolesInCookie { get { Initialize(); return s_CacheRolesInCookie; }} static public int CookieTimeout { get { Initialize(); return s_CookieTimeout; }} static public string CookiePath { get { Initialize(); return s_CookiePath; }} static public bool CookieRequireSSL { get { Initialize(); return s_CookieRequireSSL; }} static public bool CookieSlidingExpiration { get { Initialize(); return s_CookieSlidingExpiration; }} static public CookieProtection CookieProtectionValue { get { Initialize(); return s_CookieProtection; }} static public bool CreatePersistentCookie { get { Initialize(); return s_CreatePersistentCookie; } } static public string Domain { get { Initialize(); return s_Domain; } } static public int MaxCachedResults { get { Initialize(); return s_MaxCachedResults; } } static public bool Enabled { get { if (HostingEnvironment.IsHosted && !HttpRuntime.HasAspNetHostingPermission(AspNetHostingPermissionLevel.Low)) return false; if (!s_Initialized && !s_EnabledSet) { RoleManagerSection config = RuntimeConfig.GetAppConfig().RoleManager; s_Enabled = config.Enabled; s_EnabledSet = true; } return s_Enabled; } set { BuildManager.ThrowIfPreAppStartNotRunning(); s_Enabled = value; s_EnabledSet = true; } } static public string ApplicationName { get { return Provider.ApplicationName; } set { Provider.ApplicationName = value; } } // authorization static public bool IsUserInRole(string username, string roleName) { if (HostingEnvironment.IsHosted && EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) EtwTrace.Trace(EtwTraceType.ETW_TYPE_ROLE_BEGIN, HttpContext.Current.WorkerRequest); EnsureEnabled(); bool isUserInRole = false; bool isRolePrincipal = false; try { SecUtility.CheckParameter(ref roleName, true, true, true, 0, "roleName"); SecUtility.CheckParameter(ref username, true, false, true, 0, "username"); if (username.Length < 1) return false; IPrincipal user = GetCurrentUser(); if (user != null && user is RolePrincipal && ((RolePrincipal)user).ProviderName == Provider.Name && StringUtil.EqualsIgnoreCase(username, user.Identity.Name)) isUserInRole = user.IsInRole(roleName); else isUserInRole = Provider.IsUserInRole(username, roleName); return isUserInRole; } finally { if (HostingEnvironment.IsHosted && EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) { if (EtwTrace.IsTraceEnabled(EtwTraceLevel.Verbose, EtwTraceFlags.AppSvc)) { string status = SR.Resources.GetString(isUserInRole ? SR.Etw_Success : SR.Etw_Failure, CultureInfo.InstalledUICulture); EtwTrace.Trace(EtwTraceType.ETW_TYPE_ROLE_IS_USER_IN_ROLE, HttpContext.Current.WorkerRequest, isRolePrincipal ? "RolePrincipal" : Provider.GetType().FullName, username, roleName, status); } EtwTrace.Trace(EtwTraceType.ETW_TYPE_ROLE_END, HttpContext.Current.WorkerRequest, isRolePrincipal ? "RolePrincipal" : Provider.GetType().FullName, username); } } } static public bool IsUserInRole(string roleName) { return IsUserInRole(GetCurrentUserName(), roleName); } static public string[] GetRolesForUser (string username){ if (HostingEnvironment.IsHosted && EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) EtwTrace.Trace(EtwTraceType.ETW_TYPE_ROLE_BEGIN, HttpContext.Current.WorkerRequest); EnsureEnabled(); string[] roles = null; bool isRolePrincipal = false; try { SecUtility.CheckParameter(ref username, true, false, true, 0, "username"); if (username.Length < 1) { roles = new string[0]; return roles; } IPrincipal user = GetCurrentUser(); if (user != null && user is RolePrincipal && ((RolePrincipal)user).ProviderName == Provider.Name && StringUtil.EqualsIgnoreCase(username, user.Identity.Name)) { roles = ((RolePrincipal)user).GetRoles(); isRolePrincipal = true; } else { roles = Provider.GetRolesForUser(username); } return roles; } finally { if (HostingEnvironment.IsHosted && EtwTrace.IsTraceEnabled(EtwTraceLevel.Information, EtwTraceFlags.AppSvc)) { if (EtwTrace.IsTraceEnabled(EtwTraceLevel.Verbose, EtwTraceFlags.AppSvc)) { string roleNames = null; if (roles != null && roles.Length > 0) roleNames = roles[0]; for (int i = 1; i < roles.Length; i++) roleNames += "," + roles[i]; EtwTrace.Trace(EtwTraceType.ETW_TYPE_ROLE_GET_USER_ROLES, HttpContext.Current.WorkerRequest, isRolePrincipal ? "RolePrincipal" : Provider.GetType().FullName, username, roleNames, null); } EtwTrace.Trace(EtwTraceType.ETW_TYPE_ROLE_END, HttpContext.Current.WorkerRequest, isRolePrincipal ? "RolePrincipal" : Provider.GetType().FullName, username); } } } static public string[] GetRolesForUser (){ return GetRolesForUser(GetCurrentUserName()); } // role administration // static public string[] GetUsersInRole(string roleName){ EnsureEnabled(); SecUtility.CheckParameter(ref roleName, true, true, true, 0, "roleName"); return Provider.GetUsersInRole(roleName); } static public void CreateRole(string roleName){ EnsureEnabled(); SecUtility.CheckParameter(ref roleName, true, true, true, 0, "roleName"); Provider.CreateRole(roleName); } static public bool DeleteRole(string roleName, bool throwOnPopulatedRole){ EnsureEnabled(); SecUtility.CheckParameter(ref roleName, true, true, true, 0, "roleName"); bool roleDeleted = Provider.DeleteRole(roleName, throwOnPopulatedRole); try { RolePrincipal user = GetCurrentUser() as RolePrincipal; if (user != null && user.ProviderName == Provider.Name && user.IsRoleListCached && user.IsInRole(roleName)) user.SetDirty(); } catch { } return roleDeleted; } static public bool DeleteRole(string roleName) { return DeleteRole(roleName, true); } static public bool RoleExists(string roleName){ EnsureEnabled(); SecUtility.CheckParameter(ref roleName, true, true, true, 0, "roleName"); return Provider.RoleExists(roleName); } static public void AddUserToRole(string username, string roleName){ EnsureEnabled(); SecUtility.CheckParameter(ref roleName, true, true, true, 0, "roleName"); SecUtility.CheckParameter(ref username, true, true, true, 0, "username"); Provider.AddUsersToRoles(new string [] {username}, new string [] {roleName}); try { RolePrincipal user = GetCurrentUser() as RolePrincipal; if (user != null && user.ProviderName == Provider.Name && user.IsRoleListCached && StringUtil.EqualsIgnoreCase(user.Identity.Name, username)) user.SetDirty(); } catch { } } static public void AddUserToRoles(string username, string[] roleNames){ EnsureEnabled(); SecUtility.CheckParameter(ref username, true, true, true, 0, "username"); SecUtility.CheckArrayParameter( ref roleNames, true, true, true, 0, "roleNames"); Provider.AddUsersToRoles(new string [] {username}, roleNames); try { RolePrincipal user = GetCurrentUser() as RolePrincipal; if (user != null && user.ProviderName == Provider.Name && user.IsRoleListCached && StringUtil.EqualsIgnoreCase(user.Identity.Name, username)) user.SetDirty(); } catch { } } static public void AddUsersToRole(string[] usernames, string roleName){ EnsureEnabled(); SecUtility.CheckParameter(ref roleName, true, true, true, 0, "roleName"); SecUtility.CheckArrayParameter( ref usernames, true, true, true, 0, "usernames"); Provider.AddUsersToRoles(usernames, new string [] {roleName}); try { RolePrincipal user = GetCurrentUser() as RolePrincipal; if (user != null && user.ProviderName == Provider.Name && user.IsRoleListCached) foreach(string username in usernames) if (StringUtil.EqualsIgnoreCase(user.Identity.Name, username)) { user.SetDirty(); break; } } catch { } } static public void AddUsersToRoles(string[] usernames, string [] roleNames){ EnsureEnabled(); SecUtility.CheckArrayParameter( ref roleNames, true, true, true, 0, "roleNames"); SecUtility.CheckArrayParameter( ref usernames, true, true, true, 0, "usernames"); Provider.AddUsersToRoles(usernames, roleNames); try { RolePrincipal user = GetCurrentUser() as RolePrincipal; if (user != null && user.ProviderName == Provider.Name && user.IsRoleListCached) foreach (string username in usernames) if (StringUtil.EqualsIgnoreCase(user.Identity.Name, username)) { user.SetDirty(); break; } } catch { } } static public void RemoveUserFromRole(string username, string roleName){ EnsureEnabled(); SecUtility.CheckParameter(ref roleName, true, true, true, 0, "roleName"); SecUtility.CheckParameter(ref username, true, true, true, 0, "username"); Provider.RemoveUsersFromRoles(new string [] {username}, new string [] {roleName}); try { RolePrincipal user = GetCurrentUser() as RolePrincipal; if (user != null && user.ProviderName == Provider.Name && user.IsRoleListCached && StringUtil.EqualsIgnoreCase(user.Identity.Name, username)) user.SetDirty(); } catch { } } static public void RemoveUserFromRoles(string username, string[] roleNames){ EnsureEnabled(); SecUtility.CheckParameter(ref username, true, true, true, 0, "username"); SecUtility.CheckArrayParameter( ref roleNames, true, true, true, 0, "roleNames"); Provider.RemoveUsersFromRoles(new string [] {username}, roleNames); try { RolePrincipal user = GetCurrentUser() as RolePrincipal; if (user != null && user.ProviderName == Provider.Name && user.IsRoleListCached && StringUtil.EqualsIgnoreCase(user.Identity.Name, username)) user.SetDirty(); } catch { } } static public void RemoveUsersFromRole(string[] usernames, string roleName){ EnsureEnabled(); SecUtility.CheckParameter(ref roleName, true, true, true, 0, "roleName"); SecUtility.CheckArrayParameter( ref usernames, true, true, true, 0, "usernames"); Provider.RemoveUsersFromRoles(usernames, new string[] { roleName }); try { RolePrincipal user = GetCurrentUser() as RolePrincipal; if (user != null && user.ProviderName == Provider.Name && user.IsRoleListCached) foreach (string username in usernames) if (StringUtil.EqualsIgnoreCase(user.Identity.Name, username)) { user.SetDirty(); break; } } catch { } } static public void RemoveUsersFromRoles(string[] usernames, string [] roleNames){ EnsureEnabled(); SecUtility.CheckArrayParameter( ref roleNames, true, true, true, 0, "roleNames"); SecUtility.CheckArrayParameter( ref usernames, true, true, true, 0, "usernames"); Provider.RemoveUsersFromRoles(usernames, roleNames); try { RolePrincipal user = GetCurrentUser() as RolePrincipal; if (user != null && user.ProviderName == Provider.Name && user.IsRoleListCached) foreach (string username in usernames) if (StringUtil.EqualsIgnoreCase(user.Identity.Name, username)) { user.SetDirty(); break; } } catch { } } public static string[] GetAllRoles() { EnsureEnabled(); return Provider.GetAllRoles(); } public static void DeleteCookie() { EnsureEnabled(); if (CookieName == null || CookieName.Length < 1) return; HttpContext context = HttpContext.Current; if (context == null || !context.Request.Browser.Cookies) return; string cookieValue = String.Empty; if (context.Request.Browser["supportsEmptyStringInCookieValue"] == "false") cookieValue = "NoCookie"; HttpCookie cookie = new HttpCookie(CookieName, cookieValue); cookie.HttpOnly = true; cookie.Path = CookiePath; cookie.Domain = Domain; cookie.Expires = new System.DateTime(1999, 10, 12); cookie.Secure = CookieRequireSSL; context.Response.Cookies.RemoveCookie(CookieName); context.Response.Cookies.Add(cookie); } static public string[] FindUsersInRole(string roleName, string usernameToMatch) { EnsureEnabled(); SecUtility.CheckParameter(ref roleName, true, true, true, 0, "roleName"); SecUtility.CheckParameter( ref usernameToMatch, true, true, false, 0, "usernameToMatch"); return Provider.FindUsersInRole(roleName, usernameToMatch); } static private void EnsureEnabled() { Initialize(); if (!s_Enabled) throw new ProviderException(SR.GetString(SR.Roles_feature_not_enabled)); } static private void Initialize() { if (s_Initialized) { if (s_InitializeException != null) { throw s_InitializeException; } if (s_InitializedDefaultProvider) { return; } } lock (s_lock) { if (s_Initialized) { if (s_InitializeException != null) { throw s_InitializeException; } if (s_InitializedDefaultProvider) { return; } } try { if (HostingEnvironment.IsHosted) HttpRuntime.CheckAspNetHostingPermission(AspNetHostingPermissionLevel.Low, SR.Feature_not_supported_at_this_level); RoleManagerSection settings = RuntimeConfig.GetAppConfig().RoleManager; //s_InitializeException = new ProviderException(SR.GetString(SR.Roles_feature_not_enabled)); if (!s_EnabledSet) { s_Enabled = settings.Enabled; } s_CookieName = settings.CookieName; s_CacheRolesInCookie = settings.CacheRolesInCookie; s_CookieTimeout = (int)settings.CookieTimeout.TotalMinutes; s_CookiePath = settings.CookiePath; s_CookieRequireSSL = settings.CookieRequireSSL; s_CookieSlidingExpiration = settings.CookieSlidingExpiration; s_CookieProtection = settings.CookieProtection; s_Domain = settings.Domain; s_CreatePersistentCookie = settings.CreatePersistentCookie; s_MaxCachedResults = settings.MaxCachedResults; if (s_Enabled) { // Instantiate providers only if feature is enabled if (s_MaxCachedResults < 0) { throw new ProviderException(SR.GetString(SR.Value_must_be_non_negative_integer, "maxCachedResults")); } InitializeSettings(settings); InitializeDefaultProvider(settings); } } catch (Exception e) { s_InitializeException = e; } s_Initialized = true; } if (s_InitializeException != null) throw s_InitializeException; } private static void InitializeSettings(RoleManagerSection settings) { if (!s_Initialized) { s_Providers = new RoleProviderCollection(); if (HostingEnvironment.IsHosted) { ProvidersHelper.InstantiateProviders(settings.Providers, s_Providers, typeof(RoleProvider)); } else { foreach (ProviderSettings ps in settings.Providers) { Type t = Type.GetType(ps.Type, true, true); if (!typeof(RoleProvider).IsAssignableFrom(t)) throw new ArgumentException(SR.GetString(SR.Provider_must_implement_type, typeof(RoleProvider).ToString())); RoleProvider provider = (RoleProvider)Activator.CreateInstance(t); NameValueCollection pars = ps.Parameters; NameValueCollection cloneParams = new NameValueCollection(pars.Count, StringComparer.Ordinal); foreach (string key in pars) cloneParams[key] = pars[key]; provider.Initialize(ps.Name, cloneParams); s_Providers.Add(provider); } } } } private static void InitializeDefaultProvider(RoleManagerSection settings) { bool canInitializeDefaultProvider = (!HostingEnvironment.IsHosted || BuildManager.PreStartInitStage == PreStartInitStage.AfterPreStartInit); if (!s_InitializedDefaultProvider && canInitializeDefaultProvider) { Debug.Assert(s_Providers != null); s_Providers.SetReadOnly(); if (settings.DefaultProvider == null) { s_InitializeException = new ProviderException(SR.GetString(SR.Def_role_provider_not_specified)); } else { try { s_Provider = s_Providers[settings.DefaultProvider]; } catch { } } if (s_Provider == null) { s_InitializeException = new ConfigurationErrorsException(SR.GetString(SR.Def_role_provider_not_found), settings.ElementInformation.Properties["defaultProvider"].Source, settings.ElementInformation.Properties["defaultProvider"].LineNumber); } s_InitializedDefaultProvider = true; } } static private RoleProvider s_Provider; static private bool s_Enabled; static private string s_CookieName; static private bool s_CacheRolesInCookie; static private int s_CookieTimeout; static private string s_CookiePath; static private bool s_CookieRequireSSL; static private bool s_CookieSlidingExpiration; static private CookieProtection s_CookieProtection; static private string s_Domain; static private bool s_Initialized; static private bool s_InitializedDefaultProvider; static private bool s_EnabledSet; static private RoleProviderCollection s_Providers; private static Exception s_InitializeException = null; private static bool s_CreatePersistentCookie; private static object s_lock = new object(); private static int s_MaxCachedResults = 25; private static string GetCurrentUserName() { IPrincipal user = GetCurrentUser(); if (user == null || user.Identity == null) return String.Empty; else return user.Identity.Name; } private static IPrincipal GetCurrentUser() { if (HostingEnvironment.IsHosted) { HttpContext cur = HttpContext.Current; if (cur != null) return cur.User; } return Thread.CurrentPrincipal; } } //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// // This has no hosting permission demands because of DevDiv Bugs 31461: ClientAppSvcs: ASP.net Provider support public sealed class RoleProviderCollection : ProviderCollection { public override void Add(ProviderBase provider) { if( provider == null ) { throw new ArgumentNullException( "provider" ); } if( !( provider is RoleProvider ) ) { throw new ArgumentException(SR.GetString(SR.Provider_must_implement_type, typeof(RoleProvider).ToString()), "provider"); } base.Add(provider); } new public RoleProvider this[string name] { get { return (RoleProvider) base[name]; } } public void CopyTo(RoleProvider [] array, int index) { base.CopyTo(array, index); } } }
// ReSharper disable All using System.Collections.Generic; using System.Data; using System.Dynamic; using System.Linq; using Frapid.Configuration; using Frapid.DataAccess; using Frapid.DataAccess.Models; using Frapid.DbPolicy; using Frapid.Framework.Extensions; using Npgsql; using Frapid.NPoco; using Serilog; namespace Frapid.Account.DataAccess { /// <summary> /// Provides simplified data access features to perform SCRUD operation on the database table "account.access_tokens". /// </summary> public class AccessToken : DbAccess, IAccessTokenRepository { /// <summary> /// The schema of this table. Returns literal "account". /// </summary> public override string _ObjectNamespace => "account"; /// <summary> /// The schema unqualified name of this table. Returns literal "access_tokens". /// </summary> public override string _ObjectName => "access_tokens"; /// <summary> /// Login id of application user accessing this table. /// </summary> public long _LoginId { get; set; } /// <summary> /// User id of application user accessing this table. /// </summary> public int _UserId { get; set; } /// <summary> /// The name of the database on which queries are being executed to. /// </summary> public string _Catalog { get; set; } /// <summary> /// Performs SQL count on the table "account.access_tokens". /// </summary> /// <returns>Returns the number of rows of the table "account.access_tokens".</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public long Count() { if (string.IsNullOrWhiteSpace(this._Catalog)) { return 0; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to count entity \"AccessToken\" was denied to the user with Login ID {LoginId}", this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT COUNT(*) FROM account.access_tokens;"; return Factory.Scalar<long>(this._Catalog, sql); } /// <summary> /// Executes a select query on the table "account.access_tokens" to return all instances of the "AccessToken" class. /// </summary> /// <returns>Returns a non-live, non-mapped instances of "AccessToken" class.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public IEnumerable<Frapid.Account.Entities.AccessToken> GetAll() { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.ExportData, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to the export entity \"AccessToken\" was denied to the user with Login ID {LoginId}", this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT * FROM account.access_tokens ORDER BY access_token_id;"; return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql); } /// <summary> /// Executes a select query on the table "account.access_tokens" to return all instances of the "AccessToken" class to export. /// </summary> /// <returns>Returns a non-live, non-mapped instances of "AccessToken" class.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public IEnumerable<dynamic> Export() { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.ExportData, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to the export entity \"AccessToken\" was denied to the user with Login ID {LoginId}", this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT * FROM account.access_tokens ORDER BY access_token_id;"; return Factory.Get<dynamic>(this._Catalog, sql); } /// <summary> /// Executes a select query on the table "account.access_tokens" with a where filter on the column "access_token_id" to return a single instance of the "AccessToken" class. /// </summary> /// <param name="accessTokenId">The column "access_token_id" parameter used on where filter.</param> /// <returns>Returns a non-live, non-mapped instance of "AccessToken" class mapped to the database row.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public Frapid.Account.Entities.AccessToken Get(System.Guid accessTokenId) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to the get entity \"AccessToken\" filtered by \"AccessTokenId\" with value {AccessTokenId} was denied to the user with Login ID {_LoginId}", accessTokenId, this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT * FROM account.access_tokens WHERE access_token_id=@0;"; return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql, accessTokenId).FirstOrDefault(); } /// <summary> /// Gets the first record of the table "account.access_tokens". /// </summary> /// <returns>Returns a non-live, non-mapped instance of "AccessToken" class mapped to the database row.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public Frapid.Account.Entities.AccessToken GetFirst() { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to the get the first record of entity \"AccessToken\" was denied to the user with Login ID {_LoginId}", this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT * FROM account.access_tokens ORDER BY access_token_id LIMIT 1;"; return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql).FirstOrDefault(); } /// <summary> /// Gets the previous record of the table "account.access_tokens" sorted by accessTokenId. /// </summary> /// <param name="accessTokenId">The column "access_token_id" parameter used to find the next record.</param> /// <returns>Returns a non-live, non-mapped instance of "AccessToken" class mapped to the database row.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public Frapid.Account.Entities.AccessToken GetPrevious(System.Guid accessTokenId) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to the get the previous entity of \"AccessToken\" by \"AccessTokenId\" with value {AccessTokenId} was denied to the user with Login ID {_LoginId}", accessTokenId, this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT * FROM account.access_tokens WHERE access_token_id < @0 ORDER BY access_token_id DESC LIMIT 1;"; return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql, accessTokenId).FirstOrDefault(); } /// <summary> /// Gets the next record of the table "account.access_tokens" sorted by accessTokenId. /// </summary> /// <param name="accessTokenId">The column "access_token_id" parameter used to find the next record.</param> /// <returns>Returns a non-live, non-mapped instance of "AccessToken" class mapped to the database row.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public Frapid.Account.Entities.AccessToken GetNext(System.Guid accessTokenId) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to the get the next entity of \"AccessToken\" by \"AccessTokenId\" with value {AccessTokenId} was denied to the user with Login ID {_LoginId}", accessTokenId, this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT * FROM account.access_tokens WHERE access_token_id > @0 ORDER BY access_token_id LIMIT 1;"; return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql, accessTokenId).FirstOrDefault(); } /// <summary> /// Gets the last record of the table "account.access_tokens". /// </summary> /// <returns>Returns a non-live, non-mapped instance of "AccessToken" class mapped to the database row.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public Frapid.Account.Entities.AccessToken GetLast() { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to the get the last record of entity \"AccessToken\" was denied to the user with Login ID {_LoginId}", this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT * FROM account.access_tokens ORDER BY access_token_id DESC LIMIT 1;"; return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql).FirstOrDefault(); } /// <summary> /// Executes a select query on the table "account.access_tokens" with a where filter on the column "access_token_id" to return a multiple instances of the "AccessToken" class. /// </summary> /// <param name="accessTokenIds">Array of column "access_token_id" parameter used on where filter.</param> /// <returns>Returns a non-live, non-mapped collection of "AccessToken" class mapped to the database row.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public IEnumerable<Frapid.Account.Entities.AccessToken> Get(System.Guid[] accessTokenIds) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to entity \"AccessToken\" was denied to the user with Login ID {LoginId}. accessTokenIds: {accessTokenIds}.", this._LoginId, accessTokenIds); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT * FROM account.access_tokens WHERE access_token_id IN (@0);"; return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql, accessTokenIds); } /// <summary> /// Custom fields are user defined form elements for account.access_tokens. /// </summary> /// <returns>Returns an enumerable custom field collection for the table account.access_tokens</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public IEnumerable<Frapid.DataAccess.Models.CustomField> GetCustomFields(string resourceId) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to get custom fields for entity \"AccessToken\" was denied to the user with Login ID {LoginId}", this._LoginId); throw new UnauthorizedException("Access is denied."); } } string sql; if (string.IsNullOrWhiteSpace(resourceId)) { sql = "SELECT * FROM config.custom_field_definition_view WHERE table_name='account.access_tokens' ORDER BY field_order;"; return Factory.Get<Frapid.DataAccess.Models.CustomField>(this._Catalog, sql); } sql = "SELECT * from config.get_custom_field_definition('account.access_tokens'::text, @0::text) ORDER BY field_order;"; return Factory.Get<Frapid.DataAccess.Models.CustomField>(this._Catalog, sql, resourceId); } /// <summary> /// Displayfields provide a minimal name/value context for data binding the row collection of account.access_tokens. /// </summary> /// <returns>Returns an enumerable name and value collection for the table account.access_tokens</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public IEnumerable<Frapid.DataAccess.Models.DisplayField> GetDisplayFields() { List<Frapid.DataAccess.Models.DisplayField> displayFields = new List<Frapid.DataAccess.Models.DisplayField>(); if (string.IsNullOrWhiteSpace(this._Catalog)) { return displayFields; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to get display field for entity \"AccessToken\" was denied to the user with Login ID {LoginId}", this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT access_token_id AS key, access_token_id as value FROM account.access_tokens;"; using (NpgsqlCommand command = new NpgsqlCommand(sql)) { using (DataTable table = DbOperation.GetDataTable(this._Catalog, command)) { if (table?.Rows == null || table.Rows.Count == 0) { return displayFields; } foreach (DataRow row in table.Rows) { if (row != null) { DisplayField displayField = new DisplayField { Key = row["key"].ToString(), Value = row["value"].ToString() }; displayFields.Add(displayField); } } } } return displayFields; } /// <summary> /// Inserts or updates the instance of AccessToken class on the database table "account.access_tokens". /// </summary> /// <param name="accessToken">The instance of "AccessToken" class to insert or update.</param> /// <param name="customFields">The custom field collection.</param> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public object AddOrEdit(dynamic accessToken, List<Frapid.DataAccess.Models.CustomField> customFields) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } object primaryKeyValue = accessToken.access_token_id; if (accessToken.access_token_id != null) { this.Update(accessToken, accessToken.access_token_id); } else { primaryKeyValue = this.Add(accessToken); } string sql = "DELETE FROM config.custom_fields WHERE custom_field_setup_id IN(" + "SELECT custom_field_setup_id " + "FROM config.custom_field_setup " + "WHERE form_name=config.get_custom_field_form_name('account.access_tokens')" + ");"; Factory.NonQuery(this._Catalog, sql); if (customFields == null) { return primaryKeyValue; } foreach (var field in customFields) { sql = "INSERT INTO config.custom_fields(custom_field_setup_id, resource_id, value) " + "SELECT config.get_custom_field_setup_id_by_table_name('account.access_tokens', @0::character varying(100)), " + "@1, @2;"; Factory.NonQuery(this._Catalog, sql, field.FieldName, primaryKeyValue, field.Value); } return primaryKeyValue; } /// <summary> /// Inserts the instance of AccessToken class on the database table "account.access_tokens". /// </summary> /// <param name="accessToken">The instance of "AccessToken" class to insert.</param> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public object Add(dynamic accessToken) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Create, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to add entity \"AccessToken\" was denied to the user with Login ID {LoginId}. {AccessToken}", this._LoginId, accessToken); throw new UnauthorizedException("Access is denied."); } } return Factory.Insert(this._Catalog, accessToken, "account.access_tokens", "access_token_id"); } /// <summary> /// Inserts or updates multiple instances of AccessToken class on the database table "account.access_tokens"; /// </summary> /// <param name="accessTokens">List of "AccessToken" class to import.</param> /// <returns></returns> public List<object> BulkImport(List<ExpandoObject> accessTokens) { if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.ImportData, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to import entity \"AccessToken\" was denied to the user with Login ID {LoginId}. {accessTokens}", this._LoginId, accessTokens); throw new UnauthorizedException("Access is denied."); } } var result = new List<object>(); int line = 0; try { using (Database db = new Database(ConnectionString.GetConnectionString(this._Catalog), Factory.ProviderName)) { using (ITransaction transaction = db.GetTransaction()) { foreach (dynamic accessToken in accessTokens) { line++; object primaryKeyValue = accessToken.access_token_id; if (accessToken.access_token_id != null) { result.Add(accessToken.access_token_id); db.Update("account.access_tokens", "access_token_id", accessToken, accessToken.access_token_id); } else { result.Add(db.Insert("account.access_tokens", "access_token_id", accessToken)); } } transaction.Complete(); } return result; } } catch (NpgsqlException ex) { string errorMessage = $"Error on line {line} "; if (ex.Code.StartsWith("P")) { errorMessage += Factory.GetDbErrorResource(ex); throw new DataAccessException(errorMessage, ex); } errorMessage += ex.Message; throw new DataAccessException(errorMessage, ex); } catch (System.Exception ex) { string errorMessage = $"Error on line {line} "; throw new DataAccessException(errorMessage, ex); } } /// <summary> /// Updates the row of the table "account.access_tokens" with an instance of "AccessToken" class against the primary key value. /// </summary> /// <param name="accessToken">The instance of "AccessToken" class to update.</param> /// <param name="accessTokenId">The value of the column "access_token_id" which will be updated.</param> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public void Update(dynamic accessToken, System.Guid accessTokenId) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Edit, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to edit entity \"AccessToken\" with Primary Key {PrimaryKey} was denied to the user with Login ID {LoginId}. {AccessToken}", accessTokenId, this._LoginId, accessToken); throw new UnauthorizedException("Access is denied."); } } Factory.Update(this._Catalog, accessToken, accessTokenId, "account.access_tokens", "access_token_id"); } /// <summary> /// Deletes the row of the table "account.access_tokens" against the primary key value. /// </summary> /// <param name="accessTokenId">The value of the column "access_token_id" which will be deleted.</param> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public void Delete(System.Guid accessTokenId) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Delete, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to delete entity \"AccessToken\" with Primary Key {PrimaryKey} was denied to the user with Login ID {LoginId}.", accessTokenId, this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "DELETE FROM account.access_tokens WHERE access_token_id=@0;"; Factory.NonQuery(this._Catalog, sql, accessTokenId); } /// <summary> /// Performs a select statement on table "account.access_tokens" producing a paginated result of 10. /// </summary> /// <returns>Returns the first page of collection of "AccessToken" class.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public IEnumerable<Frapid.Account.Entities.AccessToken> GetPaginatedResult() { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to the first page of the entity \"AccessToken\" was denied to the user with Login ID {LoginId}.", this._LoginId); throw new UnauthorizedException("Access is denied."); } } const string sql = "SELECT * FROM account.access_tokens ORDER BY access_token_id LIMIT 10 OFFSET 0;"; return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql); } /// <summary> /// Performs a select statement on table "account.access_tokens" producing a paginated result of 10. /// </summary> /// <param name="pageNumber">Enter the page number to produce the paginated result.</param> /// <returns>Returns collection of "AccessToken" class.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public IEnumerable<Frapid.Account.Entities.AccessToken> GetPaginatedResult(long pageNumber) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to Page #{Page} of the entity \"AccessToken\" was denied to the user with Login ID {LoginId}.", pageNumber, this._LoginId); throw new UnauthorizedException("Access is denied."); } } long offset = (pageNumber - 1) * 10; const string sql = "SELECT * FROM account.access_tokens ORDER BY access_token_id LIMIT 10 OFFSET @0;"; return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql, offset); } public List<Frapid.DataAccess.Models.Filter> GetFilters(string catalog, string filterName) { const string sql = "SELECT * FROM config.filters WHERE object_name='account.access_tokens' AND lower(filter_name)=lower(@0);"; return Factory.Get<Frapid.DataAccess.Models.Filter>(catalog, sql, filterName).ToList(); } /// <summary> /// Performs a filtered count on table "account.access_tokens". /// </summary> /// <param name="filters">The list of filter conditions.</param> /// <returns>Returns number of rows of "AccessToken" class using the filter.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public long CountWhere(List<Frapid.DataAccess.Models.Filter> filters) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return 0; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to count entity \"AccessToken\" was denied to the user with Login ID {LoginId}. Filters: {Filters}.", this._LoginId, filters); throw new UnauthorizedException("Access is denied."); } } Sql sql = Sql.Builder.Append("SELECT COUNT(*) FROM account.access_tokens WHERE 1 = 1"); Frapid.DataAccess.FilterManager.AddFilters(ref sql, new Frapid.Account.Entities.AccessToken(), filters); return Factory.Scalar<long>(this._Catalog, sql); } /// <summary> /// Performs a filtered select statement on table "account.access_tokens" producing a paginated result of 10. /// </summary> /// <param name="pageNumber">Enter the page number to produce the paginated result. If you provide a negative number, the result will not be paginated.</param> /// <param name="filters">The list of filter conditions.</param> /// <returns>Returns collection of "AccessToken" class.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public IEnumerable<Frapid.Account.Entities.AccessToken> GetWhere(long pageNumber, List<Frapid.DataAccess.Models.Filter> filters) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to Page #{Page} of the filtered entity \"AccessToken\" was denied to the user with Login ID {LoginId}. Filters: {Filters}.", pageNumber, this._LoginId, filters); throw new UnauthorizedException("Access is denied."); } } long offset = (pageNumber - 1) * 10; Sql sql = Sql.Builder.Append("SELECT * FROM account.access_tokens WHERE 1 = 1"); Frapid.DataAccess.FilterManager.AddFilters(ref sql, new Frapid.Account.Entities.AccessToken(), filters); sql.OrderBy("access_token_id"); if (pageNumber > 0) { sql.Append("LIMIT @0", 10); sql.Append("OFFSET @0", offset); } return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql); } /// <summary> /// Performs a filtered count on table "account.access_tokens". /// </summary> /// <param name="filterName">The named filter.</param> /// <returns>Returns number of rows of "AccessToken" class using the filter.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public long CountFiltered(string filterName) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return 0; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to count entity \"AccessToken\" was denied to the user with Login ID {LoginId}. Filter: {Filter}.", this._LoginId, filterName); throw new UnauthorizedException("Access is denied."); } } List<Frapid.DataAccess.Models.Filter> filters = this.GetFilters(this._Catalog, filterName); Sql sql = Sql.Builder.Append("SELECT COUNT(*) FROM account.access_tokens WHERE 1 = 1"); Frapid.DataAccess.FilterManager.AddFilters(ref sql, new Frapid.Account.Entities.AccessToken(), filters); return Factory.Scalar<long>(this._Catalog, sql); } /// <summary> /// Performs a filtered select statement on table "account.access_tokens" producing a paginated result of 10. /// </summary> /// <param name="pageNumber">Enter the page number to produce the paginated result. If you provide a negative number, the result will not be paginated.</param> /// <param name="filterName">The named filter.</param> /// <returns>Returns collection of "AccessToken" class.</returns> /// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception> public IEnumerable<Frapid.Account.Entities.AccessToken> GetFiltered(long pageNumber, string filterName) { if (string.IsNullOrWhiteSpace(this._Catalog)) { return null; } if (!this.SkipValidation) { if (!this.Validated) { this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false); } if (!this.HasAccess) { Log.Information("Access to Page #{Page} of the filtered entity \"AccessToken\" was denied to the user with Login ID {LoginId}. Filter: {Filter}.", pageNumber, this._LoginId, filterName); throw new UnauthorizedException("Access is denied."); } } List<Frapid.DataAccess.Models.Filter> filters = this.GetFilters(this._Catalog, filterName); long offset = (pageNumber - 1) * 10; Sql sql = Sql.Builder.Append("SELECT * FROM account.access_tokens WHERE 1 = 1"); Frapid.DataAccess.FilterManager.AddFilters(ref sql, new Frapid.Account.Entities.AccessToken(), filters); sql.OrderBy("access_token_id"); if (pageNumber > 0) { sql.Append("LIMIT @0", 10); sql.Append("OFFSET @0", offset); } return Factory.Get<Frapid.Account.Entities.AccessToken>(this._Catalog, sql); } } }
// // Copyright 2014-2015 Amazon.com, // Inc. or its affiliates. All Rights Reserved. // // Licensed under the Amazon Software License (the "License"). // You may not use this file except in compliance with the // License. A copy of the License is located at // // http://aws.amazon.com/asl/ // // or in the "license" file accompanying this file. This file is // distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, express or implied. See the License // for the specific language governing permissions and // limitations under the License. // using System; using System.IO; namespace Amazon.Runtime.Internal.Util { /// <summary> /// A wrapper stream that calculates a hash of the base stream as it /// is being read. /// The calculated hash is only available after the stream is closed or /// CalculateHash is called. After calling CalculateHash, any further reads /// on the streams will not change the CalculatedHash. /// If an ExpectedHash is specified and is not equal to the calculated hash, /// Close or CalculateHash methods will throw an AmazonClientException. /// If CalculatedHash is calculated for only the portion of the stream that /// is read. /// </summary> /// <exception cref="Amazon.Runtime.AmazonClientException"> /// Exception thrown during Close() or CalculateHash(), if ExpectedHash is set and /// is different from CalculateHash that the stream calculates, provided that /// CalculatedHash is not a zero-length byte array. /// </exception> public abstract class HashStream : WrapperStream { #region Properties /// <summary> /// Algorithm to use to calculate hash. /// </summary> protected IHashingWrapper Algorithm { get; set; } /// <summary> /// True if hashing is finished and no more hashing should be done; /// otherwise false. /// </summary> protected bool FinishedHashing { get { return CalculatedHash != null; } } /// <summary> /// Current position in the stream. /// </summary> protected long CurrentPosition { get; private set; } /// <summary> /// Calculated hash for the stream. /// This value is set only after the stream is closed. /// </summary> public byte[] CalculatedHash { get; private set; } /// <summary> /// Expected hash value. Compared against CalculatedHash upon Close(). /// If the hashes are different, an AmazonClientException is thrown. /// </summary> public byte[] ExpectedHash { get; private set; } /// <summary> /// Expected length of stream. /// </summary> public long ExpectedLength { get; protected set; } #endregion #region Constructors ///// <summary> ///// Initializes an HashStream with a hash algorithm and a base stream. ///// </summary> ///// <param name="baseStream">Stream to calculate hash for.</param> //protected HashStream(Stream baseStream) // : this(baseStream, null) { } /// <summary> /// Initializes an HashStream with a hash algorithm and a base stream. /// </summary> /// <param name="baseStream">Stream to calculate hash for.</param> /// <param name="expectedHash"> /// Expected hash. Will be compared against calculated hash on stream close. /// Pass in null to disable check. /// </param> /// <param name="expectedLength"> /// Expected length of the stream. If the reading stops before reaching this /// position, CalculatedHash will be set to empty array. /// </param> protected HashStream(Stream baseStream, byte[] expectedHash, long expectedLength) : base(baseStream) { ExpectedHash = expectedHash; ExpectedLength = expectedLength; ValidateBaseStream(); Reset(); } #endregion #region Stream overrides /// <summary> /// Reads a sequence of bytes from the current stream and advances the position /// within the stream by the number of bytes read. /// </summary> /// <param name="buffer"> /// An array of bytes. When this method returns, the buffer contains the specified /// byte array with the values between offset and (offset + count - 1) replaced /// by the bytes read from the current source. /// </param> /// <param name="offset"> /// The zero-based byte offset in buffer at which to begin storing the data read /// from the current stream. /// </param> /// <param name="count"> /// The maximum number of bytes to be read from the current stream. /// </param> /// <returns> /// The total number of bytes read into the buffer. This can be less than the /// number of bytes requested if that many bytes are not currently available, /// or zero (0) if the end of the stream has been reached. /// </returns> public override int Read(byte[] buffer, int offset, int count) { int result = base.Read(buffer, offset, count); CurrentPosition += result; if (!FinishedHashing) { Algorithm.AppendBlock(buffer, offset, result); } return result; } /// <summary> /// Closes the underlying stream and finishes calculating the hash. /// If an ExpectedHash is specified and is not equal to the calculated hash, /// this method will throw an AmazonClientException. /// </summary> /// <exception cref="Amazon.Runtime.AmazonClientException"> /// If ExpectedHash is set and is different from CalculateHash that the stream calculates. /// </exception> #if !WIN_RT public override void Close() { CalculateHash(); base.Close(); } #else protected override void Dispose(bool disposing) { CalculateHash(); base.Dispose(disposing); } #endif /// <summary> /// Gets a value indicating whether the current stream supports seeking. /// HashStream does not support seeking, this will always be false. /// </summary> public override bool CanSeek { get { // Restrict random access, as this will break hashing. return false; } } /// <summary> /// Gets or sets the position within the current stream. /// HashStream does not support seeking, attempting to set Position /// will throw NotSupportedException. /// </summary> public override long Position { get { throw new NotSupportedException("HashStream does not support seeking"); } set { // Restrict random access, as this will break hashing. throw new NotSupportedException("HashStream does not support seeking"); } } /// <summary> /// Sets the position within the current stream. /// HashStream does not support seeking, attempting to call Seek /// will throw NotSupportedException. /// </summary> /// <param name="offset">A byte offset relative to the origin parameter.</param> /// <param name="origin"> /// A value of type System.IO.SeekOrigin indicating the reference point used /// to obtain the new position.</param> /// <returns>The new position within the current stream.</returns> public override long Seek(long offset, SeekOrigin origin) { // Restrict random access, as this will break hashing. throw new NotSupportedException("HashStream does not support seeking"); } /// <summary> /// Gets the overridden length used to construct the HashStream /// </summary> public override long Length { get { return this.ExpectedLength; } } #endregion #region Public methods /// <summary> /// Calculates the hash for the stream so far and disables any further /// hashing. /// </summary> public void CalculateHash() { if (!FinishedHashing) { if (ExpectedLength < 0 || CurrentPosition == ExpectedLength) { CalculatedHash = Algorithm.AppendLastBlock(new byte[0]); } else CalculatedHash = new byte[0]; if (CalculatedHash.Length > 0 && ExpectedHash != null && ExpectedHash.Length > 0) { if (!CompareHashes(ExpectedHash, CalculatedHash)) throw new AmazonClientException("Expected hash not equal to calculated hash"); } } } /// <summary> /// Resets the hash stream to starting state. /// Use this if the underlying stream has been modified and needs /// to be rehashed without reconstructing the hierarchy. /// </summary> public void Reset() { CurrentPosition = 0; CalculatedHash = null; if (Algorithm != null) Algorithm.Clear(); var baseHashStream = BaseStream as HashStream; if (baseHashStream != null) { baseHashStream.Reset(); } } #endregion #region Private methods /// <summary> /// Validates the underlying stream. /// </summary> private void ValidateBaseStream() { // Fast-fail on unusable streams if (!BaseStream.CanRead && !BaseStream.CanWrite) throw new InvalidDataException("HashStream does not support base streams that are not capable of reading or writing"); } /// <summary> /// Compares two hashes (arrays of bytes). /// </summary> /// <param name="expected">Expected hash.</param> /// <param name="actual">Actual hash.</param> /// <returns> /// True if the hashes are identical; otherwise false. /// </returns> private static bool CompareHashes(byte[] expected, byte[] actual) { if (ReferenceEquals(expected, actual)) return true; if (expected == null || actual == null) return (expected == actual); if (expected.Length != actual.Length) return false; for (int i = 0; i < expected.Length; i++) { if (expected[i] != actual[i]) return false; } return true; } #endregion } /// <summary> /// A wrapper stream that calculates a hash of the base stream as it /// is being read or written. /// The calculated hash is only available after the stream is closed or /// CalculateHash is called. After calling CalculateHash, any further reads /// on the streams will not change the CalculatedHash. /// If an ExpectedHash is specified and is not equal to the calculated hash, /// Close or CalculateHash methods will throw an AmazonClientException. /// If base stream's position is not 0 or HashOnReads is true and the entire stream is /// not read, the CalculatedHash will be set to an empty byte array and /// comparison to ExpectedHash will not be made. /// </summary> /// <exception cref="Amazon.Runtime.AmazonClientException"> /// Exception thrown during Close() or CalculateHash(), if ExpectedHash is set and /// is different from CalculateHash that the stream calculates, provided that /// CalculatedHash is not a zero-length byte array. /// </exception> public class HashStream<T> : HashStream where T : IHashingWrapper, new() { #region Constructors /// <summary> /// Initializes an HashStream with a hash algorithm and a base stream. /// </summary> /// <param name="baseStream">Stream to calculate hash for.</param> /// <param name="expectedHash"> /// Expected hash. Will be compared against calculated hash on stream close. /// Pass in null to disable check. /// </param> /// <param name="expectedLength"> /// Expected length of the stream. If the reading stops before reaching this /// position, CalculatedHash will be set to empty array. /// </param> public HashStream(Stream baseStream, byte[] expectedHash, long expectedLength) : base(baseStream, expectedHash, expectedLength) { Algorithm = new T(); } #endregion } /// <summary> /// A wrapper stream that calculates an MD5 hash of the base stream as it /// is being read or written. /// The calculated hash is only available after the stream is closed or /// CalculateHash is called. After calling CalculateHash, any further reads /// on the streams will not change the CalculatedHash. /// If an ExpectedHash is specified and is not equal to the calculated hash, /// Close or CalculateHash methods will throw an AmazonClientException. /// If base stream's position is not 0 or HashOnReads is true and the entire stream is /// not read, the CalculatedHash will be set to an empty byte array and /// comparison to ExpectedHash will not be made. /// </summary> /// <exception cref="Amazon.Runtime.AmazonClientException"> /// Exception thrown during Close() or CalculateHash(), if ExpectedHash is set and /// is different from CalculateHash that the stream calculates, provided that /// CalculatedHash is not a zero-length byte array. /// </exception> public class MD5Stream : HashStream<HashingWrapperMD5> { #region Constructors /// <summary> /// Initializes an MD5Stream with a base stream. /// </summary> /// <param name="baseStream">Stream to calculate hash for.</param> /// <param name="expectedHash"> /// Expected hash. Will be compared against calculated hash on stream close. /// Pass in null to disable check. /// </param> /// <param name="expectedLength"> /// Expected length of the stream. If the reading stops before reaching this /// position, CalculatedHash will be set to empty array. /// </param> public MD5Stream(Stream baseStream, byte[] expectedHash, long expectedLength) : base(baseStream, expectedHash, expectedLength) { } #endregion } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace NPOI.DDF { using System; using System.Text; using NPOI.Util; /// <summary> /// ToGether the the EscherOptRecord this record defines some of the basic /// properties of a shape. /// @author Glen Stampoultzis (glens at apache.org) /// </summary> public class EscherSpRecord : EscherRecord { public const short RECORD_ID = unchecked((short)0xF00A); public const String RECORD_DESCRIPTION = "MsofbtSp"; public const int FLAG_GROUP = 0x0001; public const int FLAG_CHILD = 0x0002; public const int FLAG_PATRIARCH = 0x0004; public const int FLAG_DELETED = 0x0008; public const int FLAG_OLESHAPE = 0x0010; public const int FLAG_HAVEMASTER = 0x0020; public const int FLAG_FLIPHORIZ = 0x0040; public const int FLAG_FLIPVERT = 0x0080; public const int FLAG_CONNECTOR = 0x0100; public const int FLAG_HAVEANCHOR = 0x0200; public const int FLAG_BACKGROUND = 0x0400; public const int FLAG_HASSHAPETYPE = 0x0800; private int field_1_shapeId; private int field_2_flags; /// <summary> /// The contract of this method is to deSerialize an escher record including /// it's children. /// </summary> /// <param name="data">The byte array containing the Serialized escher /// records.</param> /// <param name="offset">The offset into the byte array.</param> /// <param name="recordFactory">A factory for creating new escher records</param> /// <returns>The number of bytes written.</returns> public override int FillFields(byte[] data, int offset, IEscherRecordFactory recordFactory) { int bytesRemaining = ReadHeader(data, offset); int pos = offset + 8; int size = 0; field_1_shapeId = LittleEndian.GetInt(data, pos + size); size += 4; field_2_flags = LittleEndian.GetInt(data, pos + size); size += 4; // bytesRemaining -= size; // remainingData = new byte[bytesRemaining]; // Array.Copy( data, pos + size, remainingData, 0, bytesRemaining ); return RecordSize; } /// <summary> /// Serializes to an existing byte array without serialization listener. /// This is done by delegating to Serialize(int, byte[], EscherSerializationListener). /// </summary> /// <param name="offset">the offset within the data byte array.</param> /// <param name="data"> the data array to Serialize to.</param> /// <param name="listener">a listener for begin and end serialization events.</param> /// <returns>The number of bytes written.</returns> public override int Serialize(int offset, byte[] data, EscherSerializationListener listener) { listener.BeforeRecordSerialize(offset, RecordId, this); LittleEndian.PutShort(data, offset, Options); LittleEndian.PutShort(data, offset + 2, RecordId); int remainingBytes = 8; LittleEndian.PutInt(data, offset + 4, remainingBytes); LittleEndian.PutInt(data, offset + 8, field_1_shapeId); LittleEndian.PutInt(data, offset + 12, field_2_flags); listener.AfterRecordSerialize(offset + RecordSize, RecordId, RecordSize, this); return 8 + 8; } /// <summary> /// Returns the number of bytes that are required to Serialize this record. /// </summary> /// <value>Number of bytes</value> public override int RecordSize { get{return 8 + 8;} } /// <summary> /// @return the 16 bit identifier for this record. /// </summary> /// <value></value> public override short RecordId { get { return RECORD_ID; } } /// <summary> /// The short name for this record /// </summary> /// <value></value> public override String RecordName { get { return "Sp"; } } /// <summary> /// Returns a <see cref="T:System.String"/> that represents the current <see cref="T:System.Object"/>. /// </summary> /// <returns> /// A <see cref="T:System.String"/> that represents the current <see cref="T:System.Object"/>. /// </returns> public override String ToString() { String nl = Environment.NewLine; return this.GetType().Name + ":" + nl + " RecordId: 0x" + HexDump.ToHex(RECORD_ID) + nl + " Version: 0x" + HexDump.ToHex(Version) + nl + " ShapeType: 0x" + HexDump.ToHex(ShapeType) + nl + " ShapeId: " + field_1_shapeId + nl + " Flags: " + DecodeFlags(field_2_flags) + " (0x" + HexDump.ToHex(field_2_flags) + ")" + nl; } public override String ToXml(String tab) { StringBuilder builder = new StringBuilder(); builder.Append(tab).Append(FormatXmlRecordHeader(GetType().Name, HexDump.ToHex(RecordId), HexDump.ToHex(Version), HexDump.ToHex(Instance))) .Append(tab).Append("\t").Append("<ShapeType>0x").Append(HexDump.ToHex(ShapeType)).Append("</ShapeType>\n") .Append(tab).Append("\t").Append("<ShapeId>").Append(field_1_shapeId).Append("</ShapeId>\n") .Append(tab).Append("\t").Append("<Flags>").Append(DecodeFlags(field_2_flags) + " (0x" + HexDump.ToHex(field_2_flags) + ")").Append("</Flags>\n"); builder.Append(tab).Append("</").Append(GetType().Name).Append(">\n"); return builder.ToString(); } /// <summary> /// Converts the shape flags into a more descriptive name. /// </summary> /// <param name="flags">The flags.</param> /// <returns></returns> private String DecodeFlags(int flags) { StringBuilder result = new StringBuilder(); result.Append((flags & FLAG_GROUP) != 0 ? "|GROUP" : ""); result.Append((flags & FLAG_CHILD) != 0 ? "|CHILD" : ""); result.Append((flags & FLAG_PATRIARCH) != 0 ? "|PATRIARCH" : ""); result.Append((flags & FLAG_DELETED) != 0 ? "|DELETED" : ""); result.Append((flags & FLAG_OLESHAPE) != 0 ? "|OLESHAPE" : ""); result.Append((flags & FLAG_HAVEMASTER) != 0 ? "|HAVEMASTER" : ""); result.Append((flags & FLAG_FLIPHORIZ) != 0 ? "|FLIPHORIZ" : ""); result.Append((flags & FLAG_FLIPVERT) != 0 ? "|FLIPVERT" : ""); result.Append((flags & FLAG_CONNECTOR) != 0 ? "|CONNECTOR" : ""); result.Append((flags & FLAG_HAVEANCHOR) != 0 ? "|HAVEANCHOR" : ""); result.Append((flags & FLAG_BACKGROUND) != 0 ? "|BACKGROUND" : ""); result.Append((flags & FLAG_HASSHAPETYPE) != 0 ? "|HASSHAPETYPE" : ""); //need to check, else blows up on some records - bug 34435 if (result.Length > 0) { result.Remove(0,1); } return result.ToString(); } /// <summary> /// Gets or sets A number that identifies this shape /// </summary> /// <value>The shape id.</value> public int ShapeId { get { return field_1_shapeId; } set { this.field_1_shapeId = value; } } /// <summary> /// The flags that apply to this shape. /// </summary> /// <value>The flags.</value> public int Flags { get { return field_2_flags; } set { this.field_2_flags = value; } } /// <summary> /// Get or set shape type. Must be one of MSOSPT values (see [MS-ODRAW] for details). /// </summary> public short ShapeType { get { return Instance; } set { Instance = (value); } } } }
#region License /* * Copyright (C) 2002-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #endregion using System; using System.Threading; namespace Java.Util.Concurrent { /// <summary> /// Enumeration representing a task execution status. /// </summary> [Flags] internal enum TaskState : short // NET_ONLY { /// <summary>State value representing that task is ready to run </summary> Ready = 0, /// <summary>State value representing that task is running </summary> Running = 1, /// <summary>State value representing that task ran </summary> Complete = 2, /// <summary>State value representing that task was cancelled </summary> Cancelled = 4, /// <summary>State value representing that the task should be stopped.</summary> Stop = 8 } /// <summary> /// A cancellable asynchronous computation. /// </summary> /// <remarks> /// <para> /// This class provides a base implementation of /// <see cref="IFuture{T}"/> , with methods to start and cancel /// a computation, query to see if the computation is complete, and /// retrieve the result of the computation. The result can only be /// retrieved when the computation has completed; the <see cref="GetResult()"/> /// method will block if the computation has not yet completed. Once /// the computation has completed, the computation cannot be restarted /// or cancelled. /// </para> /// <para> /// A <see cref="FutureTask{T}"/> can be used to wrap a <see cref="Action"/> /// delegate, <see cref="Func{T}"/> delegate, <see cref="IRunnable"/> object /// or <see cref="ICallable{T}"/> object. Because <see cref="FutureTask{T}"/> /// implements <see cref="IRunnable"/>, a <see cref="FutureTask{T}"/> can be /// submitted to an <see cref="IExecutor"/> for execution. /// </para> /// <para> /// In addition to serving as a standalone class, this class provides /// protected functionality that may be useful when creating /// customized task classes. /// </para> /// </remarks> /// <typeparam name="T"> /// The result type returned by <see cref="GetResult()"/> method. /// </typeparam> /// <author>Doug Lea</author> /// <author>Griffin Caprio (.NET)</author> /// <author>Kenneth Xu</author> public class FutureTask<T> : IRunnableFuture<T>, IContextCopyingTask //BACKPORT_3_1 { private readonly ICallable<T> _callable; private T _result; private Exception _exception; private TaskState _taskState; /// <summary> /// The thread running task. When nulled after set/cancel, this /// indicates that the results are accessible. Must be /// volatile, to ensure visibility upon completion. /// </summary> private volatile Thread _runningThread; private IContextCarrier _contextCarrier; /// <summary> /// Creates a <see cref="FutureTask{T}"/> that will, upon running, execute the /// given <see cref="ICallable{T}"/>. /// </summary> /// <param name="callable">The callable task.</param> /// <exception cref="System.ArgumentNullException"> /// If the <paramref name="callable"/> is <c>null</c>. /// </exception> public FutureTask(ICallable<T> callable) { if(callable==null) throw new ArgumentNullException("callable"); _callable = callable; } /// <summary> /// Creates a <see cref="FutureTask{T}"/> that will, upon running, execute the /// given <see cref="Func{T}"/> delegate. /// </summary> /// <param name="call">The <see cref="Func{T}"/> delegate.</param> /// <exception cref="System.ArgumentNullException"> /// If the <paramref name="call"/> is <c>null</c>. /// </exception> public FutureTask(Func<T> call) : this(Executors.CreateCallable(call)) { } /// <summary> /// Creates a <see cref="FutureTask{T}"/> that will, upon running, execute the /// given <see cref="IRunnable"/>, and arrange that <see cref="GetResult()"/> /// will return the given <paramref name="result"/> upon successful completion. /// </summary> /// <param name="task">The runnable task.</param> /// <param name="result"> /// The result to return on successful completion. If /// you don't need a particular result, consider using /// constructions of the form: /// <code language="c#"> /// Future f = new FutureTask(runnable, default(T)) /// </code> /// </param> /// <exception cref="System.ArgumentNullException"> /// If the <paramref name="task"/> is <c>null</c>. /// </exception> public FutureTask(IRunnable task, T result) : this(Executors.CreateCallable(task, result)) { } /// <summary> /// Creates a <see cref="FutureTask{T}"/> that will, upon running, execute the /// given <see cref="Action"/>, and arrange that <see cref="GetResult()"/> /// will return the given <paramref name="result"/> upon successful completion. /// </summary> /// <param name="action">The <see cref="Action"/> delegate.</param> /// <param name="result"> /// The result to return on successful completion. If /// you don't need a particular result, consider using /// constructions of the form: /// <code language="c#"> /// Future f = new FutureTask(action, default(T)) /// </code> /// </param> /// <exception cref="ArgumentNullException"> /// If the <paramref name="action"/> is <c>null</c>. /// </exception> public FutureTask(Action action, T result) : this(Executors.CreateCallable(action, result)) { } #region IFuture<T> Members /// <summary> /// Determines if this task was cancelled. /// </summary> /// <remarks> /// Returns <c>true</c> if this task was cancelled before it completed /// normally. /// </remarks> /// <returns> <c>true</c>if task was cancelled before it completed /// </returns> public virtual bool IsCancelled { get { lock (this) { return _taskState == TaskState.Cancelled; } } } /// <summary> /// Returns <c>true</c> if this task completed. /// </summary> /// <remarks> /// Completion may be due to normal termination, an exception, or /// cancellation -- in all of these cases, this method will return /// <c>true</c> if this task completed. /// </remarks> /// <returns> <c>true</c>if this task completed.</returns> public virtual bool IsDone { get { lock (this) { return RanOrCancelled() && _runningThread == null; } } } /// <summary> /// Waits for computation to complete, then returns its result. /// </summary> /// <remarks> /// Waits if necessary for the computation to complete, and then /// retrieves its result. /// </remarks> /// <returns>The computed result</returns> /// <exception cref="Spring.Threading.Execution.CancellationException">if the computation was cancelled.</exception> /// <exception cref="Spring.Threading.Execution.ExecutionException">if the computation threw an exception.</exception> /// <exception cref="System.Threading.ThreadInterruptedException">if the current thread was interrupted while waiting.</exception> public virtual T Get() { lock (this) { WaitFor(); return Result; } } /// <summary> /// Waits for the given time span, then returns its result. /// </summary> /// <remarks> /// Waits, if necessary, for at most the <paramref name="durationToWait"/> for the computation /// to complete, and then retrieves its result, if available. /// </remarks> /// <param name="durationToWait">the <see cref="System.TimeSpan"/> to wait.</param> /// <returns>the computed result</returns> /// <exception cref="Spring.Threading.Execution.CancellationException">if the computation was cancelled.</exception> /// <exception cref="Spring.Threading.Execution.ExecutionException">if the computation threw an exception.</exception> /// <exception cref="System.Threading.ThreadInterruptedException">if the current thread was interrupted while waiting.</exception> /// <exception cref="TimeoutException">if the computation threw an exception.</exception> public virtual T Get(TimeSpan durationToWait) { lock (this) { WaitFor(durationToWait); return Result; } } /// <summary> /// Attempts to cancel execution of this task. /// </summary> /// <remarks> /// This attempt will fail if the task has already completed, already been cancelled, /// or could not be cancelled for some other reason. If successful, /// and this task has not started when <see cref="ICancellable.Cancel()"/> is called, /// this task should never run. If the task has already started, the in-progress tasks are allowed /// to complete /// </remarks> /// <returns> <c>false</c> if the task could not be cancelled, /// typically because it has already completed normally; /// <c>true</c> otherwise /// </returns> public virtual bool Cancel() { return Cancel(false); } /// <summary> /// Attempts to cancel execution of this task. /// </summary> /// <remarks> /// This attempt will fail if the task has already completed, already been cancelled, /// or could not be cancelled for some other reason. If successful, /// and this task has not started when <see cref="ICancellable.Cancel()"/> is called, /// this task should never run. If the task has already started, /// then the <paramref name="mayInterruptIfRunning"/> parameter determines /// whether the thread executing this task should be interrupted in /// an attempt to stop the task. /// </remarks> /// <param name="mayInterruptIfRunning"><c>true</c> if the thread executing this /// task should be interrupted; otherwise, in-progress tasks are allowed /// to complete /// </param> /// <returns> <c>false</c> if the task could not be cancelled, /// typically because it has already completed normally; /// <c>true</c> otherwise /// </returns> public virtual bool Cancel(bool mayInterruptIfRunning) { lock (this) { if (RanOrCancelled()) return false; _taskState = TaskState.Cancelled; if (mayInterruptIfRunning) { Thread r = _runningThread; if (r != null) r.Interrupt(); } _runningThread = null; Monitor.PulseAll(this); } Done(); return true; } /// <summary> /// The entry point /// </summary> public virtual void Run() { if (_contextCarrier != null) { _contextCarrier.Restore(); } lock (this) { if (_taskState != TaskState.Ready) return; _taskState = TaskState.Running; _runningThread = Thread.CurrentThread; } try { SetCompleted(_callable.Call()); } catch (Exception ex) { SetFailed(ex); } } #endregion #region Protected Methods /// <summary> /// Sets the result of this <see cref="IFuture{T}"/> to the given /// <paramref name="result"/> value unless /// this future has already been set or has been cancelled. /// </summary> /// <remarks> /// This method is invoked upon successful completion of the /// computation. /// </remarks> /// <param name="result"> /// The value to be retured by <see cref="GetResult()"/>. /// </param> protected virtual void SetResult(T result) { SetCompleted(result); } /// <summary> /// Protected method invoked when this task transitions to state /// <see cref="ICancellable.IsDone"/> (whether normally or via cancellation). /// </summary> /// <remarks> /// The default implementation does nothing. Subclasses may override /// this method to invoke completion callbacks or perform /// bookkeeping. Note that you can query status inside the /// implementation of this method to determine whether this task /// has been cancelled. /// </remarks> protected internal virtual void Done() { } /// <summary> /// Causes this future to report an <see cref="Spring.Threading.Execution.ExecutionException"/> /// with the given <see cref="System.Exception"/> as its cause, unless this <see cref="IFuture{T}"/> has /// already been set or has been cancelled. /// </summary> /// <remarks> /// This method is invoked internally by the <see cref="Spring.Threading.IRunnable"/> method /// upon failure of the computation. /// </remarks> /// <param name="t">the cause of failure</param> protected virtual void SetException(Exception t) { SetFailed(t); } /// <summary> /// Executes the computation without setting its result, and then /// resets this Future to initial state, failing to do so if the /// computation encounters an exception or is cancelled. /// </summary> /// <remarks> /// This is designed for use with tasks that intrinsically execute more /// than once. /// </remarks> /// <returns> <c>true</c> if successfully run and reset</returns> protected virtual bool RunAndReset() { lock (this) { if (_taskState != TaskState.Ready) return false; _taskState = TaskState.Running; _runningThread = Thread.CurrentThread; } try { _callable.Call(); lock (this) { _runningThread = null; if (_taskState == TaskState.Running) { _taskState = TaskState.Ready; return true; } else { return false; } } } catch (Exception ex) { SetFailed(ex); return false; } } #endregion /// <summary> /// Sets the result of the task, and marks the task as completed /// </summary> private void SetCompleted(T value) { lock (this) { if (RanOrCancelled()) return; _taskState = TaskState.Complete; _result = value; _runningThread = null; Monitor.PulseAll(this); } // invoking callbacks *after* setting future as completed and // outside the synchronization block makes it safe to call // interrupt() from within callback code (in which case it will be // ignored rather than cause deadlock / illegal state exception) Done(); } /// <summary> /// Sets the exception result of the task, and marks the tasks as completed. /// </summary> private void SetFailed(Exception value) { lock (this) { if (RanOrCancelled()) return; _taskState = TaskState.Complete; _exception = value; _runningThread = null; Monitor.PulseAll(this); } // invoking callbacks *after* setting future as completed and // outside the synchronization block makes it safe to call // interrupt() from within callback code (in which case it will be // ignored rather than cause deadlock / illegal state exception) Done(); } /// <summary> /// Gets the result of the task. /// </summary> private T Result { get { if (_taskState == TaskState.Cancelled) { throw new CancellationException(); } if (_exception != null) { throw new ExecutionException(_exception); } return _result; } } /// <summary> Waits for the task to complete.</summary> private void WaitFor() { while (!IsDone) { Monitor.Wait(this); } } /// <summary> /// Waits for the task to complete for <paramref name="durationToWait"/> or throws a /// <see cref="TimeoutException"/> /// if still not completed after that /// </summary> private void WaitFor(TimeSpan durationToWait) { if (IsDone) return; if (durationToWait.Ticks < 0) { while (!IsDone) { Monitor.Wait(this); } } else { DateTime deadline = DateTime.UtcNow.Add(durationToWait); while (durationToWait.Ticks > 0) { Monitor.Wait(this, durationToWait); if (IsDone) return; durationToWait = deadline.Subtract(DateTime.UtcNow); } throw new TimeoutException(); } } private const TaskState CompleteOrCancelled = TaskState.Complete | TaskState.Cancelled; private bool RanOrCancelled() { return (_taskState & CompleteOrCancelled) != 0; } #region IContextCopyingTask Members IContextCarrier IContextCopyingTask.ContextCarrier { get { return _contextCarrier; } set { _contextCarrier = value; } } #endregion } }
//--------------------------------------------------------------------------- // // <copyright file="ObjectRef.cs" company="Microsoft"> // Copyright (C) Microsoft Corporation. All rights reserved. // </copyright> // // Description: ObjectRef is a general way to name objects used in data binding // // See spec at http://avalon/connecteddata/Specs/Data%20Binding.mht // //--------------------------------------------------------------------------- using System; using System.Collections; using System.Diagnostics; using System.Globalization; using System.Reflection; using System.Windows; using System.Windows.Media; using System.Windows.Controls; using System.Windows.Data; using System.Windows.Markup; using MS.Internal; using MS.Internal.Utility; namespace MS.Internal.Data { #region ObjectRefArgs // args to GetObject and GetDataObject internal class ObjectRefArgs { internal bool IsTracing { get; set; } internal bool ResolveNamesInTemplate { get; set; } internal bool NameResolvedInOuterScope { get; set; } } #endregion ObjectRefArgs #region ObjectRef /// <summary> Abstract object reference. </summary> internal abstract class ObjectRef { //------------------------------------------------------ // // Constructors // //------------------------------------------------------ /// <summary> Constructor is protected - you can only create subclasses. </summary> protected ObjectRef() {} //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ /// <summary> Returns the referenced object. </summary> /// <param name="d">Element defining context for the reference. </param> /// <param name="args">See ObjectRefArgs </param> internal virtual object GetObject(DependencyObject d, ObjectRefArgs args) { return null; } /// <summary> Returns the data object associated with the referenced object. /// Often this is the same as the referenced object. /// </summary> /// <param name="d">Element defining context for the reference. </param> /// <param name="args">See ObjectRefArgs </param> internal virtual object GetDataObject(DependencyObject d, ObjectRefArgs args) { return GetObject(d, args); } /// <summary> true if the ObjectRef really needs the tree context </summary> internal bool TreeContextIsRequired(DependencyObject target) { return ProtectedTreeContextIsRequired(target); } /// <summary> true if the ObjectRef really needs the tree context </summary> protected virtual bool ProtectedTreeContextIsRequired(DependencyObject target) { return false; } /// <summary> /// true if the ObjectRef uses the mentor of the target element, /// rather than the target element itself. /// </summary> internal bool UsesMentor { get { return ProtectedUsesMentor; } } /// <summary> /// true if the ObjectRef uses the mentor of the target element, /// rather than the target element itself. /// </summary> protected virtual bool ProtectedUsesMentor { get { return true; } } /// <summary> /// identify this ObjectRef to the user - used by extended tracing /// </summary> internal abstract string Identify(); } #endregion ObjectRef #region ElementObjectRef /// <summary> Object reference to a DependencyObject via its Name. </summary> internal sealed class ElementObjectRef : ObjectRef { //------------------------------------------------------ // // Constructors // //------------------------------------------------------ /// <summary> Constructor. </summary> /// <param name="name">Name of the referenced Element.</param> /// <exception cref="ArgumentNullException"> name is a null reference </exception> internal ElementObjectRef(string name) { if (name == null) throw new ArgumentNullException("name"); _name = name.Trim(); } //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ /// <summary> Returns the referenced object. </summary> /// <param name="d">Element defining context for the reference. </param> /// <param name="args">See ObjectRefArgs </param> internal override object GetObject(DependencyObject d, ObjectRefArgs args) { if (d == null) throw new ArgumentNullException("d"); object o = null; if (args.ResolveNamesInTemplate) { // look in container's template (if any) first FrameworkElement fe = d as FrameworkElement; if (fe != null && fe.TemplateInternal != null) { o = Helper.FindNameInTemplate(_name, d); if (args.IsTracing) { TraceData.Trace(TraceEventType.Warning, TraceData.ElementNameQueryTemplate( _name, TraceData.Identify(d))); } } if (o == null) { args.NameResolvedInOuterScope = true; } } FrameworkObject fo = new FrameworkObject(d); while (o == null && fo.DO != null) { DependencyObject scopeOwner; o = fo.FindName(_name, out scopeOwner); // if the original element is a scope owner, supports IComponentConnector, // and has a parent, don't use the result of FindName. The // element is probably an instance of a Xaml-subclassed control; // we want to resolve the name starting in the next outer scope. // (bug 1669408) // Also, if the element's NavigationService property is locally // set, the element is the root of a navigation and should use the // inner scope (bug 1765041) if (d == scopeOwner && d is IComponentConnector && d.ReadLocalValue(System.Windows.Navigation.NavigationService.NavigationServiceProperty) == DependencyProperty.UnsetValue) { DependencyObject parent = LogicalTreeHelper.GetParent(d); if (parent == null) { parent = Helper.FindMentor(d.InheritanceContext); } if (parent != null) { o = null; fo.Reset(parent); continue; } } if (args.IsTracing) { TraceData.Trace(TraceEventType.Warning, TraceData.ElementNameQuery( _name, TraceData.Identify(fo.DO))); } if (o == null) { args.NameResolvedInOuterScope = true; // move to the next outer namescope. // First try TemplatedParent of the scope owner. FrameworkObject foScopeOwner = new FrameworkObject(scopeOwner); DependencyObject dd = foScopeOwner.TemplatedParent; // if that doesn't work, we could be at the top of // generated content for an ItemsControl. If so, use // the (visual) parent - a panel. if (dd == null) { Panel panel = fo.FrameworkParent.DO as Panel; if (panel != null && panel.IsItemsHost) { dd = panel; } } // if the logical parent is a ContentControl whose content // points right back, move to the ContentControl. This is the // m---- equivalent of having the ContentControl as the TemplatedParent. // (The InheritanceBehavior clause prevents this for cases where the // parent ContentControl imposes a barrier, e.g. Frame) if (dd == null && scopeOwner == null) { ContentControl cc = LogicalTreeHelper.GetParent(fo.DO) as ContentControl; if (cc != null && cc.Content == fo.DO && cc.InheritanceBehavior == InheritanceBehavior.Default) { dd = cc; } } // next, see if we're in a logical tree attached directly // to a ContentPresenter. This is the m---- equivalent of // having the ContentPresenter as the TemplatedParent. if (dd == null && scopeOwner == null) { // go to the top of the logical subtree DependencyObject parent; for (dd = fo.DO;;) { parent = LogicalTreeHelper.GetParent(dd); if (parent == null) { parent = Helper.FindMentor(dd.InheritanceContext); } if (parent == null) break; dd = parent; } // if it's attached to a ContentPresenter, move to the CP ContentPresenter cp = VisualTreeHelper.IsVisualType(dd) ? VisualTreeHelper.GetParent(dd) as ContentPresenter : null; dd = (cp != null && cp.TemplateInternal.CanBuildVisualTree) ? cp : null; } fo.Reset(dd); } } if (o == null) { o = DependencyProperty.UnsetValue; args.NameResolvedInOuterScope = false; } return o; } public override string ToString() { return String.Format(CultureInfo.InvariantCulture, "ElementName={0}", _name); } internal override string Identify() { return "ElementName"; } //------------------------------------------------------ // // Private Fields // //------------------------------------------------------ string _name; } #endregion ElementObjectRef #region RelativeObjectRef /// <summary> Object reference relative to the target element. /// </summary> internal sealed class RelativeObjectRef : ObjectRef { //------------------------------------------------------ // // Constructors // //------------------------------------------------------ /// <summary> Constructor. </summary> /// <param name="relativeSource">RelativeSource. </param> /// <exception cref="ArgumentNullException"> relativeSource is a null reference </exception> internal RelativeObjectRef(RelativeSource relativeSource) { if (relativeSource == null) throw new ArgumentNullException("relativeSource"); _relativeSource = relativeSource; } //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ public override string ToString() { string s; switch (_relativeSource.Mode) { case RelativeSourceMode.FindAncestor: s = String.Format(CultureInfo.InvariantCulture, "RelativeSource {0}, AncestorType='{1}', AncestorLevel='{2}'", _relativeSource.Mode, _relativeSource.AncestorType, _relativeSource.AncestorLevel); break; default: s = String.Format(CultureInfo.InvariantCulture, "RelativeSource {0}", _relativeSource.Mode); break; } return s; } /// <summary> Returns the referenced object. </summary> /// <param name="d">Element defining context for the reference. </param> /// <param name="args">See ObjectRefArgs </param> /// <exception cref="ArgumentNullException"> d is a null reference </exception> internal override object GetObject(DependencyObject d, ObjectRefArgs args) { return GetDataObjectImpl(d, args); } /// <summary> Returns the data object associated with the referenced object. /// Often this is the same as the referenced object. /// </summary> /// <param name="d">Element defining context for the reference. </param> /// <param name="args">See ObjectRefArgs </param> /// <exception cref="ArgumentNullException"> d is a null reference </exception> internal override object GetDataObject(DependencyObject d, ObjectRefArgs args) { object o = GetDataObjectImpl(d, args); DependencyObject el = o as DependencyObject; if (el != null && ReturnsDataContext) { // for generated wrappers, use the ItemForContainer property instead // of DataContext, since it's always set by the generator o = el.GetValue(ItemContainerGenerator.ItemForItemContainerProperty); if (o == null) o = el.GetValue(FrameworkElement.DataContextProperty); } return o; } private object GetDataObjectImpl(DependencyObject d, ObjectRefArgs args) { if (d == null) return null; switch (_relativeSource.Mode) { case RelativeSourceMode.Self: break; // nothing to do case RelativeSourceMode.TemplatedParent: d = Helper.GetTemplatedParent(d); break; case RelativeSourceMode.PreviousData: return GetPreviousData(d); case RelativeSourceMode.FindAncestor: d = FindAncestorOfType(_relativeSource.AncestorType, _relativeSource.AncestorLevel, d, args.IsTracing); if (d == null) { return DependencyProperty.UnsetValue; // we fell off the tree } break; default: return null; } if (args.IsTracing) { TraceData.Trace(TraceEventType.Warning, TraceData.RelativeSource( _relativeSource.Mode, TraceData.Identify(d))); } return d; } internal bool ReturnsDataContext { get { return (_relativeSource.Mode == RelativeSourceMode.PreviousData); } } /// <summary> true if the ObjectRef really needs the tree context </summary> protected override bool ProtectedTreeContextIsRequired(DependencyObject target) { return ( (_relativeSource.Mode == RelativeSourceMode.FindAncestor || (_relativeSource.Mode == RelativeSourceMode.PreviousData))); } protected override bool ProtectedUsesMentor { get { switch (_relativeSource.Mode) { case RelativeSourceMode.TemplatedParent: case RelativeSourceMode.PreviousData: return true; default: return false; } } } internal override string Identify() { return String.Format(System.Windows.Markup.TypeConverterHelper.InvariantEnglishUS, "RelativeSource ({0})", _relativeSource.Mode); } //------------------------------------------------------ // // Private Method // //------------------------------------------------------ private object GetPreviousData(DependencyObject d) { // move up to the next containing DataContext scope for (; d != null; d = FrameworkElement.GetFrameworkParent(d)) { if (BindingExpression.HasLocalDataContext(d)) { // special case: if the element is a ContentPresenter // whose templated parent is a ContentControl or // HeaderedItemsControl, and both have the same // DataContext, we'll use the parent instead of the // ContentPresenter. In this case, the DataContext // of the CP is set by various forwarding rules, and // shouldn't count as a new scope. // Similarly, do the same for a FE whose parent // is a GridViewRowPresenter; this enables Previous bindings // inside ListView. FrameworkElement parent, child; ContentPresenter cp; if ((cp = d as ContentPresenter) != null) { child = cp; parent = cp.TemplatedParent as FrameworkElement; if (!(parent is ContentControl || parent is HeaderedItemsControl)) { parent = cp.Parent as System.Windows.Controls.Primitives.GridViewRowPresenterBase; } } else { child = d as FrameworkElement; parent = ((child != null) ? child.Parent : null) as System.Windows.Controls.Primitives.GridViewRowPresenterBase; } if (child != null && parent != null && Object.Equals(child.DataContext, parent.DataContext)) { d = parent; if (!BindingExpression.HasLocalDataContext(parent)) { continue; } } break; } } if (d == null) return DependencyProperty.UnsetValue; // we fell off the tree // this only makes sense within generated content. If this // is the case, then d is now the wrapper element, its visual // parent is the layout element, and the layout's ItemsOwner // is the govening ItemsControl. Visual v = d as Visual; DependencyObject layout = (v != null) ? VisualTreeHelper.GetParent(v) : null; ItemsControl ic = ItemsControl.GetItemsOwner(layout); if (ic == null) { if (TraceData.IsEnabled) TraceData.Trace(TraceEventType.Error, TraceData.RefPreviousNotInContext); return null; } // now look up the wrapper's previous sibling within the // layout's children collection Visual v2 = layout as Visual; int count = (v2 != null) ? v2.InternalVisualChildrenCount : 0; int j = -1; Visual prevChild = null; //child at j-1th index if (count != 0) { j = IndexOf(v2, v, out prevChild); } if (j > 0) { d = prevChild; } else { d = null; if ((j < 0) && TraceData.IsEnabled) TraceData.Trace(TraceEventType.Error, TraceData.RefNoWrapperInChildren); } return d; } private DependencyObject FindAncestorOfType(Type type, int level, DependencyObject d, bool isTracing) { if (type == null) { if (TraceData.IsEnabled) TraceData.Trace(TraceEventType.Error, TraceData.RefAncestorTypeNotSpecified); return null; } if (level < 1) { if (TraceData.IsEnabled) TraceData.Trace(TraceEventType.Error, TraceData.RefAncestorLevelInvalid); return null; } // initialize search to start at the parent of the given DO FrameworkObject fo = new FrameworkObject(d); fo.Reset(fo.GetPreferVisualParent(true).DO); while (fo.DO != null) { if (isTracing) { TraceData.Trace(TraceEventType.Warning, TraceData.AncestorLookup( type.Name, TraceData.Identify(fo.DO))); } if (type.IsInstanceOfType(fo.DO)) // found it! { if (--level <= 0) break; } fo.Reset(fo.PreferVisualParent.DO); } return fo.DO; } private int IndexOf(Visual parent, Visual child, out Visual prevChild) { Visual temp; bool foundIndex = false; prevChild = null; int count = parent.InternalVisualChildrenCount; int i; for(i = 0; i < count; i++) { temp = parent.InternalGetVisualChild(i); if(child == temp) { foundIndex = true; break; } prevChild = temp; } if (foundIndex) return i; else return -1; } //------------------------------------------------------ // // Private Fields // //------------------------------------------------------ RelativeSource _relativeSource; } #endregion RelativeObjectRef #region ExplicitObjectRef /// <summary> Explicit object reference. </summary> internal sealed class ExplicitObjectRef: ObjectRef { //------------------------------------------------------ // // Constructors // //------------------------------------------------------ /// <summary> Constructor. </summary> internal ExplicitObjectRef(object o) { if (o is DependencyObject) _element = new WeakReference(o); else _object = o; } //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ /// <summary> Returns the referenced object. </summary> /// <param name="d">Element defining context for the reference. </param> /// <param name="args">See ObjectRefArgs </param> internal override object GetObject(DependencyObject d, ObjectRefArgs args) { return (_element != null) ? _element.Target : _object; } /// <summary> /// true if the ObjectRef uses the mentor of the target element, /// rather than the target element itself. /// </summary> protected override bool ProtectedUsesMentor { get { return false; } } internal override string Identify() { return "Source"; } //------------------------------------------------------ // // Private Fields // //------------------------------------------------------ object _object; WeakReference _element; // to DependencyObject (bug 986435) } #endregion ExplicitObjectRef }
using Signum.Entities.Reflection; using Signum.Utilities.DataStructures; using Signum.Utilities.Reflection; namespace Signum.Engine.Cache; class ToStringExpressionVisitor : ExpressionVisitor { Dictionary<ParameterExpression, Expression> replacements = new Dictionary<ParameterExpression, Expression>(); CachedEntityExpression root; public ToStringExpressionVisitor(ParameterExpression param, CachedEntityExpression root) { this.root = root; this.replacements = new Dictionary<ParameterExpression, Expression> { { param, root } }; } public static Expression<Func<PrimaryKey, string>> GetToString<T>(CachedTableConstructor constructor, Expression<Func<T, string>> lambda) { Table table = (Table)constructor.table; var param = lambda.Parameters.SingleEx(); if (param.Type != table.Type) throw new InvalidOperationException("incorrect lambda paramer type"); var pk = Expression.Parameter(typeof(PrimaryKey), "pk"); var root = new CachedEntityExpression(pk, typeof(T), constructor, null, null); var visitor = new ToStringExpressionVisitor(param, root); var result = visitor.Visit(lambda.Body); return Expression.Lambda<Func<PrimaryKey, string>>(result, pk); } protected override Expression VisitMember(MemberExpression node) { var exp = this.Visit(node.Expression); if (exp is CachedEntityExpression cee) { if (node.Member.Name == "IsNew") return Expression.Constant(false); Field field = cee.FieldEmbedded != null ? cee.FieldEmbedded.GetField(node.Member) : cee.FieldMixin != null ? cee.FieldMixin.GetField(node.Member) : ((Table)cee.Constructor.table).GetField(node.Member); return BindMember(cee, field, cee.PrimaryKey); } return node.Update(exp); } protected override Expression VisitConditional(ConditionalExpression c) // a.IsNew { Expression test = this.Visit(c.Test); if (test is ConstantExpression co) { if ((bool)co.Value!) return this.Visit(c.IfTrue); else return this.Visit(c.IfFalse); } Expression ifTrue = this.Visit(c.IfTrue); Expression ifFalse = this.Visit(c.IfFalse); if (test != c.Test || ifTrue != c.IfTrue || ifFalse != c.IfFalse) { return Expression.Condition(test, ifTrue, ifFalse); } return c; } private Expression BindMember(CachedEntityExpression n, Field field, Expression? prevPrimaryKey) { Expression body = GetField(field, n.Constructor, prevPrimaryKey); ConstantExpression tab = Expression.Constant(n.Constructor.cachedTable, typeof(CachedTable<>).MakeGenericType(((Table)n.Constructor.table).Type)); Expression origin = Expression.Convert(Expression.Property(Expression.Call(tab, "GetRows", null), "Item", n.PrimaryKey.UnNullify()), n.Constructor.tupleType); var result = ExpressionReplacer.Replace(body, new Dictionary<ParameterExpression, Expression> { { n.Constructor.origin, origin } }); if (!n.PrimaryKey.Type.IsNullable()) return result; return Expression.Condition( Expression.Equal(n.PrimaryKey, Expression.Constant(null, n.PrimaryKey.Type)), Expression.Constant(null, result.Type.Nullify()), result.Nullify()); } private Expression GetField(Field field, CachedTableConstructor constructor, Expression? previousPrimaryKey) { if (field is FieldValue) { var value = constructor.GetTupleProperty((IColumn)field); return value.Type == field.FieldType ? value : Expression.Convert(value, field.FieldType); } if (field is FieldEnum) return Expression.Convert(constructor.GetTupleProperty((IColumn)field), field.FieldType); if (field is FieldPrimaryKey) return constructor.GetTupleProperty((IColumn)field); if (field is IFieldReference) { bool isLite = ((IFieldReference)field).IsLite; if (field is FieldReference) { IColumn column = (IColumn)field; return GetEntity(isLite, column, field.FieldType.CleanType(), constructor); } if (field is FieldImplementedBy ib) { var nullRef = Expression.Constant(null, field.FieldType); var call = ib.ImplementationColumns.Aggregate((Expression)nullRef, (acum, kvp) => { IColumn column = (IColumn)kvp.Value; var entity = GetEntity(isLite, column, kvp.Key, constructor); return Expression.Condition(Expression.NotEqual(constructor.GetTupleProperty(column), Expression.Constant(column.Type)), Expression.Convert(entity, field.FieldType), acum); }); return call; } if (field is FieldImplementedByAll) { throw new NotImplementedException("FieldImplementedByAll not supported in cached ToString"); } } if (field is FieldEmbedded fe) { return new CachedEntityExpression(previousPrimaryKey!, fe.FieldType, constructor, fe, null); } if (field is FieldMixin fm) { return new CachedEntityExpression(previousPrimaryKey!, fm.FieldType, constructor, null, fm); } if (field is FieldMList) { throw new NotImplementedException("FieldMList not supported in cached ToString"); } throw new InvalidOperationException("Unexpected {0}".FormatWith(field.GetType().Name)); } private Expression GetEntity(bool isLite, IColumn column, Type entityType, CachedTableConstructor constructor) { Expression id = constructor.GetTupleProperty(column); var pk = CachedTableConstructor.WrapPrimaryKey(id); CachedTableConstructor typeConstructor = CacheLogic.GetCacheType(entityType) == CacheType.Cached ? CacheLogic.GetCachedTable(entityType).Constructor : constructor.cachedTable.SubTables!.SingleEx(a => a.ParentColumn == column).Constructor; return new CachedEntityExpression(pk, entityType, typeConstructor, null, null); } protected override Expression VisitUnary(UnaryExpression node) { var operand = Visit(node.Operand); if (operand != node.Operand && node.NodeType == ExpressionType.Convert) { return Expression.Convert(operand, node.Type); } return node.Update(operand); } static readonly MethodInfo miToString = ReflectionTools.GetMethodInfo((object o) => o.ToString()); protected override Expression VisitMethodCall(MethodCallExpression node) { if (node.Method.DeclaringType == typeof(string) && node.Method.Name == nameof(string.Format) || node.Method.DeclaringType == typeof(StringExtensions) && node.Method.Name == nameof(StringExtensions.FormatWith)) { if (node.Arguments[0].Type == typeof(IFormatProvider)) throw new NotSupportedException("string.Format with IFormatProvider"); var formatStr = Visit(node.Arguments[0]); if (node.Arguments.Count == 2 && node.Arguments[1] is NewArrayExpression nae) { var expressions = nae.Expressions.Select(a => Visit(ToString(a))).ToList(); return node.Update(null!, new[] { formatStr, Expression.NewArrayInit(nae.Type.ElementType()!, expressions) }); } else { var remainging = node.Arguments.Skip(1).Select(a => Visit(ToString(a))).ToList(); return node.Update(null!, new Sequence<Expression> { formatStr, remainging }); } } var obj = base.Visit(node.Object); var args = base.Visit(node.Arguments); if (node.Method.Name == "ToString" && node.Arguments.IsEmpty() && obj is CachedEntityExpression ce && ce.Type.IsEntity()) { var table = (Table)ce.Constructor.table; if (table.ToStrColumn != null) { return BindMember(ce, (FieldValue)table.ToStrColumn, null); } else if(this.root != ce) { var cachedTableType = typeof(CachedTable<>).MakeGenericType(table.Type); ConstantExpression tab = Expression.Constant(ce.Constructor.cachedTable, cachedTableType); var mi = cachedTableType.GetMethod(nameof(CachedTable<Entity>.GetToString))!; return Expression.Call(tab, mi, ce.PrimaryKey.UnNullify()); } } if(node.Method.Name == "GetType" && obj is CachedEntityExpression ce2) { return Expression.Constant(ce2.Type); } LambdaExpression? lambda = ExpressionCleaner.GetFieldExpansion(obj?.Type, node.Method); if (lambda != null) { var replace = ExpressionReplacer.Replace(Expression.Invoke(lambda, obj == null ? args : args.PreAnd(obj))); return this.Visit(replace); } if (node.Method.Name == nameof(Entity.Mixin) && obj is CachedEntityExpression cee) { var mixin = ((Table)cee.Constructor.table).GetField(node.Method); return GetField(mixin, cee.Constructor, cee.PrimaryKey); } return node.Update(obj!, args); } protected override Expression VisitParameter(ParameterExpression node) { return this.replacements.TryGetC(node) ?? node; } protected override Expression VisitBinary(BinaryExpression node) { var result = (BinaryExpression)base.VisitBinary(node); if (result.NodeType == ExpressionType.Equal || result.NodeType == ExpressionType.NotEqual) { if (result.Left is CachedEntityExpression ceLeft && ceLeft.FieldEmbedded?.HasValue == null || result.Right is CachedEntityExpression ceRight && ceRight.FieldEmbedded?.HasValue == null) { var left = GetPrimaryKey(result.Left); var right = GetPrimaryKey(result.Right); if (left.Type.IsNullable() || right.Type.IsNullable()) return Expression.MakeBinary(node.NodeType, left.Nullify(), right.Nullify()); else return Expression.MakeBinary(node.NodeType, left, right); } if (result.Left is CachedEntityExpression ceLeft2 && ceLeft2.FieldEmbedded?.HasValue != null || result.Right is CachedEntityExpression ceRight2 && ceRight2.FieldEmbedded?.HasValue != null) { var left = GetHasValue(result.Left); var right = GetHasValue(result.Right); return Expression.MakeBinary(node.NodeType, left, right); } } if(result.NodeType == ExpressionType.Add && (result.Left.Type == typeof(string) || result.Right.Type == typeof(string))) { var lefto = this.Visit(ToString(result.Left)); var righto = this.Visit(ToString(result.Right)); return Expression.Add(lefto, righto, result.Method); } return result; } private Expression ToString(Expression node) { if (node.Type == typeof(string)) return node; return Expression.Condition( Expression.Equal(node.Nullify(), Expression.Constant(null, node.Type.Nullify())), Expression.Constant(null, typeof(string)), Expression.Call(node, miToString)); } private Expression GetPrimaryKey(Expression exp) { if (exp is ConstantExpression && ((ConstantExpression)exp).Value == null) return Expression.Constant(null, typeof(PrimaryKey?)); if (exp is CachedEntityExpression cee && cee.FieldEmbedded?.HasValue == null) return cee.PrimaryKey; throw new InvalidOperationException(""); } private Expression GetHasValue(Expression exp) { if (exp is ConstantExpression && ((ConstantExpression)exp).Value == null) return Expression.Constant(false, typeof(bool)); if (exp is CachedEntityExpression n && n.FieldEmbedded?.HasValue != null) { var body = n.Constructor.GetTupleProperty(n.FieldEmbedded.HasValue); ConstantExpression tab = Expression.Constant(n.Constructor.cachedTable, typeof(CachedTable<>).MakeGenericType(((Table)n.Constructor.table).Type)); Expression origin = Expression.Convert(Expression.Property(Expression.Call(tab, "GetRows", null), "Item", n.PrimaryKey.UnNullify()), n.Constructor.tupleType); var result = ExpressionReplacer.Replace(body, new Dictionary<ParameterExpression, Expression> { { n.Constructor.origin, origin } }); return result; } throw new InvalidOperationException(""); } } internal class CachedEntityExpression : Expression { public override ExpressionType NodeType { get { return ExpressionType.Extension; } } public readonly CachedTableConstructor Constructor; public readonly Expression PrimaryKey; public readonly FieldEmbedded? FieldEmbedded; public readonly FieldMixin? FieldMixin; public readonly Type type; public override Type Type { get { return type; } } public CachedEntityExpression(Expression primaryKey, Type type, CachedTableConstructor constructor, FieldEmbedded? embedded, FieldMixin? mixin) { if (primaryKey == null) throw new ArgumentNullException(nameof(primaryKey)); if (primaryKey.Type.UnNullify() != typeof(PrimaryKey)) throw new InvalidOperationException("primaryKey should be a PrimaryKey"); if (type.IsEmbeddedEntity()) { this.FieldEmbedded = embedded ?? throw new ArgumentNullException(nameof(embedded)); } else if (type.IsMixinEntity()) { this.FieldMixin = mixin ?? throw new ArgumentNullException(nameof(mixin)); } else { if (((Table)constructor.table).Type != type.CleanType()) throw new InvalidOperationException("Wrong type"); } this.PrimaryKey = primaryKey; this.type = type; this.Constructor = constructor; } protected override Expression VisitChildren(ExpressionVisitor visitor) { if (this.PrimaryKey == null) return this; var pk = visitor.Visit(this.PrimaryKey); if (pk == this.PrimaryKey) return this; return new CachedEntityExpression(pk, type, Constructor, FieldEmbedded, FieldMixin); } public override string ToString() { return $"CachedEntityExpression({Type.TypeName()}, {PrimaryKey})"; } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Threading; using System.Threading.Tasks; using Xunit; namespace System.ComponentModel.EventBasedAsync.Tests { public class AsyncOperationTests { private const int SpinTimeoutSeconds = 30; [Fact] public static void Noop() { // Test that a simple AsyncOperation can be dispatched and completed via AsyncOperationManager Task.Run(() => { var operation = new TestAsyncOperation(op => { }); operation.Wait(); Assert.True(operation.Completed); Assert.False(operation.Cancelled); Assert.Null(operation.Exception); }).GetAwaiter().GetResult(); } [Fact] public static void ThrowAfterAsyncComplete() { Task.Run(() => { var operation = new TestAsyncOperation(op => { }); operation.Wait(); SendOrPostCallback noopCallback = state => { }; Assert.Throws<InvalidOperationException>(() => operation.AsyncOperation.Post(noopCallback, null)); Assert.Throws<InvalidOperationException>(() => operation.AsyncOperation.PostOperationCompleted(noopCallback, null)); Assert.Throws<InvalidOperationException>(() => operation.AsyncOperation.OperationCompleted()); }).GetAwaiter().GetResult(); } [Fact] public static void ThrowAfterSynchronousComplete() { Task.Run(() => { var operation = AsyncOperationManager.CreateOperation(null); operation.OperationCompleted(); SendOrPostCallback noopCallback = state => { }; Assert.Throws<InvalidOperationException>(() => operation.Post(noopCallback, null)); Assert.Throws<InvalidOperationException>(() => operation.PostOperationCompleted(noopCallback, null)); Assert.Throws<InvalidOperationException>(() => operation.OperationCompleted()); }).GetAwaiter().GetResult(); } [Fact] public static void Cancel() { // Test that cancellation gets passed all the way through PostOperationCompleted(callback, AsyncCompletedEventArgs) Task.Run(() => { var cancelEvent = new ManualResetEventSlim(); var operation = new TestAsyncOperation(op => { Assert.True(cancelEvent.Wait(TimeSpan.FromSeconds(SpinTimeoutSeconds))); }, cancelEvent: cancelEvent); operation.Cancel(); operation.Wait(); Assert.True(operation.Completed); Assert.True(operation.Cancelled); Assert.Null(operation.Exception); }).GetAwaiter().GetResult(); } [Fact] public static void Throw() { // Test that exceptions get passed all the way through PostOperationCompleted(callback, AsyncCompletedEventArgs) Task.Run(() => { var operation = new TestAsyncOperation(op => { throw new TestException("Test throw"); }); Assert.Throws<TestException>(() => operation.Wait()); }).GetAwaiter().GetResult(); } [Fact] public static void PostNullDelegate() { // the xUnit SynchronizationContext - AysncTestSyncContext interferes with the current SynchronizationContext // used by AsyncOperation when there is exception thrown -> the SC.OperationCompleted() is not called. // use new SC here to avoid this issue var orignal = SynchronizationContext.Current; try { SynchronizationContext.SetSynchronizationContext(null); // Pass a non-null state just to emphasize we're only testing passing a null delegate var state = new object(); var operation = AsyncOperationManager.CreateOperation(state); Assert.Throws<ArgumentNullException>(() => operation.Post(null, state)); Assert.Throws<ArgumentNullException>(() => operation.PostOperationCompleted(null, state)); } finally { SynchronizationContext.SetSynchronizationContext(orignal); } } // A simple wrapper for AsyncOperation which executes the specified delegate and a completion handler asynchronously. public class TestAsyncOperation { private readonly object _operationId; private readonly Action<TestAsyncOperation> _executeDelegate; private readonly ManualResetEventSlim _cancelEvent; private readonly ManualResetEventSlim _completeEvent; public AsyncOperation AsyncOperation { get; private set; } public bool Completed { get { return _completeEvent.IsSet; } } public bool Cancelled { get { return _cancelEvent.IsSet; } } public Exception Exception { get; private set; } public TestAsyncOperation(Action<TestAsyncOperation> executeDelegate, ManualResetEventSlim cancelEvent = null) { // Create an async operation passing an object as the state so we can // verify that state is passed properly. _operationId = new object(); AsyncOperation = AsyncOperationManager.CreateOperation(_operationId); Assert.Same(_operationId, AsyncOperation.UserSuppliedState); Assert.Same(AsyncOperationManager.SynchronizationContext, AsyncOperation.SynchronizationContext); _completeEvent = new ManualResetEventSlim(false); _cancelEvent = cancelEvent ?? new ManualResetEventSlim(false); // Post work to the wrapped synchronization context _executeDelegate = executeDelegate; AsyncOperation.Post((SendOrPostCallback)ExecuteWorker, _operationId); } public void Wait() { Assert.True(_completeEvent.Wait(TimeSpan.FromSeconds(SpinTimeoutSeconds))); if (Exception != null) { throw Exception; } } public void Cancel() { CompleteOperationAsync(cancelled: true); } private void ExecuteWorker(object operationId) { Assert.Same(_operationId, operationId); Exception exception = null; try { _executeDelegate(this); } catch (Exception e) { exception = e; } finally { CompleteOperationAsync(exception: exception); } } private void CompleteOperationAsync(Exception exception = null, bool cancelled = false) { if (!(Completed || Cancelled)) { AsyncOperation.PostOperationCompleted( (SendOrPostCallback)OnOperationCompleted, new AsyncCompletedEventArgs( exception, cancelled, _operationId)); } } private void OnOperationCompleted(object state) { AsyncCompletedEventArgs e = Assert.IsType<AsyncCompletedEventArgs>(state); Assert.Equal(_operationId, e.UserState); Exception = e.Error; // Make sure to set _cancelEvent before _completeEvent so that anyone waiting on // _completeEvent will not be at risk of reading Cancelled before it is set. if (e.Cancelled) _cancelEvent.Set(); _completeEvent.Set(); } } } }
//--------------------------------------------------------------------------- // // <copyright file=Clipboard.cs company=Microsoft> // Copyright (C) Microsoft Corporation. All rights reserved. // </copyright> // // Description: Clipboard implementation to provide methods to place/get data from/to the system // clipboard. // // See spec at http://avalon/uis/Data%20Transfer%20clipboard%20dragdrop/Avalon%20Clipboard.htm // // History: // 05/09/2002 : susiA Created // 06/16/2003 : sangilj Moved to WCP // //--------------------------------------------------------------------------- using MS.Win32; using MS.Internal; using MS.Internal.PresentationCore; // SecurityHelper using System.Collections.Specialized; using System.IO; using System.Security; using System.Security.Permissions; using System.ComponentModel; using System.Runtime.InteropServices; using System.Runtime.Serialization; using System.Threading; using System.Windows.Media.Imaging; using System.Windows.Threading; using SR = MS.Internal.PresentationCore.SR; using SRID = MS.Internal.PresentationCore.SRID; using IComDataObject = System.Runtime.InteropServices.ComTypes.IDataObject; namespace System.Windows { #region Clipboard class /// <summary> /// Provides methods to place data on and retrieve data from the system clipboard. /// This class cannot be inherited. /// </summary> public static class Clipboard { //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ #region Public Methods /// <summary> /// Clear the system clipboard which the clipboard is emptied. /// SetDataObject. /// </summary> /// <SecurityNote> /// Critical - access critical data (clipboard information) /// PublicOk - Clearing the clipboard is not inherently unsafe. /// </SecurityNote> [SecurityCritical] public static void Clear() { // Retry OLE operations several times as mitigation for clipboard locking issues in TS sessions. // See Dev10 bug 616223 and VSWhidbey bug 476911. int i = OleRetryCount; while (true) { // Clear the system clipboard by calling OleSetClipboard with null parameter. int hr = OleServicesContext.CurrentOleServicesContext.OleSetClipboard(null); if (NativeMethods.Succeeded(hr)) { break; } if (--i == 0) { Marshal.ThrowExceptionForHR(hr); } Thread.Sleep(OleRetryDelay); } } /// <summary> /// Return true if Clipboard contains the audio data. Otherwise, return false. /// </summary> public static bool ContainsAudio() { return ContainsDataInternal(DataFormats.WaveAudio); } /// <summary> /// Return true if Clipboard contains the specified data format. Otherwise, return false. /// </summary> public static bool ContainsData(string format) { if (format == null) { throw new ArgumentNullException("format"); } if (format == string.Empty) { throw new ArgumentException(SR.Get(SRID.DataObject_EmptyFormatNotAllowed)); } return ContainsDataInternal(format); } /// <summary> /// Return true if Clipboard contains the file drop list format. Otherwise, return false. /// </summary> public static bool ContainsFileDropList() { return ContainsDataInternal(DataFormats.FileDrop); } /// <summary> /// Return true if Clipboard contains the image format. Otherwise, return false. /// </summary> public static bool ContainsImage() { return ContainsDataInternal(DataFormats.Bitmap); } /// <summary> /// Return true if Clipboard contains the text data format which is unicode. /// Otherwise, return false. /// </summary> public static bool ContainsText() { return ContainsDataInternal(DataFormats.UnicodeText); } /// <summary> /// Return true if Clipboard contains the specified text data format which is unicode. /// Otherwise, return false. /// </summary> public static bool ContainsText(TextDataFormat format) { if (!DataFormats.IsValidTextDataFormat(format)) { throw new InvalidEnumArgumentException("format", (int)format, typeof(TextDataFormat)); } return ContainsDataInternal(DataFormats.ConvertToDataFormats(format)); } /// <summary> /// Permanently renders the contents of the last IDataObject that was set onto the clipboard. /// </summary> public static void Flush() { // Retry OLE operations several times as mitigation for clipboard locking issues in TS sessions. // See Dev10 bug 616223 and VSWhidbey bug 476911. int i = OleRetryCount; while (true) { int hr = OleServicesContext.CurrentOleServicesContext.OleFlushClipboard(); if (NativeMethods.Succeeded(hr)) { break; } if (--i == 0) { SecurityHelper.ThrowExceptionForHR(hr); } Thread.Sleep(OleRetryDelay); } } /// <summary> /// Get audio data as Stream from Clipboard. /// </summary> public static Stream GetAudioStream() { return GetDataInternal(DataFormats.WaveAudio) as Stream; } /// <summary> /// Get data for the specified data format from Clipboard. /// </summary> public static object GetData(string format) { if (format == null) { throw new ArgumentNullException("format"); } if (format == string.Empty) { throw new ArgumentException(SR.Get(SRID.DataObject_EmptyFormatNotAllowed)); } return GetDataInternal(format); } /// <summary> /// Get the file drop list as StringCollection from Clipboard. /// </summary> public static StringCollection GetFileDropList() { StringCollection fileDropListCollection; string[] fileDropList; fileDropListCollection = new StringCollection(); fileDropList = GetDataInternal(DataFormats.FileDrop) as string[]; if (fileDropList != null) { fileDropListCollection.AddRange(fileDropList); } return fileDropListCollection; } /// <summary> /// Get the image from Clipboard. /// </summary> public static BitmapSource GetImage() { return GetDataInternal(DataFormats.Bitmap) as BitmapSource; } /// <summary> /// Get text from Clipboard. /// </summary> public static string GetText() { return GetText(TextDataFormat.UnicodeText); } /// <summary> /// Get text from Clipboard. /// </summary> public static string GetText(TextDataFormat format) { if (!DataFormats.IsValidTextDataFormat(format)) { throw new InvalidEnumArgumentException("format", (int)format, typeof(TextDataFormat)); } string text; text = (string)GetDataInternal(DataFormats.ConvertToDataFormats(format)); if (text != null) { return text; } return string.Empty; } /// <summary> /// Set the audio data to Clipboard. /// </summary> public static void SetAudio(byte[] audioBytes) { if (audioBytes == null) { throw new ArgumentNullException("audioBytes"); } SetAudio(new MemoryStream(audioBytes)); } /// <summary> /// Set the audio data to Clipboard. /// </summary> public static void SetAudio(Stream audioStream) { if (audioStream == null) { throw new ArgumentNullException("audioStream"); } SetDataInternal(DataFormats.WaveAudio, audioStream); } /// <summary> /// Set the specified data to Clipboard. /// </summary> public static void SetData(string format, object data) { if (format == null) { throw new ArgumentNullException("format"); } if (format == string.Empty) { throw new ArgumentException(SR.Get(SRID.DataObject_EmptyFormatNotAllowed)); } if (data == null) { throw new ArgumentNullException("data"); } SetDataInternal(format, data); } /// <summary> /// Set the file drop list to Clipboard. /// </summary> public static void SetFileDropList(StringCollection fileDropList) { if (fileDropList == null) { throw new ArgumentNullException("fileDropList"); } if (fileDropList.Count == 0) { throw new ArgumentException(SR.Get(SRID.DataObject_FileDropListIsEmpty, fileDropList)); } foreach (string fileDrop in fileDropList) { try { string filePath = Path.GetFullPath(fileDrop); } catch (ArgumentException) { throw new ArgumentException(SR.Get(SRID.DataObject_FileDropListHasInvalidFileDropPath, fileDropList)); } } string[] fileDropListStrings; fileDropListStrings = new string[fileDropList.Count]; fileDropList.CopyTo(fileDropListStrings, 0); SetDataInternal(DataFormats.FileDrop, fileDropListStrings); } /// <summary> /// Set the image data to Clipboard. /// </summary> public static void SetImage(BitmapSource image) { if (image == null) { throw new ArgumentNullException("image"); } SetDataInternal(DataFormats.Bitmap, image); } /// <summary> /// Set the text data to Clipboard. /// </summary> public static void SetText(string text) { if (text == null) { throw new ArgumentNullException("text"); } SetText(text, TextDataFormat.UnicodeText); } /// <summary> /// Set the text data to Clipboard. /// </summary> public static void SetText(string text, TextDataFormat format) { if (text == null) { throw new ArgumentNullException("text"); } if (!DataFormats.IsValidTextDataFormat(format)) { throw new InvalidEnumArgumentException("format", (int)format, typeof(TextDataFormat)); } SetDataInternal(DataFormats.ConvertToDataFormats(format), text); } /// <summary> /// Retrieves the data object that is currently on the system clipboard. /// </summary> /// <remarks> /// Callers must have UIPermission(UIPermissionClipboard.AllClipboard) to call this API. /// </remarks> /// <SecurityNote> /// Critical - access critical data (clipboard information) /// PublicOk - demands appropriate permission (AllClipboard) /// </SecurityNote> [SecurityCritical] public static IDataObject GetDataObject() { SecurityHelper.DemandAllClipboardPermission(); return GetDataObjectInternal(); } /// <summary> /// Determines whether the data object previously placed on the clipboard /// by the SetDataObject is still on the clipboard. /// </summary> /// <param name="data"> /// Data object from the current containing clipboard which the caller /// previously placed on the clipboard. /// </param> public static bool IsCurrent(IDataObject data) { bool bReturn; if (data == null) { throw new ArgumentNullException("data"); } bReturn = false; if (data is IComDataObject) { int hr; // Retry OLE operations several times as mitigation for clipboard locking issues in TS sessions. // See Dev10 bug 616223 and VSWhidbey bug 476911. int i = OleRetryCount; while (true) { hr = OleServicesContext.CurrentOleServicesContext.OleIsCurrentClipboard((IComDataObject)data); if (NativeMethods.Succeeded(hr) || (--i == 0)) { break; } Thread.Sleep(OleRetryDelay); } if (hr == NativeMethods.S_OK) { bReturn = true; } else if (!NativeMethods.Succeeded(hr)) { throw new ExternalException("OleIsCurrentClipboard()", hr); } } return bReturn; } /// <summary> /// Places nonpersistent data on the system clipboard. /// </summary> /// <param name="data"> /// The specific data to be on clipboard. /// </param> /// <remarks> /// Callers must have UIPermission(UIPermissionClipboard.AllClipboard) to call this API. /// </remarks> /// <SecurityNote> /// Critical - access critical data (clipboard information) /// PublicOk - demands appropriate permission (AllClipboard) /// </SecurityNote> [SecurityCritical] public static void SetDataObject(object data) { SecurityHelper.DemandAllClipboardPermission(); if (data == null) { throw new ArgumentNullException("data"); } SetDataObject(data, false); } /// <summary> /// Places data on the system Clipboard and uses copy to specify whether the data /// should remain on the Clipboard after the application exits. /// </summary> /// <param name="data"> /// The specific data to be on clipboard. /// </param> /// <param name="copy"> /// Specify whether the data should remain on the clipboard after the application exits. /// </param> /// <remarks> /// Callers must have UIPermission(UIPermissionClipboard.AllClipboard) to call this API. /// </remarks> /// <SecurityNote> /// Critical - calls critical code (set clipboard), and potentially deals /// with unmanaged pointers /// PublicOk - Demands All Clipboard permissions /// </SecurityNote> [SecurityCritical] public static void SetDataObject(object data, bool copy) { SecurityHelper.DemandAllClipboardPermission(); CriticalSetDataObject(data,copy); } #endregion Public Methods #region Internal Methods //------------------------------------------------------ // // Internal Methods // //------------------------------------------------------ /// <summary> /// Places data on the system Clipboard and uses copy to specify whether the data /// should remain on the Clipboard after the application exits. /// </summary> /// <param name="data"> /// The specific data to be on clipboard. /// </param> /// <param name="copy"> /// Specify whether the data should remain on the clipboard after the application exits. /// </param> /// <SecurityNote> /// Critical - calls critical code (set clipboard), and potentially deals /// with unmanaged pointers /// </SecurityNote> [SecurityCritical] [FriendAccessAllowed] internal static void CriticalSetDataObject(object data, bool copy) { if (data == null) { throw new ArgumentNullException("data"); } IComDataObject dataObject; if (data is DataObject) { dataObject = (DataObject)data; } else if (data is IComDataObject) { SecurityHelper.DemandUnmanagedCode(); dataObject = (IComDataObject)data; } else { dataObject = new DataObject(data); } // Retry OLE operations several times as mitigation for clipboard locking issues in TS sessions. // See Dev10 bug 616223 and VSWhidbey bug 476911. int i = OleRetryCount; while (true) { // Clear the system clipboard by calling OleSetClipboard with null parameter. int hr = OleServicesContext.CurrentOleServicesContext.OleSetClipboard(dataObject); if (NativeMethods.Succeeded(hr)) { break; } if (--i == 0) { Marshal.ThrowExceptionForHR(hr); } Thread.Sleep(OleRetryDelay); } if (copy) { // Dev10 bug 835751 - OleSetClipboard and OleFlushClipboard both modify the clipboard // and cause notifications to be sent to clipboard listeners. We sleep a bit here to // mitigate issues with clipboard listeners (like TS) corrupting the clipboard contents // as a result of these two calls being back to back. Thread.Sleep(OleFlushDelay); Flush(); } } /// <SecurityNote> /// Critical - access critical data (clipboard information) /// TreatAsSafe: Returning a bool indicating whether there is data on the clipboard is ok /// </SecurityNote> [SecurityCritical, SecurityTreatAsSafe] [FriendAccessAllowed] internal static bool IsClipboardPopulated() { bool isPopulated = false; (new UIPermission(UIPermissionClipboard.AllClipboard)).Assert();//BlessedAssert try { isPopulated = (GetDataObjectInternal() != null); } finally { UIPermission.RevertAssert(); } return isPopulated; } #endregion Internal Methods //------------------------------------------------------ // // Private Methods // //------------------------------------------------------ #region Private Methods /// <SecurityNote> /// Critical: This method calls into ExtractAppDomainPermissionSetMinusSiteOfOrigin this is used to make trust decision to /// copy paste content and is hence important to be tracked. Also it asserts to get to data /// </SecurityNote> [SecurityCritical] private static bool IsDataObjectFromLessPriviligedApplicationDomain(IDataObject dataObjectToApply) { bool retVal = false; object applicationTrust = null; // Extract the permission set in case of xaml cut and paste // extract permission set if it exists if not data came from full trust app and we do not care bool isApplicationTrustFormatPresent = false; isApplicationTrustFormatPresent = dataObjectToApply.GetDataPresent(DataFormats.ApplicationTrust, /*autoConvert:*/false); if (isApplicationTrustFormatPresent) { applicationTrust = dataObjectToApply.GetData(DataFormats.ApplicationTrust, /*autoConvert:*/false); } if (applicationTrust != null) { string applicationTrustText = null; // convert to string applicationTrustText = applicationTrust.ToString(); // Convert string to permission set for getting permission set of source PermissionSet permissionSetSource; try { SecurityElement securityElement = SecurityElement.FromString(applicationTrustText); permissionSetSource = new System.Security.PermissionSet(PermissionState.None); permissionSetSource.FromXml(securityElement); } catch(XmlSyntaxException) { // This is the condition where we have Malformed XML in the clipboard for application trust // here we will fail silently since we do not want to break arbitrary applications // but since we cannot establish the validity of the application trust content we will fall back to // whatever is more secure return true; } //extract permission set for the current appdomain which is target PermissionSet permissionSetDestination = SecurityHelper.ExtractAppDomainPermissionSetMinusSiteOfOrigin(); //Compare permissions sets if (!permissionSetDestination.IsSubsetOf(permissionSetSource)) { retVal = true; // in case target is not subset of source revert to unicode or text } } return retVal; } /// <SecurityNote> /// Critical: This code extracts the DataObject from the clipboard /// which can be used to sniff clipboard /// </SecurityNote> [SecurityCritical] private static IDataObject GetDataObjectInternal() { IDataObject dataObject; IComDataObject oleDataObject; // Retry OLE operations several times as mitigation for clipboard locking issues in TS sessions. // See Dev10 bug 616223 and VSWhidbey bug 476911. int i = OleRetryCount; while (true) { oleDataObject = null; int hr = OleServicesContext.CurrentOleServicesContext.OleGetClipboard(ref oleDataObject); if (NativeMethods.Succeeded(hr)) { break; } if (--i == 0) { Marshal.ThrowExceptionForHR(hr); } Thread.Sleep(OleRetryDelay); } if (oleDataObject is IDataObject) { dataObject = (IDataObject)oleDataObject; } else if (oleDataObject != null) { dataObject = new DataObject(oleDataObject); } else { dataObject = null; } // We make this check outside of the loop independant of whether the data is ole data object or IDataObject // Although one is unable to create an OleDataObject in partial trust we still need to ensure that if he did // we strip the formats we care about by wrapping in ConstrainedDataObject if (dataObject != null) { // this is the case we are concerend about where content comes from partial trust into full trust // in the case where data contained is in one of the two formats: XAML or ApplicationTrust we return a wrapper // that blocks access to these if (IsDataObjectFromLessPriviligedApplicationDomain(dataObject) && (dataObject.GetDataPresent(DataFormats.Xaml, /*autoConvert:*/false) || dataObject.GetDataPresent(DataFormats.ApplicationTrust, /*autoConvert:*/false))) { // in this case we set the data object to be a wrapper data object that blocks off // xaml or application trust formats if they exist dataObject = new ConstrainedDataObject(dataObject); } } return dataObject; } /// <summary> /// Query the specified data format from Clipboard. /// </summary> /// <SecurityNote> /// Critical - Accesses the clipboard. /// TreatAsSafe - We demand clipboard permission. [SecurityCritical, SecurityTreatAsSafe] private static bool ContainsDataInternal(string format) { SecurityHelper.DemandAllClipboardPermission(); bool isFormatAvailable = false; if (IsDataFormatAutoConvert(format)) { string[] formats = DataObject.GetMappedFormats(format); for (int i = 0; i < formats.Length; i++) { if (SafeNativeMethods.IsClipboardFormatAvailable(DataFormats.GetDataFormat(formats[i]).Id)) { isFormatAvailable = true; break; } } } else { isFormatAvailable = SafeNativeMethods.IsClipboardFormatAvailable(DataFormats.GetDataFormat(format).Id); } return isFormatAvailable; } /// <summary> /// Get the specified format from Clipboard. /// </summary> private static object GetDataInternal(string format) { IDataObject dataObject; dataObject = Clipboard.GetDataObject(); if (dataObject != null) { bool autoConvert; if (IsDataFormatAutoConvert(format)) { autoConvert = true; } else { autoConvert = false; } return dataObject.GetData(format, autoConvert); } else { return null; } } /// <summary> /// Set the specified data into Clipboard. /// </summary> private static void SetDataInternal(string format, object data) { IDataObject dataObject; bool autoConvert; if (IsDataFormatAutoConvert(format)) { autoConvert = true; } else { autoConvert = false; } dataObject = new DataObject(); dataObject.SetData(format, data, autoConvert); Clipboard.SetDataObject(dataObject, /*copy*/true); } /// <summary> /// Check the auto convert for the specified data format. /// </summary> private static bool IsDataFormatAutoConvert(string format) { bool autoConvert; if (String.CompareOrdinal(format, DataFormats.FileDrop) == 0 || String.CompareOrdinal(format, DataFormats.Bitmap) == 0) { autoConvert = true; } else { autoConvert = false; } return autoConvert; } #endregion Private Methods //------------------------------------------------------ // // Private Constants // //------------------------------------------------------ #region Private Constants /// <summary> /// The number of times to retry OLE clipboard operations. /// </summary> /// <remarks> /// This is mitigation for clipboard locking issues in TS sessions. See Dev10 bug 616223 and VSWhidbey bug 476911. /// </remarks> private const int OleRetryCount = 10; /// <summary> /// The amount of time in milliseconds to sleep between retrying OLE clipboard operations. /// </summary> /// <remarks> /// This is mitigation for clipboard locking issues in TS sessions. See Dev10 bug 616223 and VSWhidbey bug 476911. /// </remarks> private const int OleRetryDelay = 100; /// <summary> /// The amount of time in milliseconds to sleep before flushing the clipboard after a set. /// </summary> /// <remarks> /// This is mitigation for clipboard listener issues. See Dev10 bug 835751. /// </remarks> private const int OleFlushDelay = 10; #endregion Private Constants } #endregion Clipboard class }
/* * Agent.cs * RVO2 Library C# * * Copyright 2008 University of North Carolina at Chapel Hill * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Please send all bug reports to <geom@cs.unc.edu>. * * The authors may be contacted via: * * Jur van den Berg, Stephen J. Guy, Jamie Snape, Ming C. Lin, Dinesh Manocha * Dept. of Computer Science * 201 S. Columbia St. * Frederick P. Brooks, Jr. Computer Science Bldg. * Chapel Hill, N.C. 27599-3175 * United States of America * * <http://gamma.cs.unc.edu/RVO2/> */ using System; using System.Collections.Generic; namespace RVO { /** * <summary>Defines an agent in the simulation.</summary> */ internal class Agent { internal IList<KeyValuePair<float, Agent>> agentNeighbors_ = new List<KeyValuePair<float, Agent>>(); internal IList<KeyValuePair<float, Obstacle>> obstacleNeighbors_ = new List<KeyValuePair<float, Obstacle>>(); internal IList<Line> orcaLines_ = new List<Line>(); internal Vector2 position_; internal Vector2 prefVelocity_; internal Vector2 velocity_; internal int id_ = 0; internal int maxNeighbors_ = 0; internal float maxSpeed_ = 0.0f; internal float neighborDist_ = 0.0f; internal float radius_ = 0.0f; internal float timeHorizon_ = 0.0f; internal float timeHorizonObst_ = 0.0f; private Vector2 newVelocity_; /** * <summary>Computes the neighbors of this agent.</summary> */ internal void computeNeighbors() { obstacleNeighbors_.Clear(); float rangeSq = RVOMath.sqr(timeHorizonObst_ * maxSpeed_ + radius_); Simulator.Instance.kdTree_.computeObstacleNeighbors(this, rangeSq); agentNeighbors_.Clear(); if (maxNeighbors_ > 0) { rangeSq = RVOMath.sqr(neighborDist_); Simulator.Instance.kdTree_.computeAgentNeighbors(this, ref rangeSq); } } /** * <summary>Computes the new velocity of this agent.</summary> */ internal void computeNewVelocity() { orcaLines_.Clear(); float invTimeHorizonObst = 1.0f / timeHorizonObst_; /* Create obstacle ORCA lines. */ for (int i = 0; i < obstacleNeighbors_.Count; ++i) { Obstacle obstacle1 = obstacleNeighbors_[i].Value; Obstacle obstacle2 = obstacle1.next_; Vector2 relativePosition1 = obstacle1.point_ - position_; Vector2 relativePosition2 = obstacle2.point_ - position_; /* * Check if velocity obstacle of obstacle is already taken care * of by previously constructed obstacle ORCA lines. */ bool alreadyCovered = false; for (int j = 0; j < orcaLines_.Count; ++j) { if (RVOMath.det(invTimeHorizonObst * relativePosition1 - orcaLines_[j].point, orcaLines_[j].direction) - invTimeHorizonObst * radius_ >= -RVOMath.RVO_EPSILON && RVOMath.det(invTimeHorizonObst * relativePosition2 - orcaLines_[j].point, orcaLines_[j].direction) - invTimeHorizonObst * radius_ >= -RVOMath.RVO_EPSILON) { alreadyCovered = true; break; } } if (alreadyCovered) { continue; } /* Not yet covered. Check for collisions. */ float distSq1 = RVOMath.absSq(relativePosition1); float distSq2 = RVOMath.absSq(relativePosition2); float radiusSq = RVOMath.sqr(radius_); Vector2 obstacleVector = obstacle2.point_ - obstacle1.point_; float s = (-relativePosition1 * obstacleVector) / RVOMath.absSq(obstacleVector); float distSqLine = RVOMath.absSq(-relativePosition1 - s * obstacleVector); Line line; if (s < 0.0f && distSq1 <= radiusSq) { /* Collision with left vertex. Ignore if non-convex. */ if (obstacle1.convex_) { line.point = new Vector2(0.0f, 0.0f); line.direction = RVOMath.normalize(new Vector2(-relativePosition1.y(), relativePosition1.x())); orcaLines_.Add(line); } continue; } else if (s > 1.0f && distSq2 <= radiusSq) { /* * Collision with right vertex. Ignore if non-convex or if * it will be taken care of by neighboring obstacle. */ if (obstacle2.convex_ && RVOMath.det(relativePosition2, obstacle2.direction_) >= 0.0f) { line.point = new Vector2(0.0f, 0.0f); line.direction = RVOMath.normalize(new Vector2(-relativePosition2.y(), relativePosition2.x())); orcaLines_.Add(line); } continue; } else if (s >= 0.0f && s <= 1.0f && distSqLine <= radiusSq) { /* Collision with obstacle segment. */ line.point = new Vector2(0.0f, 0.0f); line.direction = -obstacle1.direction_; orcaLines_.Add(line); continue; } /* * No collision. Compute legs. When obliquely viewed, both legs * can come from a single vertex. Legs extend cut-off line when * non-convex vertex. */ Vector2 leftLegDirection, rightLegDirection; if (s < 0.0f && distSqLine <= radiusSq) { /* * Obstacle viewed obliquely so that left vertex * defines velocity obstacle. */ if (!obstacle1.convex_) { /* Ignore obstacle. */ continue; } obstacle2 = obstacle1; float leg1 = RVOMath.sqrt(distSq1 - radiusSq); leftLegDirection = new Vector2(relativePosition1.x() * leg1 - relativePosition1.y() * radius_, relativePosition1.x() * radius_ + relativePosition1.y() * leg1) / distSq1; rightLegDirection = new Vector2(relativePosition1.x() * leg1 + relativePosition1.y() * radius_, -relativePosition1.x() * radius_ + relativePosition1.y() * leg1) / distSq1; } else if (s > 1.0f && distSqLine <= radiusSq) { /* * Obstacle viewed obliquely so that * right vertex defines velocity obstacle. */ if (!obstacle2.convex_) { /* Ignore obstacle. */ continue; } obstacle1 = obstacle2; float leg2 = RVOMath.sqrt(distSq2 - radiusSq); leftLegDirection = new Vector2(relativePosition2.x() * leg2 - relativePosition2.y() * radius_, relativePosition2.x() * radius_ + relativePosition2.y() * leg2) / distSq2; rightLegDirection = new Vector2(relativePosition2.x() * leg2 + relativePosition2.y() * radius_, -relativePosition2.x() * radius_ + relativePosition2.y() * leg2) / distSq2; } else { /* Usual situation. */ if (obstacle1.convex_) { float leg1 = RVOMath.sqrt(distSq1 - radiusSq); leftLegDirection = new Vector2(relativePosition1.x() * leg1 - relativePosition1.y() * radius_, relativePosition1.x() * radius_ + relativePosition1.y() * leg1) / distSq1; } else { /* Left vertex non-convex; left leg extends cut-off line. */ leftLegDirection = -obstacle1.direction_; } if (obstacle2.convex_) { float leg2 = RVOMath.sqrt(distSq2 - radiusSq); rightLegDirection = new Vector2(relativePosition2.x() * leg2 + relativePosition2.y() * radius_, -relativePosition2.x() * radius_ + relativePosition2.y() * leg2) / distSq2; } else { /* Right vertex non-convex; right leg extends cut-off line. */ rightLegDirection = obstacle1.direction_; } } /* * Legs can never point into neighboring edge when convex * vertex, take cutoff-line of neighboring edge instead. If * velocity projected on "foreign" leg, no constraint is added. */ Obstacle leftNeighbor = obstacle1.previous_; bool isLeftLegForeign = false; bool isRightLegForeign = false; if (obstacle1.convex_ && RVOMath.det(leftLegDirection, -leftNeighbor.direction_) >= 0.0f) { /* Left leg points into obstacle. */ leftLegDirection = -leftNeighbor.direction_; isLeftLegForeign = true; } if (obstacle2.convex_ && RVOMath.det(rightLegDirection, obstacle2.direction_) <= 0.0f) { /* Right leg points into obstacle. */ rightLegDirection = obstacle2.direction_; isRightLegForeign = true; } /* Compute cut-off centers. */ Vector2 leftCutOff = invTimeHorizonObst * (obstacle1.point_ - position_); Vector2 rightCutOff = invTimeHorizonObst * (obstacle2.point_ - position_); Vector2 cutOffVector = rightCutOff - leftCutOff; /* Project current velocity on velocity obstacle. */ /* Check if current velocity is projected on cutoff circles. */ float t = obstacle1 == obstacle2 ? 0.5f : ((velocity_ - leftCutOff) * cutOffVector) / RVOMath.absSq(cutOffVector); float tLeft = (velocity_ - leftCutOff) * leftLegDirection; float tRight = (velocity_ - rightCutOff) * rightLegDirection; if ((t < 0.0f && tLeft < 0.0f) || (obstacle1 == obstacle2 && tLeft < 0.0f && tRight < 0.0f)) { /* Project on left cut-off circle. */ Vector2 unitW = RVOMath.normalize(velocity_ - leftCutOff); line.direction = new Vector2(unitW.y(), -unitW.x()); line.point = leftCutOff + radius_ * invTimeHorizonObst * unitW; orcaLines_.Add(line); continue; } else if (t > 1.0f && tRight < 0.0f) { /* Project on right cut-off circle. */ Vector2 unitW = RVOMath.normalize(velocity_ - rightCutOff); line.direction = new Vector2(unitW.y(), -unitW.x()); line.point = rightCutOff + radius_ * invTimeHorizonObst * unitW; orcaLines_.Add(line); continue; } /* * Project on left leg, right leg, or cut-off line, whichever is * closest to velocity. */ float distSqCutoff = (t < 0.0f || t > 1.0f || obstacle1 == obstacle2) ? float.PositiveInfinity : RVOMath.absSq(velocity_ - (leftCutOff + t * cutOffVector)); float distSqLeft = tLeft < 0.0f ? float.PositiveInfinity : RVOMath.absSq(velocity_ - (leftCutOff + tLeft * leftLegDirection)); float distSqRight = tRight < 0.0f ? float.PositiveInfinity : RVOMath.absSq(velocity_ - (rightCutOff + tRight * rightLegDirection)); if (distSqCutoff <= distSqLeft && distSqCutoff <= distSqRight) { /* Project on cut-off line. */ line.direction = -obstacle1.direction_; line.point = leftCutOff + radius_ * invTimeHorizonObst * new Vector2(-line.direction.y(), line.direction.x()); orcaLines_.Add(line); continue; } if (distSqLeft <= distSqRight) { /* Project on left leg. */ if (isLeftLegForeign) { continue; } line.direction = leftLegDirection; line.point = leftCutOff + radius_ * invTimeHorizonObst * new Vector2(-line.direction.y(), line.direction.x()); orcaLines_.Add(line); continue; } /* Project on right leg. */ if (isRightLegForeign) { continue; } line.direction = -rightLegDirection; line.point = rightCutOff + radius_ * invTimeHorizonObst * new Vector2(-line.direction.y(), line.direction.x()); orcaLines_.Add(line); } int numObstLines = orcaLines_.Count; float invTimeHorizon = 1.0f / timeHorizon_; /* Create agent ORCA lines. */ for (int i = 0; i < agentNeighbors_.Count; ++i) { Agent other = agentNeighbors_[i].Value; Vector2 relativePosition = other.position_ - position_; Vector2 relativeVelocity = velocity_ - other.velocity_; float distSq = RVOMath.absSq(relativePosition); float combinedRadius = radius_ + other.radius_; float combinedRadiusSq = RVOMath.sqr(combinedRadius); Line line; Vector2 u; if (distSq > combinedRadiusSq) { /* No collision. */ Vector2 w = relativeVelocity - invTimeHorizon * relativePosition; /* Vector from cutoff center to relative velocity. */ float wLengthSq = RVOMath.absSq(w); float dotProduct1 = w * relativePosition; if (dotProduct1 < 0.0f && RVOMath.sqr(dotProduct1) > combinedRadiusSq * wLengthSq) { /* Project on cut-off circle. */ float wLength = RVOMath.sqrt(wLengthSq); Vector2 unitW = w / wLength; line.direction = new Vector2(unitW.y(), -unitW.x()); u = (combinedRadius * invTimeHorizon - wLength) * unitW; } else { /* Project on legs. */ float leg = RVOMath.sqrt(distSq - combinedRadiusSq); if (RVOMath.det(relativePosition, w) > 0.0f) { /* Project on left leg. */ line.direction = new Vector2(relativePosition.x() * leg - relativePosition.y() * combinedRadius, relativePosition.x() * combinedRadius + relativePosition.y() * leg) / distSq; } else { /* Project on right leg. */ line.direction = -new Vector2(relativePosition.x() * leg + relativePosition.y() * combinedRadius, -relativePosition.x() * combinedRadius + relativePosition.y() * leg) / distSq; } float dotProduct2 = relativeVelocity * line.direction; u = dotProduct2 * line.direction - relativeVelocity; } } else { /* Collision. Project on cut-off circle of time timeStep. */ float invTimeStep = 1.0f / Simulator.Instance.timeStep_; /* Vector from cutoff center to relative velocity. */ Vector2 w = relativeVelocity - invTimeStep * relativePosition; float wLength = RVOMath.abs(w); Vector2 unitW = w / wLength; line.direction = new Vector2(unitW.y(), -unitW.x()); u = (combinedRadius * invTimeStep - wLength) * unitW; } line.point = velocity_ + 0.5f * u; orcaLines_.Add(line); } int lineFail = linearProgram2(orcaLines_, maxSpeed_, prefVelocity_, false, ref newVelocity_); if (lineFail < orcaLines_.Count) { linearProgram3(orcaLines_, numObstLines, lineFail, maxSpeed_, ref newVelocity_); } } /** * <summary>Inserts an agent neighbor into the set of neighbors of this * agent.</summary> * * <param name="agent">A pointer to the agent to be inserted.</param> * <param name="rangeSq">The squared range around this agent.</param> */ internal void insertAgentNeighbor(Agent agent, ref float rangeSq) { if (this != agent) { float distSq = RVOMath.absSq(position_ - agent.position_); if (distSq < rangeSq) { if (agentNeighbors_.Count < maxNeighbors_) { agentNeighbors_.Add(new KeyValuePair<float, Agent>(distSq, agent)); } int i = agentNeighbors_.Count - 1; while (i != 0 && distSq < agentNeighbors_[i - 1].Key) { agentNeighbors_[i] = agentNeighbors_[i - 1]; --i; } agentNeighbors_[i] = new KeyValuePair<float, Agent>(distSq, agent); if (agentNeighbors_.Count == maxNeighbors_) { rangeSq = agentNeighbors_[agentNeighbors_.Count - 1].Key; } } } } /** * <summary>Inserts a static obstacle neighbor into the set of neighbors * of this agent.</summary> * * <param name="obstacle">The number of the static obstacle to be * inserted.</param> * <param name="rangeSq">The squared range around this agent.</param> */ internal void insertObstacleNeighbor(Obstacle obstacle, float rangeSq) { Obstacle nextObstacle = obstacle.next_; float distSq = RVOMath.distSqPointLineSegment(obstacle.point_, nextObstacle.point_, position_); if (distSq < rangeSq) { obstacleNeighbors_.Add(new KeyValuePair<float, Obstacle>(distSq, obstacle)); int i = obstacleNeighbors_.Count - 1; while (i != 0 && distSq < obstacleNeighbors_[i - 1].Key) { obstacleNeighbors_[i] = obstacleNeighbors_[i - 1]; --i; } obstacleNeighbors_[i] = new KeyValuePair<float, Obstacle>(distSq, obstacle); } } /** * <summary>Updates the two-dimensional position and two-dimensional * velocity of this agent.</summary> */ internal void update() { velocity_ = newVelocity_; position_ += velocity_ * Simulator.Instance.timeStep_; } /** * <summary>Solves a one-dimensional linear program on a specified line * subject to linear constraints defined by lines and a circular * constraint.</summary> * * <returns>True if successful.</returns> * * <param name="lines">Lines defining the linear constraints.</param> * <param name="lineNo">The specified line constraint.</param> * <param name="radius">The radius of the circular constraint.</param> * <param name="optVelocity">The optimization velocity.</param> * <param name="directionOpt">True if the direction should be optimized. * </param> * <param name="result">A reference to the result of the linear program. * </param> */ private bool linearProgram1(IList<Line> lines, int lineNo, float radius, Vector2 optVelocity, bool directionOpt, ref Vector2 result) { float dotProduct = lines[lineNo].point * lines[lineNo].direction; float discriminant = RVOMath.sqr(dotProduct) + RVOMath.sqr(radius) - RVOMath.absSq(lines[lineNo].point); if (discriminant < 0.0f) { /* Max speed circle fully invalidates line lineNo. */ return false; } float sqrtDiscriminant = RVOMath.sqrt(discriminant); float tLeft = -dotProduct - sqrtDiscriminant; float tRight = -dotProduct + sqrtDiscriminant; for (int i = 0; i < lineNo; ++i) { float denominator = RVOMath.det(lines[lineNo].direction, lines[i].direction); float numerator = RVOMath.det(lines[i].direction, lines[lineNo].point - lines[i].point); if (RVOMath.fabs(denominator) <= RVOMath.RVO_EPSILON) { /* Lines lineNo and i are (almost) parallel. */ if (numerator < 0.0f) { return false; } continue; } float t = numerator / denominator; if (denominator >= 0.0f) { /* Line i bounds line lineNo on the right. */ tRight = Math.Min(tRight, t); } else { /* Line i bounds line lineNo on the left. */ tLeft = Math.Max(tLeft, t); } if (tLeft > tRight) { return false; } } if (directionOpt) { /* Optimize direction. */ if (optVelocity * lines[lineNo].direction > 0.0f) { /* Take right extreme. */ result = lines[lineNo].point + tRight * lines[lineNo].direction; } else { /* Take left extreme. */ result = lines[lineNo].point + tLeft * lines[lineNo].direction; } } else { /* Optimize closest point. */ float t = lines[lineNo].direction * (optVelocity - lines[lineNo].point); if (t < tLeft) { result = lines[lineNo].point + tLeft * lines[lineNo].direction; } else if (t > tRight) { result = lines[lineNo].point + tRight * lines[lineNo].direction; } else { result = lines[lineNo].point + t * lines[lineNo].direction; } } return true; } /** * <summary>Solves a two-dimensional linear program subject to linear * constraints defined by lines and a circular constraint.</summary> * * <returns>The number of the line it fails on, and the number of lines * if successful.</returns> * * <param name="lines">Lines defining the linear constraints.</param> * <param name="radius">The radius of the circular constraint.</param> * <param name="optVelocity">The optimization velocity.</param> * <param name="directionOpt">True if the direction should be optimized. * </param> * <param name="result">A reference to the result of the linear program. * </param> */ private int linearProgram2(IList<Line> lines, float radius, Vector2 optVelocity, bool directionOpt, ref Vector2 result) { if (directionOpt) { /* * Optimize direction. Note that the optimization velocity is of * unit length in this case. */ result = optVelocity * radius; } else if (RVOMath.absSq(optVelocity) > RVOMath.sqr(radius)) { /* Optimize closest point and outside circle. */ result = RVOMath.normalize(optVelocity) * radius; } else { /* Optimize closest point and inside circle. */ result = optVelocity; } for (int i = 0; i < lines.Count; ++i) { if (RVOMath.det(lines[i].direction, lines[i].point - result) > 0.0f) { /* Result does not satisfy constraint i. Compute new optimal result. */ Vector2 tempResult = result; if (!linearProgram1(lines, i, radius, optVelocity, directionOpt, ref result)) { result = tempResult; return i; } } } return lines.Count; } /** * <summary>Solves a two-dimensional linear program subject to linear * constraints defined by lines and a circular constraint.</summary> * * <param name="lines">Lines defining the linear constraints.</param> * <param name="numObstLines">Count of obstacle lines.</param> * <param name="beginLine">The line on which the 2-d linear program * failed.</param> * <param name="radius">The radius of the circular constraint.</param> * <param name="result">A reference to the result of the linear program. * </param> */ private void linearProgram3(IList<Line> lines, int numObstLines, int beginLine, float radius, ref Vector2 result) { float distance = 0.0f; for (int i = beginLine; i < lines.Count; ++i) { if (RVOMath.det(lines[i].direction, lines[i].point - result) > distance) { /* Result does not satisfy constraint of line i. */ IList<Line> projLines = new List<Line>(); for (int ii = 0; ii < numObstLines; ++ii) { projLines.Add(lines[ii]); } for (int j = numObstLines; j < i; ++j) { Line line; float determinant = RVOMath.det(lines[i].direction, lines[j].direction); if (RVOMath.fabs(determinant) <= RVOMath.RVO_EPSILON) { /* Line i and line j are parallel. */ if (lines[i].direction * lines[j].direction > 0.0f) { /* Line i and line j point in the same direction. */ continue; } else { /* Line i and line j point in opposite direction. */ line.point = 0.5f * (lines[i].point + lines[j].point); } } else { line.point = lines[i].point + (RVOMath.det(lines[j].direction, lines[i].point - lines[j].point) / determinant) * lines[i].direction; } line.direction = RVOMath.normalize(lines[j].direction - lines[i].direction); projLines.Add(line); } Vector2 tempResult = result; if (linearProgram2(projLines, radius, new Vector2(-lines[i].direction.y(), lines[i].direction.x()), true, ref result) < projLines.Count) { /* * This should in principle not happen. The result is by * definition already in the feasible region of this * linear program. If it fails, it is due to small * floating point error, and the current result is kept. */ result = tempResult; } distance = RVOMath.det(lines[i].direction, lines[i].point - result); } } } } }
using UnityEngine; using System.Collections; using System; #if UNITY_EDITOR using UnityEditor; using System.IO; #endif public class GA { public static GA_GameObjectManager _GA_controller; private static GA_Settings _settings; public static GA_Settings SettingsGA { get{ if( _settings == null) { InitAPI (); } return _settings; } private set{ _settings = value; } } public static GA_GameObjectManager GA_controller { get{ if(_GA_controller == null) { var ga = new GameObject("GA_Controller"); _GA_controller = ga.AddComponent<GA_GameObjectManager>(); } return _GA_controller; } private set{ _GA_controller = value; } } public class GA_API { public GA_Quality Quality = new GA_Quality(); public GA_Error Error = new GA_Error(); public GA_Design Design = new GA_Design(); public GA_Business Business = new GA_Business(); public GA_GenericInfo GenericInfo = new GA_GenericInfo(); public GA_Debug Debugging = new GA_Debug(); public GA_Archive Archive = new GA_Archive(); #if UNITY_EDITOR || !UNITY_FLASH public GA_Request Request = new GA_Request(); #endif public GA_Submit Submit = new GA_Submit(); public GA_User User = new GA_User(); } private static GA_API api = new GA_API(); public static GA_API API { get{ if(GA.SettingsGA == null) { InitAPI (); } return api; } private set{} } private static void InitAPI () { try { _settings = (GA_Settings)Resources.Load("GameAnalytics/GA_Settings",typeof(GA_Settings)); #if UNITY_EDITOR if (_settings == null) { //If the settings asset doesn't exist, then create it. We require a resources folder if(!Directory.Exists(Application.dataPath+"/Resources")) { Directory.CreateDirectory(Application.dataPath+"/Resources"); } if(!Directory.Exists(Application.dataPath+"/Resources/GameAnalytics")) { Directory.CreateDirectory(Application.dataPath+"/Resources/GameAnalytics"); Debug.LogWarning("GameAnalytics: Resources/GameAnalytics folder is required to store settings. it was created "); } var asset = ScriptableObject.CreateInstance<GA_Settings>(); //some hack to mave the asset around string path = AssetDatabase.GetAssetPath (Selection.activeObject); if (path == "") { path = "Assets"; } else if (Path.GetExtension (path) != "") { path = path.Replace (Path.GetFileName (AssetDatabase.GetAssetPath (Selection.activeObject)), ""); } string uniquePath = AssetDatabase.GenerateUniqueAssetPath("Assets/Resources/GameAnalytics/GA_Settings.asset"); AssetDatabase.CreateAsset(asset, uniquePath); if(uniquePath != "Assets/Resources/GameAnalytics/GA_Settings.asset") GA.Log("GameAnalytics: The path Assets/Resources/GameAnalytics/GA_Settings.asset used to save the settings file is not available."); AssetDatabase.SaveAssets (); Debug.LogWarning("GameAnalytics: Settings file didn't exist and was created"); Selection.activeObject = asset; //save reference _settings = asset; } #endif GA.InitializeQueue(); //will also start a coroutine sending messages to the server if needed } catch (Exception e) { Debug.Log("Error getting GA_Settings in InitAPI: " + e.Message); } } /// <summary> /// Setup involving other components /// </summary> private static void InitializeQueue () { GA.API.Submit.SetupKeys(GA.SettingsGA.GameKey, GA.SettingsGA.SecretKey); if(!Application.isPlaying) return; // no need to setup anything else, if we are in the editor and not playing if (GA.API.GenericInfo.UserID == "" && !GA.SettingsGA.CustomUserID) { Debug.LogWarning("GA UserID not set. No data will be sent."); return; } GA.RunCoroutine(GA.SettingsGA.CheckInternetConnectivity(true)); } /// <summary> /// Starts a new coroutine for the specified method, using the StartCoroutine Unity function. /// This is used to run the submits to the GameAnalytics server in a seperate routine. /// </summary> /// <param name="routine"> /// The method to start in the new coroutine <see cref="IEnumerator"/> /// </param> /// <returns> /// The new coroutine <see cref="Coroutine"/> /// </returns> public static void RunCoroutine(IEnumerator routine) { RunCoroutine(routine,()=>true); //Default coroutine } public static void RunCoroutine(IEnumerator routine,Func<bool> done) { if(!Application.isPlaying && Application.isEditor) { #if UNITY_EDITOR GA_ContinuationManager.StartCoroutine(routine,done); #endif } else { GA_controller.RunCoroutine(routine); } } public static void Log(object msg, bool addEvent) { if (GA.SettingsGA.DebugMode || (addEvent && GA.SettingsGA.DebugAddEvent)) Debug.Log(msg); } public static void Log(object msg) { if (GA.SettingsGA.DebugMode) Debug.Log(msg); } public static void LogWarning(object msg) { Debug.LogWarning(msg); } public static void LogError(object msg) { Debug.LogError(msg); } #if UNITY_EDITOR public static void HierarchyWindowCallback (int instanceID, Rect selectionRect) { GameObject go = (GameObject)EditorUtility.InstanceIDToObject(instanceID); if (go != null && (go.GetComponent<GA_Tracker>() != null || go.GetComponent<GA_SystemTracker>() != null || go.GetComponent<GA_HeatMapDataFilter>() != null)) { float addX = 0; if (go.GetComponent("PlayMakerFSM") != null) addX = selectionRect.height + 2; if (GA.SettingsGA.Logo == null) { GA.SettingsGA.Logo = (Texture2D)Resources.LoadAssetAtPath("Assets/GameAnalytics/Plugins/Examples/gaLogo.png", typeof(Texture2D)); if (GA.SettingsGA.Logo == null) GA.SettingsGA.Logo = (Texture2D)Resources.LoadAssetAtPath("Assets/Plugins/GameAnalytics/Examples/gaLogo.png", typeof(Texture2D)); } Graphics.DrawTexture(new Rect(GUILayoutUtility.GetLastRect().width - selectionRect.height - 5 - addX, selectionRect.y, selectionRect.height, selectionRect.height), GA.SettingsGA.Logo); } } #endif }
// TODO: Take this out once Microsoft.DotNet.Cli.Compiler.Common shows up as a NuGet package // Copyright (c) .NET Foundation and contributors. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.CommandLine; using System.Linq; using Microsoft.DotNet.ProjectModel; namespace Microsoft.DotNet.Cli.Compiler.Common { public static class CommonCompilerOptionsExtensions { internal static readonly OptionTemplate s_definesTemplate = new OptionTemplate("define"); internal static readonly OptionTemplate s_suppressWarningTemplate = new OptionTemplate("suppress-warning"); internal static readonly OptionTemplate s_languageVersionTemplate = new OptionTemplate("language-version"); internal static readonly OptionTemplate s_platformTemplate = new OptionTemplate("platform"); internal static readonly OptionTemplate s_allowUnsafeTemplate = new OptionTemplate("allow-unsafe"); internal static readonly OptionTemplate s_warningsAsErrorsTemplate = new OptionTemplate("warnings-as-errors"); internal static readonly OptionTemplate s_optimizeTemplate = new OptionTemplate("optimize"); internal static readonly OptionTemplate s_keyFileTemplate = new OptionTemplate("key-file"); internal static readonly OptionTemplate s_delaySignTemplate = new OptionTemplate("delay-sign"); internal static readonly OptionTemplate s_publicSignTemplate = new OptionTemplate("public-sign"); internal static readonly OptionTemplate s_debugTypeTemplate = new OptionTemplate("debug-type"); internal static readonly OptionTemplate s_emitEntryPointTemplate = new OptionTemplate("emit-entry-point"); internal static readonly OptionTemplate s_generateXmlDocumentation = new OptionTemplate("generate-xml-documentation"); internal static readonly OptionTemplate s_additionalArgumentsTemplate = new OptionTemplate("additional-argument"); public static CommonCompilerOptions Parse(ArgumentSyntax syntax) { IReadOnlyList<string> defines = null; IReadOnlyList<string> suppressWarnings = null; string languageVersion = null; string platform = null; string debugType = null; bool? allowUnsafe = null; bool? warningsAsErrors = null; bool? optimize = null; string keyFile = null; bool? delaySign = null; bool? publicSign = null; bool? emitEntryPoint = null; bool? generateXmlDocumentation = null; IReadOnlyList<string> additionalArguments = null; Func<string, bool?> nullableBoolConverter = v => bool.Parse(v); syntax.DefineOptionList(s_definesTemplate.LongName, ref defines, "Preprocessor definitions"); syntax.DefineOptionList(s_suppressWarningTemplate.LongName, ref suppressWarnings, "Suppresses the specified warning"); syntax.DefineOptionList(s_additionalArgumentsTemplate.LongName, ref additionalArguments, "Pass the additional argument directly to the compiler"); syntax.DefineOption(s_debugTypeTemplate.LongName, ref debugType, "The type of PDB to emit: portable or full"); syntax.DefineOption(s_languageVersionTemplate.LongName, ref languageVersion, "The version of the language used to compile"); syntax.DefineOption(s_platformTemplate.LongName, ref platform, "The target platform"); syntax.DefineOption(s_allowUnsafeTemplate.LongName, ref allowUnsafe, nullableBoolConverter, "Allow unsafe code"); syntax.DefineOption(s_warningsAsErrorsTemplate.LongName, ref warningsAsErrors, nullableBoolConverter, "Turn all warnings into errors"); syntax.DefineOption(s_optimizeTemplate.LongName, ref optimize, nullableBoolConverter, "Enable compiler optimizations"); syntax.DefineOption(s_keyFileTemplate.LongName, ref keyFile, "Path to file containing the key to strong-name sign the output assembly"); syntax.DefineOption(s_delaySignTemplate.LongName, ref delaySign, nullableBoolConverter, "Delay-sign the output assembly"); syntax.DefineOption(s_publicSignTemplate.LongName, ref publicSign, nullableBoolConverter, "Public-sign the output assembly"); syntax.DefineOption(s_emitEntryPointTemplate.LongName, ref emitEntryPoint, nullableBoolConverter, "Output an executable console program"); syntax.DefineOption(s_generateXmlDocumentation.LongName, ref generateXmlDocumentation, nullableBoolConverter, "Generate XML documentation file"); return new CommonCompilerOptions { Defines = defines, SuppressWarnings = suppressWarnings, LanguageVersion = languageVersion, Platform = platform, AllowUnsafe = allowUnsafe, WarningsAsErrors = warningsAsErrors, Optimize = optimize, KeyFile = keyFile, DelaySign = delaySign, PublicSign = publicSign, EmitEntryPoint = emitEntryPoint, GenerateXmlDocumentation = generateXmlDocumentation, AdditionalArguments = additionalArguments }; } public static IEnumerable<string> SerializeToArgs(this CommonCompilerOptions options) { var defines = options.Defines; var suppressWarnings = options.SuppressWarnings; var languageVersion = options.LanguageVersion; var platform = options.Platform; var allowUnsafe = options.AllowUnsafe; var warningsAsErrors = options.WarningsAsErrors; var optimize = options.Optimize; var keyFile = options.KeyFile; var delaySign = options.DelaySign; var publicSign = options.PublicSign; var emitEntryPoint = options.EmitEntryPoint; var generateXmlDocumentation = options.GenerateXmlDocumentation; var additionalArguments = options.AdditionalArguments; var args = new List<string>(); if (defines != null) { args.AddRange(defines.Select(def => s_definesTemplate.ToLongArg(def))); } if (suppressWarnings != null) { args.AddRange(suppressWarnings.Select(def => s_suppressWarningTemplate.ToLongArg(def))); } if (additionalArguments != null) { args.AddRange(additionalArguments.Select(arg => s_additionalArgumentsTemplate.ToLongArg(arg))); } if (languageVersion != null) { args.Add(s_languageVersionTemplate.ToLongArg(languageVersion)); } if (platform != null) { args.Add(s_platformTemplate.ToLongArg(platform)); } if (allowUnsafe != null) { args.Add(s_allowUnsafeTemplate.ToLongArg(allowUnsafe)); } if (warningsAsErrors != null) { args.Add(s_warningsAsErrorsTemplate.ToLongArg(warningsAsErrors)); } if (optimize != null) { args.Add(s_optimizeTemplate.ToLongArg(optimize)); } if (keyFile != null) { args.Add(s_keyFileTemplate.ToLongArg(keyFile)); } if (delaySign != null) { args.Add(s_delaySignTemplate.ToLongArg(delaySign)); } if (publicSign != null) { args.Add(s_publicSignTemplate.ToLongArg(publicSign)); } if (emitEntryPoint != null) { args.Add(s_emitEntryPointTemplate.ToLongArg(emitEntryPoint)); } if (generateXmlDocumentation != null) { args.Add(s_generateXmlDocumentation.ToLongArg(generateXmlDocumentation)); } return args; } } }
#define USE_OLD_CODE using UnityEngine; using System.Collections; using System.Collections.Generic; public delegate void OnAngerEvent(float anger); public delegate void OnHungerEvent(float hunger); public delegate void OnDangerEvent(float danger); [RequireComponent(typeof(DamageIndicator), typeof(Stats))] [RequireComponent(typeof(Rigidbody), typeof(Locomotor), typeof(Stats))] public class Essence: MonoBehaviour { public event OnAngerEvent HandleOnAnger; public event OnHungerEvent HandleOnHunger; public event OnDangerEvent HandleOnDanger; #if USE_OLD_CODE public float quickness = 5; private float gridLength = 30; private float gridHeight = 30; List<ObjectOfInterest> objectsOfInterest = new List<ObjectOfInterest>(); //bool[] states = new bool[5]; bool[] flags = new bool[6]; float hungerLevel = 50; // refactor to store in stats float healthLevel = 50; // refactor to store in stats Transform ooiTarget, roamTarget = null; // replace with Acquirer #endif public float intellgence = 1; public int memoryLength = 64; //size of array, must be an int. private Animation animator; public AnimationClip animationClip; private AudioSource audioSource; private Locomotor locomotor; private Acquirer acquirer; private Stats stats; #region Overhead void Awake() { // Setup Dependancies animator = GetComponent<Animation>(); audioSource = GetComponent<AudioSource>(); locomotor = GetComponent<Locomotor>(); acquirer = GetComponent<Acquirer>(); stats = GetComponent<Stats>(); // Setup Components if (animationClip != null) { animationClip.wrapMode = WrapMode.Loop; animator.clip = animationClip; animator.Play(); } rigidbody.freezeRotation = true; //rigidbody.useGravity = false; Hungry = true; } void Start() { //transform.parent = GameObject.Find("Generated Enemies").transform; // Generate a list of all Objects of Interest with which to attract Badger to. // -- remove after successful integration of Acquirer. foreach (GameObject go in GameObject.FindGameObjectsWithTag("ObjectOfInterest")) { Component c = go.GetComponent<ObjectOfInterest>(); if (c != null) objectsOfInterest.Add(c as ObjectOfInterest); } //bootstrap MakeDecision(); } void OnTriggerExit(Collider trigger) { if (trigger.transform.Equals(Target)) StartCoroutine(MoveTowardTarget(false)); } void OnCollisionEnter(Collision collision) { if (collision.gameObject.tag == "Rock" && collision.rigidbody.velocity.magnitude > 0) StartCoroutine(DoDamage()); } public void MakeAlert(bool danger) { if (danger) Hungry = false; Active = false; MakeDecision(); } #endregion #region Behaviour IEnumerator DoDamage() { healthLevel -= 10; //Debug.Log ("Doing damage to Badger. Health now at " +healthLevel+ "."); yield return StartCoroutine(GetComponent<DamageIndicator>().FlashDamage()); GetComponent<DamageIndicator>().ResetMaterial(); if(healthLevel <= 0) Destroy(gameObject); } IEnumerator Forage() { //Debug.Log (name +": foraging"); yield return StartCoroutine(MoveTowardTarget(false)); StartCoroutine(Eat()); } IEnumerator Roam(bool foraging) { //Debug.Log (name + (foraging?": looking for food":" looking around")); roamTarget = GetWaypoint(); yield return StartCoroutine(MoveTowardTarget(foraging)); if (ReachedTarget && foraging) StartCoroutine(Eat()); else MakeDecision(); } void Sleep() { //Debug.Log (name +": sleeping"); Invoke("MakeDecision", (quickness*0.75f) * 10); } IEnumerator MoveTowardTarget(bool foraging) { yield return StartCoroutine(locomotor.TurnToFace(Target.position, 5)); // move turn speed to pub var deg/sec while (Target && !ReachedTarget && hungerLevel < 150) { hungerLevel += 0.1f; rigidbody.velocity = rigidbody.angularVelocity = Vector3.zero; if (foraging) { if(FindClosestFood()) { StopCoroutine("MoveTowardTarget"); Forage(); } } if (hungerLevel < 100) locomotor.GoForward(0.1f); else locomotor.GoForward(0.05f); yield return new WaitForSeconds(Time.deltaTime); } } IEnumerator Eat() { Eating = true; Food food = null; try { food = ooiTarget.gameObject.GetComponent<Food>(); } catch { FindClosestFood(); } while (Hungry && food) { food.Eat(); hungerLevel--; if (hungerLevel == 0) Hungry = false; //Debug.Log (name +": nomnoms -- hunger level = "+ hungerLevel); yield return new WaitForSeconds(quickness/5); } Eating = false; MakeDecision(); } #endregion #region AI /* * VERY simple desicion making...more to come later. */ void MakeDecision() { //Debug.Log (name +" making decision"); if (audioSource) audioSource.Play(); Active = false; if (Hungry && FindClosestFood()) StartCoroutine(Forage()); else if (!Tired) StartCoroutine(Roam(true)); else { if (Random.value > 0.75) { Debug.Log (name +": thinking for "+ (quickness*0.75f) +"seconds"); Invoke("MakeDecision", (quickness*0.75f)); } else Sleep(); } } #endregion #region Helpers /* * Discover closest Object of Interest and set it as our target * so we can look at and move toward it. */ bool FindClosestFood() { bool returnValue = false; float lastDistance = Mathf.Infinity; CleanObjectsOfInterestList(); try { foreach (ObjectOfInterest ooi in objectsOfInterest) { if (ooi.GetType().Equals(typeof(Food))) { if (!CanSee(ooi.transform)) continue; float distance = Vector3.Distance(transform.position, ooi.transform.position); if (distance < lastDistance) { lastDistance = distance; ooiTarget = ooi.transform; returnValue = true; } } else objectsOfInterest.Remove(ooi); } return KnowsWhereFoodIs = returnValue; } catch { FindClosestFood(); } return KnowsWhereFoodIs = returnValue; } public bool CanSee(Transform transformOfInterest) { const float halfFOV = 30f; Vector3 heading = (transformOfInterest.position - transform.position).normalized; return Vector3.Dot(transform.forward, heading) >= Mathf.Cos(Mathf.Deg2Rad * halfFOV); } void CleanObjectsOfInterestList() { for (int i = 0; i < objectsOfInterest.Count; i++) { if (objectsOfInterest[i] == null) objectsOfInterest.RemoveAt(i); } } Transform GetWaypoint() { if (roamTarget) Destroy(roamTarget.gameObject); Transform t = GetRandomGameObjectOnGrid("target", "World", true).transform; Vector3 tpos = t.position; tpos.y += transform.lossyScale.y/2; t.position = tpos; t.gameObject.tag = "Target"; return t; } #endregion #region Getters and Setters public bool Hungry { get { return stats.memory.CurrentHunger > 25; } // 25 = magic hunger threshold value set { stats.memory.CurrentHunger = value ? 50 : 0; } } public bool Foraging { get { return flags[1]; } set { flags[1] = value; } } public bool Eating { get { return flags[2]; } set { flags[2] = value; } } public bool Roaming { get { return flags[3]; } set { flags[3] = value; } } public bool KnowsWhereFoodIs { get { return flags[4]; } set { flags[4] = value; } } public bool Tired { get { return flags[5]; } set { flags[5] = value; } } public bool Active { get { return Roaming ^ Foraging ^ Eating; } set { if (!value) { try { Roaming = Foraging = Eating = value; rigidbody.angularVelocity = rigidbody.velocity = Vector3.zero; //transform.rotation = Quaternion.identity; -- what was this here for? StopAllCoroutines(); } catch { Debug.LogError ("Could not set Active flags (for " +name+ ") to false!"); } } } } public bool ReachedTarget { get { if (Target && Vector3.Distance(transform.position, Target.transform.position) <= Mathf.Max(new float[2] { Target.lossyScale.x, Target.lossyScale.y })) return true; return false; } } public Transform Target { get { if (ooiTarget != null) return ooiTarget; if (roamTarget != null) return roamTarget; return null; } } public GameObject GetRandomGameObjectOnGrid(string name, string parentGameObjectName, bool hideInHierarchy) { GameObject go = new GameObject(); go.name = (name); go.transform.position = GetRandomVectorOnGrid(); go.transform.parent = GameObject.Find(parentGameObjectName).transform; if (hideInHierarchy) go.hideFlags = HideFlags.HideInHierarchy; return go; } Vector3 GetRandomVectorOnGrid() { return new Vector3(Random.Range(-gridHeight/2, gridHeight/2), 0, Random.Range(-gridLength/2, gridLength/2));// + transform.position; } #endregion }
using System; using System.Collections.Generic; using System.Text; using System.Diagnostics; using System.Runtime.Serialization; using System.Windows.Forms; using System.Drawing; using JustGestures.Properties; using JustGestures.GestureParts; namespace JustGestures.TypeOfAction { [Serializable] class WindowOptions : BaseActionClass { public const string NAME = "window_name"; public const string WND_MIN = "window_min"; public const string WND_MIN_ALL = "window_min_all"; public const string WND_MIN_TOTRAY = "window_min_totray"; public const string WND_MAX = "window_max"; public const string WND_CLOSE = "window_close"; public const string WND_CLOSE_ALL = "window_close_all"; public const string WND_FULL_SCREEN = "window_fullscreen"; public const string WND_RETURN_TO_NORMAL = "Return Window To Normal State"; public const string WND_TOP_MOST = "window_topmost"; public const string WND_TRANSPARENT = "window_transparent"; public const string WND_SHOW_SIDE_BY_SIDE = "window_show_side_by_side"; public const string WND_SHOW_VERTICALLY = "window_show_vertically"; public const string WND_REDRAWN = "REDRAWN"; static Dictionary<IntPtr, Win32.WINDOWINFO> m_wndTray; static Dictionary<IntPtr, Win32.WINDOWINFO> m_wndFullscreen; static List<IntPtr> m_wndTopMost; static List<IntPtr> m_openedWnds; const double WND_DEFAUL_RATIO = 0.66; public static double[] TransparencyLvl = new double[] { 1, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 }; public WindowOptions() { m_actions = new List<string> (new string[] { WND_MIN, WND_MIN_ALL, WND_MIN_TOTRAY, WND_MAX, WND_CLOSE, WND_CLOSE_ALL, WND_FULL_SCREEN, //WND_RETURN_TO_NORMAL, WND_TOP_MOST, WND_TRANSPARENT, WND_SHOW_SIDE_BY_SIDE, WND_SHOW_VERTICALLY }); } public WindowOptions(string action) : base(action) { } public WindowOptions(WindowOptions action) : base(action) { } public WindowOptions(SerializationInfo info, StreamingContext context) : base (info, context) { } public override object Clone() { return new WindowOptions(this); } public override bool IsSensitiveToMySystemWindows() { switch (m_name) { case WND_CLOSE_ALL: case WND_MIN_ALL: case WND_SHOW_SIDE_BY_SIDE: case WND_SHOW_VERTICALLY: return false; break; default: return true; break; } } delegate void Emptydel(); public override void ExecuteAction(IntPtr activeWnd, Point location) { if (m_wndFullscreen == null) m_wndFullscreen = new Dictionary<IntPtr, Win32.WINDOWINFO>(); if (m_wndTray == null) m_wndTray = new Dictionary<IntPtr, Win32.WINDOWINFO>(); if (m_wndTopMost == null) m_wndTopMost = new List<IntPtr>(); if (m_openedWnds == null) m_openedWnds = new List<IntPtr>(); StringBuilder buff = new StringBuilder(256); Win32.GetClassName(activeWnd, buff, 256); Debug.WriteLine(buff.ToString()); string wndName = buff.ToString();//.ToUpper(); if ((AppGroupOptions.IsDesktop(wndName) || AppGroupOptions.IsTaskbar(wndName)) && (this.Name != WND_MIN_ALL && this.Name != WND_CLOSE_ALL && this.Name != WND_SHOW_SIDE_BY_SIDE && this.Name != WND_SHOW_VERTICALLY)) return; //Win32.GetWindowText(ActiveWnd, buff, 256); int x1, x2, y1, y2; Win32.WINDOWINFO info = new Win32.WINDOWINFO(); Rectangle screen; info.cbSize = (uint)System.Runtime.InteropServices.Marshal.SizeOf(info); Win32.WINDOWPLACEMENT placement = new Win32.WINDOWPLACEMENT(); placement.length = System.Runtime.InteropServices.Marshal.SizeOf(placement); IntPtr hwndTaskbar = Win32.FindWindow(AppGroupOptions.SYSTEM_TASKBAR, null); switch (this.Name) { case WND_MIN: Win32.SendMessage(activeWnd, Win32.WM_SYSCOMMAND, Win32.SC_MINIMIZE, 0); break; case WND_MIN_ALL: List<IntPtr> opendedWnds = new List<IntPtr>(m_openedWnds); m_openedWnds = new List<IntPtr>(); Win32.EnumDelegate enumfunc = new Win32.EnumDelegate(EnumWindowsProc); IntPtr hDesktop = IntPtr.Zero; // current desktop bool success = Win32.EnumDesktopWindows(hDesktop, enumfunc, IntPtr.Zero); if (success) { if (m_openedWnds.Count == 0) { opendedWnds.Reverse(); foreach (IntPtr hwnd in opendedWnds) Win32.SendMessage(hwnd, Win32.WM_SYSCOMMAND, Win32.SC_RESTORE, 0); //Win32.SetWindowPos(hwnd, Win32.HWND_NOTOPMOST, 0, 0, 0, 0, Win32.SWP_SHOWWINDOW); } else { foreach (IntPtr hwnd in m_openedWnds) Win32.SendMessage(hwnd, Win32.WM_SYSCOMMAND, Win32.SC_MINIMIZE, 0); } } //Win32.SendMessage(hwndTaskbar, Win32.WM_COMMAND, Win32.WINS_MIN_ALL, 0); break; case WND_MIN_TOTRAY: StringBuilder title = new StringBuilder(256); Win32.GetWindowText(activeWnd, title, 256); Emptydel del = delegate() { Win32.GetWindowInfo(activeWnd, ref info); if (m_wndTray.ContainsKey(activeWnd)) m_wndTray[activeWnd] = info; else { m_wndTray.Add(activeWnd, info); NotifyIcon tray = new NotifyIcon(); //tray = new NotifyIcon(); tray.Visible = true; tray.Tag = activeWnd; tray.Icon = GetWindowIcon(activeWnd); tray.Text = title.Length >= 64 ? title.ToString().Substring(0, 60) + "..." : title.ToString(); tray.Click += new EventHandler(tray_Click); Form_engine.TrayIcons.Add(tray); } Win32.ShowWindow(activeWnd, 0);//hide }; Form_engine.Instance.Invoke(del); //if (string.IsNullOrEmpty(title.ToString())) // return; break; case WND_MAX: Win32.GetWindowPlacement(activeWnd, ref placement); if (placement.showCmd == Win32.SW_SHOWNORMAL) Win32.SendMessage(activeWnd, Win32.WM_SYSCOMMAND, Win32.SC_MAXIMIZE, 0); else if (placement.showCmd == Win32.SW_SHOWMAXIMIZED) Win32.SendMessage(activeWnd, Win32.WM_SYSCOMMAND, Win32.SC_RESTORE, 0); break; case WND_CLOSE: //Win32.PostMessage(ActiveWnd, Win32.WM_SYSCOMMAND, Win32.SC_CLOSE, 0); Win32.PostMessage(activeWnd, Win32.WM_CLOSE, 0, 0); break; case WND_CLOSE_ALL: int jgProcessId = Form_engine.Instance.ProcessId; foreach (Process p in Process.GetProcesses(System.Environment.MachineName)) { if (p.Id != jgProcessId) { // some application which aren't system might have handle zero but still has to be closed! fix is required if (p.MainWindowHandle != IntPtr.Zero) p.CloseMainWindow(); } } break; case WND_FULL_SCREEN: screen = Screen.FromHandle(activeWnd).Bounds; Win32.GetWindowInfo(activeWnd, ref info); int style = (int)info.dwStyle; int fullscreen_style = (int)info.dwStyle & ~Win32.WS_CAPTION & ~Win32.WS_THICKFRAME; //RETURN_TO_NORMAL if ((info.rcWindow.left == screen.Left && info.rcWindow.top == screen.Top && info.rcWindow.right == screen.Right && info.rcWindow.bottom == screen.Bottom && style == fullscreen_style) || m_wndFullscreen.ContainsKey(activeWnd)) { goto case WND_RETURN_TO_NORMAL; } //MAKE_FULL_SCREEN else { m_wndFullscreen.Add(activeWnd, info); Win32.SetWindowLong(activeWnd, Win32.GWL_STYLE, ((int)info.dwStyle & ~Win32.WS_CAPTION & ~Win32.WS_THICKFRAME)); //Win32.SetWindowLong(ActiveWnd, Win32.GWL_EXSTYLE, (int)info.dwExStyle & ~Win32.WS_EX_TOOLWINDOW); Win32.SetWindowPos(activeWnd, Win32.HWND_TOPMOST, screen.Left, screen.Top, screen.Width, screen.Height, Win32.SWP_SHOWWINDOW | Win32.SWP_DRAWFRAME | Win32.SWP_FRAMECHANGED); //Win32.SetWindowPos(ActiveWnd, Win32.HWND_TOPMOST, x1, y1, x2, y2, Win32.SWP_SHOWWINDOW | Win32.SWP_DRAWFRAME | Win32.SWP_FRAMECHANGED); //Win32.MoveWindow(ActiveWnd, x1, y1, info.rcWindow.right - info.rcWindow.left, info.rcWindow.bottom - info.rcWindow.top, true); //Win32.MoveWindow(ActiveWnd, x1, y1, x2, y2, true); //Win32.InvalidateRect(ActiveWnd, ref info.rcWindow, true); //Win32.UpdateWindow(ActiveWnd); } break; case WND_RETURN_TO_NORMAL: if (m_wndFullscreen.ContainsKey(activeWnd)) { info = m_wndFullscreen[activeWnd]; x1 = info.rcWindow.left; y1 = info.rcWindow.top; x2 = info.rcWindow.right - x1; y2 = info.rcWindow.bottom - y1; Win32.SetWindowLong(activeWnd, Win32.GWL_STYLE, (int)info.dwStyle); Win32.SetWindowLong(activeWnd, Win32.GWL_EXSTYLE, (int)info.dwExStyle & ~Win32.WS_EX_LAYERED); //Win32.SendMessage(ActiveWnd, Win32.WM_SYSCOMMAND, Win32.SC_RESTORE, 0); Win32.SetWindowPos(activeWnd, Win32.HWND_NOTOPMOST, x1, y1, x2, y2, Win32.SWP_SHOWWINDOW | Win32.SWP_DRAWFRAME | Win32.SWP_FRAMECHANGED); m_wndFullscreen.Remove(activeWnd); } else { screen = Screen.FromHandle(activeWnd).Bounds; x1 = screen.Left + (int)(screen.Width * (1 - WND_DEFAUL_RATIO)) / 2; y1 = screen.Top + (int)(screen.Height * (1 - WND_DEFAUL_RATIO)) / 2; x2 = (int)Math.Floor(screen.Width * WND_DEFAUL_RATIO); y2 = (int)Math.Floor(screen.Height * WND_DEFAUL_RATIO); Win32.SetWindowLong(activeWnd, Win32.GWL_STYLE, (int)info.dwStyle | Win32.WS_CAPTION | Win32.WS_THICKFRAME); Win32.SetWindowLong(activeWnd, Win32.GWL_EXSTYLE, (int)info.dwExStyle & ~Win32.WS_EX_LAYERED); //Win32.RedrawWindow(ActiveWnd, ref info.rcWindow, IntPtr.Zero, Win32.RDW_ERASE | Win32.RDW_INVALIDATE | Win32.RDW_FRAME | Win32.RDW_ALLCHILDREN); Win32.SetWindowPos(activeWnd, Win32.HWND_NOTOPMOST, x1, y1, x2, y2, Win32.SWP_SHOWWINDOW | Win32.SWP_DRAWFRAME | Win32.SWP_FRAMECHANGED); } break; case WND_TOP_MOST: Win32.GetWindowInfo(activeWnd, ref info); x1 = info.rcWindow.left; y1 = info.rcWindow.top; x2 = info.rcWindow.right - x1; y2 = info.rcWindow.bottom - y1; if (!m_wndTopMost.Contains(activeWnd)) { m_wndTopMost.Add(activeWnd); Win32.SetWindowPos(activeWnd, Win32.HWND_TOPMOST, x1, y1, x2, y2, Win32.SWP_SHOWWINDOW); } else { m_wndTopMost.Remove(activeWnd); Win32.SetWindowPos(activeWnd, Win32.HWND_NOTOPMOST, x1, y1, x2, y2, Win32.SWP_SHOWWINDOW); } break; case WND_TRANSPARENT: SetWindowTrasparency(activeWnd, Int16.Parse(this.Details)); break; case WND_SHOW_VERTICALLY: Win32.SendMessage(hwndTaskbar, Win32.WM_COMMAND, Win32.WINS_ARRANGE_VRT, 0); break; case WND_SHOW_SIDE_BY_SIDE: Win32.SendMessage(hwndTaskbar, Win32.WM_COMMAND, Win32.WINS_ARRANGE_HRZ, 0); break; case WND_REDRAWN: Win32.InvalidateRgn(activeWnd, IntPtr.Zero, true); //Win32.GetWindowInfo(ActiveWnd, ref info); //x1 = info.rcWindow.left; //y1 = info.rcWindow.top; //x2 = info.rcWindow.right - x1; //y2 = info.rcWindow.bottom - y1; //IntPtr screenDc = Win32.GetDC(IntPtr.Zero); //IntPtr memDc = Win32.CreateCompatibleDC(screenDc); //Win32.SIZE size = new Win32.SIZE(Math.Abs(x2 - x1), Math.Abs(y2 - y1)); //Win32.POINT pointSource = new Win32.POINT(x1,y1); //Win32.POINT topPos = new Win32.POINT(x1, y1); //Win32.BLENDFUNCTION blend = new Win32.BLENDFUNCTION(); //blend.BlendOp = 0; //blend.BlendFlags = 0; //blend.SourceConstantAlpha = 122; //blend.AlphaFormat = 1; //Win32.UpdateLayeredWindow(ActiveWnd, screenDc, ref topPos, ref size, memDc, ref pointSource, 0, ref blend, Win32.ULW_ALPHA); ////Win32.InvalidateRect(ActiveWnd, ref info.rcWindow, true); ////Win32.MoveWindow(ActiveWnd, x1, y1, x2 - 10, y2 - 10, true); ////Win32.UpdateWindow(ActiveWnd); ////Win32.SetWindowPos(ActiveWnd, Win32.HWND_NOTOPMOST, x1, y1, x2, y2, Win32.SWP_FRAMECHANGED); //Win32.ReleaseDC(IntPtr.Zero, screenDc); //Win32.DeleteDC(memDc); break; } } //private static bool IsPopup(IntPtr hWnd) //{ // StringBuilder buff = new StringBuilder(256); // Win32.GetClassName(hWnd, buff, 256); // string wndName = buff.ToString(); // int wndStyle = Win32.GetWindowLong(hWnd, Win32.GWL_STYLE); // int wndExStyle = Win32.GetWindowLong(hWnd, Win32.GWL_EXSTYLE); // bool isPopup = (wndStyle & Win32.WS_POPUP) != 0; // bool isTooWnd = (wndExStyle & Win32.WS_EX_TOOLWINDOW) != 0; // bool isWindow = Win32.IsWindow(hWnd); // Debug.WriteLine("ClassName: " + wndName + " IsPopup : " + isPopup + " IsToolWnd: " + isTooWnd + " IsWindow: " + isWindow); // return false; //} /// <summary> /// Check if window handle is visible opened window which should be minimized /// </summary> /// <param name="hWnd"></param> /// <param name="lParam"></param> /// <returns></returns> private static bool EnumWindowsProc(IntPtr hWnd, int lParam) { Win32.WINDOWPLACEMENT placement = new Win32.WINDOWPLACEMENT(); placement.length = System.Runtime.InteropServices.Marshal.SizeOf(placement); Win32.GetWindowPlacement(hWnd, ref placement); StringBuilder buff = new StringBuilder(256); IntPtr mainWnd = Win32.GetAncestor(hWnd, Win32.GA_ROOT); int wndExStyle = Win32.GetWindowLong(hWnd, Win32.GWL_EXSTYLE); Win32.GetClassName(mainWnd, buff, 256); string className = buff.ToString(); if (mainWnd != IntPtr.Zero && Win32.IsWindowVisible(mainWnd) && placement.showCmd != Win32.SW_SHOWMINIMIZED && placement.showCmd != Win32.SW_MINIMIZE && ((wndExStyle & ~Win32.WS_EX_TOOLWINDOW) == wndExStyle) // is not tool window && !AppGroupOptions.IsDesktop(className) && !AppGroupOptions.IsTaskbar(className) && !m_openedWnds.Contains(mainWnd) ) { Win32.GetWindowText(mainWnd, buff, 256); string wndText = buff.ToString(); Debug.WriteLine("ADD: " + wndText + " " + placement.showCmd); m_openedWnds.Add(hWnd); } return true; } public static void ShowAllTrayWindows() { //shows all windows which have been minimized to system tray while (Form_engine.TrayIcons.Count > 0) { NotifyIcon tray = Form_engine.TrayIcons[0]; tray_Click(tray, null); } } private static void tray_Click(object sender, EventArgs e) { NotifyIcon tray = sender as NotifyIcon; IntPtr hWnd = (IntPtr)tray.Tag; Win32.WINDOWINFO info; if (m_wndTray.ContainsKey(hWnd)) { info = m_wndTray[hWnd]; SetWindowFromInfo(hWnd, info); m_wndTray.Remove(hWnd); } else { Win32.ShowWindow(hWnd, 1);//show Win32.SetForegroundWindow(hWnd); } tray.Click -= new EventHandler(tray_Click); Form_engine.TrayIcons.Remove(tray); tray.Dispose(); } private static void SetWindowFromInfo(IntPtr hwnd, Win32.WINDOWINFO info) { int x1 = info.rcWindow.left; int y1 = info.rcWindow.top; int x2 = info.rcWindow.right - x1; int y2 = info.rcWindow.bottom - y1; //Win32.SetWindowLong(hwnd, Win32.GWL_STYLE, (int)info.dwStyle); //Win32.SetWindowLong(hwnd, Win32.GWL_EXSTYLE, (int)info.dwExStyle); Win32.SetWindowPos(hwnd, Win32.HWND_NOTOPMOST, x1, y1, x2, y2, Win32.SWP_SHOWWINDOW); } public override Bitmap GetIcon(int size) { switch (this.Name) { case NAME: return Resources.window; //break; case WND_CLOSE: case WND_CLOSE_ALL: return Resources.window_close; //break; case WND_MAX: return Resources.window_max; //break; case WND_MIN: case WND_MIN_ALL: case WND_MIN_TOTRAY: return Resources.window_min; //break; case WND_FULL_SCREEN: return Resources.window_fullscreen; //break; case WND_TOP_MOST: return Resources.window_topmost; //break; case WND_TRANSPARENT: return Resources.window_transparent; //break; case WND_SHOW_VERTICALLY: return Resources.window_vertical; //break; case WND_SHOW_SIDE_BY_SIDE: return Resources.window_side_by_side; //break; default: return Resources.window; //break; } } Icon GetWindowIcon(IntPtr hWnd) { IntPtr mainWindow = hWnd; //Win32.GetAncestor(hWnd, Win32.GA_ROOTOWNER); IntPtr hIcon = Win32.SendMessage(mainWindow, Win32.WM_GETICON, Win32.ICON_BIG, 0); if (hIcon == IntPtr.Zero) { string path = AppGroupOptions.GetPathFromHwnd(hWnd); if (path != string.Empty) { try { return Icon.ExtractAssociatedIcon(path); } catch { hIcon = Resources.window.GetHicon(); } } else hIcon = Resources.window.GetHicon(); } return Icon.FromHandle(hIcon); } #region Transparency Methods public static int GetWindowTransparency(IntPtr hWnd) { uint crKey = 0; byte bAlpha; uint dwFlags; int num = Win32.GetWindowLong(hWnd, Win32.GWL_EXSTYLE); if (num == (num & ~Win32.WS_EX_LAYERED)) return -1; Win32.GetLayeredWindowAttributes(hWnd, out crKey, out bAlpha, out dwFlags); return (bAlpha * 100) / 255; } public static void SetWindowTrasparency(IntPtr hWnd, int visibility) { if (visibility == 100) { Win32.WINDOWINFO info = new Win32.WINDOWINFO(); Win32.GetWindowInfo(hWnd, ref info); Win32.SetWindowLong(hWnd, Win32.GWL_EXSTYLE, (int)info.dwExStyle & ~Win32.WS_EX_LAYERED); } else { int num = Win32.GetWindowLong(hWnd, Win32.GWL_EXSTYLE); num |= Win32.WS_EX_LAYERED; Win32.SetWindowLong(hWnd, Win32.GWL_EXSTYLE, num); Win32.SetLayeredWindowAttributes(hWnd, 0, (byte)Math.Round((double)((((double)visibility) / 100) * 255)), Win32.LWA_ALPHA); } } public static void ChangeWndTransparency(IntPtr hWnd, bool increase) { int original = GetWindowTransparency(hWnd); int newTransparency; if (increase) { if (original == -1) return; if (original == 1) newTransparency = 10; else newTransparency = original < 90 ? original + 10 : 100; } else { if (original == -1) newTransparency = 90; else newTransparency = original > 10 ? original - 10 : 1; } SetWindowTrasparency(hWnd, newTransparency); } #endregion Transparency Methods } }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // File System.Byte.cs // Automatically generated contract file. using System.Collections.Generic; using System.IO; using System.Text; using System.Diagnostics.Contracts; using System; // Disable the "this variable is not used" warning as every field would imply it. #pragma warning disable 0414 // Disable the "this variable is never assigned to". #pragma warning disable 0067 // Disable the "this event is never assigned to". #pragma warning disable 0649 // Disable the "this variable is never used". #pragma warning disable 0169 // Disable the "new keyword not required" warning. #pragma warning disable 0109 // Disable the "extern without DllImport" warning. #pragma warning disable 0626 // Disable the "could hide other member" warning, can happen on certain properties. #pragma warning disable 0108 namespace System { public partial struct Byte : IComparable, IFormattable, IConvertible, IComparable<byte>, IEquatable<byte> { #region Methods and constructors public int CompareTo(byte value) { return default(int); } public int CompareTo(Object value) { return default(int); } public bool Equals(byte obj) { return default(bool); } public override bool Equals(Object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public TypeCode GetTypeCode() { return default(TypeCode); } public static byte Parse(string s, IFormatProvider provider) { Contract.Ensures(System.Runtime.CompilerServices.RuntimeHelpers.OffsetToStringData == 8); return default(byte); } public static byte Parse(string s, System.Globalization.NumberStyles style) { Contract.Ensures(System.Runtime.CompilerServices.RuntimeHelpers.OffsetToStringData == 8); return default(byte); } public static byte Parse(string s, System.Globalization.NumberStyles style, IFormatProvider provider) { Contract.Ensures(System.Runtime.CompilerServices.RuntimeHelpers.OffsetToStringData == 8); return default(byte); } public static byte Parse(string s) { Contract.Ensures(System.Runtime.CompilerServices.RuntimeHelpers.OffsetToStringData == 8); return default(byte); } bool System.IConvertible.ToBoolean(IFormatProvider provider) { return default(bool); } byte System.IConvertible.ToByte(IFormatProvider provider) { return default(byte); } char System.IConvertible.ToChar(IFormatProvider provider) { return default(char); } DateTime System.IConvertible.ToDateTime(IFormatProvider provider) { return default(DateTime); } Decimal System.IConvertible.ToDecimal(IFormatProvider provider) { return default(Decimal); } double System.IConvertible.ToDouble(IFormatProvider provider) { return default(double); } short System.IConvertible.ToInt16(IFormatProvider provider) { return default(short); } int System.IConvertible.ToInt32(IFormatProvider provider) { return default(int); } long System.IConvertible.ToInt64(IFormatProvider provider) { return default(long); } sbyte System.IConvertible.ToSByte(IFormatProvider provider) { return default(sbyte); } float System.IConvertible.ToSingle(IFormatProvider provider) { return default(float); } Object System.IConvertible.ToType(Type type, IFormatProvider provider) { return default(Object); } ushort System.IConvertible.ToUInt16(IFormatProvider provider) { return default(ushort); } uint System.IConvertible.ToUInt32(IFormatProvider provider) { return default(uint); } ulong System.IConvertible.ToUInt64(IFormatProvider provider) { return default(ulong); } public string ToString(string format, IFormatProvider provider) { return default(string); } public string ToString(string format) { return default(string); } public string ToString(IFormatProvider provider) { return default(string); } public override string ToString() { return default(string); } public static bool TryParse(string s, System.Globalization.NumberStyles style, IFormatProvider provider, out byte result) { result = default(byte); return default(bool); } public static bool TryParse(string s, out byte result) { result = default(byte); return default(bool); } #endregion #region Fields public static byte MaxValue; public static byte MinValue; #endregion } }
using System; using System.Configuration; using System.IO; using System.IO.Compression; using System.Linq; using System.Net; using System.Reflection; namespace Paket.Bootstrapper { internal class NugetDownloadStrategy : IDownloadStrategy { internal class NugetApiHelper { private readonly string packageName; private readonly string nugetSource; const string NugetSourceAppSettingsKey = "NugetSource"; const string DefaultNugetSource = "https://www.nuget.org/api/v2"; const string GetPackageVersionTemplate = "{0}/package-versions/{1}"; const string GetLatestFromNugetUrlTemplate = "{0}/package/{1}"; const string GetSpecificFromNugetUrlTemplate = "{0}/package/{1}/{2}"; public NugetApiHelper(string packageName, string nugetSource) { this.packageName = packageName; this.nugetSource = nugetSource ?? ConfigurationManager.AppSettings[NugetSourceAppSettingsKey] ?? DefaultNugetSource; } internal string GetAllPackageVersions(bool includePrerelease) { var request = String.Format(GetPackageVersionTemplate, nugetSource, packageName); const string withPrereleases = "?includePrerelease=true"; if (includePrerelease) request += withPrereleases; return request; } internal string GetLatestPackage() { return String.Format(GetLatestFromNugetUrlTemplate, nugetSource, packageName); } internal string GetSpecificPackageVersion(string version) { return String.Format(GetSpecificFromNugetUrlTemplate, nugetSource, packageName, version); } } private PrepareWebClientDelegate PrepareWebClient { get; set; } private GetDefaultWebProxyForDelegate GetDefaultWebProxyFor { get; set; } private string Folder { get; set; } private string NugetSource { get; set; } private const string PaketNugetPackageName = "Paket"; private const string PaketBootstrapperNugetPackageName = "Paket.Bootstrapper"; public NugetDownloadStrategy(PrepareWebClientDelegate prepareWebClient, GetDefaultWebProxyForDelegate getDefaultWebProxyFor, string folder, string nugetSource) { PrepareWebClient = prepareWebClient; GetDefaultWebProxyFor = getDefaultWebProxyFor; Folder = folder; NugetSource = nugetSource; } public string Name { get { return "Nuget"; } } public IDownloadStrategy FallbackStrategy { get; set; } public string GetLatestVersion(bool ignorePrerelease) { if (Directory.Exists(NugetSource)) { var paketPrefix = "paket."; var latestLocalVersion = Directory.EnumerateFiles(NugetSource, "paket.*.nupkg", SearchOption.TopDirectoryOnly). Select(x => Path.GetFileNameWithoutExtension(x)). // If the specified character isn't a digit, then the file // likely contains the bootstrapper or paket.core Where(x => x.Length > paketPrefix.Length && Char.IsDigit(x[paketPrefix.Length])). Select(x => x.Substring(paketPrefix.Length)). Select(SemVer.Create). Where(x => !ignorePrerelease || (x.PreRelease == null)). OrderBy(x => x). LastOrDefault(x => !String.IsNullOrWhiteSpace(x.Original)); return latestLocalVersion != null ? latestLocalVersion.Original : String.Empty; } else { var apiHelper = new NugetApiHelper(PaketNugetPackageName, NugetSource); using (var client = new WebClient()) { var versionRequestUrl = apiHelper.GetAllPackageVersions(!ignorePrerelease); PrepareWebClient(client, versionRequestUrl); var versions = client.DownloadString(versionRequestUrl); var latestVersion = versions. Trim('[', ']'). Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries). Select(x => x.Trim('"')). Select(SemVer.Create). OrderBy(x => x). LastOrDefault(x => !String.IsNullOrWhiteSpace(x.Original)); return latestVersion != null ? latestVersion.Original : String.Empty; } } } public void DownloadVersion(string latestVersion, string target, bool silent) { var apiHelper = new NugetApiHelper(PaketNugetPackageName, NugetSource); using (WebClient client = new WebClient()) { const string paketNupkgFile = "paket.latest.nupkg"; const string paketNupkgFileTemplate = "paket.{0}.nupkg"; var paketDownloadUrl = apiHelper.GetLatestPackage(); var paketFile = paketNupkgFile; if (!String.IsNullOrWhiteSpace(latestVersion)) { paketDownloadUrl = apiHelper.GetSpecificPackageVersion(latestVersion); paketFile = String.Format(paketNupkgFileTemplate, latestVersion); } var randomFullPath = Path.Combine(Folder, Path.GetRandomFileName()); Directory.CreateDirectory(randomFullPath); var paketPackageFile = Path.Combine(randomFullPath, paketFile); if (Directory.Exists(NugetSource)) { if (String.IsNullOrWhiteSpace(latestVersion)) latestVersion = this.GetLatestVersion(false); var sourcePath = Path.Combine(NugetSource, String.Format(paketNupkgFileTemplate, latestVersion)); if (!silent) Console.WriteLine("Starting download from {0}", sourcePath); File.Copy(sourcePath, paketPackageFile); } else { if (!silent) Console.WriteLine("Starting download from {0}", paketDownloadUrl); PrepareWebClient(client, paketDownloadUrl); client.DownloadFile(paketDownloadUrl, paketPackageFile); } ZipFile.ExtractToDirectory(paketPackageFile, randomFullPath); var paketSourceFile = Path.Combine(randomFullPath, "tools", "paket.exe"); File.Copy(paketSourceFile, target, true); Directory.Delete(randomFullPath, true); } } public void SelfUpdate(string latestVersion, bool silent) { var executingAssembly = Assembly.GetExecutingAssembly(); string target = executingAssembly.Location; var localVersion = BootstrapperHelper.GetLocalFileVersion(target); if (localVersion.StartsWith(latestVersion)) { if (!silent) Console.WriteLine("Bootstrapper is up to date. Nothing to do."); return; } var apiHelper = new NugetApiHelper(PaketBootstrapperNugetPackageName, NugetSource); const string paketNupkgFile = "paket.bootstrapper.latest.nupkg"; const string paketNupkgFileTemplate = "paket.bootstrapper.{0}.nupkg"; var getLatestFromNugetUrl = apiHelper.GetLatestPackage(); var paketDownloadUrl = getLatestFromNugetUrl; var paketFile = paketNupkgFile; if (!String.IsNullOrWhiteSpace(latestVersion)) { paketDownloadUrl = apiHelper.GetSpecificPackageVersion(latestVersion); paketFile = String.Format(paketNupkgFileTemplate, latestVersion); } var randomFullPath = Path.Combine(Folder, Path.GetRandomFileName()); Directory.CreateDirectory(randomFullPath); var paketPackageFile = Path.Combine(randomFullPath, paketFile); if (Directory.Exists(NugetSource)) { if (String.IsNullOrWhiteSpace(latestVersion)) latestVersion = this.GetLatestVersion(false); var sourcePath = Path.Combine(NugetSource, String.Format(paketNupkgFileTemplate, latestVersion)); if (!silent) Console.WriteLine("Starting download from {0}", sourcePath); File.Copy(sourcePath, paketPackageFile); } else { if (!silent) Console.WriteLine("Starting download from {0}", paketDownloadUrl); using (var client = new WebClient()) { PrepareWebClient(client, paketDownloadUrl); client.DownloadFile(paketDownloadUrl, paketPackageFile); } } ZipFile.ExtractToDirectory(paketPackageFile, randomFullPath); var paketSourceFile = Path.Combine(randomFullPath, "tools", "paket.bootstrapper.exe"); var renamedPath = BootstrapperHelper.GetTempFile("oldBootstrapper"); try { BootstrapperHelper.FileMove(target, renamedPath); BootstrapperHelper.FileMove(paketSourceFile, target); if (!silent) Console.WriteLine("Self update of bootstrapper was successful."); } catch (Exception) { if (!silent) Console.WriteLine("Self update failed. Resetting bootstrapper."); BootstrapperHelper.FileMove(renamedPath, target); throw; } Directory.Delete(randomFullPath, true); } } }
// Copyright (c) 2014-2015 Robert Rouhani <robert.rouhani@gmail.com> and other contributors (see CONTRIBUTORS file). // Licensed under the MIT License - https://raw.github.com/Robmaister/SharpNav/master/LICENSE using System; using System.Collections.Generic; using SharpNav.Collections.Generic; using SharpNav.Geometry; using SharpNav.Pathfinding; #if MONOGAME using Vector3 = Microsoft.Xna.Framework.Vector3; #elif OPENTK using Vector3 = OpenTK.Vector3; #elif SHARPDX using Vector3 = SharpDX.Vector3; #endif namespace SharpNav.Crowds { public class PathQueue { #region Fields public const byte Invalid = 0; private const int MaxQueue = 8; private PathQuery[] queue; //size = MAX_QUEUE private int nextHandle = 1; private int maxPathSize; private int queueHead; private NavMeshQuery navquery; #endregion #region Constructors public PathQueue(int maxPathSize, int maxSearchNodeCount, ref TiledNavMesh nav) { this.navquery = new NavMeshQuery(nav, maxSearchNodeCount); this.maxPathSize = maxPathSize; this.queue = new PathQuery[MaxQueue]; for (int i = 0; i < MaxQueue; i++) { queue[i].Reference = Invalid; queue[i].Path = new int[maxPathSize]; } this.queueHead = 0; } #endregion #region Methods public void Update(int maxIters) { const int MAX_KEEP_ALIVE = 2; //in update ticks //update path request until there is nothing left to update //or up to maxIters pathfinder iterations have been consumed int iterCount = maxIters; for (int i = 0; i < MaxQueue; i++) { PathQuery q = queue[queueHead % MaxQueue]; //skip inactive requests if (q.Reference == Invalid) { queueHead++; continue; } //handle completed request if (q.status == Status.Success || q.status == Status.Failure) { q.KeepAlive++; if (q.KeepAlive > MAX_KEEP_ALIVE) { q.Reference = Invalid; q.status = 0; } queueHead++; continue; } //handle query start if (q.status == 0) { q.status = navquery.InitSlicedFindPath(new NavPoint(q.StartRef, q.StartPos), new NavPoint(q.EndRef, q.EndPos)).ToStatus(); } //handle query in progress if (q.status == Status.InProgress) { int iters = 0; q.status = navquery.UpdateSlicedFindPath(iterCount, ref iters).ToStatus(); iterCount -= iters; } if (q.status == Status.Success) { q.status = navquery.FinalizeSlicedFindPath(q.Path, ref q.PathCount, maxPathSize).ToStatus(); } if (iterCount <= 0) break; queueHead++; } } /// <summary> /// Request an empty slot in the path queue /// </summary> /// <param name="startRef">Start polygon reference</param> /// <param name="endRef">End polygon reference</param> /// <param name="startPos">Start position</param> /// <param name="endPos">End position</param> /// <returns>Index of empty slot</returns> public int Request(int startRef, int endRef, Vector3 startPos, Vector3 endPos) { //find empty slot int slot = -1; for (int i = 0; i < MaxQueue; i++) { if (queue[i].Reference == Invalid) { slot = i; break; } } //could not find slot if (slot == -1) return Invalid; int reference = nextHandle++; if (nextHandle == Invalid) nextHandle++; PathQuery q = queue[slot]; q.Reference = reference; q.StartPos = startPos; q.StartRef = startRef; q.EndPos = endPos; q.EndRef = endRef; q.status = 0; q.PathCount = 0; q.KeepAlive = 0; queue[slot] = q; return reference; } /// <summary> /// Get the status of the polygon in the path queue /// </summary> /// <param name="reference">The polygon reference</param> /// <returns>The status in the queue</returns> public Status GetRequestStatus(int reference) { for (int i = 0; i < MaxQueue; i++) { if (queue[i].Reference == reference) return queue[i].status; } return Status.Failure; } public bool GetPathResult(int reference, int[] path, ref int pathSize, int maxPath) { for (int i = 0; i < MaxQueue; i++) { if (queue[i].Reference == reference) { PathQuery q = queue[i]; //free request for reuse q.Reference = Invalid; q.status = 0; //copy path int n = Math.Min(q.PathCount, maxPath); q.Path.CopyTo(path, 0); pathSize = n; queue[i] = q; return true; } } return false; } #endregion private struct PathQuery { public int Reference; //path find start and end location public Vector3 StartPos, EndPos; public int StartRef, EndRef; //result public int[] Path; public int PathCount; //state public Status status; public int KeepAlive; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Runtime.InteropServices; using System.ComponentModel; using System.Configuration; using System.DirectoryServices.Protocols; using System.DirectoryServices; using System.Net; using System.Text; using System.Threading; using System.Collections; using System.Security.Permissions; namespace System.DirectoryServices.AccountManagement { internal struct ServerProperties { public string dnsHostName; public DomainControllerMode OsVersion; public ContextType contextType; public string[] SupportCapabilities; public int portSSL; public int portLDAP; }; internal enum DomainControllerMode { Win2k = 0, Win2k3 = 2, WinLH = 3 }; static internal class CapabilityMap { public const string LDAP_CAP_ACTIVE_DIRECTORY_OID = "1.2.840.113556.1.4.800"; public const string LDAP_CAP_ACTIVE_DIRECTORY_V51_OID = "1.2.840.113556.1.4.1670"; public const string LDAP_CAP_ACTIVE_DIRECTORY_LDAP_INTEG_OID = "1.2.840.113556.1.4.1791"; public const string LDAP_CAP_ACTIVE_DIRECTORY_ADAM_OID = "1.2.840.113556.1.4.1851"; public const string LDAP_CAP_ACTIVE_DIRECTORY_PARTIAL_SECRETS_OID = "1.2.840.113556.1.4.1920"; public const string LDAP_CAP_ACTIVE_DIRECTORY_V61_OID = "1.2.840.113556.1.4.1935"; } internal sealed class CredentialValidator { private enum AuthMethod { Simple = 1, Negotiate = 2 } private bool _fastConcurrentSupported = true; private Hashtable _connCache = new Hashtable(4); private LdapDirectoryIdentifier _directoryIdent; private object _cacheLock = new object(); private AuthMethod _lastBindMethod = AuthMethod.Simple; private string _serverName; private ContextType _contextType; private ServerProperties _serverProperties; private const ContextOptions defaultContextOptionsNegotiate = ContextOptions.Signing | ContextOptions.Sealing | ContextOptions.Negotiate; private const ContextOptions defaultContextOptionsSimple = ContextOptions.SecureSocketLayer | ContextOptions.SimpleBind; public CredentialValidator(ContextType contextType, string serverName, ServerProperties serverProperties) { _fastConcurrentSupported = !(serverProperties.OsVersion == DomainControllerMode.Win2k); if (contextType == ContextType.Machine && serverName == null) { _serverName = Environment.MachineName; } else { _serverName = serverName; } _contextType = contextType; _serverProperties = serverProperties; } private bool BindSam(string target, string userName, string password) { StringBuilder adsPath = new StringBuilder(); adsPath.Append("WinNT://"); adsPath.Append(_serverName); adsPath.Append(",computer"); Guid g = new Guid("fd8256d0-fd15-11ce-abc4-02608c9e7553"); // IID_IUnknown object value = null; // always attempt secure auth.. int authenticationType = 1; object unmanagedResult = null; try { if (Thread.CurrentThread.GetApartmentState() == ApartmentState.Unknown) Thread.CurrentThread.SetApartmentState(ApartmentState.MTA); // We need the credentials to be in the form <machine>\\<user> // if they just passed user then append the machine name here. if (null != userName) { int index = userName.IndexOf("\\", StringComparison.Ordinal); if (index == -1) { userName = _serverName + "\\" + userName; } } int hr = UnsafeNativeMethods.ADsOpenObject(adsPath.ToString(), userName, password, (int)authenticationType, ref g, out value); if (hr != 0) { if (hr == unchecked((int)(ExceptionHelper.ERROR_HRESULT_LOGON_FAILURE))) { // This is the invalid credetials case. We want to return false // instead of throwing an exception return false; } else { throw ExceptionHelper.GetExceptionFromErrorCode(hr); } } unmanagedResult = ((UnsafeNativeMethods.IADs)value).Get("name"); } catch (System.Runtime.InteropServices.COMException e) { if (e.ErrorCode == unchecked((int)(ExceptionHelper.ERROR_HRESULT_LOGON_FAILURE))) { return false; } else { throw ExceptionHelper.GetExceptionFromCOMException(e); } } finally { if (value != null) System.Runtime.InteropServices.Marshal.ReleaseComObject(value); } return true; } private bool BindLdap(NetworkCredential creds, ContextOptions contextOptions) { LdapConnection current = null; bool useSSL = (ContextOptions.SecureSocketLayer & contextOptions) > 0; if (_contextType == ContextType.ApplicationDirectory) { _directoryIdent = new LdapDirectoryIdentifier(_serverProperties.dnsHostName, useSSL ? _serverProperties.portSSL : _serverProperties.portLDAP); } else { _directoryIdent = new LdapDirectoryIdentifier(_serverName, useSSL ? LdapConstants.LDAP_SSL_PORT : LdapConstants.LDAP_PORT); } bool attemptFastConcurrent = useSSL && _fastConcurrentSupported; int index = Convert.ToInt32(attemptFastConcurrent) * 2 + Convert.ToInt32(useSSL); if (!_connCache.Contains(index)) { lock (_cacheLock) { if (!_connCache.Contains(index)) { current = new LdapConnection(_directoryIdent); // First attempt to turn on SSL current.SessionOptions.SecureSocketLayer = useSSL; if (attemptFastConcurrent) { try { current.SessionOptions.FastConcurrentBind(); } catch (PlatformNotSupportedException) { current.Dispose(); current = null; _fastConcurrentSupported = false; index = Convert.ToInt32(useSSL); current = new LdapConnection(_directoryIdent); // We have fallen back to another connection so we need to set SSL again. current.SessionOptions.SecureSocketLayer = useSSL; } } _connCache.Add(index, current); } else { current = (LdapConnection)_connCache[index]; } } } else { current = (LdapConnection)_connCache[index]; } // If we are performing fastConcurrentBind there is no need to prevent multithreadaccess. FSB is thread safe and multi cred safe // FSB also always has the same contextoptions so there is no need to lock the code that is modifying the current connection if (attemptFastConcurrent && _fastConcurrentSupported) { lockedLdapBind(current, creds, contextOptions); } else { lock (_cacheLock) { lockedLdapBind(current, creds, contextOptions); } } return true; } private void lockedLdapBind(LdapConnection current, NetworkCredential creds, ContextOptions contextOptions) { current.AuthType = ((ContextOptions.SimpleBind & contextOptions) > 0 ? AuthType.Basic : AuthType.Negotiate); current.SessionOptions.Signing = ((ContextOptions.Signing & contextOptions) > 0 ? true : false); current.SessionOptions.Sealing = ((ContextOptions.Sealing & contextOptions) > 0 ? true : false); if ((null == creds.UserName) && (null == creds.Password)) { current.Bind(); } else { current.Bind(creds); } } public bool Validate(string userName, string password) { NetworkCredential networkCredential = new NetworkCredential(userName, password); // empty username and password on the local box // causes authentication to succeed. If the username is empty we should just fail it // here. if (userName != null && userName.Length == 0) return false; if (_contextType == ContextType.Domain || _contextType == ContextType.ApplicationDirectory) { try { if (_lastBindMethod == AuthMethod.Simple && (_fastConcurrentSupported || _contextType == ContextType.ApplicationDirectory)) { try { BindLdap(networkCredential, defaultContextOptionsSimple); _lastBindMethod = AuthMethod.Simple; return true; } catch (LdapException) { // we don't return false here even if we failed with ERROR_LOGON_FAILURE. We must check Negotiate // because there might be cases in which SSL fails and Negotiate succeeds } BindLdap(networkCredential, defaultContextOptionsNegotiate); _lastBindMethod = AuthMethod.Negotiate; return true; } else { try { BindLdap(networkCredential, defaultContextOptionsNegotiate); _lastBindMethod = AuthMethod.Negotiate; return true; } catch (LdapException) { // we don't return false here even if we failed with ERROR_LOGON_FAILURE. We must check SSL // because there might be cases in which Negotiate fails and SSL succeeds } BindLdap(networkCredential, defaultContextOptionsSimple); _lastBindMethod = AuthMethod.Simple; return true; } } catch (LdapException ldapex) { // If we got here it means that both SSL and Negotiate failed. Tough luck. if (ldapex.ErrorCode == ExceptionHelper.ERROR_LOGON_FAILURE) { return false; } throw; } } else { Debug.Assert(_contextType == ContextType.Machine); return (BindSam(_serverName, userName, password)); } } public bool Validate(string userName, string password, ContextOptions connectionMethod) { // empty username and password on the local box // causes authentication to succeed. If the username is empty we should just fail it // here. if (userName != null && userName.Length == 0) return false; if (_contextType == ContextType.Domain || _contextType == ContextType.ApplicationDirectory) { try { NetworkCredential networkCredential = new NetworkCredential(userName, password); BindLdap(networkCredential, connectionMethod); return true; } catch (LdapException ldapex) { if (ldapex.ErrorCode == ExceptionHelper.ERROR_LOGON_FAILURE) { return false; } throw; } } else { return (BindSam(_serverName, userName, password)); } } } // ******************************************** public class PrincipalContext : IDisposable { // // Public Constructors // public PrincipalContext(ContextType contextType) : this(contextType, null, null, PrincipalContext.GetDefaultOptionForStore(contextType), null, null) { } public PrincipalContext(ContextType contextType, string name) : this(contextType, name, null, PrincipalContext.GetDefaultOptionForStore(contextType), null, null) { } public PrincipalContext(ContextType contextType, string name, string container) : this(contextType, name, container, PrincipalContext.GetDefaultOptionForStore(contextType), null, null) { } public PrincipalContext(ContextType contextType, string name, string container, ContextOptions options) : this(contextType, name, container, options, null, null) { } public PrincipalContext(ContextType contextType, string name, string userName, string password) : this(contextType, name, null, PrincipalContext.GetDefaultOptionForStore(contextType), userName, password) { } public PrincipalContext(ContextType contextType, string name, string container, string userName, string password) : this(contextType, name, container, PrincipalContext.GetDefaultOptionForStore(contextType), userName, password) { } public PrincipalContext( ContextType contextType, string name, string container, ContextOptions options, string userName, string password) { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Entering ctor"); if ((userName == null && password != null) || (userName != null && password == null)) throw new ArgumentException(SR.ContextBadUserPwdCombo); if ((options & ~(ContextOptions.Signing | ContextOptions.Negotiate | ContextOptions.Sealing | ContextOptions.SecureSocketLayer | ContextOptions.SimpleBind | ContextOptions.ServerBind)) != 0) throw new InvalidEnumArgumentException(nameof(options), (int)options, typeof(ContextOptions)); if (contextType == ContextType.Machine && ((options & ~ContextOptions.Negotiate) != 0)) { throw new ArgumentException(SR.InvalidContextOptionsForMachine); } if ((contextType == ContextType.Domain || contextType == ContextType.ApplicationDirectory) && (((options & (ContextOptions.Negotiate | ContextOptions.SimpleBind)) == 0) || (((options & (ContextOptions.Negotiate | ContextOptions.SimpleBind)) == ((ContextOptions.Negotiate | ContextOptions.SimpleBind)))))) { throw new ArgumentException(SR.InvalidContextOptionsForAD); } if ((contextType != ContextType.Machine) && (contextType != ContextType.Domain) && (contextType != ContextType.ApplicationDirectory) #if TESTHOOK && (contextType != ContextType.Test) #endif ) { throw new InvalidEnumArgumentException(nameof(contextType), (int)contextType, typeof(ContextType)); } if ((contextType == ContextType.Machine) && (container != null)) throw new ArgumentException(SR.ContextNoContainerForMachineCtx); if ((contextType == ContextType.ApplicationDirectory) && ((string.IsNullOrEmpty(container)) || (string.IsNullOrEmpty(name)))) throw new ArgumentException(SR.ContextNoContainerForApplicationDirectoryCtx); _contextType = contextType; _name = name; _container = container; _options = options; _username = userName; _password = password; DoServerVerifyAndPropRetrieval(); _credValidate = new CredentialValidator(contextType, name, _serverProperties); } // // Public Properties // public ContextType ContextType { get { CheckDisposed(); return _contextType; } } public string Name { get { CheckDisposed(); return _name; } } public string Container { get { CheckDisposed(); return _container; } } public string UserName { get { CheckDisposed(); return _username; } } public ContextOptions Options { get { CheckDisposed(); return _options; } } public string ConnectedServer { get { CheckDisposed(); Initialize(); // Unless we're not initialized, connectedServer should not be null Debug.Assert(_connectedServer != null || _initialized == false); // connectedServer should never be an empty string Debug.Assert(_connectedServer == null || _connectedServer.Length != 0); return _connectedServer; } } /// <summary> /// Validate the passed credentials against the directory supplied. // This function will use the best determined method to do the evaluation /// </summary> public bool ValidateCredentials(string userName, string password) { CheckDisposed(); if ((userName == null && password != null) || (userName != null && password == null)) throw new ArgumentException(SR.ContextBadUserPwdCombo); #if TESTHOOK if ( contextType == ContextType.Test ) { return true; } #endif return (_credValidate.Validate(userName, password)); } /// <summary> /// Validate the passed credentials against the directory supplied. // The supplied options will determine the directory method for credential validation. /// </summary> public bool ValidateCredentials(string userName, string password, ContextOptions options) { // Perform credential validation using fast concurrent bind... CheckDisposed(); if ((userName == null && password != null) || (userName != null && password == null)) throw new ArgumentException(SR.ContextBadUserPwdCombo); if (options != ContextOptions.Negotiate && _contextType == ContextType.Machine) throw new ArgumentException(SR.ContextOptionsNotValidForMachineStore); #if TESTHOOK if ( contextType == ContextType.Test ) { return true; } #endif return (_credValidate.Validate(userName, password, options)); } // // Private methods for initialization // private void Initialize() { if (!_initialized) { lock (_initializationLock) { if (_initialized) return; GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Initializing Context"); switch (_contextType) { case ContextType.Domain: DoDomainInit(); break; case ContextType.Machine: DoMachineInit(); break; case ContextType.ApplicationDirectory: DoApplicationDirectoryInit(); break; #if TESTHOOK case ContextType.Test: // do nothing break; #endif default: // Internal error Debug.Fail("PrincipalContext.Initialize: fell off end looking for " + _contextType.ToString()); break; } _initialized = true; } } } private void DoApplicationDirectoryInit() { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Entering DoApplicationDirecotryInit"); Debug.Assert(_contextType == ContextType.ApplicationDirectory); if (_container == null) { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoApplicationDirecotryInit: using no-container path"); DoLDAPDirectoryInitNoContainer(); } else { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoApplicationDirecotryInit: using container path"); DoLDAPDirectoryInit(); } } private void DoMachineInit() { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Entering DoMachineInit"); Debug.Assert(_contextType == ContextType.Machine); Debug.Assert(_container == null); DirectoryEntry de = null; try { string hostname = _name; if (hostname == null) hostname = Utils.GetComputerFlatName(); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoMachineInit: hostname is " + hostname); // use the options they specified AuthenticationTypes authTypes = SDSUtils.MapOptionsToAuthTypes(_options); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoMachineInit: authTypes is " + authTypes.ToString()); de = new DirectoryEntry("WinNT://" + hostname + ",computer", _username, _password, authTypes); // Force ADSI to connect so we detect if the server is down or if the servername is invalid de.RefreshCache(); StoreCtx storeCtx = CreateContextFromDirectoryEntry(de); _queryCtx = storeCtx; _userCtx = storeCtx; _groupCtx = storeCtx; _computerCtx = storeCtx; _connectedServer = hostname; de = null; } catch (Exception e) { GlobalDebug.WriteLineIf(GlobalDebug.Error, "PrincipalContext", "DoMachineInit: caught exception of type " + e.GetType().ToString() + " and message " + e.Message); // Cleanup the DE on failure if (de != null) de.Dispose(); throw; } } private void DoDomainInit() { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Entering DoDomainInit"); Debug.Assert(_contextType == ContextType.Domain); if (_container == null) { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoDomainInit: using no-container path"); DoLDAPDirectoryInitNoContainer(); return; } else { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoDomainInit: using container path"); DoLDAPDirectoryInit(); return; } } private void DoServerVerifyAndPropRetrieval() { _serverProperties = new ServerProperties(); if (_contextType == ContextType.ApplicationDirectory || _contextType == ContextType.Domain) { ReadServerConfig(_name, ref _serverProperties); if (_serverProperties.contextType != _contextType) { throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, SR.PassedContextTypeDoesNotMatchDetectedType, _serverProperties.contextType.ToString())); } } } private void DoLDAPDirectoryInit() { // use the servername if they gave us one, else let ADSI figure it out string serverName = ""; if (_name != null) { if (_contextType == ContextType.ApplicationDirectory) { serverName = _serverProperties.dnsHostName + ":" + ((ContextOptions.SecureSocketLayer & _options) > 0 ? _serverProperties.portSSL : _serverProperties.portLDAP); } else { serverName = _name; } serverName += "/"; } GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInit: serverName is " + serverName); // use the options they specified AuthenticationTypes authTypes = SDSUtils.MapOptionsToAuthTypes(_options); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInit: authTypes is " + authTypes.ToString()); DirectoryEntry de = new DirectoryEntry("LDAP://" + serverName + _container, _username, _password, authTypes); try { // Set the password port to the ssl port read off of the rootDSE. Without this // password change/set won't work when we connect without SSL and ADAM is running // on non-standard port numbers. We have already verified directory connectivity at this point // so this should always succeed. if (_serverProperties.portSSL > 0) { de.Options.PasswordPort = _serverProperties.portSSL; } StoreCtx storeCtx = CreateContextFromDirectoryEntry(de); _queryCtx = storeCtx; _userCtx = storeCtx; _groupCtx = storeCtx; _computerCtx = storeCtx; _connectedServer = ADUtils.GetServerName(de); de = null; } catch (System.Runtime.InteropServices.COMException e) { throw ExceptionHelper.GetExceptionFromCOMException(e); } catch (Exception e) { GlobalDebug.WriteLineIf(GlobalDebug.Error, "PrincipalContext", "DoLDAPDirectoryInit: caught exception of type " + e.GetType().ToString() + " and message " + e.Message); throw; } finally { // Cleanup the DE on failure if (de != null) de.Dispose(); } } private void DoLDAPDirectoryInitNoContainer() { byte[] USERS_CONTAINER_GUID = new byte[] { 0xa9, 0xd1, 0xca, 0x15, 0x76, 0x88, 0x11, 0xd1, 0xad, 0xed, 0x00, 0xc0, 0x4f, 0xd8, 0xd5, 0xcd }; byte[] COMPUTERS_CONTAINER_GUID = new byte[] { 0xaa, 0x31, 0x28, 0x25, 0x76, 0x88, 0x11, 0xd1, 0xad, 0xed, 0x00, 0xc0, 0x4f, 0xd8, 0xd5, 0xcd }; // The StoreCtxs that will be used in the PrincipalContext, and their associated DirectoryEntry objects. DirectoryEntry deUserGroupOrg = null; DirectoryEntry deComputer = null; DirectoryEntry deBase = null; ADStoreCtx storeCtxUserGroupOrg = null; ADStoreCtx storeCtxComputer = null; ADStoreCtx storeCtxBase = null; GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Entering DoLDAPDirectoryInitNoContainer"); // // Build a DirectoryEntry that represents the root of the domain. // // Use the RootDSE to find the default naming context DirectoryEntry deRootDse = null; string adsPathBase; // use the servername if they gave us one, else let ADSI figure it out string serverName = ""; if (_name != null) { serverName = _name + "/"; } GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: serverName is " + serverName); // use the options they specified AuthenticationTypes authTypes = SDSUtils.MapOptionsToAuthTypes(_options); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: authTypes is " + authTypes.ToString()); try { deRootDse = new DirectoryEntry("LDAP://" + serverName + "rootDse", _username, _password, authTypes); // This will also detect if the server is down or nonexistent string domainNC = (string)deRootDse.Properties["defaultNamingContext"][0]; adsPathBase = "LDAP://" + serverName + domainNC; GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: domainNC is " + domainNC); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: adsPathBase is " + adsPathBase); } finally { // Don't allow the DE to leak if (deRootDse != null) deRootDse.Dispose(); } try { // Build a DE for the root of the domain using the retrieved naming context deBase = new DirectoryEntry(adsPathBase, _username, _password, authTypes); // Set the password port to the ssl port read off of the rootDSE. Without this // password change/set won't work when we connect without SSL and ADAM is running // on non-standard port numbers. We have already verified directory connectivity at this point // so this should always succeed. if (_serverProperties.portSSL > 0) { deBase.Options.PasswordPort = _serverProperties.portSSL; } // // Use the wellKnownObjects attribute to determine the default location // for users and computers. // string adsPathUserGroupOrg = null; string adsPathComputer = null; PropertyValueCollection wellKnownObjectValues = deBase.Properties["wellKnownObjects"]; foreach (UnsafeNativeMethods.IADsDNWithBinary value in wellKnownObjectValues) { if (Utils.AreBytesEqual(USERS_CONTAINER_GUID, (byte[])value.BinaryValue)) { Debug.Assert(adsPathUserGroupOrg == null); adsPathUserGroupOrg = "LDAP://" + serverName + value.DNString; GlobalDebug.WriteLineIf( GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: found USER, adsPathUserGroupOrg is " + adsPathUserGroupOrg); } // Is it the computer container? if (Utils.AreBytesEqual(COMPUTERS_CONTAINER_GUID, (byte[])value.BinaryValue)) { Debug.Assert(adsPathComputer == null); adsPathComputer = "LDAP://" + serverName + value.DNString; GlobalDebug.WriteLineIf( GlobalDebug.Info, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: found COMPUTER, adsPathComputer is " + adsPathComputer); } } if ((adsPathUserGroupOrg == null) || (adsPathComputer == null)) { // Something's wrong with the domain, it's not exposing the proper // well-known object fields. throw new PrincipalOperationException(SR.ContextNoWellKnownObjects); } // // Build DEs for the Users and Computers containers. // The Users container will also be used as the default for Groups. // The reason there are different contexts for groups, users and computers is so that // when a principal is created it will go into the appropriate default container. This is so users don't // be default create principals in the root of their directory. When a search happens the base context is used so that // the whole directory will be covered. // deUserGroupOrg = new DirectoryEntry(adsPathUserGroupOrg, _username, _password, authTypes); deComputer = new DirectoryEntry(adsPathComputer, _username, _password, authTypes); StoreCtx userStore = CreateContextFromDirectoryEntry(deUserGroupOrg); _userCtx = userStore; _groupCtx = userStore; deUserGroupOrg = null; // since we handed off ownership to the StoreCtx _computerCtx = CreateContextFromDirectoryEntry(deComputer); deComputer = null; _queryCtx = CreateContextFromDirectoryEntry(deBase); _connectedServer = ADUtils.GetServerName(deBase); deBase = null; } catch (Exception e) { GlobalDebug.WriteLineIf(GlobalDebug.Error, "PrincipalContext", "DoLDAPDirectoryInitNoContainer: caught exception of type " + e.GetType().ToString() + " and message " + e.Message); // Cleanup on failure. Once a DE has been successfully handed off to a ADStoreCtx, // that ADStoreCtx will handle Dispose()'ing it if (deUserGroupOrg != null) deUserGroupOrg.Dispose(); if (deComputer != null) deComputer.Dispose(); if (deBase != null) deBase.Dispose(); if (storeCtxUserGroupOrg != null) storeCtxUserGroupOrg.Dispose(); if (storeCtxComputer != null) storeCtxComputer.Dispose(); if (storeCtxBase != null) storeCtxBase.Dispose(); throw; } } #if TESTHOOK static public PrincipalContext Test { get { StoreCtx storeCtx = new TestStoreCtx(true); PrincipalContext ctx = new PrincipalContext(ContextType.Test); ctx.SetupContext(storeCtx); ctx.initialized = true; storeCtx.OwningContext = ctx; return ctx; } } static public PrincipalContext TestAltValidation { get { TestStoreCtx storeCtx = new TestStoreCtx(true); storeCtx.SwitchValidationMode = true; PrincipalContext ctx = new PrincipalContext(ContextType.Test); ctx.SetupContext(storeCtx); ctx.initialized = true; storeCtx.OwningContext = ctx; return ctx; } } static public PrincipalContext TestNoTimeLimited { get { TestStoreCtx storeCtx = new TestStoreCtx(true); storeCtx.SupportTimeLimited = false; PrincipalContext ctx = new PrincipalContext(ContextType.Test); ctx.SetupContext(storeCtx); ctx.initialized = true; storeCtx.OwningContext = ctx; return ctx; } } #endif // TESTHOOK // // Public Methods // public void Dispose() { if (!_disposed) { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "Dispose: disposing"); // Note that we may end up calling Dispose multiple times on the same // StoreCtx (since, for example, it might be that userCtx == groupCtx). // This is okay, since StoreCtxs allow multiple Dispose() calls, and ignore // all but the first call. if (_userCtx != null) _userCtx.Dispose(); if (_groupCtx != null) _groupCtx.Dispose(); if (_computerCtx != null) _computerCtx.Dispose(); if (_queryCtx != null) _queryCtx.Dispose(); _disposed = true; GC.SuppressFinalize(this); } } // // Private Implementation // // Are we initialized? private bool _initialized = false; private object _initializationLock = new object(); // Have we been disposed? private bool _disposed = false; internal bool Disposed { get { return _disposed; } } // Our constructor parameters // encryption nor zeroing out the string when you're done with it. private string _username; private string _password; // Cached connections to the server for fast credential validation private CredentialValidator _credValidate; private ServerProperties _serverProperties; internal ServerProperties ServerInformation { get { return _serverProperties; } } private string _name; private string _container; private ContextOptions _options; private ContextType _contextType; // The server we're connected to private string _connectedServer = null; // The reason there are different contexts for groups, users and computers is so that // when a principal is created it will go into the appropriate default container. This is so users don't // by default create principals in the root of their directory. When a search happens the base context is used so that // the whole directory will be covered. User and Computers default are the same ( USERS container ), Computers are // put under COMPUTERS container. If a container is specified then all the contexts will point to the same place. // The StoreCtx to be used when inserting a new User/Computer/Group Principal into this // PrincipalContext. private StoreCtx _userCtx = null; private StoreCtx _computerCtx = null; private StoreCtx _groupCtx = null; // The StoreCtx to be used when querying against this PrincipalContext for Principals private StoreCtx _queryCtx = null; internal StoreCtx QueryCtx { get { Initialize(); return _queryCtx; } set { _queryCtx = value; } } internal void ReadServerConfig(string serverName, ref ServerProperties properties) { string[] proplist = new string[] { "msDS-PortSSL", "msDS-PortLDAP", "domainControllerFunctionality", "dnsHostName", "supportedCapabilities" }; LdapConnection ldapConnection = null; try { bool useSSL = (_options & ContextOptions.SecureSocketLayer) > 0; if (useSSL && _contextType == ContextType.Domain) { LdapDirectoryIdentifier directoryid = new LdapDirectoryIdentifier(serverName, LdapConstants.LDAP_SSL_PORT); ldapConnection = new LdapConnection(directoryid); } else { ldapConnection = new LdapConnection(serverName); } ldapConnection.AutoBind = false; // If SSL was enabled on the initial connection then turn it on for the search. // This is requried bc the appended port number will be SSL and we don't know what port LDAP is running on. ldapConnection.SessionOptions.SecureSocketLayer = useSSL; string baseDN = null; // specify base as null for RootDSE search string ldapSearchFilter = "(objectClass=*)"; SearchResponse searchResponse = null; SearchRequest searchRequest = new SearchRequest(baseDN, ldapSearchFilter, System.DirectoryServices.Protocols .SearchScope.Base, proplist); try { searchResponse = (SearchResponse)ldapConnection.SendRequest(searchRequest); } catch (LdapException ex) { throw new PrincipalServerDownException(SR.ServerDown, ex); } // Fill in the struct with the casted properties from the serach results. // there will always be only 1 item on the rootDSE so all entry indexes are 0 properties.dnsHostName = (string)searchResponse.Entries[0].Attributes["dnsHostName"][0]; properties.SupportCapabilities = new string[searchResponse.Entries[0].Attributes["supportedCapabilities"].Count]; for (int i = 0; i < searchResponse.Entries[0].Attributes["supportedCapabilities"].Count; i++) { properties.SupportCapabilities[i] = (string)searchResponse.Entries[0].Attributes["supportedCapabilities"][i]; } foreach (string capability in properties.SupportCapabilities) { if (CapabilityMap.LDAP_CAP_ACTIVE_DIRECTORY_ADAM_OID == capability) { properties.contextType = ContextType.ApplicationDirectory; } else if (CapabilityMap.LDAP_CAP_ACTIVE_DIRECTORY_OID == capability) { properties.contextType = ContextType.Domain; } } // If we can't determine the OS vesion so we must fall back to lowest level of functionality if (searchResponse.Entries[0].Attributes.Contains("domainControllerFunctionality")) { properties.OsVersion = (DomainControllerMode)Convert.ToInt32(searchResponse.Entries[0].Attributes["domainControllerFunctionality"][0], CultureInfo.InvariantCulture); } else { properties.OsVersion = DomainControllerMode.Win2k; } if (properties.contextType == ContextType.ApplicationDirectory) { if (searchResponse.Entries[0].Attributes.Contains("msDS-PortSSL")) { properties.portSSL = Convert.ToInt32(searchResponse.Entries[0].Attributes["msDS-PortSSL"][0]); } if (searchResponse.Entries[0].Attributes.Contains("msDS-PortLDAP")) { properties.portLDAP = Convert.ToInt32(searchResponse.Entries[0].Attributes["msDS-PortLDAP"][0]); } } GlobalDebug.WriteLineIf(GlobalDebug.Info, "ReadServerConfig", "OsVersion : " + properties.OsVersion.ToString()); GlobalDebug.WriteLineIf(GlobalDebug.Info, "ReadServerConfig", "dnsHostName : " + properties.dnsHostName); GlobalDebug.WriteLineIf(GlobalDebug.Info, "ReadServerConfig", "contextType : " + properties.contextType.ToString()); GlobalDebug.WriteLineIf(GlobalDebug.Info, "ReadServerConfig", "portSSL : " + properties.portSSL.ToString(CultureInfo.InvariantCulture)); GlobalDebug.WriteLineIf(GlobalDebug.Info, "ReadServerConfig", "portLDAP :" + properties.portLDAP.ToString(CultureInfo.InvariantCulture)); } finally { if (ldapConnection != null) { ldapConnection.Dispose(); } } } private StoreCtx CreateContextFromDirectoryEntry(DirectoryEntry entry) { StoreCtx storeCtx; Debug.Assert(entry != null); GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "CreateContextFromDirectoryEntry: path is " + entry.Path); if (entry.Path.StartsWith("LDAP:", StringComparison.Ordinal)) { if (this.ContextType == ContextType.ApplicationDirectory) { storeCtx = new ADAMStoreCtx(entry, true, _username, _password, _name, _options); } else { storeCtx = new ADStoreCtx(entry, true, _username, _password, _options); } } else { Debug.Assert(entry.Path.StartsWith("WinNT:", StringComparison.Ordinal)); storeCtx = new SAMStoreCtx(entry, true, _username, _password, _options); } storeCtx.OwningContext = this; return storeCtx; } // Checks if we're already been disposed, and throws an appropriate // exception if so. internal void CheckDisposed() { if (_disposed) { GlobalDebug.WriteLineIf(GlobalDebug.Warn, "PrincipalContext", "CheckDisposed: accessing disposed object"); throw new ObjectDisposedException("PrincipalContext"); } } // Match the default context options to the store type. private static ContextOptions GetDefaultOptionForStore(ContextType storeType) { if (storeType == ContextType.Machine) { return DefaultContextOptions.MachineDefaultContextOption; } else { return DefaultContextOptions.ADDefaultContextOption; } } // Helper method: given a typeof(User/Computer/etc.), returns the userCtx/computerCtx/etc. internal StoreCtx ContextForType(Type t) { GlobalDebug.WriteLineIf(GlobalDebug.Info, "PrincipalContext", "ContextForType: type is " + t.ToString()); Initialize(); if (t == typeof(System.DirectoryServices.AccountManagement.UserPrincipal) || t.IsSubclassOf(typeof(System.DirectoryServices.AccountManagement.UserPrincipal))) { return _userCtx; } else if (t == typeof(System.DirectoryServices.AccountManagement.ComputerPrincipal) || t.IsSubclassOf(typeof(System.DirectoryServices.AccountManagement.ComputerPrincipal))) { return _computerCtx; } else if (t == typeof(System.DirectoryServices.AccountManagement.AuthenticablePrincipal) || t.IsSubclassOf(typeof(System.DirectoryServices.AccountManagement.AuthenticablePrincipal))) { return _userCtx; } else { Debug.Assert(t == typeof(System.DirectoryServices.AccountManagement.GroupPrincipal) || t.IsSubclassOf(typeof(System.DirectoryServices.AccountManagement.GroupPrincipal))); return _groupCtx; } } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure; using Microsoft.Azure.Management.OperationalInsights; using Microsoft.Azure.Management.OperationalInsights.Models; namespace Microsoft.Azure.Management.OperationalInsights { /// <summary> /// .Net client wrapper for the REST API for Azure Operational Insights /// </summary> public static partial class DataSourceOperationsExtensions { /// <summary> /// Create or update a data source. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.OperationalInsights.IDataSourceOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The resource group name of the data source. /// </param> /// <param name='workspaceName'> /// Required. The name of the parent workspace that will contain the /// data source /// </param> /// <param name='parameters'> /// Required. The parameters required to create or update a data source. /// </param> /// <returns> /// The create or update data source operation response. /// </returns> public static DataSourceCreateOrUpdateResponse CreateOrUpdate(this IDataSourceOperations operations, string resourceGroupName, string workspaceName, DataSourceCreateOrUpdateParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IDataSourceOperations)s).CreateOrUpdateAsync(resourceGroupName, workspaceName, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Create or update a data source. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.OperationalInsights.IDataSourceOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The resource group name of the data source. /// </param> /// <param name='workspaceName'> /// Required. The name of the parent workspace that will contain the /// data source /// </param> /// <param name='parameters'> /// Required. The parameters required to create or update a data source. /// </param> /// <returns> /// The create or update data source operation response. /// </returns> public static Task<DataSourceCreateOrUpdateResponse> CreateOrUpdateAsync(this IDataSourceOperations operations, string resourceGroupName, string workspaceName, DataSourceCreateOrUpdateParameters parameters) { return operations.CreateOrUpdateAsync(resourceGroupName, workspaceName, parameters, CancellationToken.None); } /// <summary> /// Deletes a data source instance. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.OperationalInsights.IDataSourceOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The resource group name of the data source. /// </param> /// <param name='workspaceName'> /// Required. The name of the workspace that contains the data source. /// </param> /// <param name='datasourceName'> /// Required. Name of the data source. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static AzureOperationResponse Delete(this IDataSourceOperations operations, string resourceGroupName, string workspaceName, string datasourceName) { return Task.Factory.StartNew((object s) => { return ((IDataSourceOperations)s).DeleteAsync(resourceGroupName, workspaceName, datasourceName); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Deletes a data source instance. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.OperationalInsights.IDataSourceOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The resource group name of the data source. /// </param> /// <param name='workspaceName'> /// Required. The name of the workspace that contains the data source. /// </param> /// <param name='datasourceName'> /// Required. Name of the data source. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<AzureOperationResponse> DeleteAsync(this IDataSourceOperations operations, string resourceGroupName, string workspaceName, string datasourceName) { return operations.DeleteAsync(resourceGroupName, workspaceName, datasourceName, CancellationToken.None); } /// <summary> /// Gets a data source instance. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.OperationalInsights.IDataSourceOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The resource group name of the data source. /// </param> /// <param name='workspaceName'> /// Required. The name of the workspace that contains the data source. /// </param> /// <param name='dataSourceName'> /// Required. Name of the data source /// </param> /// <returns> /// The get data source operation response. /// </returns> public static DataSourceGetResponse Get(this IDataSourceOperations operations, string resourceGroupName, string workspaceName, string dataSourceName) { return Task.Factory.StartNew((object s) => { return ((IDataSourceOperations)s).GetAsync(resourceGroupName, workspaceName, dataSourceName); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Gets a data source instance. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.OperationalInsights.IDataSourceOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The resource group name of the data source. /// </param> /// <param name='workspaceName'> /// Required. The name of the workspace that contains the data source. /// </param> /// <param name='dataSourceName'> /// Required. Name of the data source /// </param> /// <returns> /// The get data source operation response. /// </returns> public static Task<DataSourceGetResponse> GetAsync(this IDataSourceOperations operations, string resourceGroupName, string workspaceName, string dataSourceName) { return operations.GetAsync(resourceGroupName, workspaceName, dataSourceName, CancellationToken.None); } /// <summary> /// Gets the first page of data source instances in a workspace with /// the link to the next page. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.OperationalInsights.IDataSourceOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The resource group name of the data sources. /// </param> /// <param name='workspaceName'> /// Required. The workspace that contains the data sources. /// </param> /// <param name='kind'> /// Required. Filter data sources by Kind. /// </param> /// <param name='skiptoken'> /// Optional. Token for paging support. /// </param> /// <returns> /// The list data source operation response. /// </returns> public static DataSourceListResponse ListInWorkspace(this IDataSourceOperations operations, string resourceGroupName, string workspaceName, string kind, string skiptoken) { return Task.Factory.StartNew((object s) => { return ((IDataSourceOperations)s).ListInWorkspaceAsync(resourceGroupName, workspaceName, kind, skiptoken); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Gets the first page of data source instances in a workspace with /// the link to the next page. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.OperationalInsights.IDataSourceOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The resource group name of the data sources. /// </param> /// <param name='workspaceName'> /// Required. The workspace that contains the data sources. /// </param> /// <param name='kind'> /// Required. Filter data sources by Kind. /// </param> /// <param name='skiptoken'> /// Optional. Token for paging support. /// </param> /// <returns> /// The list data source operation response. /// </returns> public static Task<DataSourceListResponse> ListInWorkspaceAsync(this IDataSourceOperations operations, string resourceGroupName, string workspaceName, string kind, string skiptoken) { return operations.ListInWorkspaceAsync(resourceGroupName, workspaceName, kind, skiptoken, CancellationToken.None); } /// <summary> /// Gets the next page of data source instances with the link to the /// next page. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.OperationalInsights.IDataSourceOperations. /// </param> /// <param name='nextLink'> /// Required. The url to the next data source page. /// </param> /// <returns> /// The list data source operation response. /// </returns> public static DataSourceListResponse ListNext(this IDataSourceOperations operations, string nextLink) { return Task.Factory.StartNew((object s) => { return ((IDataSourceOperations)s).ListNextAsync(nextLink); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Gets the next page of data source instances with the link to the /// next page. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.OperationalInsights.IDataSourceOperations. /// </param> /// <param name='nextLink'> /// Required. The url to the next data source page. /// </param> /// <returns> /// The list data source operation response. /// </returns> public static Task<DataSourceListResponse> ListNextAsync(this IDataSourceOperations operations, string nextLink) { return operations.ListNextAsync(nextLink, CancellationToken.None); } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; using System.Threading; using System.Web; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using Umbraco.Core.Cache; using Umbraco.Core.IO; using Umbraco.Core.Logging; using Umbraco.Core.Models.Rdbms; using Umbraco.Core.Persistence; using umbraco.interfaces; using Umbraco.Core.Persistence.SqlSyntax; namespace Umbraco.Core.Sync { /// <summary> /// An <see cref="IServerMessenger"/> that works by storing messages in the database. /// </summary> // // this messenger writes ALL instructions to the database, // but only processes instructions coming from remote servers, // thus ensuring that instructions run only once // public class DatabaseServerMessenger : ServerMessengerBase { private readonly ApplicationContext _appContext; private readonly ManualResetEvent _syncIdle; private readonly object _locko = new object(); private readonly ILogger _logger; private int _lastId = -1; private DateTime _lastSync; private DateTime _lastPruned; private bool _initialized; private bool _syncing; private bool _released; private readonly ProfilingLogger _profilingLogger; protected DatabaseServerMessengerOptions Options { get; private set; } protected ApplicationContext ApplicationContext { get { return _appContext; } } public DatabaseServerMessenger(ApplicationContext appContext, bool distributedEnabled, DatabaseServerMessengerOptions options) : base(distributedEnabled) { if (appContext == null) throw new ArgumentNullException("appContext"); if (options == null) throw new ArgumentNullException("options"); _appContext = appContext; Options = options; _lastPruned = _lastSync = DateTime.UtcNow; _syncIdle = new ManualResetEvent(true); _profilingLogger = appContext.ProfilingLogger; _logger = appContext.ProfilingLogger.Logger; } #region Messenger protected override bool RequiresDistributed(IEnumerable<IServerAddress> servers, ICacheRefresher refresher, MessageType dispatchType) { // we don't care if there's servers listed or not, // if distributed call is enabled we will make the call return _initialized && DistributedEnabled; } protected override void DeliverRemote( IEnumerable<IServerAddress> servers, ICacheRefresher refresher, MessageType messageType, IEnumerable<object> ids = null, string json = null) { var idsA = ids == null ? null : ids.ToArray(); Type idType; if (GetArrayType(idsA, out idType) == false) throw new ArgumentException("All items must be of the same type, either int or Guid.", "ids"); var instructions = RefreshInstruction.GetInstructions(refresher, messageType, idsA, idType, json); var dto = new CacheInstructionDto { UtcStamp = DateTime.UtcNow, Instructions = JsonConvert.SerializeObject(instructions, Formatting.None), OriginIdentity = LocalIdentity }; ApplicationContext.DatabaseContext.Database.Insert(dto); } #endregion #region Sync /// <summary> /// Boots the messenger. /// </summary> /// <remarks> /// Thread safety: this is NOT thread safe. Because it is NOT meant to run multi-threaded. /// Callers MUST ensure thread-safety. /// </remarks> protected void Boot() { // weight:10, must release *before* the facade service, because once released // the service will *not* be able to properly handle our notifications anymore const int weight = 10; var registered = ApplicationContext.MainDom.Register( () => { lock (_locko) { _released = true; // no more syncs } // wait a max of 5 seconds and then return, so that we don't block // the entire MainDom callbacks chain and prevent the AppDomain from // properly releasing MainDom - a timeout here means that one refresher // is taking too much time processing, however when it's done we will // not update lastId and stop everything var idle =_syncIdle.WaitOne(5000); if (idle == false) { _logger.Warn<DatabaseServerMessenger>("The wait lock timed out, application is shutting down. The current instruction batch will be re-processed."); } }, weight); if (registered == false) return; ReadLastSynced(); // get _lastId EnsureInstructions(); // reset _lastId if instrs are missing Initialize(); // boot } /// <summary> /// Initializes a server that has never synchronized before. /// </summary> /// <remarks> /// Thread safety: this is NOT thread safe. Because it is NOT meant to run multi-threaded. /// Callers MUST ensure thread-safety. /// </remarks> private void Initialize() { lock (_locko) { if (_released) return; var coldboot = false; if (_lastId < 0) // never synced before { // we haven't synced - in this case we aren't going to sync the whole thing, we will assume this is a new // server and it will need to rebuild it's own caches, eg Lucene or the xml cache file. _logger.Warn<DatabaseServerMessenger>("No last synced Id found, this generally means this is a new server/install." + " The server will build its caches and indexes, and then adjust its last synced Id to the latest found in" + " the database and maintain cache updates based on that Id."); coldboot = true; } else { //check for how many instructions there are to process //TODO: In 7.6 we need to store the count of instructions per row since this is not affective because there can be far more than one (if not thousands) // of instructions in a single row. var count = _appContext.DatabaseContext.Database.ExecuteScalar<int>("SELECT COUNT(*) FROM umbracoCacheInstruction WHERE id > @lastId", new {lastId = _lastId}); if (count > Options.MaxProcessingInstructionCount) { //too many instructions, proceed to cold boot _logger.Warn<DatabaseServerMessenger>("The instruction count ({0}) exceeds the specified MaxProcessingInstructionCount ({1})." + " The server will skip existing instructions, rebuild its caches and indexes entirely, adjust its last synced Id" + " to the latest found in the database and maintain cache updates based on that Id.", () => count, () => Options.MaxProcessingInstructionCount); coldboot = true; } } if (coldboot) { // go get the last id in the db and store it // note: do it BEFORE initializing otherwise some instructions might get lost // when doing it before, some instructions might run twice - not an issue var maxId = _appContext.DatabaseContext.Database.ExecuteScalar<int>("SELECT MAX(id) FROM umbracoCacheInstruction"); //if there is a max currently, or if we've never synced if (maxId > 0 || _lastId < 0) SaveLastSynced(maxId); // execute initializing callbacks if (Options.InitializingCallbacks != null) foreach (var callback in Options.InitializingCallbacks) callback(); } _initialized = true; } } /// <summary> /// Synchronize the server (throttled). /// </summary> protected void Sync() { lock (_locko) { if (_syncing) return; //Don't continue if we are released if (_released) return; if ((DateTime.UtcNow - _lastSync).TotalSeconds <= Options.ThrottleSeconds) return; //Set our flag and the lock to be in it's original state (i.e. it can be awaited) _syncing = true; _syncIdle.Reset(); _lastSync = DateTime.UtcNow; } try { using (_profilingLogger.DebugDuration<DatabaseServerMessenger>("Syncing from database...")) { ProcessDatabaseInstructions(); //Check for pruning throttling if ((_released || (DateTime.UtcNow - _lastPruned).TotalSeconds <= Options.PruneThrottleSeconds)) return; _lastPruned = _lastSync; switch (_appContext.GetCurrentServerRole()) { case ServerRole.Single: case ServerRole.Master: PruneOldInstructions(); break; } } } finally { lock (_locko) { //We must reset our flag and signal any waiting locks _syncing = false; } _syncIdle.Set(); } } /// <summary> /// Process instructions from the database. /// </summary> /// <remarks> /// Thread safety: this is NOT thread safe. Because it is NOT meant to run multi-threaded. /// </remarks> /// <returns> /// Returns the number of processed instructions /// </returns> private void ProcessDatabaseInstructions() { // NOTE // we 'could' recurse to ensure that no remaining instructions are pending in the table before proceeding but I don't think that // would be a good idea since instructions could keep getting added and then all other threads will probably get stuck from serving requests // (depending on what the cache refreshers are doing). I think it's best we do the one time check, process them and continue, if there are // pending requests after being processed, they'll just be processed on the next poll. // // FIXME not true if we're running on a background thread, assuming we can? var sql = new Sql().Select("*") .From<CacheInstructionDto>(_appContext.DatabaseContext.SqlSyntax) .Where<CacheInstructionDto>(dto => dto.Id > _lastId) .OrderBy<CacheInstructionDto>(dto => dto.Id, _appContext.DatabaseContext.SqlSyntax); //only retrieve the top 100 (just in case there's tons) // even though MaxProcessingInstructionCount is by default 1000 we still don't want to process that many // rows in one request thread since each row can contain a ton of instructions (until 7.5.5 in which case // a row can only contain MaxProcessingInstructionCount) var topSql = _appContext.DatabaseContext.SqlSyntax.SelectTop(sql, 100); // only process instructions coming from a remote server, and ignore instructions coming from // the local server as they've already been processed. We should NOT assume that the sequence of // instructions in the database makes any sense whatsoever, because it's all async. var localIdentity = LocalIdentity; var lastId = 0; //tracks which ones have already been processed to avoid duplicates var processed = new HashSet<RefreshInstruction>(); //It would have been nice to do this in a Query instead of Fetch using a data reader to save // some memory however we cannot do thta because inside of this loop the cache refreshers are also // performing some lookups which cannot be done with an active reader open foreach (var dto in _appContext.DatabaseContext.Database.Fetch<CacheInstructionDto>(topSql)) { //If this flag gets set it means we're shutting down! In this case, we need to exit asap and cannot // continue processing anything otherwise we'll hold up the app domain shutdown if (_released) { break; } if (dto.OriginIdentity == localIdentity) { // just skip that local one but update lastId nevertheless lastId = dto.Id; continue; } // deserialize remote instructions & skip if it fails JArray jsonA; try { jsonA = JsonConvert.DeserializeObject<JArray>(dto.Instructions); } catch (JsonException ex) { _logger.Error<DatabaseServerMessenger>(string.Format("Failed to deserialize instructions ({0}: \"{1}\").", dto.Id, dto.Instructions), ex); lastId = dto.Id; // skip continue; } var instructionBatch = GetAllInstructions(jsonA); //process as per-normal var success = ProcessDatabaseInstructions(instructionBatch, dto, processed, ref lastId); //if they couldn't be all processed (i.e. we're shutting down) then exit if (success == false) { _logger.Info<DatabaseServerMessenger>("The current batch of instructions was not processed, app is shutting down"); break; } } if (lastId > 0) SaveLastSynced(lastId); } /// <summary> /// Processes the instruction batch and checks for errors /// </summary> /// <param name="instructionBatch"></param> /// <param name="dto"></param> /// <param name="processed"> /// Tracks which instructions have already been processed to avoid duplicates /// </param> /// <param name="lastId"></param> /// <returns> /// returns true if all instructions in the batch were processed, otherwise false if they could not be due to the app being shut down /// </returns> private bool ProcessDatabaseInstructions(IReadOnlyCollection<RefreshInstruction> instructionBatch, CacheInstructionDto dto, HashSet<RefreshInstruction> processed, ref int lastId) { // execute remote instructions & update lastId try { var result = NotifyRefreshers(instructionBatch, processed); if (result) { //if all instructions we're processed, set the last id lastId = dto.Id; } return result; } //catch (ThreadAbortException ex) //{ // //This will occur if the instructions processing is taking too long since this is occuring on a request thread. // // Or possibly if IIS terminates the appdomain. In any case, we should deal with this differently perhaps... //} catch (Exception ex) { _logger.Error<DatabaseServerMessenger>( string.Format("DISTRIBUTED CACHE IS NOT UPDATED. Failed to execute instructions (id: {0}, instruction count: {1}). Instruction is being skipped/ignored", dto.Id, instructionBatch.Count), ex); //we cannot throw here because this invalid instruction will just keep getting processed over and over and errors // will be thrown over and over. The only thing we can do is ignore and move on. lastId = dto.Id; return false; } ////if this is returned it will not be saved //return -1; } /// <summary> /// Remove old instructions from the database /// </summary> /// <remarks> /// Always leave the last (most recent) record in the db table, this is so that not all instructions are removed which would cause /// the site to cold boot if there's been no instruction activity for more than DaysToRetainInstructions. /// See: http://issues.umbraco.org/issue/U4-7643#comment=67-25085 /// </remarks> private void PruneOldInstructions() { var pruneDate = DateTime.UtcNow.AddDays(-Options.DaysToRetainInstructions); // using 2 queries is faster than convoluted joins var maxId = _appContext.DatabaseContext.Database.ExecuteScalar<int>("SELECT MAX(id) FROM umbracoCacheInstruction;"); var delete = new Sql().Append(@"DELETE FROM umbracoCacheInstruction WHERE utcStamp < @pruneDate AND id < @maxId", new { pruneDate, maxId }); _appContext.DatabaseContext.Database.Execute(delete); } /// <summary> /// Ensure that the last instruction that was processed is still in the database. /// </summary> /// <remarks> /// If the last instruction is not in the database anymore, then the messenger /// should not try to process any instructions, because some instructions might be lost, /// and it should instead cold-boot. /// However, if the last synced instruction id is '0' and there are '0' records, then this indicates /// that it's a fresh site and no user actions have taken place, in this circumstance we do not want to cold /// boot. See: http://issues.umbraco.org/issue/U4-8627 /// </remarks> private void EnsureInstructions() { if (_lastId == 0) { var sql = new Sql().Select("COUNT(*)") .From<CacheInstructionDto>(_appContext.DatabaseContext.SqlSyntax); var count = _appContext.DatabaseContext.Database.ExecuteScalar<int>(sql); //if there are instructions but we haven't synced, then a cold boot is necessary if (count > 0) _lastId = -1; } else { var sql = new Sql().Select("*") .From<CacheInstructionDto>(_appContext.DatabaseContext.SqlSyntax) .Where<CacheInstructionDto>(dto => dto.Id == _lastId); var dtos = _appContext.DatabaseContext.Database.Fetch<CacheInstructionDto>(sql); //if the last synced instruction is not found in the db, then a cold boot is necessary if (dtos.Count == 0) _lastId = -1; } } /// <summary> /// Reads the last-synced id from file into memory. /// </summary> /// <remarks> /// Thread safety: this is NOT thread safe. Because it is NOT meant to run multi-threaded. /// </remarks> private void ReadLastSynced() { var path = SyncFilePath; if (File.Exists(path) == false) return; var content = File.ReadAllText(path); int last; if (int.TryParse(content, out last)) _lastId = last; } /// <summary> /// Updates the in-memory last-synced id and persists it to file. /// </summary> /// <param name="id">The id.</param> /// <remarks> /// Thread safety: this is NOT thread safe. Because it is NOT meant to run multi-threaded. /// </remarks> private void SaveLastSynced(int id) { File.WriteAllText(SyncFilePath, id.ToString(CultureInfo.InvariantCulture)); _lastId = id; } /// <summary> /// Gets the unique local identity of the executing AppDomain. /// </summary> /// <remarks> /// <para>It is not only about the "server" (machine name and appDomainappId), but also about /// an AppDomain, within a Process, on that server - because two AppDomains running at the same /// time on the same server (eg during a restart) are, practically, a LB setup.</para> /// <para>Practically, all we really need is the guid, the other infos are here for information /// and debugging purposes.</para> /// </remarks> protected static readonly string LocalIdentity = NetworkHelper.MachineName // eg DOMAIN\SERVER + "/" + HttpRuntime.AppDomainAppId // eg /LM/S3SVC/11/ROOT + " [P" + Process.GetCurrentProcess().Id // eg 1234 + "/D" + AppDomain.CurrentDomain.Id // eg 22 + "] " + Guid.NewGuid().ToString("N").ToUpper(); // make it truly unique /// <summary> /// Gets the sync file path for the local server. /// </summary> /// <returns>The sync file path for the local server.</returns> private static string SyncFilePath { get { var tempFolder = IOHelper.MapPath("~/App_Data/TEMP/DistCache/" + NetworkHelper.FileSafeMachineName); if (Directory.Exists(tempFolder) == false) Directory.CreateDirectory(tempFolder); return Path.Combine(tempFolder, HttpRuntime.AppDomainAppId.ReplaceNonAlphanumericChars(string.Empty) + "-lastsynced.txt"); } } #endregion #region Notify refreshers private static ICacheRefresher GetRefresher(Guid id) { var refresher = CacheRefreshersResolver.Current.GetById(id); if (refresher == null) throw new InvalidOperationException("Cache refresher with ID \"" + id + "\" does not exist."); return refresher; } private static IJsonCacheRefresher GetJsonRefresher(Guid id) { return GetJsonRefresher(GetRefresher(id)); } private static IJsonCacheRefresher GetJsonRefresher(ICacheRefresher refresher) { var jsonRefresher = refresher as IJsonCacheRefresher; if (jsonRefresher == null) throw new InvalidOperationException("Cache refresher with ID \"" + refresher.UniqueIdentifier + "\" does not implement " + typeof(IJsonCacheRefresher) + "."); return jsonRefresher; } /// <summary> /// Parses out the individual instructions to be processed /// </summary> /// <param name="jsonArray"></param> /// <returns></returns> private static List<RefreshInstruction> GetAllInstructions(IEnumerable<JToken> jsonArray) { var result = new List<RefreshInstruction>(); foreach (var jsonItem in jsonArray) { // could be a JObject in which case we can convert to a RefreshInstruction, // otherwise it could be another JArray - in which case we'll iterate that. var jsonObj = jsonItem as JObject; if (jsonObj != null) { var instruction = jsonObj.ToObject<RefreshInstruction>(); result.Add(instruction); } else { var jsonInnerArray = (JArray)jsonItem; result.AddRange(GetAllInstructions(jsonInnerArray)); // recurse } } return result; } /// <summary> /// executes the instructions against the cache refresher instances /// </summary> /// <param name="instructions"></param> /// <param name="processed"></param> /// <returns> /// Returns true if all instructions were processed, otherwise false if the processing was interupted (i.e. app shutdown) /// </returns> private bool NotifyRefreshers(IEnumerable<RefreshInstruction> instructions, HashSet<RefreshInstruction> processed) { foreach (var instruction in instructions) { //Check if the app is shutting down, we need to exit if this happens. if (_released) { return false; } //this has already been processed if (processed.Contains(instruction)) continue; switch (instruction.RefreshType) { case RefreshMethodType.RefreshAll: RefreshAll(instruction.RefresherId); break; case RefreshMethodType.RefreshByGuid: RefreshByGuid(instruction.RefresherId, instruction.GuidId); break; case RefreshMethodType.RefreshById: RefreshById(instruction.RefresherId, instruction.IntId); break; case RefreshMethodType.RefreshByIds: RefreshByIds(instruction.RefresherId, instruction.JsonIds); break; case RefreshMethodType.RefreshByJson: RefreshByJson(instruction.RefresherId, instruction.JsonPayload); break; case RefreshMethodType.RemoveById: RemoveById(instruction.RefresherId, instruction.IntId); break; } processed.Add(instruction); } return true; } private static void RefreshAll(Guid uniqueIdentifier) { var refresher = GetRefresher(uniqueIdentifier); refresher.RefreshAll(); } private static void RefreshByGuid(Guid uniqueIdentifier, Guid id) { var refresher = GetRefresher(uniqueIdentifier); refresher.Refresh(id); } private static void RefreshById(Guid uniqueIdentifier, int id) { var refresher = GetRefresher(uniqueIdentifier); refresher.Refresh(id); } private static void RefreshByIds(Guid uniqueIdentifier, string jsonIds) { var refresher = GetRefresher(uniqueIdentifier); foreach (var id in JsonConvert.DeserializeObject<int[]>(jsonIds)) refresher.Refresh(id); } private static void RefreshByJson(Guid uniqueIdentifier, string jsonPayload) { var refresher = GetJsonRefresher(uniqueIdentifier); refresher.Refresh(jsonPayload); } private static void RemoveById(Guid uniqueIdentifier, int id) { var refresher = GetRefresher(uniqueIdentifier); refresher.Remove(id); } #endregion } }
using Microsoft.VisualStudio.TestTools.UnitTesting; using System; using System.IO; using System.Net; using System.Threading.Tasks; using VistaQFT = Vista.QFT; namespace Spyder.Client.Net { [TestClass] public class QFTTests { private static QFTClient client; private static VistaQFT.QFTServer server; protected static string watcherDirectory = @"temp\watcher"; //root relative watcher directory public QFTTests() { } [ClassInitialize()] public static void StartupQFT(TestContext context) { server = new VistaQFT.QFTServer(@"c:\", IPAddress.Loopback, QFTClient.SERVER_PORT, watcherDirectory); server.Startup(); client = new QFTClient("127.0.0.1", 7280); Assert.IsTrue(client.StartupAsync().Result, "Failed to startup client connection"); } [ClassCleanup()] public static void Shutdown() { if (client != null) { client.ShutdownAsync().Wait(); client = null; } if (server != null) { server.Shutdown(); server = null; } } [TestInitialize()] public void SetUp() { var tcs = new TaskCompletionSource<bool>(); Task.Run(async () => { try { if (!await client.Ping()) { Assert.IsTrue(await client.StartupAsync(), "Failed to start client for test"); } tcs.TrySetResult(true); } catch (Exception ex) { tcs.TrySetException(ex); } }); tcs.Task.Wait(); } protected async Task sendFile(long size) { string clientFile = createDummyFile(Path.GetRandomFileName(), size); string serverFile = Path.GetRandomFileName(); try { using (Stream stream = File.OpenRead(clientFile)) { Assert.IsTrue(await client.SendFile(stream, client.ConvertAbsolutePathToRelative(serverFile), null), "SendFile returned false -- send failed"); } Assert.IsTrue(File.Exists(serverFile), "SendFile returned true, but file does not exist."); compareFiles(clientFile, serverFile); } finally { TryDeleteFile(serverFile); TryDeleteFile(clientFile); } } [TestMethod()] public async Task SendSmallFile() { await sendFile(65535); } [TestMethod()] public async Task SendLargeFile() { await sendFile(331667986); } public async Task receiveFile(long size) { string serverFile = createDummyFile(Path.GetRandomFileName(), size); string clientFile = Path.GetRandomFileName(); try { TryDeleteFile(clientFile); using (Stream clientFileStream = File.Create(clientFile)) { Assert.IsTrue(await client.ReceiveFile(client.ConvertAbsolutePathToRelative(serverFile), clientFileStream, null), "ReceiveFile returned false -- receive failed"); } Assert.IsTrue(File.Exists(serverFile), "ReceivedFile returned true, but file does not exist."); compareFiles(serverFile, clientFile); } finally { TryDeleteFile(serverFile); TryDeleteFile(clientFile); } } [TestMethod()] public async Task ReceiveSmallFile() { await receiveFile(65535); } [TestMethod()] public async Task ReceiveLargeFile() { //Receive 500 meg file await receiveFile(331667986); } [TestMethod()] public async Task CreateDirectory() { string tempDir = @"c:\temp\nunittestfolder"; try { TryDeleteFile(tempDir); //Make sure the directory doesn't exist Assert.IsFalse(Directory.Exists(tempDir)); Assert.IsTrue(await client.CreateDirectory(client.ConvertAbsolutePathToRelative(tempDir)), "Failed to create specified directory"); Assert.IsTrue(Directory.Exists(tempDir), "QFT Client returned true from CreateDirectory, but directory does not exist"); } finally { TryDeleteDirectory(tempDir); } } [TestMethod()] public async Task DirectoryExists() { string tempDir = @"c:\temp\nunittestfolder"; try { //Test directory not exists first TryDeleteDirectory(tempDir); Assert.IsFalse(await client.DirectoryExists(client.ConvertAbsolutePathToRelative(tempDir)), "Client reported directory existed when it did not."); //Test directory does exist if (!Directory.Exists(tempDir)) Directory.CreateDirectory(tempDir); Assert.IsTrue(await client.DirectoryExists(client.ConvertAbsolutePathToRelative(tempDir)), "Client reported directory did not existe when it did."); } finally { TryDeleteDirectory(tempDir); } } [TestMethod()] public async Task DeleteDirectory() { string tempDir = @"c:\temp\nunittestfolder"; try { if (!Directory.Exists(tempDir)) Directory.CreateDirectory(tempDir); //Put a file in the directory to make sure it removes it at well createDummyFile(Path.Combine(tempDir, "dummyfile.txt"), 1024); //Make sure the directory exists Assert.IsTrue(Directory.Exists(tempDir)); Assert.IsTrue(await client.DeleteDirectory(client.ConvertAbsolutePathToRelative(tempDir)), "Failed to create specified directory"); Assert.IsFalse(Directory.Exists(tempDir), "QFT Client returned true from DeleteDirectory, but directory exists"); } finally { TryDeleteDirectory(tempDir); } } [TestMethod()] public async Task GetDirectories() { string tempDir = @"c:\temp\nunittestfolder"; try { if (!Directory.Exists(tempDir)) Directory.CreateDirectory(tempDir); for (int i = 0; i < 100; i++) Directory.CreateDirectory(Path.Combine(tempDir, "Directory " + i.ToString())); string[] expected = Directory.GetDirectories(tempDir); for (int i = 0; i < expected.Length; i++) expected[i] = new DirectoryInfo(expected[i]).Name; string[] actual = await client.GetDirectories(client.ConvertAbsolutePathToRelative(tempDir)); Assert.AreEqual(expected.Length, actual.Length, "Directory count was unexpected"); for (int i = 0; i < actual.Length; i++) Assert.AreEqual(expected[i], actual[i], "Directory at index {0} did not match", i); } finally { TryDeleteDirectory(tempDir); } } [TestMethod()] public async Task GetFiles() { string tempDir = @"c:\temp\nunittestfolder"; try { if (!Directory.Exists(tempDir)) Directory.CreateDirectory(tempDir); for (int i = 0; i < 100; i++) createDummyFile(Path.Combine(tempDir, "dummyFile." + i.ToString()), 64); string[] expected = Directory.GetFiles(tempDir); for (int i = 0; i < expected.Length; i++) expected[i] = Path.GetFileName(expected[i]); string[] actual = await client.GetFiles(client.ConvertAbsolutePathToRelative(tempDir)); Assert.AreEqual(expected.Length, actual.Length, "File count was unexpected"); for (int i = 0; i < actual.Length; i++) Assert.AreEqual(expected[i], actual[i], "File at index {0} did not match", i); } finally { TryDeleteDirectory(tempDir); } } [TestMethod()] public async Task GetFileSize() { string tempFile = createDummyFile(); try { long expected = new FileInfo(tempFile).Length; long actual = await client.GetFileSize(client.ConvertAbsolutePathToRelative(tempFile)); Assert.AreEqual(expected, actual); } finally { TryDeleteFile(tempFile); } } [TestMethod()] public async Task FileExists() { string dummyFile = Path.GetRandomFileName(); try { TryDeleteFile(dummyFile); //Make sure file doesn't exist Assert.IsFalse(await client.FileExists(client.ConvertAbsolutePathToRelative(dummyFile)), "Client reported file existed when it did not."); //Make sure it correctly identifies the file if it exists createDummyFile(dummyFile, 1024); Assert.IsTrue(await client.FileExists(client.ConvertAbsolutePathToRelative(dummyFile)), "Client reported file did not exist when it did."); } finally { TryDeleteFile(dummyFile); } } [TestMethod()] public async Task DeleteFile() { string dummyFile = createDummyFile(); try { Assert.IsTrue(File.Exists(dummyFile), "Dummy file does not exist to be deleted."); Assert.IsTrue(await client.DeleteFile(client.ConvertAbsolutePathToRelative(dummyFile)), "DeleteFile method failed"); Assert.IsFalse(File.Exists(dummyFile), "Dummy file was not successfully deleted."); } finally { TryDeleteFile(dummyFile); } } [TestMethod()] public async Task Ping() { Assert.IsTrue(await client.Ping(), "Ping failed"); } [TestMethod()] public async Task GetRemoteTimeOffset() { const int maxSkewInMs = 250; TimeSpan offset = await client.GetRemoteTimeOffset(); Assert.IsTrue(Math.Abs(offset.TotalMilliseconds) < maxSkewInMs, "Time offset was too large. Offset was {0}", offset); } [TestMethod()] public async Task GetCreationTime() { string dummy = createDummyFile(); try { DateTime expected = File.GetCreationTime(dummy); DateTime actual = await client.GetCreationTime(client.ConvertAbsolutePathToRelative(dummy)); Assert.AreEqual(expected, actual); } finally { TryDeleteFile(dummy); } } [TestMethod()] public async Task SetModifiedFileTime() { string dummy = createDummyFile(); try { DateTime setTime = DateTime.Now.Subtract(new TimeSpan(2, 3, 4, 5, 6)); Assert.IsTrue(await client.SetModifiedTime(client.ConvertAbsolutePathToRelative(dummy), setTime), "Failed to set modified time"); //Make sure the retuned time is the same as the local file itself DateTime expected = File.GetLastWriteTime(dummy); Assert.AreEqual(expected, setTime); } finally { TryDeleteFile(dummy); } } [TestMethod()] public async Task GetModifiedTime() { string dummy = createDummyFile(); try { DateTime expected = File.GetLastWriteTime(dummy); DateTime actual = await client.GetModifiedTime(client.ConvertAbsolutePathToRelative(dummy)); Assert.AreEqual(expected, actual); } finally { TryDeleteFile(dummy); } } [TestMethod()] public void FolderCreateCompareTest() { string tempDir = @"c:\temp\nunittestfolder"; string tempDir2 = @"c:\temp\nunittestfolder2"; try { this.createRecursiveFolder(tempDir); this.createRecursiveFolder(tempDir2); Assert.IsTrue(compareTwoFolders(tempDir, tempDir2)); } finally { TryDeleteDirectory(tempDir); TryDeleteDirectory(tempDir2); } } [TestMethod()] public async Task ReceiveFileFiftyTimes() { string serverFile = createDummyFile(Path.GetRandomFileName(), 65536); string clientFile = Path.GetRandomFileName(); try { for (int i = 0; i < 50; i++) { TryDeleteFile(clientFile); using (var stream = File.Create(clientFile)) { Assert.IsTrue(await client.ReceiveFile(client.ConvertAbsolutePathToRelative(serverFile), stream, null), "ReceiveFile returned false -- receive failed"); } Assert.IsTrue(File.Exists(serverFile), "ReceivedFile returned true, but file does not exist."); compareFiles(serverFile, clientFile); } } finally { TryDeleteFile(serverFile); TryDeleteFile(clientFile); } } [TestMethod()] public async Task SendAndOverwriteOneHundredTimes() { char fillChar = 'a'; string serverFile = Path.GetRandomFileName(); string clientFile = Path.GetRandomFileName(); try { for (int i = 0; i < 100; i++) { createDummyFile(clientFile, 65535, fillChar++); using (Stream stream = File.OpenRead(clientFile)) { Assert.IsTrue(await client.SendFile(stream, client.ConvertAbsolutePathToRelative(serverFile), null)); } compareFiles(clientFile, serverFile); } } finally { TryDeleteFile(serverFile); TryDeleteFile(clientFile); } } [TestMethod()] public async Task FileSendTime() { const int megabytes = 10; long fileSize = megabytes * 1024 * 1024; string tempFile = Path.GetRandomFileName(); createDummyFile(tempFile, fileSize); string destFile = Path.GetRandomFileName(); try { DateTime startTime = DateTime.Now; using (Stream stream = File.OpenRead(tempFile)) { await client.SendFile(stream, client.ConvertAbsolutePathToRelative(destFile), null); } DateTime endTime = DateTime.Now; TimeSpan span = endTime.Subtract(startTime); Console.WriteLine("SendingB {0}MB file completed in {1} ({2}k/s", megabytes, span, (fileSize / 1024f) / (float)span.TotalSeconds); //Verify the file copied properly compareFiles(tempFile, destFile); //Todo: Enforce time constraints and fail if outside constraint } finally { TryDeleteFile(tempFile); TryDeleteFile(destFile); } } [TestMethod()] public async Task FileReceiveTime() { const int megabytes = 10; long fileSize = megabytes * 1024 * 1024; string tempFile = Path.GetRandomFileName(); createDummyFile(tempFile, fileSize); string destFile = Path.GetRandomFileName(); try { DateTime startTime = DateTime.Now; using (Stream stream = File.Create(destFile)) { await client.ReceiveFile(client.ConvertAbsolutePathToRelative(tempFile), stream, null); } DateTime endTime = DateTime.Now; TimeSpan span = endTime.Subtract(startTime); Console.WriteLine("Receiving {0}MB file completed in {1} ({2}k/s", megabytes, span, (fileSize / 1024f) / (float)span.TotalSeconds); //Verify files sent were sent properly compareFiles(tempFile, destFile); //Todo: Enforce time constraints and fail if outside constraint } finally { TryDeleteFile(tempFile); TryDeleteFile(destFile); } } protected void progressHandler(long bytes, long total, string fileName) { Console.WriteLine("{0} {1} / {2}", fileName, bytes, total); } private void TryDeleteFile(string fileName) { try { if (File.Exists(fileName)) File.Delete(fileName); } catch { } } private void TryDeleteDirectory(string directory) { try { if (Directory.Exists(directory)) Directory.Delete(directory, true); } catch { } } #region Generic File Generation / Compare methods protected void compareFiles(string origonalFile, string compareFile) { if (!File.Exists(origonalFile)) throw new FileNotFoundException("Failed to find origonal file to perform compare."); if (!File.Exists(compareFile)) throw new FileNotFoundException("Failed to find compare file to perform compare on."); FileStream origonalStream = null; FileStream compareStream = null; try { origonalStream = File.OpenRead(origonalFile); compareStream = File.OpenRead(compareFile); if (origonalStream.Length != compareStream.Length) throw new Exception(string.Format("Origonal file and compare file are different sizes. Origonal: {0} Compare: {1}", origonalStream.Length, compareStream.Length)); const int bufferSize = 8096; byte[] origBuffer = new byte[bufferSize]; byte[] compBuffer = new byte[bufferSize]; long fileSize = origonalStream.Length; int index = 0; while (index < fileSize) { int origRead = origonalStream.Read(origBuffer, 0, bufferSize); int compareRead = compareStream.Read(compBuffer, 0, bufferSize); Assert.AreEqual(origRead, compareRead, "Read sized from streams did not match"); for (int i = 0; i < origRead; i++) { if (origBuffer[i] != compBuffer[i]) Assert.Fail("Buffers did not match at index " + i); } index += origRead; } } finally { if (origonalStream != null) { origonalStream.Close(); origonalStream = null; } if (compareStream != null) { compareStream.Close(); compareStream = null; } } } protected string createDummyFile(string tempFile, long size, char fillChar) { StreamWriter writer = null; try { TryDeleteFile(tempFile); writer = File.CreateText(tempFile); for (long i = 0; i < size; i++) writer.Write(fillChar); writer.Flush(); return tempFile; } finally { if (writer != null) { writer.Flush(); writer.Close(); writer = null; } } } /// <summary> /// Creates a dummy file of a specified byte size. /// </summary> /// <param name="size">Size in bytes of file to create</param> /// <returns></returns> protected string createDummyFile(string tempFile, long size) { return createDummyFile(tempFile, size, 'H'); } protected string createDummyFile() { return createDummyFile(Path.GetRandomFileName(), 1024); } protected void createRecursiveFolder(string tempDir) { if (!Directory.Exists(tempDir)) Directory.CreateDirectory(tempDir); for (int i = 0; i < 10; i++) { string subDir = Path.Combine(tempDir, "Directory " + i.ToString()); Directory.CreateDirectory(subDir); for (int j = 0; j < 10; j++) createDummyFile(Path.Combine(subDir, "File." + j.ToString()), 1024); } } protected bool compareTwoFolders(string origonal, string compare) { string[] origonalSubs = Directory.GetDirectories(origonal); string[] compareSubs = Directory.GetDirectories(compare); if (compareSubs.Length != origonalSubs.Length) return false; //check subdirs foreach (string subDir in origonalSubs) { bool exists = false; foreach (string subCompare in compareSubs) { string strCompare = new DirectoryInfo(subCompare).Name; string strDir = new DirectoryInfo(subDir).Name; if (strCompare == strDir) { exists = true; //recursively search folders if (!compareTwoFolders(subDir, subCompare)) return false; break; } } if (!exists) return false; } //check files string[] origonalFiles = Directory.GetFiles(origonal); string[] compareFiles = Directory.GetFiles(compare); if (origonalFiles.Length != compareFiles.Length) return false; foreach (string subFile in origonalFiles) { bool exists = false; foreach (string subCompare in compareFiles) if (Path.GetFileName(subCompare) == Path.GetFileName(subFile)) { exists = true; break; } if (!exists) return false; } return true; } #endregion } }
//// Copyright (c) Microsoft. All rights reserved. //// Licensed under the MIT license. See LICENSE file in the project root for full license information. ////#define PERFORMANCE_TESTING //using System; //using Xunit; //using System.Drawing.Graphics; //using System.IO; //using System.Diagnostics; //using System.Reflection; //using System.Threading.Tasks; //public partial class GraphicsUnitTests //{ // /* Functionality test Constants */ // static string SquareCatLogicalName = "SquareCatJpeg"; // static string BlackCatLogicalName = "BlackCatPng"; // static string SoccerCatLogicalName = "SoccerCatJpeg"; // static string CuteCatLogicalName = "CuteCatPng"; // static string JpegCatLogicalName = "JpegCat"; // static string PngCatLogicalName = "PngCat"; // /* Performance Tests Constants */ //#if PERFORMANCE_TESTING // static StreamWriter streamwriter; // static Stopwatch stopwatchSingleThread = new Stopwatch(); // static Stopwatch stopwatchMultiThread = new Stopwatch(); // /* Performance Tests Variables */ // static string jpegCatPath = ""; // static string jpegDogPath = ""; // static string pngCatPath = ""; // static string pngDogPath = ""; //#endif // /*----------------------Functionality Unit Tests------------------------------------*/ // private static void ValidateImagePng(Image img, string embeddedLogicalName) // { // Stream toCompare = typeof(GraphicsUnitTests).GetTypeInfo().Assembly.GetManifestResourceStream(embeddedLogicalName); // Image comparison = Png.Load(toCompare); // Assert.Equal(comparison.HeightInPixels, img.HeightInPixels); // Assert.Equal(comparison.WidthInPixels, img.WidthInPixels); // Assert.Equal(comparison.TrueColor, img.TrueColor); // } // private static void ValidateImageJpeg(Image img, string embeddedLogicalName) // { // Stream toCompare = typeof(GraphicsUnitTests).GetTypeInfo().Assembly.GetManifestResourceStream(embeddedLogicalName); // Image comparison = Jpg.Load(toCompare); // Assert.Equal(comparison.HeightInPixels, img.HeightInPixels); // Assert.Equal(comparison.WidthInPixels, img.WidthInPixels); // Assert.Equal(comparison.TrueColor, img.TrueColor); // } // private static void ValidateCreatedImage(Image img, int widthToCompare, int heightToCompare) // { // Assert.Equal(widthToCompare, img.WidthInPixels); // Assert.Equal(heightToCompare, img.HeightInPixels); // } // private static string ChooseExtension(string filepath) // { // if (filepath.Contains("Jpeg")) // return ".jpg"; // else // return ".png"; // } // private static string SaveEmbeddedResourceToFile(string logicalName) // { // //get a temp file path // string toReturn = Path.GetTempFileName(); // toReturn = Path.ChangeExtension(toReturn, ChooseExtension(logicalName)); // //get stream of embedded resoruce // Stream embeddedResourceStream = typeof(GraphicsUnitTests).GetTypeInfo().Assembly.GetManifestResourceStream(logicalName); // //write stream to temp file path // using (FileStream fileStream = new FileStream(toReturn, FileMode.OpenOrCreate)) // { // embeddedResourceStream.Seek(0, SeekOrigin.Begin); // embeddedResourceStream.CopyTo(fileStream); // } // //return where the resource is saved // return toReturn; // } // /* Tests Create Method */ // [Fact] // public static void WhenCreatingAnEmptyImageThenValidateAnImage() // { // Image emptyTenSquare = Image.Create(10, 10); // ValidateCreatedImage(emptyTenSquare, 10, 10); // } // [Fact] // public void WhenCreatingABlankImageWithNegativeHeightThenThrowException() // { // Assert.Throws<InvalidOperationException>(() => Image.Create(1, -1)); // } // [Fact] // public void WhenCreatingABlankImageWithNegativeWidthThenThrowException() // { // Assert.Throws<InvalidOperationException>(() => Image.Create(-1, 1)); // } // [Fact] // public void WhenCreatingABlankImageWithNegativeSizesThenThrowException() // { // Assert.Throws<InvalidOperationException>(() => Image.Create(-1, -1)); // } // [Fact] // public void WhenCreatingABlankImageWithZeroHeightThenThrowException() // { // Assert.Throws<InvalidOperationException>(() => Image.Create(1, 0)); // } // [Fact] // public void WhenCreatingABlankImageWithZeroWidthThenThrowException() // { // Assert.Throws<InvalidOperationException>(() => Image.Create(0, 1)); // } // [Fact] // public void WhenCreatingABlankImageWithZeroParametersThenThrowException() // { // Assert.Throws<InvalidOperationException>(() => Image.Create(0, 0)); // } // /* Tests Load(filepath) method */ // [Fact] // public void WhenCreatingAJpegFromAValidFileGiveAValidImage() // { // //save embedded resource to a file // string filepath = SaveEmbeddedResourceToFile(SquareCatLogicalName); // //read it // Image newJpeg = Jpg.Load(filepath); // File.Delete(filepath); // //validate it // ValidateImageJpeg(newJpeg, SquareCatLogicalName); // } // [Fact] // public void WhenCreatingAPngFromAValidFileGiveAValidImage() // { // //save embedded resource to a file // string filepath = SaveEmbeddedResourceToFile(BlackCatLogicalName); // //read it // Image newJpeg = Png.Load(filepath); // File.Delete(filepath); // //validate it // ValidateImagePng(newJpeg, BlackCatLogicalName); // } // [Fact] // public void WhenCreatingAJpegFromAMalformedPathThenThrowException() // { // //place holder string to demonstrate what would be the error case // string temporaryPath = Path.GetTempPath(); // string invalidFilepath = temporaryPath + "\\Hi.jpg"; // Assert.Throws<FileNotFoundException>(() => Jpg.Load(invalidFilepath)); // } // [Fact] // public void WhenCreatingAPngFromAMalformedPathThenThrowException() // { // string temporaryPath = Path.GetTempPath(); // string invalidFilepath = temporaryPath + "\\Hi.png"; // Assert.Throws<FileNotFoundException>(() => Png.Load(invalidFilepath)); // } // [Fact] // public void WhenCreatingAnImageFromAnUnfoundPathThenThrowException() // { // string temporaryPath = Path.GetTempPath(); // string invalidFilepath = temporaryPath + "\\Hi.jpg"; // Assert.Throws<FileNotFoundException>(() => Jpg.Load(invalidFilepath)); // } // [Fact] // public void WhenCreatingAnImageFromAFileTypeThatIsNotAnImageThenThrowException() // { // string temporaryPath = Path.GetTempPath(); // string invalidFilepath = temporaryPath + "text.txt"; // Assert.Throws<FileNotFoundException>(() => Jpg.Load(invalidFilepath)); // } // /* Tests Load(stream) mehtod*/ // [Fact] // public void WhenCreatingAJpegFromAValidStreamThenWriteAValidImageToFile() // { // string filepath = SaveEmbeddedResourceToFile(SoccerCatLogicalName); // using (FileStream filestream = new FileStream(filepath, FileMode.Open)) // { // Image fromStream = Jpg.Load(filestream); // ValidateImageJpeg(fromStream, SoccerCatLogicalName); // } // File.Delete(filepath); // } // [Fact] // public void WhenCreatingAPngFromAValidStreamThenWriteAValidImageToFile() // { // string filepath = SaveEmbeddedResourceToFile(CuteCatLogicalName); // using (FileStream filestream = new FileStream(filepath, FileMode.Open)) // { // Image fromStream = Png.Load(filestream); // ValidateImagePng(fromStream, CuteCatLogicalName); // } // File.Delete(filepath); // } // [Fact] // public void WhenCreatingAnImageFromAnInvalidStreamThenThrowException() // { // Stream stream = null; // Assert.Throws<InvalidOperationException>(() => Png.Load(stream)); // } // /* Test Resize */ // [Fact] // public void WhenResizingEmptyImageDownThenGiveAValidatedResizedImage() // { // Image emptyResizeSquare = Image.Create(100, 100); // emptyResizeSquare = emptyResizeSquare.Resize(10, 10); // ValidateCreatedImage(emptyResizeSquare, 10, 10); // } // [Fact] // public void WhenResizingEmptyImageUpThenGiveAValidatedResizedImage() // { // Image emptyResizeSquare = Image.Create(100, 100); // emptyResizeSquare = emptyResizeSquare.Resize(200, 200); // ValidateCreatedImage(emptyResizeSquare, 200, 200); // } // [Fact] // public void WhenResizingJpegLoadedFromFileThenGiveAValidatedResizedImage() // { // //what to do? Have embedded resource stream of expected result? // string filepath = SaveEmbeddedResourceToFile(SquareCatLogicalName); // Image fromFileResizeSquare = Jpg.Load(filepath); // fromFileResizeSquare = fromFileResizeSquare.Resize(200, 200); // ValidateCreatedImage(fromFileResizeSquare, 200, 200); // File.Delete(filepath); // } // [Fact] // public void WhenResizingPngLoadedFromFileThenGiveAValidatedResizedImage() // { // //what to do? Have embedded resource stream of expected result? // string filepath = SaveEmbeddedResourceToFile(BlackCatLogicalName); // Image fromFileResizeSquare = Png.Load(filepath); // fromFileResizeSquare = fromFileResizeSquare.Resize(400, 400); // ValidateCreatedImage(fromFileResizeSquare, 400, 400); // File.Delete(filepath); // } // [Fact] // public void WhenResizingJpegLoadedFromStreamThenGiveAValidatedResizedImage() // { // string filepath = SaveEmbeddedResourceToFile(SoccerCatLogicalName); // using (FileStream filestream = new FileStream(filepath, FileMode.Open)) // { // Image fromStream = Jpg.Load(filestream); // fromStream = fromStream.Resize(400, 400); // ValidateCreatedImage(fromStream, 400, 400); // } // File.Delete(filepath); // } // [Fact] // public void WhenResizingPngLoadedFromStreamThenGiveAValidatedResizedImage() // { // string filepath = SaveEmbeddedResourceToFile(CuteCatLogicalName); // using (FileStream filestream = new FileStream(filepath, FileMode.Open)) // { // Image fromStream = Png.Load(filestream); // fromStream = fromStream.Resize(400, 400); // ValidateCreatedImage(fromStream, 400, 400); // } // File.Delete(filepath); // } // /* Testing Resize parameters */ // [Fact] // public void WhenResizingImageGivenNegativeHeightThenThrowException() // { // Image img = Image.Create(1, 1); // Assert.Throws<InvalidOperationException>(() => img.Resize(-1, 1)); // } // [Fact] // public void WhenResizingImageGivenNegativeWidthThenThrowException() // { // Image img = Image.Create(1, 1); // Assert.Throws<InvalidOperationException>(() => img.Resize(1, -1)); // } // [Fact] // public void WhenResizingImageGivenNegativeSizesThenThrowException() // { // Image img = Image.Create(1, 1); // Assert.Throws<InvalidOperationException>(() => img.Resize(-1, -1)); // } // [Fact] // public void WhenResizingImageGivenZeroHeightThenThrowException() // { // Image img = Image.Create(1, 1); // Assert.Throws<InvalidOperationException>(() => img.Resize(0, 1)); // } // [Fact] // public void WhenResizingImageGivenZeroWidthThenThrowException() // { // Image img = Image.Create(1, 1); // Assert.Throws<InvalidOperationException>(() => img.Resize(1, 0)); // } // [Fact] // public void WhenResizingImageGivenZeroSizesThenThrowException() // { // Image img = Image.Create(1, 1); // Assert.Throws<InvalidOperationException>(() => img.Resize(0, 0)); // } // /* Test Write */ // [Fact] // public void WhenWritingABlankCreatedJpegToAValidFileWriteAValidFile() // { // Image emptyImage = Image.Create(10, 10); // ValidateCreatedImage(emptyImage, 10, 10); // string tempFilePath = Path.ChangeExtension(Path.GetTempFileName(), ".jpg"); // Jpg.Write(emptyImage, tempFilePath); // File.Delete(tempFilePath); // } // [Fact] // public void WhenWritingABlankCreatedPngToAValidFileWriteAValidFile() // { // Image emptyImage = Image.Create(10, 10); // ValidateCreatedImage(emptyImage, 10, 10); // string tempFilePath = Path.ChangeExtension(Path.GetTempFileName(), ".png"); // Png.Write(emptyImage, tempFilePath); // File.Delete(tempFilePath); // } // [Fact] // public void WhenWritingAJpegCreatedFromFileToAValidFileWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(SquareCatLogicalName); // Image fromFile = Jpg.Load(filepath); // ValidateImageJpeg(fromFile, SquareCatLogicalName); // string tempFilePath = Path.ChangeExtension(Path.GetTempFileName(), ".jpg"); // Png.Write(fromFile, tempFilePath); // File.Delete(filepath); // File.Delete(tempFilePath); // } // [Fact] // public void WhenWritingAPngCreatedFromFileToAValidFileWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(BlackCatLogicalName); // Image fromFile = Png.Load(filepath); // ValidateImagePng(fromFile, BlackCatLogicalName); // string tempFilePath = Path.ChangeExtension(Path.GetTempFileName(), ".png"); // Png.Write(fromFile, tempFilePath); // File.Delete(filepath); // File.Delete(tempFilePath); // } // [Fact] // public void WhenWritingAPngMadeTransparentToAValidFileWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(BlackCatLogicalName); // Image img = Png.Load(filepath); // ValidateImagePng(img, BlackCatLogicalName); // img.SetAlphaPercentage(.2); // ValidateImagePng(img, BlackCatLogicalName); // string tempFilePath = Path.ChangeExtension(Path.GetTempFileName(), ".png"); // Png.Write(img, tempFilePath); // File.Delete(filepath); // File.Delete(tempFilePath); // } // [Fact] // public void WhenWritingATransparentResizedPngToAValidFileWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(BlackCatLogicalName); // Image img = Png.Load(filepath); // ValidateImagePng(img, BlackCatLogicalName); // img.SetAlphaPercentage(.2); // img = img.Resize(400, 400); // ValidateCreatedImage(img, 400, 400); // string tempFilePath = Path.ChangeExtension(Path.GetTempFileName(), ".png"); // Png.Write(img, tempFilePath); // File.Delete(filepath); // File.Delete(tempFilePath); // } // [Fact] // public void WhenWritingAResizedTransparentPngToAValidFileWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(BlackCatLogicalName); // Image img = Png.Load(filepath); // ValidateImagePng(img, BlackCatLogicalName); // img = img.Resize(400, 400); // ValidateCreatedImage(img, 400, 400); // img.SetAlphaPercentage(.2); // string tempFilePath = Path.ChangeExtension(Path.GetTempFileName(), ".png"); // Png.Write(img, tempFilePath); // File.Delete(filepath); // File.Delete(tempFilePath); // } // /* Tests Writing to a Stream*/ // [Fact] // public void WhenWritingABlankCreatedJpegToAValidStreamWriteAValidStream() // { // Image img = Image.Create(100, 100); // using (MemoryStream stream = new MemoryStream()) // { // Jpg.Write(img, stream); // stream.Position = 0; // Image img2 = Jpg.Load(stream); // ValidateCreatedImage(img2, 100, 100); // } // } // [Fact] // public void WhenWritingABlankCreatedPngToAValidStreamWriteAValidStream() // { // Image img = Image.Create(100, 100); // using (MemoryStream stream = new MemoryStream()) // { // Png.Write(img, stream); // stream.Position = 0; // Image img2 = Png.Load(stream); // ValidateCreatedImage(img2, 100, 100); // } // } // [Fact] // public void WhenWritingAJpegFromFileToAValidStreamWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(SoccerCatLogicalName); // Image img = Jpg.Load(filepath); // using (MemoryStream stream = new MemoryStream()) // { // Jpg.Write(img, stream); // stream.Position = 0; // Image img2 = Jpg.Load(stream); // ValidateImageJpeg(img2, SoccerCatLogicalName); // } // File.Delete(filepath); // } // [Fact] // public void WhenWritingAPngCreatedFromFileToAValidStreamWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(CuteCatLogicalName); // Image img = Png.Load(filepath); // using (MemoryStream stream = new MemoryStream()) // { // Png.Write(img, stream); // stream.Position = 0; // Image img2 = Png.Load(stream); // ValidateImagePng(img2, CuteCatLogicalName); // } // File.Delete(filepath); // } // [Fact] // public void WhenWritingAResizedJpegToAValidStreamWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(SoccerCatLogicalName); // Image img = Jpg.Load(filepath); // using (MemoryStream stream = new MemoryStream()) // { // img = img.Resize(40, 40); // ValidateCreatedImage(img, 40, 40); // Jpg.Write(img, stream); // stream.Position = 0; // Image img2 = Jpg.Load(stream); // ValidateCreatedImage(img, 40, 40); // } // File.Delete(filepath); // } // [Fact] // public void WhenWritingAResizedPngToAValidStreamWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(CuteCatLogicalName); // Image img = Png.Load(filepath); // using (MemoryStream stream = new MemoryStream()) // { // img = img.Resize(40, 40); // ValidateCreatedImage(img, 40, 40); // Png.Write(img, stream); // stream.Position = 0; // Image img2 = Png.Load(stream); // ValidateCreatedImage(img, 40, 40); // } // File.Delete(filepath); // } // [Fact] // public void WhenWritingAPngMadeTransparentToAValidStreamWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(CuteCatLogicalName); // Image img = Png.Load(filepath); // using (MemoryStream stream = new MemoryStream()) // { // img.SetAlphaPercentage(.2); // ValidateImagePng(img, CuteCatLogicalName); // Png.Write(img, stream); // stream.Position = 0; // Image img2 = Png.Load(stream); // ValidateImagePng(img2, CuteCatLogicalName); // } // File.Delete(filepath); // } // [Fact] // public void WhenWritingATransparentResizedPngToAValidStreamWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(CuteCatLogicalName); // Image img = Png.Load(filepath); // using (MemoryStream stream = new MemoryStream()) // { // img.SetAlphaPercentage(.2); // img = img.Resize(400, 400); // ValidateCreatedImage(img, 400, 400); // Png.Write(img, stream); // stream.Position = 0; // Image img2 = Png.Load(stream); // ValidateCreatedImage(img2, 400, 400); // } // File.Delete(filepath); // } // [Fact] // public void WhenWritingAResizedTransparentPngToAValidStreamWriteAValidImage() // { // string filepath = SaveEmbeddedResourceToFile(CuteCatLogicalName); // Image img = Png.Load(filepath); // ValidateImagePng(img, CuteCatLogicalName); // img = img.Resize(400, 400); // ValidateCreatedImage(img, 400, 400); // img.SetAlphaPercentage(.2); // File.Delete(filepath); // } // /* Test Draw */ // [Fact] // public void WhenDrawingTwoImagesWriteACorrectResult() // { // //open yellow cat image // string filepath = SaveEmbeddedResourceToFile(SquareCatLogicalName); // Image yellowCat = Jpg.Load(filepath); // ValidateImageJpeg(yellowCat, SquareCatLogicalName); // //open black cat image // string filepath2 = SaveEmbeddedResourceToFile(BlackCatLogicalName); // Image blackCat = Jpg.Load(filepath2); // ValidateImagePng(blackCat, BlackCatLogicalName); // //draw // yellowCat.Draw(blackCat, 0, 0); // ValidateImageJpeg(yellowCat, SquareCatLogicalName); // File.Delete(filepath); // File.Delete(filepath2); // } // /* Test SetTransparency */ // [Fact] // public void WhenSettingTheTransparencyOfAnImageWriteAnImageWithChangedTransparency() // { // //open black cat image // string filepath = SaveEmbeddedResourceToFile(BlackCatLogicalName); // Image blackCat0 = Jpg.Load(filepath); // ValidateImagePng(blackCat0, BlackCatLogicalName); // blackCat0.SetAlphaPercentage(0); // ValidateImagePng(blackCat0, BlackCatLogicalName); // Image blackCat1 = Jpg.Load(filepath); // ValidateImagePng(blackCat1, BlackCatLogicalName); // blackCat0.SetAlphaPercentage(0.5); // ValidateImagePng(blackCat1, BlackCatLogicalName); // Image blackCat2 = Jpg.Load(filepath); // ValidateImagePng(blackCat2, BlackCatLogicalName); // blackCat0.SetAlphaPercentage(1); // ValidateImagePng(blackCat2, BlackCatLogicalName); // File.Delete(filepath); // } // /* Test Draw and Set Transparency */ // [Fact] // public void WhenDrawingAnImageWithTransparencyChangedGiveACorrectWrittenFile() // { // //black cat load // string filepath = SaveEmbeddedResourceToFile(BlackCatLogicalName); // Image blackCat = Jpg.Load(filepath); // ValidateImagePng(blackCat, BlackCatLogicalName); // blackCat.SetAlphaPercentage(0.5); // //yellow cat load // string filepath2 = SaveEmbeddedResourceToFile(SquareCatLogicalName); // Image yellowCat = Jpg.Load(filepath2); // ValidateImageJpeg(yellowCat, SquareCatLogicalName); // yellowCat.Draw(blackCat, 0, 0); // ValidateImageJpeg(yellowCat, SquareCatLogicalName); // } // [Fact] // public static void WhenAddingAGreyScaleFilterToAJpegGiveAValidGreyScaledImage() // { // string filepath = SaveEmbeddedResourceToFile(SquareCatLogicalName); // Image img1 = Jpg.Load(filepath); // img1.ApplyMatrixMultiplier(ImageExtensions.GreyScaleMatrix); // ValidateImageJpeg(img1, SquareCatLogicalName); // Jpg.Write(img1, Path.GetTempPath() + "GreyscaleCat.jpg"); // } // [Fact] // public static void WhenAddingAGreyScaleFilterToAPngGiveAValidGreyScaledImage() // { // string filepath = SaveEmbeddedResourceToFile(BlackCatLogicalName); // Image img1 = Png.Load(filepath); // img1.ApplyMatrixMultiplier(ImageExtensions.GreyScaleMatrix); // ValidateImagePng(img1, BlackCatLogicalName); // Png.Write(img1, Path.GetTempPath() + "GreyscaleCat.png"); // } // [Fact] // public static void WhenAddingASepiaFilterToAJpegGiveAValidGreyScaledImage() // { // string filepath = SaveEmbeddedResourceToFile(SquareCatLogicalName); // Image img1 = Jpg.Load(filepath); // img1.ApplyMatrixMultiplier(ImageExtensions.SepiaMatrix); // ValidateImageJpeg(img1, SquareCatLogicalName); // Jpg.Write(img1, Path.GetTempPath() + "SepiaCat.jpg"); // } // [Fact] // public static void WhenAddingASepiaFilterToAPngGiveAValidGreyScaledImage() // { // string filepath = SaveEmbeddedResourceToFile(CuteCatLogicalName); // Image img1 = Png.Load(filepath); // img1.ApplyMatrixMultiplier(ImageExtensions.SepiaMatrix); // ValidateImagePng(img1, CuteCatLogicalName); // Png.Write(img1, Path.GetTempPath() + "SepiaCat.png"); // } // [Fact] // public static void WhenAddingANegativeFilterToAJpegGiveAValidGreyScaledImage() // { // string filepath = SaveEmbeddedResourceToFile(SquareCatLogicalName); // Image img1 = Jpg.Load(filepath); // img1.ApplyMatrixMultiplier(ImageExtensions.NegativeMatrix); // ValidateImageJpeg(img1, SquareCatLogicalName); // Jpg.Write(img1, Path.GetTempPath() + "NegativeCat.jpg"); // } // [Fact] // public static void WhenAddingANegativeFilterToAPngGiveAValidGreyScaledImage() // { // string filepath = SaveEmbeddedResourceToFile(BlackCatLogicalName); // Image img1 = Png.Load(filepath); // img1.ApplyMatrixMultiplier(ImageExtensions.NegativeMatrix); // ValidateImagePng(img1, BlackCatLogicalName); // Png.Write(img1, Path.GetTempPath() + "NegativeCat.png"); // } // /*Tests CircleCrop*/ // //Tests filpath // //Tests jpg // [Fact] // public void WhenCropingAnJpgImageFromFileGiveACorrectCroppedImage() // { // //checking with cat image // string filepath = SaveEmbeddedResourceToFile(JpegCatLogicalName); // Image avatarImage = Jpg.Load(filepath); // Image newImage = avatarImage.CircleCrop(0, 0); // } // //Tests png // [Fact] // public void WhenCropingAnPngImageFromFileGiveACorrectCroppedImage() // { // //checking with cat image // string filepath = SaveEmbeddedResourceToFile(PngCatLogicalName); // Image avatarImage = Png.Load(filepath); // Image newImage = avatarImage.CircleCrop(0, 0); // } // //Tests stream // //Tests jpg // [Fact] // public void WhenCropingAnJpgImageFromFileStreamACorrectCroppedImage() // { // string filepath = SaveEmbeddedResourceToFile(JpegCatLogicalName); // using (FileStream filestream = new FileStream(filepath, FileMode.Open)) // { // Image avatarImage = Jpg.Load(filestream); // Image newImage = avatarImage.CircleCrop(0, 0); // } // } // //Tests png // [Fact] // public void WhenCropingAnPngImageFromFileStreamACorrectCroppedImage() // { // string filepath = SaveEmbeddedResourceToFile(PngCatLogicalName); // using (FileStream filestream = new FileStream(filepath, FileMode.Open)) // { // Image avatarImage = Png.Load(filestream); // Image newImage = avatarImage.CircleCrop(0, 0); // } // } // /* ------------------Performance Tests-------------------------*/ //#if PERFORMANCE_TESTING // [Fact] // public static void RunAllPerfTests() // { // string filepath = Path.GetTempPath() + "Trial1Results.txt"; // if (File.Exists(filepath)) File.Delete(filepath); // FileStream fstream = new FileStream(filepath, FileMode.OpenOrCreate); // streamwriter = new StreamWriter(fstream); // //set temppaths of files perf test images // SetTempPathsOfPerfTestFiles(); // runTests(1); // runTests(10); // runTests(100); // runTests(1000); // runTests(5000); // streamwriter.Dispose(); // fstream.Dispose(); // //delete perf test images files // DeletePerfTestFileConstants(); // } // public static void runTests(int numRuns) // { // WriteTestHeader(numRuns); // //LoadFileJpg // WriteCurrentTest("LoadFileJpeg", numRuns); // LoadFileJpegPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "LoadFileJpeg"); // //LoadFilePng // WriteCurrentTest("LoadFilePng", numRuns); // LoadFilePngPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "LoadFilePng"); // //WriteJpg // WriteCurrentTest("WriteJpeg", numRuns); // WriteFileJpegPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "WriteFileJpeg"); // //WritePng // WriteCurrentTest("WritePng", numRuns); // WriteFilePngPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "WriteFilePng"); // //ResizeJpg // WriteCurrentTest("ResizeJpeg", numRuns); // ResizeJpegPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "ResizeJpeg"); // //resize png // WriteCurrentTest("ResizePng", numRuns); // ResizePngPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "ResizePng"); // //ChangeAlphaJpg // WriteCurrentTest("ChangeAlphaJpeg", numRuns); // ChangeAlphaJpegPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "ChangeAlphaJpeg"); // //ChangeAlphaPng // WriteCurrentTest("ChangeAlphaPng", numRuns); // ChangeAlphaPngPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "ChangeAlphaPng"); // //DrawJpgOverJpg // WriteCurrentTest("DrawJpegOverJpeg", numRuns); // DrawJpegOverJpegPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "DrawJpegOverJpeg"); // //DrawPngOverPng // WriteCurrentTest("DrawPngOverPng", numRuns); // DrawPngOverPngPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "DrawPngOverPng"); // //DrawJpgOverPng // WriteCurrentTest("DrawJpegOverPng", numRuns); // DrawJpegOverPngPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "DrawJpegOverPng"); // //DrawPngOverJpg // WriteCurrentTest("DrawPngOverJpeg", numRuns); // DrawPngOverJpegPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "DrawPngOverJpeg"); // //LoadStreamJpg // WriteCurrentTest("LoadStreamJpeg", numRuns); // LoadStreamJpegPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "LoadStreamJpeg"); // //LoadStreamPng // WriteCurrentTest("LoadStreamPng", numRuns); // LoadStreamPngPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "LoadStreamPng"); // //WriteJpg // WriteCurrentTest("WriteJpeg", numRuns); // WriteStreamJpegPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "WriteStreamJpeg"); // //WritePng // WriteCurrentTest("WritePng", numRuns); // WriteStreamPngPerfTest(numRuns); // WriteStopWatch(stopwatchSingleThread, "WriteStreamPng"); // } // [Fact] // public static void SetUpAllPerfTestsWithThreads() // { // int numOfTasks = 4; // string filepath = Path.GetTempPath() + "Trial2Results.txt"; // if (File.Exists(filepath)) File.Delete(filepath); // //set temp paths of files perf test images // SetTempPathsOfPerfTestFiles(); // FileStream fstream = new FileStream(filepath, FileMode.OpenOrCreate); // streamwriter = new StreamWriter(fstream); // WriteTestHeader(1); // RunAllPerfTestsWithThreads(numOfTasks, 1); // WriteTestHeader(10); // RunAllPerfTestsWithThreads(numOfTasks, 10); // WriteTestHeader(100); // RunAllPerfTestsWithThreads(numOfTasks, 100); // WriteTestHeader(1000); // RunAllPerfTestsWithThreads(numOfTasks, 1000); // WriteTestHeader(5000); // RunAllPerfTestsWithThreads(numOfTasks, 5000); // streamwriter.Dispose(); // fstream.Dispose(); // //delete temp perf tests images files // DeletePerfTestFileConstants(); // } // private static void RunAllPerfTestsWithThreads(int numOfTasks, int numRuns) // { // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "LoadFileJpegPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "LoadFilePngPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "WriteJpegPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "WritePngPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "ResizeJpegPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "ResizePngPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "ChangeAlphaJpegPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "ChangeAlphaPngPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "DrawJpegOverJpegPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "DrawPngOverPngPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "DrawJpegOverPngPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "DrawPngOverJpegPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "LoadStreamJpegPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "LoadStreamPngPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "WriteJpegPerfTest"); // RunOneFuntionWithMultipleTasks(numOfTasks, numRuns, "WritePngPerfTest"); // } // private static void RunOneFuntionWithMultipleTasks(int numOfTasks, int numRuns, string functionToRun) // { // WriteCurrentTest(functionToRun, numRuns); // Task[] tasks = new Task[numOfTasks]; // stopwatchMultiThread.Start(); // for (int i = 0; i < numOfTasks; i++) // { // switch (functionToRun) // { // case "LoadFileJpegPerfTest": // tasks[i] = Task.Factory.StartNew(() => LoadFileJpegPerfTest(numRuns / numOfTasks)); // break; // case "LoadFilePngPerfTest": // tasks[i] = Task.Factory.StartNew(() => LoadFilePngPerfTest(numRuns / numOfTasks)); // break; // case "WriteFileJpegPerfTest": // tasks[i] = Task.Factory.StartNew(() => WriteFileJpegPerfTest(numRuns / numOfTasks)); // break; // case "WriteFilePngPerfTest": // tasks[i] = Task.Factory.StartNew(() => WriteFilePngPerfTest(numRuns / numOfTasks)); // break; // case "ResizeJpegPerfTest": // tasks[i] = Task.Factory.StartNew(() => ResizeJpegPerfTest(numRuns / numOfTasks)); // break; // case "ResizePngPerfTest": // tasks[i] = Task.Factory.StartNew(() => ResizePngPerfTest(numRuns / numOfTasks)); // break; // case "ChangeAlphaJpegPerfTest": // tasks[i] = Task.Factory.StartNew(() => ChangeAlphaJpegPerfTest(numRuns / numOfTasks)); // break; // case "ChangeAlphaPngPerfTest": // tasks[i] = Task.Factory.StartNew(() => ChangeAlphaPngPerfTest(numRuns / numOfTasks)); // break; // case "DrawJpegOverJpegPerfTest": // tasks[i] = Task.Factory.StartNew(() => DrawJpegOverJpegPerfTest(numRuns / numOfTasks)); // break; // case "DrawPngOverPngPerfTest": // tasks[i] = Task.Factory.StartNew(() => DrawPngOverPngPerfTest(numRuns / numOfTasks)); // break; // case "DrawJpegOverPngPerfTest": // tasks[i] = Task.Factory.StartNew(() => DrawJpegOverPngPerfTest(numRuns / numOfTasks)); // break; // case "DrawPngOverJpegPerfTest": // tasks[i] = Task.Factory.StartNew(() => DrawPngOverJpegPerfTest(numRuns / numOfTasks)); // break; // case "LoadStreamJpegPerfTest": // tasks[i] = Task.Factory.StartNew(() => LoadStreamJpegPerfTest(numRuns / numOfTasks)); // break; // case "LoadStreamPngPerfTest": // tasks[i] = Task.Factory.StartNew(() => LoadStreamPngPerfTest(numRuns / numOfTasks)); // break; // case "WriteStreamJpegPerfTest": // tasks[i] = Task.Factory.StartNew(() => WriteStreamJpegPerfTest(numRuns / numOfTasks)); // break; // case "WriteStreamPngPerfTest": // tasks[i] = Task.Factory.StartNew(() => WriteStreamPngPerfTest(numRuns / numOfTasks)); // break; // default: // throw new NotSupportedException("A task was created but not given a proper task. Check the code/swithc statement."); // } // } // Task.WaitAll(tasks); // stopwatchMultiThread.Stop(); // WriteStopWatch(stopwatchMultiThread, functionToRun); // //delete dump dir // } // private static void SetTempPathsOfPerfTestFiles() // { // jpegDogPath = SaveEmbeddedResourceToFile("JpegDog"); // jpegCatPath = SaveEmbeddedResourceToFile("JpegCat"); // pngDogPath = SaveEmbeddedResourceToFile("PngDog"); // pngCatPath = SaveEmbeddedResourceToFile("PngCat"); // } // private static void DeletePerfTestFileConstants() // { // File.Delete(jpegDogPath); // File.Delete(jpegCatPath); // File.Delete(pngDogPath); // File.Delete(pngCatPath); // } // private static void WriteTestHeader(int numRuns) // { // Console.WriteLine(""); // Console.WriteLine("~~~~~~~~~~~ {0} Runs ~~~~~~~~~~~", numRuns); // Console.WriteLine(""); // streamwriter.WriteLine(""); // streamwriter.WriteLine("~~~~~~~~~~~ {0} Runs ~~~~~~~~~~~", numRuns); // streamwriter.WriteLine(""); // } // private static void WriteCurrentTest(string currentTest, int numRuns) // { // Console.WriteLine(currentTest + "{0}", numRuns); // streamwriter.WriteLine(currentTest + "{0}", numRuns); // } // private static void WriteStopWatch(Stopwatch sw, string currentTest) // { // TimeSpan elapsedSecs = (sw.Elapsed); // Console.WriteLine(elapsedSecs); // Console.WriteLine(""); // streamwriter.WriteLine("Elapsed time for " + currentTest + ": " + elapsedSecs); // streamwriter.WriteLine(""); // sw.Reset(); // } // private static void LoadFileJpegPerfTest(int numRuns) // { // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("LoadFileJpegTest :" + i); // stopwatchSingleThread.Start(); // Image img = Jpg.Load(jpegCatPath); // stopwatchSingleThread.Stop(); // img.ReleaseStruct(); // } // } // private static void LoadFilePngPerfTest(int numRuns) // { // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // { // Console.WriteLine("LoadFilePngTest :" + i); // } // stopwatchSingleThread.Start(); // Image img = Png.Load(pngCatPath); // stopwatchSingleThread.Stop(); // img.ReleaseStruct(); // } // } // //FIX Write // private static void WriteFileJpegPerfTest(int numRuns) // { // //string dir = Path.GetTempPath(); // Image _thisjpgdog = Jpg.Load(jpegDogPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // { // Console.WriteLine("WriteJpegTest :" + i); // } // stopwatchSingleThread.Start(); // Jpg.Write(_thisjpgdog, Path.ChangeExtension(Path.GetTempFileName(), ".jpg")); // stopwatchSingleThread.Stop(); // } // _thisjpgdog.ReleaseStruct(); // } // //fix write // private static void WriteFilePngPerfTest(int numRuns) // { // Image _thispngdog = Png.Load(pngDogPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("WritePngTest :" + i); // stopwatchSingleThread.Start(); // Png.Write(_thispngdog, Path.ChangeExtension(Path.GetTempFileName(), ".png")); // stopwatchSingleThread.Stop(); // } // _thispngdog.ReleaseStruct(); // } // private static void ResizeJpegPerfTest(int numRuns) // { // Image _thisjpgcat = Jpg.Load(jpegCatPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("ResizeJpegTest :" + i); // stopwatchSingleThread.Start(); // Image img = _thisjpgcat.Resize(100, 100); // stopwatchSingleThread.Stop(); // img.ReleaseStruct(); // } // _thisjpgcat.ReleaseStruct(); // } // private static void ResizePngPerfTest(int numRuns) // { // Image _thispngcat = Png.Load(pngCatPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("ResizePngTest :" + i); // stopwatchSingleThread.Start(); // Image img = _thispngcat.Resize(100, 100); // stopwatchSingleThread.Stop(); // img.ReleaseStruct(); // } // _thispngcat.ReleaseStruct(); // } // private static void ChangeAlphaJpegPerfTest(int numRuns) // { // Image _thisjpgcat = Jpg.Load(jpegCatPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("ChangeAlphaJpegTest :" + i); // stopwatchSingleThread.Start(); // _thisjpgcat.SetAlphaPercentage(0.5); // stopwatchSingleThread.Stop(); // } // _thisjpgcat.ReleaseStruct(); // } // private static void ChangeAlphaPngPerfTest(int numRuns) // { // Image _thispngcat = Png.Load(pngCatPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("ChangeAlphaPngTest :" + i); // stopwatchSingleThread.Start(); // _thispngcat.SetAlphaPercentage(0.5); // stopwatchSingleThread.Stop(); // } // _thispngcat.ReleaseStruct(); // } // private static void DrawJpegOverJpegPerfTest(int numRuns) // { // Image _thisjpgcat = Jpg.Load(jpegCatPath); // Image _thisjpgdog = Jpg.Load(jpegDogPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("DrawJpegOverJpegTest :" + i); // stopwatchSingleThread.Start(); // _thisjpgdog.Draw(_thisjpgcat, 10, 10); // stopwatchSingleThread.Stop(); // } // _thisjpgcat.ReleaseStruct(); // _thisjpgdog.ReleaseStruct(); // } // private static void DrawPngOverPngPerfTest(int numRuns) // { // Image _thispngcat = Png.Load(pngCatPath); // Image _thispngdog = Png.Load(pngDogPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("DrawPngOverPngTest :" + i); // stopwatchSingleThread.Start(); // _thispngdog.Draw(_thispngcat, 10, 10); // stopwatchSingleThread.Stop(); // } // _thispngcat.ReleaseStruct(); // _thispngdog.ReleaseStruct(); // } // private static void DrawJpegOverPngPerfTest(int numRuns) // { // Image _thisjpgcat = Jpg.Load(jpegCatPath); // Image _thispngdog = Png.Load(pngDogPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("DrawJpegOverPngTest :" + i); // stopwatchSingleThread.Start(); // _thispngdog.Draw(_thisjpgcat, 10, 10); // stopwatchSingleThread.Stop(); // } // _thisjpgcat.ReleaseStruct(); // _thispngdog.ReleaseStruct(); // } // private static void DrawPngOverJpegPerfTest(int numRuns) // { // Image _thisjpgdog = Jpg.Load(jpegDogPath); // Image _thispngcat = Png.Load(pngCatPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("DrawPngOverJpegTest :" + i); // stopwatchSingleThread.Start(); // _thisjpgdog.Draw(_thispngcat, 10, 10); // stopwatchSingleThread.Stop(); // } // _thisjpgdog.ReleaseStruct(); // _thispngcat.ReleaseStruct(); // } // private static void LoadStreamJpegPerfTest(int numRuns) // { // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("LoadStreamJpegTest :" + i); // using (FileStream filestream = new FileStream(jpegCatPath, FileMode.Open, FileAccess.Read, FileShare.Read)) // { // stopwatchSingleThread.Start(); // Image img = Jpg.Load(filestream); // stopwatchSingleThread.Stop(); // img.ReleaseStruct(); // //filestream.Dispose(); // } // } // } // private static void LoadStreamPngPerfTest(int numRuns) // { // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("LoadStreamPngTest :" + i); // //fixed stream by giving acces to multiple threads? // using (FileStream filestream = new FileStream(pngCatPath, FileMode.Open, FileAccess.Read, FileShare.Read)) // { // stopwatchSingleThread.Start(); // Image img = Png.Load(filestream); // stopwatchSingleThread.Stop(); // img.ReleaseStruct(); // //filestream.Dispose(); // } // } // } // private static void WriteStreamJpegPerfTest(int numRuns) // { // Image _thisjpgcat = Jpg.Load(jpegCatPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("WriteJpegTest :" + i); // using (MemoryStream stream = new MemoryStream()) // { // stopwatchSingleThread.Start(); // Jpg.Write(_thisjpgcat, stream); // stopwatchSingleThread.Stop(); // } // } // _thisjpgcat.ReleaseStruct(); // } // private static void WriteStreamPngPerfTest(int numRuns) // { // Image _thispngcat = Jpg.Load(pngCatPath); // for (int i = 0; i < numRuns; i++) // { // //make sure it's going // if (i % 100 == 0) // Console.WriteLine("WritePngTest :" + i); // using (MemoryStream stream = new MemoryStream()) // { // stopwatchSingleThread.Start(); // Png.Write(_thispngcat, stream); // stopwatchSingleThread.Stop(); // } // } // _thispngcat.ReleaseStruct(); // } //#endif //}
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Text; using System.Text.RegularExpressions; using GitTfs.Commands; using GitTfs.Core.TfsInterop; using GitTfs.Util; namespace GitTfs.Core { public class GitTfsRemote : IGitTfsRemote { private static readonly Regex isInDotGit = new Regex("(?:^|/)\\.git(?:/|$)", RegexOptions.Compiled); private readonly Globals _globals; private readonly RemoteOptions _remoteOptions; private readonly ConfigProperties _properties; private int? maxChangesetId; private string maxCommitHash; private bool isTfsAuthenticated; public RemoteInfo RemoteInfo { get; private set; } public GitTfsRemote(RemoteInfo info, IGitRepository repository, RemoteOptions remoteOptions, Globals globals, ITfsHelper tfsHelper, ConfigProperties properties) { _remoteOptions = remoteOptions; _globals = globals; _properties = properties; Tfs = tfsHelper; Repository = repository; RemoteInfo = info; Id = info.Id; TfsUrl = info.Url; TfsRepositoryPath = info.Repository; TfsUsername = info.Username; TfsPassword = info.Password; Aliases = (info.Aliases ?? Enumerable.Empty<string>()).ToArray(); IgnoreRegexExpression = info.IgnoreRegex; IgnoreExceptRegexExpression = info.IgnoreExceptRegex; Autotag = info.Autotag; IsSubtree = CheckSubtree(); } private bool CheckSubtree() { var m = GitTfsConstants.RemoteSubtreeRegex.Match(Id); if (m.Success) { OwningRemoteId = m.Groups["owner"].Value; Prefix = m.Groups["prefix"].Value; return true; } return false; } public void EnsureTfsAuthenticated() { if (isTfsAuthenticated) return; Tfs.EnsureAuthenticated(); isTfsAuthenticated = true; } public bool IsDerived { get { return false; } } public int? GetInitialChangeset() { return _properties.InitialChangeset; } public void SetInitialChangeset(int? changesetId) { _properties.InitialChangeset = changesetId; } public bool IsSubtree { get; private set; } public bool IsSubtreeOwner { get { return TfsRepositoryPath == null; } } public string Id { get; set; } public string TfsUrl { get { return Tfs.Url; } set { Tfs.Url = value; } } private string[] Aliases { get; set; } public bool Autotag { get; set; } public string TfsUsername { get { return Tfs.Username; } set { Tfs.Username = value; } } public string TfsPassword { get { return Tfs.Password; } set { Tfs.Password = value; } } public string TfsRepositoryPath { get; set; } /// <summary> /// Gets the TFS server-side paths of all subtrees of this remote. /// Valid if the remote has subtrees, which occurs when <see cref="TfsRepositoryPath"/> is null. /// </summary> public string[] TfsSubtreePaths { get { if (tfsSubtreePaths == null) tfsSubtreePaths = Repository.GetSubtrees(this).Select(x => x.TfsRepositoryPath).ToArray(); return tfsSubtreePaths; } } private string[] tfsSubtreePaths = null; public string IgnoreRegexExpression { get; set; } public string IgnoreExceptRegexExpression { get; set; } public IGitRepository Repository { get; set; } public ITfsHelper Tfs { get; set; } public string OwningRemoteId { get; private set; } public string Prefix { get; private set; } public bool ExportMetadatas { get; set; } public Dictionary<string, IExportWorkItem> ExportWorkitemsMapping { get; set; } public int MaxChangesetId { get { InitHistory(); return maxChangesetId.Value; } set { maxChangesetId = value; } } public string MaxCommitHash { get { InitHistory(); return maxCommitHash; } set { maxCommitHash = value; } } private TfsChangesetInfo GetTfsChangesetById(int id) { return Repository.GetTfsChangesetById(RemoteRef, id); } private void InitHistory() { if (maxChangesetId == null) { var mostRecentUpdate = Repository.GetLastParentTfsCommits(RemoteRef).FirstOrDefault(); if (mostRecentUpdate != null) { MaxCommitHash = mostRecentUpdate.GitCommit; MaxChangesetId = mostRecentUpdate.ChangesetId; } else { MaxChangesetId = 0; //Manage the special case where a .gitignore has been commited try { var gitCommit = Repository.GetCommit(RemoteRef); if (gitCommit != null) { MaxCommitHash = gitCommit.Sha; } } catch (Exception) { } } } } private const string WorkspaceDirectory = "~w"; private string WorkingDirectory { get { var dir = Repository.GetConfig(GitTfsConstants.WorkspaceConfigKey); if (IsSubtree) { if (dir != null) { return Path.Combine(dir, Prefix); } //find the relative path to the owning remote return Ext.CombinePaths(_globals.GitDir, WorkspaceDirectory, OwningRemoteId, Prefix); } return dir ?? DefaultWorkingDirectory; } } private string DefaultWorkingDirectory { get { return Path.Combine(_globals.GitDir, WorkspaceDirectory); } } public void CleanupWorkspace() { Tfs.CleanupWorkspaces(WorkingDirectory); } public void CleanupWorkspaceDirectory() { try { if (Directory.Exists(WorkingDirectory)) { var allFiles = Directory.EnumerateFiles(WorkingDirectory, "*", SearchOption.AllDirectories); foreach (var file in allFiles) File.SetAttributes(file, File.GetAttributes(file) & ~FileAttributes.ReadOnly); Directory.Delete(WorkingDirectory, true); } } catch (Exception ex) { Trace.WriteLine("CleanupWorkspaceDirectory: " + ex.Message); } } public bool ShouldSkip(string path) { return IsInDotGit(path) || IsIgnored(path); } private bool IsIgnored(string path) { return Ignorance.IsIncluded(path) || Repository.IsPathIgnored(path); } private Bouncer _ignorance; private Bouncer Ignorance { get { if (_ignorance == null) { _ignorance = new Bouncer(); _ignorance.Include(IgnoreRegexExpression); _ignorance.Include(_remoteOptions.IgnoreRegex); _ignorance.Exclude(IgnoreExceptRegexExpression); _ignorance.Exclude(_remoteOptions.ExceptRegex); } return _ignorance; } } private bool IsInDotGit(string path) { return isInDotGit.IsMatch(path); } public string GetPathInGitRepo(string tfsPath) { if (tfsPath == null) return null; if (!IsSubtreeOwner) { if (!tfsPath.StartsWith(TfsRepositoryPath, StringComparison.InvariantCultureIgnoreCase)) return null; if (TfsRepositoryPath == GitTfsConstants.TfsRoot) { tfsPath = tfsPath.Substring(TfsRepositoryPath.Length); } else { if (tfsPath.Length > TfsRepositoryPath.Length && tfsPath[TfsRepositoryPath.Length] != '/') return null; tfsPath = tfsPath.Substring(TfsRepositoryPath.Length); } } else { //look through the subtrees var p = _globals.Repository.GetSubtrees(this) .Where(x => x.IsSubtree) .FirstOrDefault(x => tfsPath.StartsWith(x.TfsRepositoryPath, StringComparison.InvariantCultureIgnoreCase) && (tfsPath.Length == x.TfsRepositoryPath.Length || tfsPath[x.TfsRepositoryPath.Length] == '/')); if (p == null) return null; tfsPath = p.GetPathInGitRepo(tfsPath); //we must prepend the prefix in order to get the correct directory if (tfsPath.StartsWith("/")) tfsPath = p.Prefix + tfsPath; else tfsPath = p.Prefix + "/" + tfsPath; } while (tfsPath.StartsWith("/")) tfsPath = tfsPath.Substring(1); return tfsPath; } public class FetchResult : IFetchResult { public bool IsSuccess { get; set; } public int LastFetchedChangesetId { get; set; } public int NewChangesetCount { get; set; } public string ParentBranchTfsPath { get; set; } public bool IsProcessingRenameChangeset { get; set; } public string LastParentCommitBeforeRename { get; set; } } public IFetchResult Fetch(bool stopOnFailMergeCommit = false, int lastChangesetIdToFetch = -1, IRenameResult renameResult = null) { return FetchWithMerge(-1, stopOnFailMergeCommit, lastChangesetIdToFetch, renameResult); } public IFetchResult FetchWithMerge(int mergeChangesetId, bool stopOnFailMergeCommit = false, IRenameResult renameResult = null, params string[] parentCommitsHashes) { return FetchWithMerge(mergeChangesetId, stopOnFailMergeCommit, -1, renameResult, parentCommitsHashes); } public IFetchResult FetchWithMerge(int mergeChangesetId, bool stopOnFailMergeCommit = false, int lastChangesetIdToFetch = -1, IRenameResult renameResult = null, params string[] parentCommitsHashes) { var fetchResult = new FetchResult { IsSuccess = true, NewChangesetCount = 0 }; var latestChangesetId = GetLatestChangesetId(); if (lastChangesetIdToFetch != -1) latestChangesetId = Math.Min(latestChangesetId, lastChangesetIdToFetch); // TFS 2010 doesn't like when we ask for history past its last changeset. if (MaxChangesetId >= latestChangesetId) return fetchResult; bool fetchRetrievedChangesets; do { var fetchedChangesets = FetchChangesets(true, lastChangesetIdToFetch); var objects = BuildEntryDictionary(); fetchRetrievedChangesets = false; foreach (var changeset in fetchedChangesets) { fetchRetrievedChangesets = true; fetchResult.NewChangesetCount++; if (lastChangesetIdToFetch > 0 && changeset.Summary.ChangesetId > lastChangesetIdToFetch) return fetchResult; string parentCommitSha = null; if (changeset.IsMergeChangeset && !ProcessMergeChangeset(changeset, stopOnFailMergeCommit, ref parentCommitSha)) { fetchResult.NewChangesetCount--; // Merge wasn't successful - so don't count the changeset we found fetchResult.IsSuccess = false; return fetchResult; } var parentSha = (renameResult != null && renameResult.IsProcessingRenameChangeset) ? renameResult.LastParentCommitBeforeRename : MaxCommitHash; var isFirstCommitInRepository = (parentSha == null); var log = Apply(parentSha, changeset, objects); if (changeset.IsRenameChangeset && !isFirstCommitInRepository) { if (renameResult == null || !renameResult.IsProcessingRenameChangeset) { fetchResult.IsProcessingRenameChangeset = true; fetchResult.LastParentCommitBeforeRename = MaxCommitHash; return fetchResult; } renameResult.IsProcessingRenameChangeset = false; renameResult.LastParentCommitBeforeRename = null; } if (parentCommitSha != null) log.CommitParents.Add(parentCommitSha); if (changeset.Summary.ChangesetId == mergeChangesetId) { foreach (var parent in parentCommitsHashes) log.CommitParents.Add(parent); } var commitSha = ProcessChangeset(changeset, log); fetchResult.LastFetchedChangesetId = changeset.Summary.ChangesetId; // set commit sha for added git objects foreach (var commit in objects) { if (commit.Value.Commit == null) commit.Value.Commit = commitSha; } DoGcIfNeeded(); } } while (fetchRetrievedChangesets && latestChangesetId > fetchResult.LastFetchedChangesetId); return fetchResult; } private Dictionary<string, GitObject> BuildEntryDictionary() { return new Dictionary<string, GitObject>(StringComparer.InvariantCultureIgnoreCase); } private bool ProcessMergeChangeset(ITfsChangeset changeset, bool stopOnFailMergeCommit, ref string parentCommit) { if (!Tfs.CanGetBranchInformation) { Trace.TraceInformation("info: this changeset " + changeset.Summary.ChangesetId + " is a merge changeset. But was not treated as is because this version of TFS can't manage branches..."); } else if (!IsIgnoringBranches()) { var parentChangesetId = Tfs.FindMergeChangesetParent(TfsRepositoryPath, changeset.Summary.ChangesetId, this); if (parentChangesetId < 1) // Handle missing merge parent info { if (stopOnFailMergeCommit) { return false; } Trace.TraceInformation("warning: this changeset " + changeset.Summary.ChangesetId + " is a merge changeset. But git-tfs is unable to determine the parent changeset."); return true; } var shaParent = Repository.FindCommitHashByChangesetId(parentChangesetId); if (shaParent == null) { string omittedParentBranch; shaParent = FindMergedRemoteAndFetch(parentChangesetId, stopOnFailMergeCommit, out omittedParentBranch); changeset.OmittedParentBranch = omittedParentBranch; } if (shaParent != null) { parentCommit = shaParent; } else { if (stopOnFailMergeCommit) return false; Trace.TraceInformation("warning: this changeset " + changeset.Summary.ChangesetId + " is a merge changeset. But git-tfs failed to find and fetch the parent changeset " + parentChangesetId + ". Parent changeset will be ignored..."); } } else { Trace.TraceInformation("info: this changeset " + changeset.Summary.ChangesetId + " is a merge changeset. But was not treated as is because of your git setting..."); changeset.OmittedParentBranch = ";C" + changeset.Summary.ChangesetId; } return true; } public bool IsIgnoringBranches() { var value = Repository.GetConfig<string>(GitTfsConstants.IgnoreBranches, null); bool isIgnoringBranches; if (value != null && bool.TryParse(value, out isIgnoringBranches)) return isIgnoringBranches; Trace.TraceInformation("warning: no value found for branch management setting '" + GitTfsConstants.IgnoreBranches + "'..."); var isIgnoringBranchesDetected = Repository.ReadAllTfsRemotes().Count() < 2; Trace.TraceInformation("=> Branch support " + (isIgnoringBranchesDetected ? "disabled!" : "enabled!")); if (isIgnoringBranchesDetected) Trace.TraceInformation(" if you want to enable branch support, use the command:" + Environment.NewLine + " git config --local " + GitTfsConstants.IgnoreBranches + " false"); _globals.Repository.SetConfig(GitTfsConstants.IgnoreBranches, isIgnoringBranchesDetected); return isIgnoringBranchesDetected; } private string ProcessChangeset(ITfsChangeset changeset, LogEntry log) { if (ExportMetadatas) { if (changeset.Summary.Workitems.Any()) { var workItems = TranslateWorkItems(changeset.Summary.Workitems.Select(wi => new ExportWorkItem(wi))); if (workItems != null) { log.Log += "\nWorkitems:"; foreach (var workItem in workItems) { log.Log += "\n#" + workItem.Id + " " + workItem.Title; } } } if (!string.IsNullOrWhiteSpace(changeset.Summary.PolicyOverrideComment)) log.Log += "\n" + GitTfsConstants.GitTfsPolicyOverrideCommentPrefix + " " + changeset.Summary.PolicyOverrideComment; foreach (var checkinNote in changeset.Summary.CheckinNotes) { if (!string.IsNullOrWhiteSpace(checkinNote.Name) && !string.IsNullOrWhiteSpace(checkinNote.Value)) log.Log += "\n" + GitTfsConstants.GitTfsPrefix + "-" + CamelCaseToDelimitedStringConverter.Convert(checkinNote.Name, "-") + ": " + checkinNote.Value; } } var commitSha = Commit(log); UpdateTfsHead(commitSha, changeset.Summary.ChangesetId); StringBuilder metadatas = new StringBuilder(); if (changeset.Summary.Workitems.Any()) { string workitemNote = "Workitems:\n"; foreach (var workitem in changeset.Summary.Workitems) { var workitemId = workitem.Id.ToString(); var workitemUrl = workitem.Url; if (ExportMetadatas && ExportWorkitemsMapping.Count != 0) { if (ExportWorkitemsMapping.ContainsKey(workitemId)) { var oldWorkitemId = workitemId; workitemId = ExportWorkitemsMapping[workitemId].Id; workitemUrl = workitemUrl.Replace(oldWorkitemId, workitemId); } } workitemNote += string.Format("[{0}] {1}\n {2}\n", workitemId, workitem.Title, workitemUrl); } metadatas.Append(workitemNote); } if (!string.IsNullOrWhiteSpace(changeset.Summary.PolicyOverrideComment)) metadatas.Append("\nPolicy Override Comment: " + changeset.Summary.PolicyOverrideComment); foreach (var checkinNote in changeset.Summary.CheckinNotes) { if (!string.IsNullOrWhiteSpace(checkinNote.Name) && !string.IsNullOrWhiteSpace(checkinNote.Value)) metadatas.Append("\n" + checkinNote.Name + ": " + checkinNote.Value); } if (!string.IsNullOrWhiteSpace(changeset.OmittedParentBranch)) metadatas.Append("\nOmitted parent branch: " + changeset.OmittedParentBranch); if (metadatas.Length != 0) Repository.CreateNote(commitSha, metadatas.ToString(), log.AuthorName, log.AuthorEmail, log.Date); return commitSha; } private IEnumerable<IExportWorkItem> TranslateWorkItems(IEnumerable<IExportWorkItem> workItemsOriginal) { if (ExportWorkitemsMapping.Count == 0) return workItemsOriginal; List<IExportWorkItem> workItemsTranslated = new List<IExportWorkItem>(); if (workItemsOriginal == null) return workItemsTranslated; foreach (var oldWorkItemId in workItemsOriginal) { IExportWorkItem translatedWorkItem = null; if (oldWorkItemId != null && !ExportWorkitemsMapping.TryGetValue(oldWorkItemId.Id, out translatedWorkItem)) translatedWorkItem = oldWorkItemId; if (translatedWorkItem != null) workItemsTranslated.Add(translatedWorkItem); } return workItemsTranslated; } private string FindRootRemoteAndFetch(int parentChangesetId, IRenameResult renameResult = null) { string omittedParentBranch; return FindRemoteAndFetch(parentChangesetId, false, false, renameResult, out omittedParentBranch); } private string FindMergedRemoteAndFetch(int parentChangesetId, bool stopOnFailMergeCommit, out string omittedParentBranch) { return FindRemoteAndFetch(parentChangesetId, false, true, null, out omittedParentBranch); } private string FindRemoteAndFetch(int parentChangesetId, bool stopOnFailMergeCommit, bool mergeChangeset, IRenameResult renameResult, out string omittedParentBranch) { var tfsRemote = FindOrInitTfsRemoteOfChangeset(parentChangesetId, mergeChangeset, renameResult, out omittedParentBranch); if (tfsRemote != null && string.Compare(tfsRemote.TfsRepositoryPath, TfsRepositoryPath, StringComparison.InvariantCultureIgnoreCase) != 0) { Trace.TraceInformation("\tFetching from dependent TFS remote '{0}'...", tfsRemote.Id); try { var fetchResult = ((GitTfsRemote)tfsRemote).FetchWithMerge(-1, stopOnFailMergeCommit, parentChangesetId, renameResult); } finally { Trace.WriteLine("Cleaning..."); tfsRemote.CleanupWorkspaceDirectory(); if (tfsRemote.Repository.IsBare) tfsRemote.Repository.UpdateRef(GitRepository.ShortToLocalName(tfsRemote.Id), tfsRemote.MaxCommitHash); } return Repository.FindCommitHashByChangesetId(parentChangesetId); } return null; } private IGitTfsRemote FindOrInitTfsRemoteOfChangeset(int parentChangesetId, bool mergeChangeset, IRenameResult renameResult, out string omittedParentBranch) { omittedParentBranch = null; IGitTfsRemote tfsRemote; IChangeset parentChangeset = Tfs.GetChangeset(parentChangesetId); //I think you want something that uses GetPathInGitRepo and ShouldSkip. See TfsChangeset.Apply. //Don't know if there is a way to extract remote tfs repository path from changeset datas! Should be better!!! var remote = Repository.ReadAllTfsRemotes().FirstOrDefault(r => parentChangeset.Changes.Any(c => r.GetPathInGitRepo(c.Item.ServerItem) != null)); if (remote != null) tfsRemote = remote; else { // If the changeset has created multiple folders, the expected branch folder will not always be the first // so we scan all the changes of type folder to try to detect the first one which is a branch. // In most cases it will change nothing: the first folder is the good one IBranchObject tfsBranch = null; string tfsPath = null; var allBranches = Tfs.GetBranches(true); foreach (var change in parentChangeset.Changes) { tfsPath = change.Item.ServerItem; tfsPath = tfsPath.EndsWith("/") ? tfsPath : tfsPath + "/"; tfsBranch = allBranches.SingleOrDefault(b => tfsPath.StartsWith(b.Path.EndsWith("/") ? b.Path : b.Path + "/")); if (tfsBranch != null) { // we found a branch, we stop here break; } } var filterRegex = Repository.GetConfig(GitTfsConstants.IgnoreBranchesRegex); if (mergeChangeset && tfsBranch != null && !string.IsNullOrEmpty(filterRegex) && Regex.IsMatch(tfsBranch.Path, filterRegex, RegexOptions.IgnoreCase)) { Trace.TraceInformation("warning: skip filtered branch for path " + tfsBranch.Path + " (regex:" + filterRegex + ")"); tfsRemote = null; omittedParentBranch = tfsBranch.Path + ";C" + parentChangesetId; } else if (mergeChangeset && tfsBranch != null && string.Equals(Repository.GetConfig(GitTfsConstants.IgnoreNotInitBranches), true.ToString(), StringComparison.InvariantCultureIgnoreCase)) { Trace.TraceInformation("warning: skip not initialized branch for path " + tfsBranch.Path); tfsRemote = null; omittedParentBranch = tfsBranch.Path + ";C" + parentChangesetId; } else if (tfsBranch == null) { Trace.TraceInformation("error: branch not found. Verify that all the folders have been converted to branches (or something else :().\n\tpath {0}", tfsPath); tfsRemote = null; omittedParentBranch = ";C" + parentChangesetId; } else { tfsRemote = InitTfsRemoteOfChangeset(tfsBranch, parentChangeset.ChangesetId, renameResult); if (tfsRemote == null) omittedParentBranch = tfsBranch.Path + ";C" + parentChangesetId; } } return tfsRemote; } private IGitTfsRemote InitTfsRemoteOfChangeset(IBranchObject tfsBranch, int parentChangesetId, IRenameResult renameResult = null) { if (tfsBranch.IsRoot) { return InitTfsBranch(_remoteOptions, tfsBranch.Path); } var branchesDatas = Tfs.GetRootChangesetForBranch(tfsBranch.Path, parentChangesetId); IGitTfsRemote remote = null; foreach (var branch in branchesDatas) { var rootChangesetId = branch.SourceBranchChangesetId; remote = InitBranch(_remoteOptions, tfsBranch.Path, rootChangesetId, true); if (remote == null) { Trace.TraceInformation("warning: root commit not found corresponding to changeset " + rootChangesetId); Trace.TraceInformation("=> continuing anyway by creating a branch without parent..."); return InitTfsBranch(_remoteOptions, tfsBranch.Path); } if (branch.IsRenamedBranch) { try { remote.Fetch(renameResult: renameResult); } finally { Trace.WriteLine("Cleaning..."); remote.CleanupWorkspaceDirectory(); if (remote.Repository.IsBare) remote.Repository.UpdateRef(GitRepository.ShortToLocalName(remote.Id), remote.MaxCommitHash); } } } return remote; } public void QuickFetch(int changesetId, bool ignoreRestricted, bool printRestrictionHint) { try { ITfsChangeset changeset; if (changesetId < 0) changeset = GetLatestChangeset(); else changeset = Tfs.GetChangeset(changesetId, this); quickFetch(changeset); } catch (Exception ex) { Trace.WriteLine("Quick fetch failed: " + ex.Message); if (!IgnoreException(ex.Message, ignoreRestricted, printRestrictionHint)) throw; } } private void quickFetch(ITfsChangeset changeset) { var log = CopyTree(MaxCommitHash, changeset); UpdateTfsHead(Commit(log), changeset.Summary.ChangesetId); DoGcIfNeeded(); } private IEnumerable<ITfsChangeset> FetchChangesets(bool byLots, int lastVersion = -1) { int lowerBoundChangesetId; // If we're starting at the Root side of a branch commit (e.g. C1), but there ar // invalid commits between C1 and the actual branch side of the commit operation // (e.g. a Folder with the branch name was created [C2] and then deleted [C3], // then the root-side was branched [C4; C1 --branch--> C4]), this will detecte // only the folder creation and deletion operations due to the lowerBound being // detected as the root-side of the commit +1 (C1+1=C2) instead of referencing // the branch-side of the branching operation [C4]. if (_properties.InitialChangeset.HasValue) lowerBoundChangesetId = Math.Max(MaxChangesetId + 1, _properties.InitialChangeset.Value); else lowerBoundChangesetId = MaxChangesetId + 1; Trace.WriteLine(RemoteRef + ": Getting changesets from " + lowerBoundChangesetId + " to " + lastVersion + " ...", "info"); if (!IsSubtreeOwner) return Tfs.GetChangesets(TfsRepositoryPath, lowerBoundChangesetId, this, lastVersion, byLots); return _globals.Repository.GetSubtrees(this) .SelectMany(x => Tfs.GetChangesets(x.TfsRepositoryPath, lowerBoundChangesetId, x, lastVersion, byLots)) .OrderBy(x => x.Summary.ChangesetId); } public ITfsChangeset GetChangeset(int changesetId) { return Tfs.GetChangeset(changesetId, this); } private ITfsChangeset GetLatestChangeset() { if (!string.IsNullOrEmpty(TfsRepositoryPath)) return Tfs.GetLatestChangeset(this); var changesetId = _globals.Repository.GetSubtrees(this).Select(x => Tfs.GetLatestChangeset(x)).Max(x => x.Summary.ChangesetId); return GetChangeset(changesetId); } private int GetLatestChangesetId() { if (!string.IsNullOrEmpty(TfsRepositoryPath)) return Tfs.GetLatestChangesetId(this); return _globals.Repository.GetSubtrees(this).Select(x => Tfs.GetLatestChangesetId(x)).Max(); } public void UpdateTfsHead(string commitHash, int changesetId) { MaxCommitHash = commitHash; MaxChangesetId = changesetId; Repository.UpdateRef(RemoteRef, MaxCommitHash, "C" + MaxChangesetId); if (Autotag) Repository.UpdateRef(TagPrefix + "C" + MaxChangesetId, MaxCommitHash); LogCurrentMapping(); } private void LogCurrentMapping() { Trace.TraceInformation("C" + MaxChangesetId + " = " + MaxCommitHash); } private string TagPrefix { get { return "refs/tags/tfs/" + Id + "/"; } } public string RemoteRef { get { return "refs/remotes/tfs/" + Id; } } private void DoGcIfNeeded() { Trace.WriteLine("GC Countdown: " + _globals.GcCountdown); if (--_globals.GcCountdown < 0) { _globals.GcCountdown = _globals.GcPeriod; Repository.GarbageCollect(true, "Try running it after git-tfs is finished."); } } private LogEntry Apply(string parent, ITfsChangeset changeset, IDictionary<string, GitObject> entries) { return Apply(parent, changeset, entries, null); } private LogEntry Apply(string parent, ITfsChangeset changeset, Action<Exception> ignorableErrorHandler) { return Apply(parent, changeset, BuildEntryDictionary(), ignorableErrorHandler); } private LogEntry Apply(string parent, ITfsChangeset changeset, IDictionary<string, GitObject> entries, Action<Exception> ignorableErrorHandler) { LogEntry result = null; WithWorkspace(changeset.Summary, workspace => { var treeBuilder = workspace.Remote.Repository.GetTreeBuilder(parent); result = changeset.Apply(parent, treeBuilder, workspace, entries, ignorableErrorHandler); result.Tree = treeBuilder.GetTree(); }); if (!string.IsNullOrEmpty(parent)) result.CommitParents.Add(parent); return result; } private LogEntry CopyTree(string lastCommit, ITfsChangeset changeset) { LogEntry result = null; WithWorkspace(changeset.Summary, workspace => { var treeBuilder = workspace.Remote.Repository.GetTreeBuilder(null); result = changeset.CopyTree(treeBuilder, workspace); result.Tree = treeBuilder.GetTree(); }); if (!string.IsNullOrEmpty(lastCommit)) result.CommitParents.Add(lastCommit); return result; } private string Commit(LogEntry logEntry) { logEntry.Log = BuildCommitMessage(logEntry.Log, logEntry.ChangesetId); return Repository.Commit(logEntry).Sha; } private string BuildCommitMessage(string tfsCheckinComment, int changesetId) { var builder = new StringWriter(); builder.WriteLine(tfsCheckinComment); builder.WriteLine(GitTfsConstants.TfsCommitInfoFormat, TfsUrl, TfsRepositoryPath, changesetId); return builder.ToString(); } public void Unshelve(string shelvesetOwner, string shelvesetName, string destinationBranch, Action<Exception> ignorableErrorHandler, bool force) { var destinationRef = GitRepository.ShortToLocalName(destinationBranch); if (Repository.HasRef(destinationRef)) throw new GitTfsException("ERROR: Destination branch (" + destinationBranch + ") already exists!"); var shelvesetChangeset = Tfs.GetShelvesetData(this, shelvesetOwner, shelvesetName); var parentId = shelvesetChangeset.BaseChangesetId; var ch = GetTfsChangesetById(parentId); string rootCommit; if (ch == null) { if (!force) throw new GitTfsException("ERROR: Parent changeset C" + parentId + " not found.", new[] { "Try fetching the latest changes from TFS", "Try applying the shelveset on the currently checkouted commit using the '--force' option" } ); Trace.TraceInformation("warning: Parent changeset C" + parentId + " not found." + " Trying to apply the shelveset on the current commit..."); rootCommit = Repository.GetCurrentCommit(); } else { rootCommit = ch.GitCommit; } var log = Apply(rootCommit, shelvesetChangeset, ignorableErrorHandler); var commit = Commit(log); Repository.UpdateRef(destinationRef, commit, "Shelveset " + shelvesetName + " from " + shelvesetOwner); } public void Shelve(string shelvesetName, string head, TfsChangesetInfo parentChangeset, CheckinOptions options, bool evaluateCheckinPolicies) { WithWorkspace(parentChangeset, workspace => Shelve(shelvesetName, head, parentChangeset, options, evaluateCheckinPolicies, workspace)); } public bool HasShelveset(string shelvesetName) { return Tfs.HasShelveset(shelvesetName); } private void Shelve(string shelvesetName, string head, TfsChangesetInfo parentChangeset, CheckinOptions options, bool evaluateCheckinPolicies, ITfsWorkspace workspace) { PendChangesToWorkspace(head, parentChangeset.GitCommit, workspace); workspace.Shelve(shelvesetName, evaluateCheckinPolicies, options, () => Repository.GetCommitMessage(head, parentChangeset.GitCommit)); } public int CheckinTool(string head, TfsChangesetInfo parentChangeset) { var changeset = 0; WithWorkspace(parentChangeset, workspace => changeset = CheckinTool(head, parentChangeset, workspace)); return changeset; } private int CheckinTool(string head, TfsChangesetInfo parentChangeset, ITfsWorkspace workspace) { PendChangesToWorkspace(head, parentChangeset.GitCommit, workspace); return workspace.CheckinTool(() => Repository.GetCommitMessage(head, parentChangeset.GitCommit)); } private void PendChangesToWorkspace(string head, string parent, ITfsWorkspaceModifier workspace) { using (var tidyWorkspace = new DirectoryTidier(workspace, () => GetLatestChangeset().GetFullTree())) { foreach (var change in Repository.GetChangedFiles(parent, head)) { change.Apply(tidyWorkspace); } } } public int Checkin(string head, TfsChangesetInfo parentChangeset, CheckinOptions options, string sourceTfsPath = null) { var changeset = 0; WithWorkspace(parentChangeset, workspace => changeset = Checkin(head, parentChangeset.GitCommit, workspace, options, sourceTfsPath)); return changeset; } public int Checkin(string head, string parent, TfsChangesetInfo parentChangeset, CheckinOptions options, string sourceTfsPath = null) { var changeset = 0; WithWorkspace(parentChangeset, workspace => changeset = Checkin(head, parent, workspace, options, sourceTfsPath)); return changeset; } private void WithWorkspace(TfsChangesetInfo parentChangeset, Action<ITfsWorkspace> action) { //are there any subtrees? var subtrees = _globals.Repository.GetSubtrees(this); if (subtrees.Any()) { Tfs.WithWorkspace(WorkingDirectory, this, subtrees.Select(x => new Tuple<string, string>(x.TfsRepositoryPath, x.Prefix)), parentChangeset, action); } else { Tfs.WithWorkspace(WorkingDirectory, this, parentChangeset, action); } } private int Checkin(string head, string parent, ITfsWorkspace workspace, CheckinOptions options, string sourceTfsPath) { PendChangesToWorkspace(head, parent, workspace); if (!string.IsNullOrWhiteSpace(sourceTfsPath)) workspace.Merge(sourceTfsPath, TfsRepositoryPath); return workspace.Checkin(options, () => Repository.GetCommitMessage(head, parent)); } public bool MatchesUrlAndRepositoryPath(string tfsUrl, string tfsRepositoryPath) { if (!MatchesTfsUrl(tfsUrl)) return false; if (TfsRepositoryPath == null) return tfsRepositoryPath == null; return TfsRepositoryPath.Equals(tfsRepositoryPath, StringComparison.OrdinalIgnoreCase); } public void DeleteShelveset(string shelvesetName) { WithWorkspace(null, workspace => workspace.DeleteShelveset(shelvesetName)); } private bool MatchesTfsUrl(string tfsUrl) { return TfsUrl.Equals(tfsUrl, StringComparison.OrdinalIgnoreCase) || Aliases.Contains(tfsUrl, StringComparison.OrdinalIgnoreCase); } private string ExtractGitBranchNameFromTfsRepositoryPath(string tfsRepositoryPath) { var includeTeamProjectName = !Repository.IsInSameTeamProjectAsDefaultRepository(tfsRepositoryPath); var gitBranchName = tfsRepositoryPath.ToGitBranchNameFromTfsRepositoryPath(includeTeamProjectName); gitBranchName = Repository.AssertValidBranchName(gitBranchName); Trace.TraceInformation("The name of the local branch will be : " + gitBranchName); return gitBranchName; } public IGitTfsRemote InitBranch(RemoteOptions remoteOptions, string tfsRepositoryPath, int rootChangesetId, bool fetchParentBranch, string gitBranchNameExpected = null, IRenameResult renameResult = null) { return InitTfsBranch(remoteOptions, tfsRepositoryPath, rootChangesetId, fetchParentBranch, gitBranchNameExpected, renameResult); } private bool IgnoreException(string message, bool ignoreRestricted, bool printHint = true) { // Detect exception "TF14098: Access Denied: User ??? needs // Read permission(s) for at least one item in changeset ???." if (message.Contains("TF14098")) { if (ignoreRestricted) return true; if (printHint) Trace.TraceWarning("\nAccess to changeset denied. Try the '--ignore-restricted-changesets' option!\n"); } return false; } private IGitTfsRemote InitTfsBranch(RemoteOptions remoteOptions, string tfsRepositoryPath, int rootChangesetId = -1, bool fetchParentBranch = false, string gitBranchNameExpected = null, IRenameResult renameResult = null, bool ignoreRestricted = false) { Trace.WriteLine("Begin process of creating branch for remote :" + tfsRepositoryPath); // TFS string representations of repository paths do not end in trailing slashes tfsRepositoryPath = (tfsRepositoryPath ?? string.Empty).TrimEnd('/'); string gitBranchName = ExtractGitBranchNameFromTfsRepositoryPath( string.IsNullOrWhiteSpace(gitBranchNameExpected) ? tfsRepositoryPath : gitBranchNameExpected); if (string.IsNullOrWhiteSpace(gitBranchName)) throw new GitTfsException("error: The Git branch name '" + gitBranchName + "' is not valid...\n"); Trace.WriteLine("Git local branch will be :" + gitBranchName); string sha1RootCommit = null; if (rootChangesetId != -1) { sha1RootCommit = Repository.FindCommitHashByChangesetId(rootChangesetId); if (fetchParentBranch && string.IsNullOrWhiteSpace(sha1RootCommit)) { try { sha1RootCommit = FindRootRemoteAndFetch(rootChangesetId, renameResult); } catch (Exception ex) { Trace.WriteLine("Getting changeset fetch failed: " + ex.Message); if (!IgnoreException(ex.Message, ignoreRestricted)) throw; } } if (string.IsNullOrWhiteSpace(sha1RootCommit)) return null; Trace.WriteLine("Found commit " + sha1RootCommit + " for changeset :" + rootChangesetId); } IGitTfsRemote tfsRemote; if (Repository.HasRemote(gitBranchName)) { Trace.WriteLine("Remote already exist"); tfsRemote = Repository.ReadTfsRemote(gitBranchName); if (tfsRemote.TfsUrl != TfsUrl) Trace.WriteLine("warning: Url is different"); if (tfsRemote.TfsRepositoryPath != tfsRepositoryPath) Trace.WriteLine("warning: TFS repository path is different"); } else { Trace.WriteLine("Try creating remote..."); tfsRemote = Repository.CreateTfsRemote(new RemoteInfo { Id = gitBranchName, Url = TfsUrl, Repository = tfsRepositoryPath, RemoteOptions = remoteOptions }, string.Empty); tfsRemote.ExportMetadatas = ExportMetadatas; tfsRemote.ExportWorkitemsMapping = ExportWorkitemsMapping; } if (sha1RootCommit != null && !Repository.HasRef(tfsRemote.RemoteRef)) { if (!Repository.CreateBranch(tfsRemote.RemoteRef, sha1RootCommit)) throw new GitTfsException("error: Fail to create remote branch ref file!"); } Trace.WriteLine("Remote created!"); return tfsRemote; } } }
using System; using System.Data; using Csla; using Csla.Data; using ParentLoad.DataAccess; using ParentLoad.DataAccess.ERCLevel; namespace ParentLoad.Business.ERCLevel { /// <summary> /// B06_Country (editable child object).<br/> /// This is a generated base class of <see cref="B06_Country"/> business object. /// </summary> /// <remarks> /// This class contains one child collection:<br/> /// - <see cref="B07_RegionObjects"/> of type <see cref="B07_RegionColl"/> (1:M relation to <see cref="B08_Region"/>)<br/> /// This class is an item of <see cref="B05_CountryColl"/> collection. /// </remarks> [Serializable] public partial class B06_Country : BusinessBase<B06_Country> { #region Static Fields private static int _lastID; #endregion #region State Fields [NotUndoable] [NonSerialized] internal int parent_SubContinent_ID = 0; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="Country_ID"/> property. /// </summary> public static readonly PropertyInfo<int> Country_IDProperty = RegisterProperty<int>(p => p.Country_ID, "Country ID"); /// <summary> /// Gets the Country ID. /// </summary> /// <value>The Country ID.</value> public int Country_ID { get { return GetProperty(Country_IDProperty); } } /// <summary> /// Maintains metadata about <see cref="Country_Name"/> property. /// </summary> public static readonly PropertyInfo<string> Country_NameProperty = RegisterProperty<string>(p => p.Country_Name, "Country Name"); /// <summary> /// Gets or sets the Country Name. /// </summary> /// <value>The Country Name.</value> public string Country_Name { get { return GetProperty(Country_NameProperty); } set { SetProperty(Country_NameProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B07_Country_SingleObject"/> property. /// </summary> public static readonly PropertyInfo<B07_Country_Child> B07_Country_SingleObjectProperty = RegisterProperty<B07_Country_Child>(p => p.B07_Country_SingleObject, "B07 Country Single Object", RelationshipTypes.Child); /// <summary> /// Gets the B07 Country Single Object ("parent load" child property). /// </summary> /// <value>The B07 Country Single Object.</value> public B07_Country_Child B07_Country_SingleObject { get { return GetProperty(B07_Country_SingleObjectProperty); } private set { LoadProperty(B07_Country_SingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B07_Country_ASingleObject"/> property. /// </summary> public static readonly PropertyInfo<B07_Country_ReChild> B07_Country_ASingleObjectProperty = RegisterProperty<B07_Country_ReChild>(p => p.B07_Country_ASingleObject, "B07 Country ASingle Object", RelationshipTypes.Child); /// <summary> /// Gets the B07 Country ASingle Object ("parent load" child property). /// </summary> /// <value>The B07 Country ASingle Object.</value> public B07_Country_ReChild B07_Country_ASingleObject { get { return GetProperty(B07_Country_ASingleObjectProperty); } private set { LoadProperty(B07_Country_ASingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B07_RegionObjects"/> property. /// </summary> public static readonly PropertyInfo<B07_RegionColl> B07_RegionObjectsProperty = RegisterProperty<B07_RegionColl>(p => p.B07_RegionObjects, "B07 Region Objects", RelationshipTypes.Child); /// <summary> /// Gets the B07 Region Objects ("parent load" child property). /// </summary> /// <value>The B07 Region Objects.</value> public B07_RegionColl B07_RegionObjects { get { return GetProperty(B07_RegionObjectsProperty); } private set { LoadProperty(B07_RegionObjectsProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="B06_Country"/> object. /// </summary> /// <returns>A reference to the created <see cref="B06_Country"/> object.</returns> internal static B06_Country NewB06_Country() { return DataPortal.CreateChild<B06_Country>(); } /// <summary> /// Factory method. Loads a <see cref="B06_Country"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> /// <returns>A reference to the fetched <see cref="B06_Country"/> object.</returns> internal static B06_Country GetB06_Country(SafeDataReader dr) { B06_Country obj = new B06_Country(); // show the framework that this is a child object obj.MarkAsChild(); obj.Fetch(dr); obj.LoadProperty(B07_RegionObjectsProperty, B07_RegionColl.NewB07_RegionColl()); obj.MarkOld(); return obj; } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="B06_Country"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public B06_Country() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="B06_Country"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { LoadProperty(Country_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID)); LoadProperty(B07_Country_SingleObjectProperty, DataPortal.CreateChild<B07_Country_Child>()); LoadProperty(B07_Country_ASingleObjectProperty, DataPortal.CreateChild<B07_Country_ReChild>()); LoadProperty(B07_RegionObjectsProperty, DataPortal.CreateChild<B07_RegionColl>()); var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="B06_Country"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { // Value properties LoadProperty(Country_IDProperty, dr.GetInt32("Country_ID")); LoadProperty(Country_NameProperty, dr.GetString("Country_Name")); // parent properties parent_SubContinent_ID = dr.GetInt32("Parent_SubContinent_ID"); var args = new DataPortalHookArgs(dr); OnFetchRead(args); } /// <summary> /// Loads child <see cref="B07_Country_Child"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(B07_Country_Child child) { LoadProperty(B07_Country_SingleObjectProperty, child); } /// <summary> /// Loads child <see cref="B07_Country_ReChild"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(B07_Country_ReChild child) { LoadProperty(B07_Country_ASingleObjectProperty, child); } /// <summary> /// Inserts a new <see cref="B06_Country"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(B04_SubContinent parent) { using (var dalManager = DalFactoryParentLoad.GetManager()) { var args = new DataPortalHookArgs(); OnInsertPre(args); var dal = dalManager.GetProvider<IB06_CountryDal>(); using (BypassPropertyChecks) { int country_ID = -1; dal.Insert( parent.SubContinent_ID, out country_ID, Country_Name ); LoadProperty(Country_IDProperty, country_ID); } OnInsertPost(args); // flushes all pending data operations FieldManager.UpdateChildren(this); } } /// <summary> /// Updates in the database all changes made to the <see cref="B06_Country"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update() { if (!IsDirty) return; using (var dalManager = DalFactoryParentLoad.GetManager()) { var args = new DataPortalHookArgs(); OnUpdatePre(args); var dal = dalManager.GetProvider<IB06_CountryDal>(); using (BypassPropertyChecks) { dal.Update( Country_ID, Country_Name ); } OnUpdatePost(args); // flushes all pending data operations FieldManager.UpdateChildren(this); } } /// <summary> /// Self deletes the <see cref="B06_Country"/> object from database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf() { using (var dalManager = DalFactoryParentLoad.GetManager()) { var args = new DataPortalHookArgs(); // flushes all pending data operations FieldManager.UpdateChildren(this); OnDeletePre(args); var dal = dalManager.GetProvider<IB06_CountryDal>(); using (BypassPropertyChecks) { dal.Delete(ReadProperty(Country_IDProperty)); } OnDeletePost(args); } } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Globalization; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Text; namespace System { // A Version object contains four hierarchical numeric components: major, minor, // build and revision. Build and revision may be unspecified, which is represented // internally as a -1. By definition, an unspecified component matches anything // (both unspecified and specified), and an unspecified component is "less than" any // specified component. [Serializable] [System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] public sealed class Version : ICloneable, IComparable , IComparable<Version>, IEquatable<Version> { // AssemblyName depends on the order staying the same private readonly int _Major; // Do not rename (binary serialization) private readonly int _Minor; // Do not rename (binary serialization) private readonly int _Build = -1; // Do not rename (binary serialization) private readonly int _Revision = -1; // Do not rename (binary serialization) public Version(int major, int minor, int build, int revision) { if (major < 0) throw new ArgumentOutOfRangeException(nameof(major), SR.ArgumentOutOfRange_Version); if (minor < 0) throw new ArgumentOutOfRangeException(nameof(minor), SR.ArgumentOutOfRange_Version); if (build < 0) throw new ArgumentOutOfRangeException(nameof(build), SR.ArgumentOutOfRange_Version); if (revision < 0) throw new ArgumentOutOfRangeException(nameof(revision), SR.ArgumentOutOfRange_Version); Contract.EndContractBlock(); _Major = major; _Minor = minor; _Build = build; _Revision = revision; } public Version(int major, int minor, int build) { if (major < 0) throw new ArgumentOutOfRangeException(nameof(major), SR.ArgumentOutOfRange_Version); if (minor < 0) throw new ArgumentOutOfRangeException(nameof(minor), SR.ArgumentOutOfRange_Version); if (build < 0) throw new ArgumentOutOfRangeException(nameof(build), SR.ArgumentOutOfRange_Version); Contract.EndContractBlock(); _Major = major; _Minor = minor; _Build = build; } public Version(int major, int minor) { if (major < 0) throw new ArgumentOutOfRangeException(nameof(major), SR.ArgumentOutOfRange_Version); if (minor < 0) throw new ArgumentOutOfRangeException(nameof(minor), SR.ArgumentOutOfRange_Version); Contract.EndContractBlock(); _Major = major; _Minor = minor; } public Version(String version) { Version v = Version.Parse(version); _Major = v.Major; _Minor = v.Minor; _Build = v.Build; _Revision = v.Revision; } public Version() { _Major = 0; _Minor = 0; } private Version(Version version) { Debug.Assert(version != null); _Major = version._Major; _Minor = version._Minor; _Build = version._Build; _Revision = version._Revision; } public object Clone() { return new Version(this); } // Properties for setting and getting version numbers public int Major { get { return _Major; } } public int Minor { get { return _Minor; } } public int Build { get { return _Build; } } public int Revision { get { return _Revision; } } public short MajorRevision { get { return (short)(_Revision >> 16); } } public short MinorRevision { get { return (short)(_Revision & 0xFFFF); } } public int CompareTo(Object version) { if (version == null) { return 1; } Version v = version as Version; if (v == null) { throw new ArgumentException(SR.Arg_MustBeVersion); } return CompareTo(v); } public int CompareTo(Version value) { return object.ReferenceEquals(value, this) ? 0 : object.ReferenceEquals(value, null) ? 1 : _Major != value._Major ? (_Major > value._Major ? 1 : -1) : _Minor != value._Minor ? (_Minor > value._Minor ? 1 : -1) : _Build != value._Build ? (_Build > value._Build ? 1 : -1) : _Revision != value._Revision ? (_Revision > value._Revision ? 1 : -1) : 0; } public override bool Equals(Object obj) { return Equals(obj as Version); } public bool Equals(Version obj) { return object.ReferenceEquals(obj, this) || (!object.ReferenceEquals(obj, null) && _Major == obj._Major && _Minor == obj._Minor && _Build == obj._Build && _Revision == obj._Revision); } public override int GetHashCode() { // Let's assume that most version numbers will be pretty small and just // OR some lower order bits together. int accumulator = 0; accumulator |= (_Major & 0x0000000F) << 28; accumulator |= (_Minor & 0x000000FF) << 20; accumulator |= (_Build & 0x000000FF) << 12; accumulator |= (_Revision & 0x00000FFF); return accumulator; } public override String ToString() { if (_Build == -1) return (ToString(2)); if (_Revision == -1) return (ToString(3)); return (ToString(4)); } public String ToString(int fieldCount) { StringBuilder sb; switch (fieldCount) { case 0: return (String.Empty); case 1: return (_Major.ToString()); case 2: sb = StringBuilderCache.Acquire(); AppendPositiveNumber(_Major, sb); sb.Append('.'); AppendPositiveNumber(_Minor, sb); return StringBuilderCache.GetStringAndRelease(sb); default: if (_Build == -1) throw new ArgumentException(SR.Format(SR.ArgumentOutOfRange_Bounds_Lower_Upper, "0", "2"), nameof(fieldCount)); if (fieldCount == 3) { sb = StringBuilderCache.Acquire(); AppendPositiveNumber(_Major, sb); sb.Append('.'); AppendPositiveNumber(_Minor, sb); sb.Append('.'); AppendPositiveNumber(_Build, sb); return StringBuilderCache.GetStringAndRelease(sb); } if (_Revision == -1) throw new ArgumentException(SR.Format(SR.ArgumentOutOfRange_Bounds_Lower_Upper, "0", "3"), nameof(fieldCount)); if (fieldCount == 4) { sb = StringBuilderCache.Acquire(); AppendPositiveNumber(_Major, sb); sb.Append('.'); AppendPositiveNumber(_Minor, sb); sb.Append('.'); AppendPositiveNumber(_Build, sb); sb.Append('.'); AppendPositiveNumber(_Revision, sb); return StringBuilderCache.GetStringAndRelease(sb); } throw new ArgumentException(SR.Format(SR.ArgumentOutOfRange_Bounds_Lower_Upper, "0", "4"), nameof(fieldCount)); } } // // AppendPositiveNumber is an optimization to append a number to a StringBuilder object without // doing any boxing and not even creating intermediate string. // Note: as we always have positive numbers then it is safe to convert the number to string // regardless of the current culture as we'll not have any punctuation marks in the number // private const int ZERO_CHAR_VALUE = (int)'0'; private static void AppendPositiveNumber(int num, StringBuilder sb) { Debug.Assert(num >= 0, "AppendPositiveNumber expect positive numbers"); int index = sb.Length; int reminder; do { reminder = num % 10; num = num / 10; sb.Insert(index, (char)(ZERO_CHAR_VALUE + reminder)); } while (num > 0); } public static Version Parse(string input) { if (input == null) { throw new ArgumentNullException(nameof(input)); } Contract.EndContractBlock(); VersionResult r = new VersionResult(); r.Init(nameof(input), true); if (!TryParseVersion(input, ref r)) { throw r.GetVersionParseException(); } return r.m_parsedVersion; } public static bool TryParse(string input, out Version result) { VersionResult r = new VersionResult(); r.Init(nameof(input), false); bool b = TryParseVersion(input, ref r); result = r.m_parsedVersion; return b; } private static bool TryParseVersion(string version, ref VersionResult result) { int major, minor, build, revision; if ((Object)version == null) { result.SetFailure(ParseFailureKind.ArgumentNullException); return false; } String[] parsedComponents = version.Split('.'); int parsedComponentsLength = parsedComponents.Length; if ((parsedComponentsLength < 2) || (parsedComponentsLength > 4)) { result.SetFailure(ParseFailureKind.ArgumentException); return false; } if (!TryParseComponent(parsedComponents[0], nameof(version), ref result, out major)) { return false; } if (!TryParseComponent(parsedComponents[1], nameof(version), ref result, out minor)) { return false; } parsedComponentsLength -= 2; if (parsedComponentsLength > 0) { if (!TryParseComponent(parsedComponents[2], "build", ref result, out build)) { return false; } parsedComponentsLength--; if (parsedComponentsLength > 0) { if (!TryParseComponent(parsedComponents[3], "revision", ref result, out revision)) { return false; } else { result.m_parsedVersion = new Version(major, minor, build, revision); } } else { result.m_parsedVersion = new Version(major, minor, build); } } else { result.m_parsedVersion = new Version(major, minor); } return true; } private static bool TryParseComponent(string component, string componentName, ref VersionResult result, out int parsedComponent) { if (!Int32.TryParse(component, NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedComponent)) { result.SetFailure(ParseFailureKind.FormatException, component); return false; } if (parsedComponent < 0) { result.SetFailure(ParseFailureKind.ArgumentOutOfRangeException, componentName); return false; } return true; } public static bool operator ==(Version v1, Version v2) { if (Object.ReferenceEquals(v1, null)) { return Object.ReferenceEquals(v2, null); } return v1.Equals(v2); } public static bool operator !=(Version v1, Version v2) { return !(v1 == v2); } public static bool operator <(Version v1, Version v2) { if ((Object)v1 == null) throw new ArgumentNullException(nameof(v1)); Contract.EndContractBlock(); return (v1.CompareTo(v2) < 0); } public static bool operator <=(Version v1, Version v2) { if ((Object)v1 == null) throw new ArgumentNullException(nameof(v1)); Contract.EndContractBlock(); return (v1.CompareTo(v2) <= 0); } public static bool operator >(Version v1, Version v2) { return (v2 < v1); } public static bool operator >=(Version v1, Version v2) { return (v2 <= v1); } internal enum ParseFailureKind { ArgumentNullException, ArgumentException, ArgumentOutOfRangeException, FormatException } internal struct VersionResult { internal Version m_parsedVersion; internal ParseFailureKind m_failure; internal string m_exceptionArgument; internal string m_argumentName; internal bool m_canThrow; internal void Init(string argumentName, bool canThrow) { m_canThrow = canThrow; m_argumentName = argumentName; } internal void SetFailure(ParseFailureKind failure) { SetFailure(failure, String.Empty); } internal void SetFailure(ParseFailureKind failure, string argument) { m_failure = failure; m_exceptionArgument = argument; if (m_canThrow) { throw GetVersionParseException(); } } internal Exception GetVersionParseException() { switch (m_failure) { case ParseFailureKind.ArgumentNullException: return new ArgumentNullException(m_argumentName); case ParseFailureKind.ArgumentException: return new ArgumentException(SR.Arg_VersionString); case ParseFailureKind.ArgumentOutOfRangeException: return new ArgumentOutOfRangeException(m_exceptionArgument, SR.ArgumentOutOfRange_Version); case ParseFailureKind.FormatException: // Regenerate the FormatException as would be thrown by Int32.Parse() try { Int32.Parse(m_exceptionArgument, CultureInfo.InvariantCulture); } catch (FormatException e) { return e; } catch (OverflowException e) { return e; } Debug.Assert(false, "Int32.Parse() did not throw exception but TryParse failed: " + m_exceptionArgument); return new FormatException(SR.Format_InvalidString); default: Debug.Assert(false, "Unmatched case in Version.GetVersionParseException() for value: " + m_failure); return new ArgumentException(SR.Arg_VersionString); } } } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gagvr = Google.Ads.GoogleAds.V9.Resources; using gax = Google.Api.Gax; using sys = System; namespace Google.Ads.GoogleAds.V9.Resources { /// <summary>Resource name for the <c>Campaign</c> resource.</summary> public sealed partial class CampaignName : gax::IResourceName, sys::IEquatable<CampaignName> { /// <summary>The possible contents of <see cref="CampaignName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary>A resource name with pattern <c>customers/{customer_id}/campaigns/{campaign_id}</c>.</summary> CustomerCampaign = 1, } private static gax::PathTemplate s_customerCampaign = new gax::PathTemplate("customers/{customer_id}/campaigns/{campaign_id}"); /// <summary>Creates a <see cref="CampaignName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="CampaignName"/> containing the provided <paramref name="unparsedResourceName"/> /// . /// </returns> public static CampaignName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new CampaignName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="CampaignName"/> with the pattern <c>customers/{customer_id}/campaigns/{campaign_id}</c> /// . /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="campaignId">The <c>Campaign</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="CampaignName"/> constructed from the provided ids.</returns> public static CampaignName FromCustomerCampaign(string customerId, string campaignId) => new CampaignName(ResourceNameType.CustomerCampaign, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), campaignId: gax::GaxPreconditions.CheckNotNullOrEmpty(campaignId, nameof(campaignId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="CampaignName"/> with pattern /// <c>customers/{customer_id}/campaigns/{campaign_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="campaignId">The <c>Campaign</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="CampaignName"/> with pattern /// <c>customers/{customer_id}/campaigns/{campaign_id}</c>. /// </returns> public static string Format(string customerId, string campaignId) => FormatCustomerCampaign(customerId, campaignId); /// <summary> /// Formats the IDs into the string representation of this <see cref="CampaignName"/> with pattern /// <c>customers/{customer_id}/campaigns/{campaign_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="campaignId">The <c>Campaign</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="CampaignName"/> with pattern /// <c>customers/{customer_id}/campaigns/{campaign_id}</c>. /// </returns> public static string FormatCustomerCampaign(string customerId, string campaignId) => s_customerCampaign.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), gax::GaxPreconditions.CheckNotNullOrEmpty(campaignId, nameof(campaignId))); /// <summary>Parses the given resource name string into a new <see cref="CampaignName"/> instance.</summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/campaigns/{campaign_id}</c></description></item> /// </list> /// </remarks> /// <param name="campaignName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="CampaignName"/> if successful.</returns> public static CampaignName Parse(string campaignName) => Parse(campaignName, false); /// <summary> /// Parses the given resource name string into a new <see cref="CampaignName"/> instance; optionally allowing an /// unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/campaigns/{campaign_id}</c></description></item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="campaignName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="CampaignName"/> if successful.</returns> public static CampaignName Parse(string campaignName, bool allowUnparsed) => TryParse(campaignName, allowUnparsed, out CampaignName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="CampaignName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/campaigns/{campaign_id}</c></description></item> /// </list> /// </remarks> /// <param name="campaignName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="CampaignName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string campaignName, out CampaignName result) => TryParse(campaignName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="CampaignName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/campaigns/{campaign_id}</c></description></item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="campaignName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="CampaignName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string campaignName, bool allowUnparsed, out CampaignName result) { gax::GaxPreconditions.CheckNotNull(campaignName, nameof(campaignName)); gax::TemplatedResourceName resourceName; if (s_customerCampaign.TryParseName(campaignName, out resourceName)) { result = FromCustomerCampaign(resourceName[0], resourceName[1]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(campaignName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private CampaignName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string campaignId = null, string customerId = null) { Type = type; UnparsedResource = unparsedResourceName; CampaignId = campaignId; CustomerId = customerId; } /// <summary> /// Constructs a new instance of a <see cref="CampaignName"/> class from the component parts of pattern /// <c>customers/{customer_id}/campaigns/{campaign_id}</c> /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="campaignId">The <c>Campaign</c> ID. Must not be <c>null</c> or empty.</param> public CampaignName(string customerId, string campaignId) : this(ResourceNameType.CustomerCampaign, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), campaignId: gax::GaxPreconditions.CheckNotNullOrEmpty(campaignId, nameof(campaignId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>Campaign</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CampaignId { get; } /// <summary> /// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CustomerId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.CustomerCampaign: return s_customerCampaign.Expand(CustomerId, CampaignId); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as CampaignName); /// <inheritdoc/> public bool Equals(CampaignName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(CampaignName a, CampaignName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(CampaignName a, CampaignName b) => !(a == b); } public partial class Campaign { /// <summary> /// <see cref="gagvr::CampaignName"/>-typed view over the <see cref="ResourceName"/> resource name property. /// </summary> internal CampaignName ResourceNameAsCampaignName { get => string.IsNullOrEmpty(ResourceName) ? null : gagvr::CampaignName.Parse(ResourceName, allowUnparsed: true); set => ResourceName = value?.ToString() ?? ""; } /// <summary> /// <see cref="gagvr::CampaignName"/>-typed view over the <see cref="BaseCampaign"/> resource name property. /// </summary> internal CampaignName BaseCampaignAsCampaignName { get => string.IsNullOrEmpty(BaseCampaign) ? null : gagvr::CampaignName.Parse(BaseCampaign, allowUnparsed: true); set => BaseCampaign = value?.ToString() ?? ""; } /// <summary> /// <see cref="gagvr::CampaignName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> internal CampaignName CampaignName { get => string.IsNullOrEmpty(Name) ? null : gagvr::CampaignName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } /// <summary> /// <see cref="CampaignLabelName"/>-typed view over the <see cref="Labels"/> resource name property. /// </summary> internal gax::ResourceNameList<CampaignLabelName> LabelsAsCampaignLabelNames { get => new gax::ResourceNameList<CampaignLabelName>(Labels, s => string.IsNullOrEmpty(s) ? null : CampaignLabelName.Parse(s, allowUnparsed: true)); } /// <summary> /// <see cref="CampaignBudgetName"/>-typed view over the <see cref="CampaignBudget"/> resource name property. /// </summary> internal CampaignBudgetName CampaignBudgetAsCampaignBudgetName { get => string.IsNullOrEmpty(CampaignBudget) ? null : CampaignBudgetName.Parse(CampaignBudget, allowUnparsed: true); set => CampaignBudget = value?.ToString() ?? ""; } /// <summary> /// <see cref="BiddingStrategyName"/>-typed view over the <see cref="BiddingStrategy"/> resource name property. /// </summary> internal BiddingStrategyName BiddingStrategyAsBiddingStrategyName { get => string.IsNullOrEmpty(BiddingStrategy) ? null : BiddingStrategyName.Parse(BiddingStrategy, allowUnparsed: true); set => BiddingStrategy = value?.ToString() ?? ""; } /// <summary> /// <see cref="AccessibleBiddingStrategyName"/>-typed view over the <see cref="AccessibleBiddingStrategy"/> /// resource name property. /// </summary> internal AccessibleBiddingStrategyName AccessibleBiddingStrategyAsAccessibleBiddingStrategyName { get => string.IsNullOrEmpty(AccessibleBiddingStrategy) ? null : AccessibleBiddingStrategyName.Parse(AccessibleBiddingStrategy, allowUnparsed: true); set => AccessibleBiddingStrategy = value?.ToString() ?? ""; } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using System; using System.Linq; using System.Reactive.Linq; using System.Reactive.Subjects; using Moq; using Xunit; namespace Avalonia.Base.UnitTests { public class PriorityValueTests { private static readonly AvaloniaProperty TestProperty = new StyledProperty<string>( "Test", typeof(PriorityValueTests), new StyledPropertyMetadata<string>()); [Fact] public void Initial_Value_Should_Be_UnsetValue() { var target = new PriorityValue(null, TestProperty, typeof(string)); Assert.Same(AvaloniaProperty.UnsetValue, target.Value); } [Fact] public void First_Binding_Sets_Value() { var target = new PriorityValue(null, TestProperty, typeof(string)); target.Add(Single("foo"), 0); Assert.Equal("foo", target.Value); } [Fact] public void Changing_Binding_Should_Set_Value() { var target = new PriorityValue(null, TestProperty, typeof(string)); var subject = new BehaviorSubject<string>("foo"); target.Add(subject, 0); Assert.Equal("foo", target.Value); subject.OnNext("bar"); Assert.Equal("bar", target.Value); } [Fact] public void Setting_Direct_Value_Should_Override_Binding() { var target = new PriorityValue(null, TestProperty, typeof(string)); target.Add(Single("foo"), 0); target.SetValue("bar", 0); Assert.Equal("bar", target.Value); } [Fact] public void Binding_Firing_Should_Override_Direct_Value() { var target = new PriorityValue(null, TestProperty, typeof(string)); var source = new BehaviorSubject<object>("initial"); target.Add(source, 0); Assert.Equal("initial", target.Value); target.SetValue("first", 0); Assert.Equal("first", target.Value); source.OnNext("second"); Assert.Equal("second", target.Value); } [Fact] public void Earlier_Binding_Firing_Should_Not_Override_Later() { var target = new PriorityValue(null, TestProperty, typeof(string)); var nonActive = new BehaviorSubject<object>("na"); var source = new BehaviorSubject<object>("initial"); target.Add(nonActive, 1); target.Add(source, 1); Assert.Equal("initial", target.Value); target.SetValue("first", 1); Assert.Equal("first", target.Value); nonActive.OnNext("second"); Assert.Equal("first", target.Value); } [Fact] public void Binding_Completing_Should_Revert_To_Direct_Value() { var target = new PriorityValue(null, TestProperty, typeof(string)); var source = new BehaviorSubject<object>("initial"); target.Add(source, 0); Assert.Equal("initial", target.Value); target.SetValue("first", 0); Assert.Equal("first", target.Value); source.OnNext("second"); Assert.Equal("second", target.Value); source.OnCompleted(); Assert.Equal("first", target.Value); } [Fact] public void Binding_With_Lower_Priority_Has_Precedence() { var target = new PriorityValue(null, TestProperty, typeof(string)); target.Add(Single("foo"), 1); target.Add(Single("bar"), 0); target.Add(Single("baz"), 1); Assert.Equal("bar", target.Value); } [Fact] public void Later_Binding_With_Same_Priority_Should_Take_Precedence() { var target = new PriorityValue(null, TestProperty, typeof(string)); target.Add(Single("foo"), 1); target.Add(Single("bar"), 0); target.Add(Single("baz"), 0); target.Add(Single("qux"), 1); Assert.Equal("baz", target.Value); } [Fact] public void Changing_Binding_With_Lower_Priority_Should_Set_Not_Value() { var target = new PriorityValue(null, TestProperty, typeof(string)); var subject = new BehaviorSubject<string>("bar"); target.Add(Single("foo"), 0); target.Add(subject, 1); Assert.Equal("foo", target.Value); subject.OnNext("baz"); Assert.Equal("foo", target.Value); } [Fact] public void UnsetValue_Should_Fall_Back_To_Next_Binding() { var target = new PriorityValue(null, TestProperty, typeof(string)); var subject = new BehaviorSubject<object>("bar"); target.Add(subject, 0); target.Add(Single("foo"), 1); Assert.Equal("bar", target.Value); subject.OnNext(AvaloniaProperty.UnsetValue); Assert.Equal("foo", target.Value); } [Fact] public void Adding_Value_Should_Call_OnNext() { var owner = new Mock<IPriorityValueOwner>(); var target = new PriorityValue(owner.Object, TestProperty, typeof(string)); target.Add(Single("foo"), 0); owner.Verify(x => x.Changed(target, AvaloniaProperty.UnsetValue, "foo")); } [Fact] public void Changing_Value_Should_Call_OnNext() { var owner = new Mock<IPriorityValueOwner>(); var target = new PriorityValue(owner.Object, TestProperty, typeof(string)); var subject = new BehaviorSubject<object>("foo"); target.Add(subject, 0); subject.OnNext("bar"); owner.Verify(x => x.Changed(target, "foo", "bar")); } [Fact] public void Disposing_A_Binding_Should_Revert_To_Next_Value() { var target = new PriorityValue(null, TestProperty, typeof(string)); target.Add(Single("foo"), 0); var disposable = target.Add(Single("bar"), 0); Assert.Equal("bar", target.Value); disposable.Dispose(); Assert.Equal("foo", target.Value); } [Fact] public void Disposing_A_Binding_Should_Remove_BindingEntry() { var target = new PriorityValue(null, TestProperty, typeof(string)); target.Add(Single("foo"), 0); var disposable = target.Add(Single("bar"), 0); Assert.Equal(2, target.GetBindings().Count()); disposable.Dispose(); Assert.Single(target.GetBindings()); } [Fact] public void Completing_A_Binding_Should_Revert_To_Previous_Binding() { var target = new PriorityValue(null, TestProperty, typeof(string)); var source = new BehaviorSubject<object>("bar"); target.Add(Single("foo"), 0); target.Add(source, 0); Assert.Equal("bar", target.Value); source.OnCompleted(); Assert.Equal("foo", target.Value); } [Fact] public void Completing_A_Binding_Should_Revert_To_Lower_Priority() { var target = new PriorityValue(null, TestProperty, typeof(string)); var source = new BehaviorSubject<object>("bar"); target.Add(Single("foo"), 1); target.Add(source, 0); Assert.Equal("bar", target.Value); source.OnCompleted(); Assert.Equal("foo", target.Value); } [Fact] public void Completing_A_Binding_Should_Remove_BindingEntry() { var target = new PriorityValue(null, TestProperty, typeof(string)); var subject = new BehaviorSubject<object>("bar"); target.Add(Single("foo"), 0); target.Add(subject, 0); Assert.Equal(2, target.GetBindings().Count()); subject.OnCompleted(); Assert.Single(target.GetBindings()); } [Fact] public void Direct_Value_Should_Be_Coerced() { var target = new PriorityValue(null, TestProperty, typeof(int), x => Math.Min((int)x, 10)); target.SetValue(5, 0); Assert.Equal(5, target.Value); target.SetValue(15, 0); Assert.Equal(10, target.Value); } [Fact] public void Bound_Value_Should_Be_Coerced() { var target = new PriorityValue(null, TestProperty, typeof(int), x => Math.Min((int)x, 10)); var source = new Subject<object>(); target.Add(source, 0); source.OnNext(5); Assert.Equal(5, target.Value); source.OnNext(15); Assert.Equal(10, target.Value); } [Fact] public void Revalidate_Should_ReCoerce_Value() { var max = 10; var target = new PriorityValue(null, TestProperty, typeof(int), x => Math.Min((int)x, max)); var source = new Subject<object>(); target.Add(source, 0); source.OnNext(5); Assert.Equal(5, target.Value); source.OnNext(15); Assert.Equal(10, target.Value); max = 12; target.Revalidate(); Assert.Equal(12, target.Value); } /// <summary> /// Returns an observable that returns a single value but does not complete. /// </summary> /// <typeparam name="T">The type of the observable.</typeparam> /// <param name="value">The value.</param> /// <returns>The observable.</returns> private IObservable<T> Single<T>(T value) { return Observable.Never<T>().StartWith(value); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // ------------------------------------------------------------------------------ // Changes to this file must follow the http://aka.ms/api-review process. // ------------------------------------------------------------------------------ namespace System.Net.Http { public partial class ByteArrayContent : System.Net.Http.HttpContent { public ByteArrayContent(byte[] content) { } public ByteArrayContent(byte[] content, int offset, int count) { } protected override System.Threading.Tasks.Task<System.IO.Stream> CreateContentReadStreamAsync() { return default(System.Threading.Tasks.Task<System.IO.Stream>); } protected override System.Threading.Tasks.Task SerializeToStreamAsync(System.IO.Stream stream, System.Net.TransportContext context) { return default(System.Threading.Tasks.Task); } protected internal override bool TryComputeLength(out long length) { length = default(long); return default(bool); } } public enum ClientCertificateOption { Automatic = 1, Manual = 0, } public abstract partial class DelegatingHandler : System.Net.Http.HttpMessageHandler { protected DelegatingHandler() { } protected DelegatingHandler(System.Net.Http.HttpMessageHandler innerHandler) { } public System.Net.Http.HttpMessageHandler InnerHandler { get { return default(System.Net.Http.HttpMessageHandler); } set { } } protected override void Dispose(bool disposing) { } protected internal override System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> SendAsync(System.Net.Http.HttpRequestMessage request, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } } public partial class FormUrlEncodedContent : System.Net.Http.ByteArrayContent { public FormUrlEncodedContent(System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, string>> nameValueCollection) : base(default(byte[])) { } } public partial class HttpClient : System.Net.Http.HttpMessageInvoker { public HttpClient() : base(default(System.Net.Http.HttpMessageHandler)) { } public HttpClient(System.Net.Http.HttpMessageHandler handler) : base(default(System.Net.Http.HttpMessageHandler)) { } public HttpClient(System.Net.Http.HttpMessageHandler handler, bool disposeHandler) : base(default(System.Net.Http.HttpMessageHandler)) { } public System.Uri BaseAddress { get { return default(System.Uri); } set { } } public System.Net.Http.Headers.HttpRequestHeaders DefaultRequestHeaders { get { return default(System.Net.Http.Headers.HttpRequestHeaders); } } public long MaxResponseContentBufferSize { get { return default(long); } set { } } public System.TimeSpan Timeout { get { return default(System.TimeSpan); } set { } } public void CancelPendingRequests() { } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> DeleteAsync(string requestUri) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> DeleteAsync(string requestUri, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> DeleteAsync(System.Uri requestUri) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> DeleteAsync(System.Uri requestUri, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } protected override void Dispose(bool disposing) { } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> GetAsync(string requestUri) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> GetAsync(string requestUri, System.Net.Http.HttpCompletionOption completionOption) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> GetAsync(string requestUri, System.Net.Http.HttpCompletionOption completionOption, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> GetAsync(string requestUri, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> GetAsync(System.Uri requestUri) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> GetAsync(System.Uri requestUri, System.Net.Http.HttpCompletionOption completionOption) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> GetAsync(System.Uri requestUri, System.Net.Http.HttpCompletionOption completionOption, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> GetAsync(System.Uri requestUri, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<byte[]> GetByteArrayAsync(string requestUri) { return default(System.Threading.Tasks.Task<byte[]>); } public System.Threading.Tasks.Task<byte[]> GetByteArrayAsync(System.Uri requestUri) { return default(System.Threading.Tasks.Task<byte[]>); } public System.Threading.Tasks.Task<System.IO.Stream> GetStreamAsync(string requestUri) { return default(System.Threading.Tasks.Task<System.IO.Stream>); } public System.Threading.Tasks.Task<System.IO.Stream> GetStreamAsync(System.Uri requestUri) { return default(System.Threading.Tasks.Task<System.IO.Stream>); } public System.Threading.Tasks.Task<string> GetStringAsync(string requestUri) { return default(System.Threading.Tasks.Task<string>); } public System.Threading.Tasks.Task<string> GetStringAsync(System.Uri requestUri) { return default(System.Threading.Tasks.Task<string>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> PostAsync(string requestUri, System.Net.Http.HttpContent content) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> PostAsync(string requestUri, System.Net.Http.HttpContent content, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> PostAsync(System.Uri requestUri, System.Net.Http.HttpContent content) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> PostAsync(System.Uri requestUri, System.Net.Http.HttpContent content, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> PutAsync(string requestUri, System.Net.Http.HttpContent content) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> PutAsync(string requestUri, System.Net.Http.HttpContent content, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> PutAsync(System.Uri requestUri, System.Net.Http.HttpContent content) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> PutAsync(System.Uri requestUri, System.Net.Http.HttpContent content, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> SendAsync(System.Net.Http.HttpRequestMessage request) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> SendAsync(System.Net.Http.HttpRequestMessage request, System.Net.Http.HttpCompletionOption completionOption) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> SendAsync(System.Net.Http.HttpRequestMessage request, System.Net.Http.HttpCompletionOption completionOption, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } public override System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> SendAsync(System.Net.Http.HttpRequestMessage request, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } } public partial class HttpClientHandler : System.Net.Http.HttpMessageHandler { public HttpClientHandler() { } public bool AllowAutoRedirect { get { return default(bool); } set { } } public System.Net.DecompressionMethods AutomaticDecompression { get { return default(System.Net.DecompressionMethods); } set { } } public bool CheckCertificateRevocationList { get { return default(bool); } set { } } public System.Net.Http.ClientCertificateOption ClientCertificateOptions { get { return default(System.Net.Http.ClientCertificateOption); } set { } } public System.Security.Cryptography.X509Certificates.X509CertificateCollection ClientCertificates { get { return default(System.Security.Cryptography.X509Certificates.X509CertificateCollection); } } public System.Net.CookieContainer CookieContainer { get { return default(System.Net.CookieContainer); } set { } } public System.Net.ICredentials Credentials { get { return default(System.Net.ICredentials); } set { } } public System.Net.ICredentials DefaultProxyCredentials { get { return default(System.Net.ICredentials); } set { } } public int MaxAutomaticRedirections { get { return default(int); } set { } } public int MaxConnectionsPerServer { get { return default(int); } set { } } public long MaxRequestContentBufferSize { get { return default(long); } set { } } public int MaxResponseHeadersLength { get { return default(int); } set { } } public bool PreAuthenticate { get { return default(bool); } set { } } public System.Collections.Generic.IDictionary<string, object> Properties { get { return default(System.Collections.Generic.IDictionary<string, object>); } } public System.Net.IWebProxy Proxy { get { return default(System.Net.IWebProxy); } set { } } public System.Func<System.Net.Http.HttpRequestMessage, System.Security.Cryptography.X509Certificates.X509Certificate2, System.Security.Cryptography.X509Certificates.X509Chain, System.Net.Security.SslPolicyErrors, bool> ServerCertificateCustomValidationCallback { get { return default(System.Func<System.Net.Http.HttpRequestMessage, System.Security.Cryptography.X509Certificates.X509Certificate2, System.Security.Cryptography.X509Certificates.X509Chain, System.Net.Security.SslPolicyErrors, bool>); } set { } } public System.Security.Authentication.SslProtocols SslProtocols { get { return default(System.Security.Authentication.SslProtocols); } set { } } public virtual bool SupportsAutomaticDecompression { get { return default(bool); } } public virtual bool SupportsProxy { get { return default(bool); } } public virtual bool SupportsRedirectConfiguration { get { return default(bool); } } public bool UseCookies { get { return default(bool); } set { } } public bool UseDefaultCredentials { get { return default(bool); } set { } } public bool UseProxy { get { return default(bool); } set { } } protected override void Dispose(bool disposing) { } protected internal override System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> SendAsync(System.Net.Http.HttpRequestMessage request, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } } public enum HttpCompletionOption { ResponseContentRead = 0, ResponseHeadersRead = 1, } public abstract partial class HttpContent : System.IDisposable { protected HttpContent() { } public System.Net.Http.Headers.HttpContentHeaders Headers { get { return default(System.Net.Http.Headers.HttpContentHeaders); } } public System.Threading.Tasks.Task CopyToAsync(System.IO.Stream stream) { return default(System.Threading.Tasks.Task); } public System.Threading.Tasks.Task CopyToAsync(System.IO.Stream stream, System.Net.TransportContext context) { return default(System.Threading.Tasks.Task); } protected virtual System.Threading.Tasks.Task<System.IO.Stream> CreateContentReadStreamAsync() { return default(System.Threading.Tasks.Task<System.IO.Stream>); } public void Dispose() { } protected virtual void Dispose(bool disposing) { } public System.Threading.Tasks.Task LoadIntoBufferAsync() { return default(System.Threading.Tasks.Task); } public System.Threading.Tasks.Task LoadIntoBufferAsync(long maxBufferSize) { return default(System.Threading.Tasks.Task); } public System.Threading.Tasks.Task<byte[]> ReadAsByteArrayAsync() { return default(System.Threading.Tasks.Task<byte[]>); } public System.Threading.Tasks.Task<System.IO.Stream> ReadAsStreamAsync() { return default(System.Threading.Tasks.Task<System.IO.Stream>); } public System.Threading.Tasks.Task<string> ReadAsStringAsync() { return default(System.Threading.Tasks.Task<string>); } protected abstract System.Threading.Tasks.Task SerializeToStreamAsync(System.IO.Stream stream, System.Net.TransportContext context); protected internal abstract bool TryComputeLength(out long length); } public abstract partial class HttpMessageHandler : System.IDisposable { protected HttpMessageHandler() { } public void Dispose() { } protected virtual void Dispose(bool disposing) { } protected internal abstract System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> SendAsync(System.Net.Http.HttpRequestMessage request, System.Threading.CancellationToken cancellationToken); } public partial class HttpMessageInvoker : System.IDisposable { public HttpMessageInvoker(System.Net.Http.HttpMessageHandler handler) { } public HttpMessageInvoker(System.Net.Http.HttpMessageHandler handler, bool disposeHandler) { } public void Dispose() { } protected virtual void Dispose(bool disposing) { } public virtual System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> SendAsync(System.Net.Http.HttpRequestMessage request, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } } public partial class HttpMethod : System.IEquatable<System.Net.Http.HttpMethod> { public HttpMethod(string method) { } public static System.Net.Http.HttpMethod Delete { get { return default(System.Net.Http.HttpMethod); } } public static System.Net.Http.HttpMethod Get { get { return default(System.Net.Http.HttpMethod); } } public static System.Net.Http.HttpMethod Head { get { return default(System.Net.Http.HttpMethod); } } public string Method { get { return default(string); } } public static System.Net.Http.HttpMethod Options { get { return default(System.Net.Http.HttpMethod); } } public static System.Net.Http.HttpMethod Post { get { return default(System.Net.Http.HttpMethod); } } public static System.Net.Http.HttpMethod Put { get { return default(System.Net.Http.HttpMethod); } } public static System.Net.Http.HttpMethod Trace { get { return default(System.Net.Http.HttpMethod); } } public bool Equals(System.Net.Http.HttpMethod other) { return default(bool); } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static bool operator ==(System.Net.Http.HttpMethod left, System.Net.Http.HttpMethod right) { return default(bool); } public static bool operator !=(System.Net.Http.HttpMethod left, System.Net.Http.HttpMethod right) { return default(bool); } public override string ToString() { return default(string); } } public partial class HttpRequestException : System.Exception { public HttpRequestException() { } public HttpRequestException(string message) { } public HttpRequestException(string message, System.Exception inner) { } } public partial class HttpRequestMessage : System.IDisposable { public HttpRequestMessage() { } public HttpRequestMessage(System.Net.Http.HttpMethod method, string requestUri) { } public HttpRequestMessage(System.Net.Http.HttpMethod method, System.Uri requestUri) { } public System.Net.Http.HttpContent Content { get { return default(System.Net.Http.HttpContent); } set { } } public System.Net.Http.Headers.HttpRequestHeaders Headers { get { return default(System.Net.Http.Headers.HttpRequestHeaders); } } public System.Net.Http.HttpMethod Method { get { return default(System.Net.Http.HttpMethod); } set { } } public System.Collections.Generic.IDictionary<string, object> Properties { get { return default(System.Collections.Generic.IDictionary<string, object>); } } public System.Uri RequestUri { get { return default(System.Uri); } set { } } public System.Version Version { get { return default(System.Version); } set { } } public void Dispose() { } protected virtual void Dispose(bool disposing) { } public override string ToString() { return default(string); } } public partial class HttpResponseMessage : System.IDisposable { public HttpResponseMessage() { } public HttpResponseMessage(System.Net.HttpStatusCode statusCode) { } public System.Net.Http.HttpContent Content { get { return default(System.Net.Http.HttpContent); } set { } } public System.Net.Http.Headers.HttpResponseHeaders Headers { get { return default(System.Net.Http.Headers.HttpResponseHeaders); } } public bool IsSuccessStatusCode { get { return default(bool); } } public string ReasonPhrase { get { return default(string); } set { } } public System.Net.Http.HttpRequestMessage RequestMessage { get { return default(System.Net.Http.HttpRequestMessage); } set { } } public System.Net.HttpStatusCode StatusCode { get { return default(System.Net.HttpStatusCode); } set { } } public System.Version Version { get { return default(System.Version); } set { } } public void Dispose() { } protected virtual void Dispose(bool disposing) { } public System.Net.Http.HttpResponseMessage EnsureSuccessStatusCode() { return default(System.Net.Http.HttpResponseMessage); } public override string ToString() { return default(string); } } public abstract partial class MessageProcessingHandler : System.Net.Http.DelegatingHandler { protected MessageProcessingHandler() { } protected MessageProcessingHandler(System.Net.Http.HttpMessageHandler innerHandler) { } protected abstract System.Net.Http.HttpRequestMessage ProcessRequest(System.Net.Http.HttpRequestMessage request, System.Threading.CancellationToken cancellationToken); protected abstract System.Net.Http.HttpResponseMessage ProcessResponse(System.Net.Http.HttpResponseMessage response, System.Threading.CancellationToken cancellationToken); protected internal sealed override System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage> SendAsync(System.Net.Http.HttpRequestMessage request, System.Threading.CancellationToken cancellationToken) { return default(System.Threading.Tasks.Task<System.Net.Http.HttpResponseMessage>); } } public partial class MultipartContent : System.Net.Http.HttpContent, System.Collections.Generic.IEnumerable<System.Net.Http.HttpContent>, System.Collections.IEnumerable { public MultipartContent() { } public MultipartContent(string subtype) { } public MultipartContent(string subtype, string boundary) { } public virtual void Add(System.Net.Http.HttpContent content) { } protected override void Dispose(bool disposing) { } public System.Collections.Generic.IEnumerator<System.Net.Http.HttpContent> GetEnumerator() { return default(System.Collections.Generic.IEnumerator<System.Net.Http.HttpContent>); } protected override System.Threading.Tasks.Task SerializeToStreamAsync(System.IO.Stream stream, System.Net.TransportContext context) { return default(System.Threading.Tasks.Task); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return default(System.Collections.IEnumerator); } protected internal override bool TryComputeLength(out long length) { length = default(long); return default(bool); } } public partial class MultipartFormDataContent : System.Net.Http.MultipartContent { public MultipartFormDataContent() { } public MultipartFormDataContent(string boundary) { } public override void Add(System.Net.Http.HttpContent content) { } public void Add(System.Net.Http.HttpContent content, string name) { } public void Add(System.Net.Http.HttpContent content, string name, string fileName) { } } public partial class StreamContent : System.Net.Http.HttpContent { public StreamContent(System.IO.Stream content) { } public StreamContent(System.IO.Stream content, int bufferSize) { } protected override System.Threading.Tasks.Task<System.IO.Stream> CreateContentReadStreamAsync() { return default(System.Threading.Tasks.Task<System.IO.Stream>); } protected override void Dispose(bool disposing) { } protected override System.Threading.Tasks.Task SerializeToStreamAsync(System.IO.Stream stream, System.Net.TransportContext context) { return default(System.Threading.Tasks.Task); } protected internal override bool TryComputeLength(out long length) { length = default(long); return default(bool); } } public partial class StringContent : System.Net.Http.ByteArrayContent { public StringContent(string content) : base(default(byte[])) { } public StringContent(string content, System.Text.Encoding encoding) : base(default(byte[])) { } public StringContent(string content, System.Text.Encoding encoding, string mediaType) : base(default(byte[])) { } } } namespace System.Net.Http.Headers { public partial class AuthenticationHeaderValue { public AuthenticationHeaderValue(string scheme) { } public AuthenticationHeaderValue(string scheme, string parameter) { } public string Parameter { get { return default(string); } } public string Scheme { get { return default(string); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.AuthenticationHeaderValue Parse(string input) { return default(System.Net.Http.Headers.AuthenticationHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.AuthenticationHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.AuthenticationHeaderValue); return default(bool); } } public partial class CacheControlHeaderValue { public CacheControlHeaderValue() { } public System.Collections.Generic.ICollection<System.Net.Http.Headers.NameValueHeaderValue> Extensions { get { return default(System.Collections.Generic.ICollection<System.Net.Http.Headers.NameValueHeaderValue>); } } public System.Nullable<System.TimeSpan> MaxAge { get { return default(System.Nullable<System.TimeSpan>); } set { } } public bool MaxStale { get { return default(bool); } set { } } public System.Nullable<System.TimeSpan> MaxStaleLimit { get { return default(System.Nullable<System.TimeSpan>); } set { } } public System.Nullable<System.TimeSpan> MinFresh { get { return default(System.Nullable<System.TimeSpan>); } set { } } public bool MustRevalidate { get { return default(bool); } set { } } public bool NoCache { get { return default(bool); } set { } } public System.Collections.Generic.ICollection<string> NoCacheHeaders { get { return default(System.Collections.Generic.ICollection<string>); } } public bool NoStore { get { return default(bool); } set { } } public bool NoTransform { get { return default(bool); } set { } } public bool OnlyIfCached { get { return default(bool); } set { } } public bool Private { get { return default(bool); } set { } } public System.Collections.Generic.ICollection<string> PrivateHeaders { get { return default(System.Collections.Generic.ICollection<string>); } } public bool ProxyRevalidate { get { return default(bool); } set { } } public bool Public { get { return default(bool); } set { } } public System.Nullable<System.TimeSpan> SharedMaxAge { get { return default(System.Nullable<System.TimeSpan>); } set { } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.CacheControlHeaderValue Parse(string input) { return default(System.Net.Http.Headers.CacheControlHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.CacheControlHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.CacheControlHeaderValue); return default(bool); } } public partial class ContentDispositionHeaderValue { protected ContentDispositionHeaderValue(System.Net.Http.Headers.ContentDispositionHeaderValue source) { } public ContentDispositionHeaderValue(string dispositionType) { } public System.Nullable<System.DateTimeOffset> CreationDate { get { return default(System.Nullable<System.DateTimeOffset>); } set { } } public string DispositionType { get { return default(string); } set { } } public string FileName { get { return default(string); } set { } } public string FileNameStar { get { return default(string); } set { } } public System.Nullable<System.DateTimeOffset> ModificationDate { get { return default(System.Nullable<System.DateTimeOffset>); } set { } } public string Name { get { return default(string); } set { } } public System.Collections.Generic.ICollection<System.Net.Http.Headers.NameValueHeaderValue> Parameters { get { return default(System.Collections.Generic.ICollection<System.Net.Http.Headers.NameValueHeaderValue>); } } public System.Nullable<System.DateTimeOffset> ReadDate { get { return default(System.Nullable<System.DateTimeOffset>); } set { } } public System.Nullable<long> Size { get { return default(System.Nullable<long>); } set { } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.ContentDispositionHeaderValue Parse(string input) { return default(System.Net.Http.Headers.ContentDispositionHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.ContentDispositionHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.ContentDispositionHeaderValue); return default(bool); } } public partial class ContentRangeHeaderValue { public ContentRangeHeaderValue(long length) { } public ContentRangeHeaderValue(long from, long to) { } public ContentRangeHeaderValue(long from, long to, long length) { } public System.Nullable<long> From { get { return default(System.Nullable<long>); } } public bool HasLength { get { return default(bool); } } public bool HasRange { get { return default(bool); } } public System.Nullable<long> Length { get { return default(System.Nullable<long>); } } public System.Nullable<long> To { get { return default(System.Nullable<long>); } } public string Unit { get { return default(string); } set { } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.ContentRangeHeaderValue Parse(string input) { return default(System.Net.Http.Headers.ContentRangeHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.ContentRangeHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.ContentRangeHeaderValue); return default(bool); } } public partial class EntityTagHeaderValue { public EntityTagHeaderValue(string tag) { } public EntityTagHeaderValue(string tag, bool isWeak) { } public static System.Net.Http.Headers.EntityTagHeaderValue Any { get { return default(System.Net.Http.Headers.EntityTagHeaderValue); } } public bool IsWeak { get { return default(bool); } } public string Tag { get { return default(string); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.EntityTagHeaderValue Parse(string input) { return default(System.Net.Http.Headers.EntityTagHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.EntityTagHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.EntityTagHeaderValue); return default(bool); } } public sealed partial class HttpContentHeaders : System.Net.Http.Headers.HttpHeaders { internal HttpContentHeaders() { } public System.Collections.Generic.ICollection<string> Allow { get { return default(System.Collections.Generic.ICollection<string>); } } public System.Net.Http.Headers.ContentDispositionHeaderValue ContentDisposition { get { return default(System.Net.Http.Headers.ContentDispositionHeaderValue); } set { } } public System.Collections.Generic.ICollection<string> ContentEncoding { get { return default(System.Collections.Generic.ICollection<string>); } } public System.Collections.Generic.ICollection<string> ContentLanguage { get { return default(System.Collections.Generic.ICollection<string>); } } public System.Nullable<long> ContentLength { get { return default(System.Nullable<long>); } set { } } public System.Uri ContentLocation { get { return default(System.Uri); } set { } } public byte[] ContentMD5 { get { return default(byte[]); } set { } } public System.Net.Http.Headers.ContentRangeHeaderValue ContentRange { get { return default(System.Net.Http.Headers.ContentRangeHeaderValue); } set { } } public System.Net.Http.Headers.MediaTypeHeaderValue ContentType { get { return default(System.Net.Http.Headers.MediaTypeHeaderValue); } set { } } public System.Nullable<System.DateTimeOffset> Expires { get { return default(System.Nullable<System.DateTimeOffset>); } set { } } public System.Nullable<System.DateTimeOffset> LastModified { get { return default(System.Nullable<System.DateTimeOffset>); } set { } } } public abstract partial class HttpHeaders : System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, System.Collections.Generic.IEnumerable<string>>>, System.Collections.IEnumerable { protected HttpHeaders() { } public void Add(string name, System.Collections.Generic.IEnumerable<string> values) { } public void Add(string name, string value) { } public void Clear() { } public bool Contains(string name) { return default(bool); } public System.Collections.Generic.IEnumerator<System.Collections.Generic.KeyValuePair<string, System.Collections.Generic.IEnumerable<string>>> GetEnumerator() { return default(System.Collections.Generic.IEnumerator<System.Collections.Generic.KeyValuePair<string, System.Collections.Generic.IEnumerable<string>>>); } public System.Collections.Generic.IEnumerable<string> GetValues(string name) { return default(System.Collections.Generic.IEnumerable<string>); } public bool Remove(string name) { return default(bool); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return default(System.Collections.IEnumerator); } public override string ToString() { return default(string); } public bool TryAddWithoutValidation(string name, System.Collections.Generic.IEnumerable<string> values) { return default(bool); } public bool TryAddWithoutValidation(string name, string value) { return default(bool); } public bool TryGetValues(string name, out System.Collections.Generic.IEnumerable<string> values) { values = default(System.Collections.Generic.IEnumerable<string>); return default(bool); } } public sealed partial class HttpHeaderValueCollection<T> : System.Collections.Generic.ICollection<T>, System.Collections.Generic.IEnumerable<T>, System.Collections.IEnumerable where T : class { internal HttpHeaderValueCollection() { } public int Count { get { return default(int); } } public bool IsReadOnly { get { return default(bool); } } public void Add(T item) { } public void Clear() { } public bool Contains(T item) { return default(bool); } public void CopyTo(T[] array, int arrayIndex) { } public System.Collections.Generic.IEnumerator<T> GetEnumerator() { return default(System.Collections.Generic.IEnumerator<T>); } public void ParseAdd(string input) { } public bool Remove(T item) { return default(bool); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return default(System.Collections.IEnumerator); } public override string ToString() { return default(string); } public bool TryParseAdd(string input) { return default(bool); } } public sealed partial class HttpRequestHeaders : System.Net.Http.Headers.HttpHeaders { internal HttpRequestHeaders() { } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.MediaTypeWithQualityHeaderValue> Accept { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.MediaTypeWithQualityHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.StringWithQualityHeaderValue> AcceptCharset { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.StringWithQualityHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.StringWithQualityHeaderValue> AcceptEncoding { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.StringWithQualityHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.StringWithQualityHeaderValue> AcceptLanguage { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.StringWithQualityHeaderValue>); } } public System.Net.Http.Headers.AuthenticationHeaderValue Authorization { get { return default(System.Net.Http.Headers.AuthenticationHeaderValue); } set { } } public System.Net.Http.Headers.CacheControlHeaderValue CacheControl { get { return default(System.Net.Http.Headers.CacheControlHeaderValue); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<string> Connection { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<string>); } } public System.Nullable<bool> ConnectionClose { get { return default(System.Nullable<bool>); } set { } } public System.Nullable<System.DateTimeOffset> Date { get { return default(System.Nullable<System.DateTimeOffset>); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.NameValueWithParametersHeaderValue> Expect { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.NameValueWithParametersHeaderValue>); } } public System.Nullable<bool> ExpectContinue { get { return default(System.Nullable<bool>); } set { } } public string From { get { return default(string); } set { } } public string Host { get { return default(string); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.EntityTagHeaderValue> IfMatch { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.EntityTagHeaderValue>); } } public System.Nullable<System.DateTimeOffset> IfModifiedSince { get { return default(System.Nullable<System.DateTimeOffset>); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.EntityTagHeaderValue> IfNoneMatch { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.EntityTagHeaderValue>); } } public System.Net.Http.Headers.RangeConditionHeaderValue IfRange { get { return default(System.Net.Http.Headers.RangeConditionHeaderValue); } set { } } public System.Nullable<System.DateTimeOffset> IfUnmodifiedSince { get { return default(System.Nullable<System.DateTimeOffset>); } set { } } public System.Nullable<int> MaxForwards { get { return default(System.Nullable<int>); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.NameValueHeaderValue> Pragma { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.NameValueHeaderValue>); } } public System.Net.Http.Headers.AuthenticationHeaderValue ProxyAuthorization { get { return default(System.Net.Http.Headers.AuthenticationHeaderValue); } set { } } public System.Net.Http.Headers.RangeHeaderValue Range { get { return default(System.Net.Http.Headers.RangeHeaderValue); } set { } } public System.Uri Referrer { get { return default(System.Uri); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.TransferCodingWithQualityHeaderValue> TE { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.TransferCodingWithQualityHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<string> Trailer { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<string>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.TransferCodingHeaderValue> TransferEncoding { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.TransferCodingHeaderValue>); } } public System.Nullable<bool> TransferEncodingChunked { get { return default(System.Nullable<bool>); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ProductHeaderValue> Upgrade { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ProductHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ProductInfoHeaderValue> UserAgent { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ProductInfoHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ViaHeaderValue> Via { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ViaHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.WarningHeaderValue> Warning { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.WarningHeaderValue>); } } } public sealed partial class HttpResponseHeaders : System.Net.Http.Headers.HttpHeaders { internal HttpResponseHeaders() { } public System.Net.Http.Headers.HttpHeaderValueCollection<string> AcceptRanges { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<string>); } } public System.Nullable<System.TimeSpan> Age { get { return default(System.Nullable<System.TimeSpan>); } set { } } public System.Net.Http.Headers.CacheControlHeaderValue CacheControl { get { return default(System.Net.Http.Headers.CacheControlHeaderValue); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<string> Connection { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<string>); } } public System.Nullable<bool> ConnectionClose { get { return default(System.Nullable<bool>); } set { } } public System.Nullable<System.DateTimeOffset> Date { get { return default(System.Nullable<System.DateTimeOffset>); } set { } } public System.Net.Http.Headers.EntityTagHeaderValue ETag { get { return default(System.Net.Http.Headers.EntityTagHeaderValue); } set { } } public System.Uri Location { get { return default(System.Uri); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.NameValueHeaderValue> Pragma { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.NameValueHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.AuthenticationHeaderValue> ProxyAuthenticate { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.AuthenticationHeaderValue>); } } public System.Net.Http.Headers.RetryConditionHeaderValue RetryAfter { get { return default(System.Net.Http.Headers.RetryConditionHeaderValue); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ProductInfoHeaderValue> Server { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ProductInfoHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<string> Trailer { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<string>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.TransferCodingHeaderValue> TransferEncoding { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.TransferCodingHeaderValue>); } } public System.Nullable<bool> TransferEncodingChunked { get { return default(System.Nullable<bool>); } set { } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ProductHeaderValue> Upgrade { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ProductHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<string> Vary { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<string>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ViaHeaderValue> Via { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.ViaHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.WarningHeaderValue> Warning { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.WarningHeaderValue>); } } public System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.AuthenticationHeaderValue> WwwAuthenticate { get { return default(System.Net.Http.Headers.HttpHeaderValueCollection<System.Net.Http.Headers.AuthenticationHeaderValue>); } } } public partial class MediaTypeHeaderValue { protected MediaTypeHeaderValue(System.Net.Http.Headers.MediaTypeHeaderValue source) { } public MediaTypeHeaderValue(string mediaType) { } public string CharSet { get { return default(string); } set { } } public string MediaType { get { return default(string); } set { } } public System.Collections.Generic.ICollection<System.Net.Http.Headers.NameValueHeaderValue> Parameters { get { return default(System.Collections.Generic.ICollection<System.Net.Http.Headers.NameValueHeaderValue>); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.MediaTypeHeaderValue Parse(string input) { return default(System.Net.Http.Headers.MediaTypeHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.MediaTypeHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.MediaTypeHeaderValue); return default(bool); } } public sealed partial class MediaTypeWithQualityHeaderValue : System.Net.Http.Headers.MediaTypeHeaderValue { public MediaTypeWithQualityHeaderValue(string mediaType) : base(default(System.Net.Http.Headers.MediaTypeHeaderValue)) { } public MediaTypeWithQualityHeaderValue(string mediaType, double quality) : base(default(System.Net.Http.Headers.MediaTypeHeaderValue)) { } public System.Nullable<double> Quality { get { return default(System.Nullable<double>); } set { } } public static new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue Parse(string input) { return default(System.Net.Http.Headers.MediaTypeWithQualityHeaderValue); } public static bool TryParse(string input, out System.Net.Http.Headers.MediaTypeWithQualityHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.MediaTypeWithQualityHeaderValue); return default(bool); } } public partial class NameValueHeaderValue { protected NameValueHeaderValue(System.Net.Http.Headers.NameValueHeaderValue source) { } public NameValueHeaderValue(string name) { } public NameValueHeaderValue(string name, string value) { } public string Name { get { return default(string); } } public string Value { get { return default(string); } set { } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.NameValueHeaderValue Parse(string input) { return default(System.Net.Http.Headers.NameValueHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.NameValueHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.NameValueHeaderValue); return default(bool); } } public partial class NameValueWithParametersHeaderValue : System.Net.Http.Headers.NameValueHeaderValue { protected NameValueWithParametersHeaderValue(System.Net.Http.Headers.NameValueWithParametersHeaderValue source) : base(default(string)) { } public NameValueWithParametersHeaderValue(string name) : base(default(string)) { } public NameValueWithParametersHeaderValue(string name, string value) : base(default(string)) { } public System.Collections.Generic.ICollection<System.Net.Http.Headers.NameValueHeaderValue> Parameters { get { return default(System.Collections.Generic.ICollection<System.Net.Http.Headers.NameValueHeaderValue>); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static new System.Net.Http.Headers.NameValueWithParametersHeaderValue Parse(string input) { return default(System.Net.Http.Headers.NameValueWithParametersHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.NameValueWithParametersHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.NameValueWithParametersHeaderValue); return default(bool); } } public partial class ProductHeaderValue { public ProductHeaderValue(string name) { } public ProductHeaderValue(string name, string version) { } public string Name { get { return default(string); } } public string Version { get { return default(string); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.ProductHeaderValue Parse(string input) { return default(System.Net.Http.Headers.ProductHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.ProductHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.ProductHeaderValue); return default(bool); } } public partial class ProductInfoHeaderValue { public ProductInfoHeaderValue(System.Net.Http.Headers.ProductHeaderValue product) { } public ProductInfoHeaderValue(string comment) { } public ProductInfoHeaderValue(string productName, string productVersion) { } public string Comment { get { return default(string); } } public System.Net.Http.Headers.ProductHeaderValue Product { get { return default(System.Net.Http.Headers.ProductHeaderValue); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.ProductInfoHeaderValue Parse(string input) { return default(System.Net.Http.Headers.ProductInfoHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.ProductInfoHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.ProductInfoHeaderValue); return default(bool); } } public partial class RangeConditionHeaderValue { public RangeConditionHeaderValue(System.DateTimeOffset date) { } public RangeConditionHeaderValue(System.Net.Http.Headers.EntityTagHeaderValue entityTag) { } public RangeConditionHeaderValue(string entityTag) { } public System.Nullable<System.DateTimeOffset> Date { get { return default(System.Nullable<System.DateTimeOffset>); } } public System.Net.Http.Headers.EntityTagHeaderValue EntityTag { get { return default(System.Net.Http.Headers.EntityTagHeaderValue); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.RangeConditionHeaderValue Parse(string input) { return default(System.Net.Http.Headers.RangeConditionHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.RangeConditionHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.RangeConditionHeaderValue); return default(bool); } } public partial class RangeHeaderValue { public RangeHeaderValue() { } public RangeHeaderValue(System.Nullable<long> from, System.Nullable<long> to) { } public System.Collections.Generic.ICollection<System.Net.Http.Headers.RangeItemHeaderValue> Ranges { get { return default(System.Collections.Generic.ICollection<System.Net.Http.Headers.RangeItemHeaderValue>); } } public string Unit { get { return default(string); } set { } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.RangeHeaderValue Parse(string input) { return default(System.Net.Http.Headers.RangeHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.RangeHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.RangeHeaderValue); return default(bool); } } public partial class RangeItemHeaderValue { public RangeItemHeaderValue(System.Nullable<long> from, System.Nullable<long> to) { } public System.Nullable<long> From { get { return default(System.Nullable<long>); } } public System.Nullable<long> To { get { return default(System.Nullable<long>); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public override string ToString() { return default(string); } } public partial class RetryConditionHeaderValue { public RetryConditionHeaderValue(System.DateTimeOffset date) { } public RetryConditionHeaderValue(System.TimeSpan delta) { } public System.Nullable<System.DateTimeOffset> Date { get { return default(System.Nullable<System.DateTimeOffset>); } } public System.Nullable<System.TimeSpan> Delta { get { return default(System.Nullable<System.TimeSpan>); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.RetryConditionHeaderValue Parse(string input) { return default(System.Net.Http.Headers.RetryConditionHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.RetryConditionHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.RetryConditionHeaderValue); return default(bool); } } public partial class StringWithQualityHeaderValue { public StringWithQualityHeaderValue(string value) { } public StringWithQualityHeaderValue(string value, double quality) { } public System.Nullable<double> Quality { get { return default(System.Nullable<double>); } } public string Value { get { return default(string); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.StringWithQualityHeaderValue Parse(string input) { return default(System.Net.Http.Headers.StringWithQualityHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.StringWithQualityHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.StringWithQualityHeaderValue); return default(bool); } } public partial class TransferCodingHeaderValue { protected TransferCodingHeaderValue(System.Net.Http.Headers.TransferCodingHeaderValue source) { } public TransferCodingHeaderValue(string value) { } public System.Collections.Generic.ICollection<System.Net.Http.Headers.NameValueHeaderValue> Parameters { get { return default(System.Collections.Generic.ICollection<System.Net.Http.Headers.NameValueHeaderValue>); } } public string Value { get { return default(string); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.TransferCodingHeaderValue Parse(string input) { return default(System.Net.Http.Headers.TransferCodingHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.TransferCodingHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.TransferCodingHeaderValue); return default(bool); } } public sealed partial class TransferCodingWithQualityHeaderValue : System.Net.Http.Headers.TransferCodingHeaderValue { public TransferCodingWithQualityHeaderValue(string value) : base(default(System.Net.Http.Headers.TransferCodingHeaderValue)) { } public TransferCodingWithQualityHeaderValue(string value, double quality) : base(default(System.Net.Http.Headers.TransferCodingHeaderValue)) { } public System.Nullable<double> Quality { get { return default(System.Nullable<double>); } set { } } public static new System.Net.Http.Headers.TransferCodingWithQualityHeaderValue Parse(string input) { return default(System.Net.Http.Headers.TransferCodingWithQualityHeaderValue); } public static bool TryParse(string input, out System.Net.Http.Headers.TransferCodingWithQualityHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.TransferCodingWithQualityHeaderValue); return default(bool); } } public partial class ViaHeaderValue { public ViaHeaderValue(string protocolVersion, string receivedBy) { } public ViaHeaderValue(string protocolVersion, string receivedBy, string protocolName) { } public ViaHeaderValue(string protocolVersion, string receivedBy, string protocolName, string comment) { } public string Comment { get { return default(string); } } public string ProtocolName { get { return default(string); } } public string ProtocolVersion { get { return default(string); } } public string ReceivedBy { get { return default(string); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.ViaHeaderValue Parse(string input) { return default(System.Net.Http.Headers.ViaHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.ViaHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.ViaHeaderValue); return default(bool); } } public partial class WarningHeaderValue { public WarningHeaderValue(int code, string agent, string text) { } public WarningHeaderValue(int code, string agent, string text, System.DateTimeOffset date) { } public string Agent { get { return default(string); } } public int Code { get { return default(int); } } public System.Nullable<System.DateTimeOffset> Date { get { return default(System.Nullable<System.DateTimeOffset>); } } public string Text { get { return default(string); } } public override bool Equals(object obj) { return default(bool); } public override int GetHashCode() { return default(int); } public static System.Net.Http.Headers.WarningHeaderValue Parse(string input) { return default(System.Net.Http.Headers.WarningHeaderValue); } public override string ToString() { return default(string); } public static bool TryParse(string input, out System.Net.Http.Headers.WarningHeaderValue parsedValue) { parsedValue = default(System.Net.Http.Headers.WarningHeaderValue); return default(bool); } } }
// // Authors: // Atsushi Enomoto // // Copyright 2007 Novell (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Xml; using System.Xml.Schema; using System.Xml.Serialization; namespace System.Xml.Linq { [XmlSchemaProvider (null, IsAny = true)] public class XElement : XContainer, IXmlSerializable { static IEnumerable <XElement> emptySequence = new List <XElement> (); public static IEnumerable <XElement> EmptySequence { get { return emptySequence; } } XName name; XAttribute attr_first, attr_last; bool explicit_is_empty = true; public XElement (XName name, object value) { this.name = name; Add (value); } public XElement (XElement source) { name = source.name; Add (source.Attributes ()); Add (source.Nodes ()); } public XElement (XName name) { this.name = name; } public XElement (XName name, params object [] contents) { this.name = name; Add (contents); } public XElement (XStreamingElement source) { this.name = source.Name; Add (source.Contents); } [CLSCompliant (false)] public static explicit operator bool (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XUtil.ConvertToBoolean (element.Value); } [CLSCompliant (false)] public static explicit operator bool? (XElement element) { if (element == null) return null; return element.Value == null ? (bool?) null : XUtil.ConvertToBoolean (element.Value); } [CLSCompliant (false)] public static explicit operator DateTime (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XUtil.ToDateTime (element.Value); } [CLSCompliant (false)] public static explicit operator DateTime? (XElement element) { if (element == null) return null; return element.Value == null ? (DateTime?) null : XUtil.ToDateTime (element.Value); } #if !TARGET_JVM // Same as for System.Xml.XmlConvert.ToDateTimeOffset [CLSCompliant (false)] public static explicit operator DateTimeOffset (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XmlConvert.ToDateTimeOffset (element.Value); } [CLSCompliant (false)] public static explicit operator DateTimeOffset? (XElement element) { if (element == null) return null; return element.Value == null ? (DateTimeOffset?) null : XmlConvert.ToDateTimeOffset (element.Value); } #endif [CLSCompliant (false)] public static explicit operator decimal (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XmlConvert.ToDecimal (element.Value); } [CLSCompliant (false)] public static explicit operator decimal? (XElement element) { if (element == null) return null; return element.Value == null ? (decimal?) null : XmlConvert.ToDecimal (element.Value); } [CLSCompliant (false)] public static explicit operator double (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XmlConvert.ToDouble (element.Value); } [CLSCompliant (false)] public static explicit operator double? (XElement element) { if (element == null) return null; return element.Value == null ? (double?) null : XmlConvert.ToDouble (element.Value); } [CLSCompliant (false)] public static explicit operator float (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XmlConvert.ToSingle (element.Value); } [CLSCompliant (false)] public static explicit operator float? (XElement element) { if (element == null) return null; return element.Value == null ? (float?) null : XmlConvert.ToSingle (element.Value); } [CLSCompliant (false)] public static explicit operator Guid (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XmlConvert.ToGuid (element.Value); } [CLSCompliant (false)] public static explicit operator Guid? (XElement element) { if (element == null) return null; return element.Value == null ? (Guid?) null : XmlConvert.ToGuid (element.Value); } [CLSCompliant (false)] public static explicit operator int (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XmlConvert.ToInt32 (element.Value); } [CLSCompliant (false)] public static explicit operator int? (XElement element) { if (element == null) return null; return element.Value == null ? (int?) null : XmlConvert.ToInt32 (element.Value); } [CLSCompliant (false)] public static explicit operator long (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XmlConvert.ToInt64 (element.Value); } [CLSCompliant (false)] public static explicit operator long? (XElement element) { if (element == null) return null; return element.Value == null ? (long?) null : XmlConvert.ToInt64 (element.Value); } [CLSCompliant (false)] public static explicit operator uint (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XmlConvert.ToUInt32 (element.Value); } [CLSCompliant (false)] public static explicit operator uint? (XElement element) { if (element == null) return null; return element.Value == null ? (uint?) null : XmlConvert.ToUInt32 (element.Value); } [CLSCompliant (false)] public static explicit operator ulong (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XmlConvert.ToUInt64 (element.Value); } [CLSCompliant (false)] public static explicit operator ulong? (XElement element) { if (element == null) return null; return element.Value == null ? (ulong?) null : XmlConvert.ToUInt64 (element.Value); } [CLSCompliant (false)] public static explicit operator TimeSpan (XElement element) { if (element == null) throw new ArgumentNullException ("element"); return XmlConvert.ToTimeSpan (element.Value); } [CLSCompliant (false)] public static explicit operator TimeSpan? (XElement element) { if (element == null) return null; return element.Value == null ? (TimeSpan?) null : XmlConvert.ToTimeSpan (element.Value); } [CLSCompliant (false)] public static explicit operator string (XElement element) { if (element == null) return null; return element.Value; } public XAttribute FirstAttribute { get { return attr_first; } internal set { attr_first = value; } } public XAttribute LastAttribute { get { return attr_last; } internal set { attr_last = value; } } public bool HasAttributes { get { return attr_first != null; } } public bool HasElements { get { foreach (object o in Nodes ()) if (o is XElement) return true; return false; } } public bool IsEmpty { get { return !Nodes ().GetEnumerator ().MoveNext () && explicit_is_empty; } internal set { explicit_is_empty = value; } } public XName Name { get { return name; } set { if (value == null) throw new ArgumentNullException ("Name"); name = value; } } public override XmlNodeType NodeType { get { return XmlNodeType.Element; } } public string Value { get { StringBuilder sb = null; foreach (XNode n in Nodes ()) { if (sb == null) sb = new StringBuilder (); if (n is XText) sb.Append (((XText) n).Value); else if (n is XElement) sb.Append (((XElement) n).Value); } return sb == null ? String.Empty : sb.ToString (); } set { RemoveNodes (); Add (value); } } IEnumerable <XElement> GetAncestorList (XName name, bool getMeIn) { List <XElement> list = new List <XElement> (); if (getMeIn) list.Add (this); for (XElement el = Parent as XElement; el != null; el = el.Parent as XElement) if (name == null || el.Name == name) list.Add (el); return list; } public XAttribute Attribute (XName name) { foreach (XAttribute a in Attributes ()) if (a.Name == name) return a; return null; } public IEnumerable <XAttribute> Attributes () { XAttribute next; for (XAttribute a = attr_first; a != null; a = next) { next = a.NextAttribute; yield return a; } } // huh? public IEnumerable <XAttribute> Attributes (XName name) { foreach (XAttribute a in Attributes ()) if (a.Name == name) yield return a; } static void DefineDefaultSettings (XmlReaderSettings settings, LoadOptions options) { #if MOONLIGHT // 2.1 has a DtdProcessing property which defaults to DtdProcessing.Prohibit settings.DtdProcessing = DtdProcessing.Parse; #else settings.ProhibitDtd = false; #endif settings.IgnoreWhitespace = (options & LoadOptions.PreserveWhitespace) == 0; } static XmlReaderSettings CreateDefaultSettings (LoadOptions options) { var settings = new XmlReaderSettings (); DefineDefaultSettings (settings, options); return settings; } public static XElement Load (string uri) { return Load (uri, LoadOptions.None); } public static XElement Load (string uri, LoadOptions options) { XmlReaderSettings s = CreateDefaultSettings (options); using (XmlReader r = XmlReader.Create (uri, s)) { return LoadCore (r, options); } } public static XElement Load (TextReader tr) { return Load (tr, LoadOptions.None); } public static XElement Load (TextReader tr, LoadOptions options) { XmlReaderSettings s = CreateDefaultSettings (options); using (XmlReader r = XmlReader.Create (tr, s)) { return LoadCore (r, options); } } public static XElement Load (XmlReader reader) { return Load (reader, LoadOptions.None); } public static XElement Load (XmlReader reader, LoadOptions options) { XmlReaderSettings s = reader.Settings != null ? reader.Settings.Clone () : new XmlReaderSettings (); DefineDefaultSettings (s, options); using (XmlReader r = XmlReader.Create (reader, s)) { return LoadCore (r, options); } } #if MOONLIGHT || MOBILE || NET_4_0 public static XElement Load (Stream stream) { return Load (stream, LoadOptions.None); } public static XElement Load (Stream stream, LoadOptions options) { XmlReaderSettings s = new XmlReaderSettings (); DefineDefaultSettings (s, options); using (XmlReader r = XmlReader.Create (stream, s)) { return LoadCore (r, options); } } #endif internal static XElement LoadCore (XmlReader r, LoadOptions options) { r.MoveToContent (); if (r.NodeType != XmlNodeType.Element) throw new InvalidOperationException ("The XmlReader must be positioned at an element"); XName name = XName.Get (r.LocalName, r.NamespaceURI); XElement e = new XElement (name); e.FillLineInfoAndBaseUri (r, options); if (r.MoveToFirstAttribute ()) { do { // not sure how current Orcas behavior makes sense here though ... if (r.LocalName == "xmlns" && r.NamespaceURI == XNamespace.Xmlns.NamespaceName) e.SetAttributeValue (XNamespace.None.GetName ("xmlns"), r.Value); else e.SetAttributeValue (XName.Get (r.LocalName, r.NamespaceURI), r.Value); e.LastAttribute.FillLineInfoAndBaseUri (r, options); } while (r.MoveToNextAttribute ()); r.MoveToElement (); } if (!r.IsEmptyElement) { r.Read (); e.ReadContentFrom (r, options); r.ReadEndElement (); e.explicit_is_empty = false; } else { e.explicit_is_empty = true; r.Read (); } return e; } public static XElement Parse (string s) { return Parse (s, LoadOptions.None); } public static XElement Parse (string s, LoadOptions options) { return Load (new StringReader (s), options); } public void RemoveAll () { RemoveAttributes (); RemoveNodes (); } public void RemoveAttributes () { while (attr_first != null) attr_last.Remove (); } public void Save (string filename) { Save (filename, SaveOptions.None); } public void Save (string filename, SaveOptions options) { XmlWriterSettings s = new XmlWriterSettings (); if ((options & SaveOptions.DisableFormatting) == SaveOptions.None) s.Indent = true; #if NET_4_0 || MOONLIGHT || MOBILE if ((options & SaveOptions.OmitDuplicateNamespaces) == SaveOptions.OmitDuplicateNamespaces) s.NamespaceHandling |= NamespaceHandling.OmitDuplicates; #endif using (XmlWriter w = XmlWriter.Create (filename, s)) { Save (w); } } public void Save (TextWriter tw) { Save (tw, SaveOptions.None); } public void Save (TextWriter tw, SaveOptions options) { XmlWriterSettings s = new XmlWriterSettings (); if ((options & SaveOptions.DisableFormatting) == SaveOptions.None) s.Indent = true; #if NET_4_0 || MOONLIGHT || MOBILE if ((options & SaveOptions.OmitDuplicateNamespaces) == SaveOptions.OmitDuplicateNamespaces) s.NamespaceHandling |= NamespaceHandling.OmitDuplicates; #endif using (XmlWriter w = XmlWriter.Create (tw, s)) { Save (w); } } public void Save (XmlWriter w) { WriteTo (w); } #if NET_4_0 || MOONLIGHT || MOBILE public void Save (Stream stream) { Save (stream, SaveOptions.None); } public void Save (Stream stream, SaveOptions options) { XmlWriterSettings s = new XmlWriterSettings (); if ((options & SaveOptions.DisableFormatting) == SaveOptions.None) s.Indent = true; if ((options & SaveOptions.OmitDuplicateNamespaces) == SaveOptions.OmitDuplicateNamespaces) s.NamespaceHandling |= NamespaceHandling.OmitDuplicates; using (var writer = XmlWriter.Create (stream, s)){ Save (writer); } } #endif public IEnumerable <XElement> AncestorsAndSelf () { return GetAncestorList (null, true); } public IEnumerable <XElement> AncestorsAndSelf (XName name) { return GetAncestorList (name, true); } public IEnumerable <XElement> DescendantsAndSelf () { List <XElement> list = new List <XElement> (); list.Add (this); list.AddRange (Descendants ()); return list; } public IEnumerable <XElement> DescendantsAndSelf (XName name) { List <XElement> list = new List <XElement> (); if (name == this.name) list.Add (this); list.AddRange (Descendants (name)); return list; } public IEnumerable <XNode> DescendantNodesAndSelf () { yield return this; foreach (XNode node in DescendantNodes ()) yield return node; } public void SetAttributeValue (XName name, object value) { XAttribute a = Attribute (name); if (value == null) { if (a != null) a.Remove (); } else { if (a == null) { SetAttributeObject (new XAttribute (name, value)); } else a.Value = XUtil.ToString (value); } } void SetAttributeObject (XAttribute a) { a = (XAttribute) XUtil.GetDetachedObject (a); a.SetOwner (this); if (attr_first == null) { attr_first = a; attr_last = a; } else { attr_last.NextAttribute = a; a.PreviousAttribute = attr_last; attr_last = a; } } string LookupPrefix (string ns, XmlWriter w) { string prefix = ns.Length > 0 ? w.LookupPrefix (ns) : String.Empty; foreach (XAttribute a in Attributes ()) { if (a.IsNamespaceDeclaration && a.Value == ns) { if (a.Name.Namespace == XNamespace.Xmlns) prefix = a.Name.LocalName; // otherwise xmlns="..." break; } } return prefix; } public override void WriteTo (XmlWriter w) { // some people expect the same prefix output as in input, // in the loss of performance... see bug #466423. int createdNS = 0; string prefix = LookupPrefix (name.NamespaceName, w); Func<string> nsCreator = () => { string p = null; do { p = "p" + (++createdNS); // check conflict if (Attributes ().All (a => a.Name.LocalName != p)) break; } while (true); return p; }; if (prefix == null) prefix = nsCreator (); w.WriteStartElement (prefix, name.LocalName, name.Namespace.NamespaceName); foreach (XAttribute a in Attributes ()) { if (a.IsNamespaceDeclaration) { if (a.Name.Namespace == XNamespace.Xmlns) w.WriteAttributeString ("xmlns", a.Name.LocalName, XNamespace.Xmlns.NamespaceName, a.Value); else w.WriteAttributeString ("xmlns", a.Value); } else { string apfix = LookupPrefix (a.Name.NamespaceName, w); if (apfix == null) apfix = nsCreator (); w.WriteAttributeString (apfix, a.Name.LocalName, a.Name.Namespace.NamespaceName, a.Value); } } foreach (XNode node in Nodes ()) node.WriteTo (w); if (explicit_is_empty) w.WriteEndElement (); else w.WriteFullEndElement (); } public XNamespace GetDefaultNamespace () { for (XElement el = this; el != null; el = el.Parent) foreach (XAttribute a in el.Attributes ()) if (a.IsNamespaceDeclaration && a.Name.Namespace == XNamespace.None) return XNamespace.Get (a.Value); return XNamespace.None; // nothing is declared. } public XNamespace GetNamespaceOfPrefix (string prefix) { for (XElement el = this; el != null; el = el.Parent) foreach (XAttribute a in el.Attributes ()) if (a.IsNamespaceDeclaration && (prefix.Length == 0 && a.Name.LocalName == "xmlns" || a.Name.LocalName == prefix)) return XNamespace.Get (a.Value); return XNamespace.None; // nothing is declared. } public string GetPrefixOfNamespace (XNamespace ns) { foreach (string prefix in GetPrefixOfNamespaceCore (ns)) if (GetNamespaceOfPrefix (prefix) == ns) return prefix; return null; // nothing is declared } IEnumerable<string> GetPrefixOfNamespaceCore (XNamespace ns) { for (XElement el = this; el != null; el = el.Parent) foreach (XAttribute a in el.Attributes ()) if (a.IsNamespaceDeclaration && a.Value == ns.NamespaceName) yield return a.Name.Namespace == XNamespace.None ? String.Empty : a.Name.LocalName; } public void ReplaceAll (object item) { RemoveNodes (); Add (item); } public void ReplaceAll (params object [] items) { RemoveNodes (); Add (items); } public void ReplaceAttributes (object item) { RemoveAttributes (); Add (item); } public void ReplaceAttributes (params object [] items) { RemoveAttributes (); Add (items); } public void SetElementValue (XName name, object value) { var element = Element (name); if (element == null) { element = new XElement (name, value); Add (element); } else element.SetValue (value); } public void SetValue (object value) { if (value == null) throw new ArgumentNullException ("value"); if (value is XAttribute || value is XDocument || value is XDeclaration || value is XDocumentType) throw new ArgumentException (String.Format ("Node type {0} is not allowed as element value", value.GetType ())); RemoveNodes (); foreach (object o in XUtil.ExpandArray (value)) Add (o); } internal override bool OnAddingObject (object o, bool rejectAttribute, XNode refNode, bool addFirst) { if (o is XDocument || o is XDocumentType || o is XDeclaration || (rejectAttribute && o is XAttribute)) throw new ArgumentException (String.Format ("A node of type {0} cannot be added as a content", o.GetType ())); XAttribute a = o as XAttribute; if (a != null) { foreach (XAttribute ia in Attributes ()) if (a.Name == ia.Name) throw new InvalidOperationException (String.Format ("Duplicate attribute: {0}", a.Name)); SetAttributeObject (a); return true; } else if (o is string && refNode is XText) { ((XText) refNode).Value += o as string; return true; } else return false; } void IXmlSerializable.WriteXml (XmlWriter writer) { Save (writer); } void IXmlSerializable.ReadXml (XmlReader reader) { ReadContentFrom (reader, LoadOptions.None); } XmlSchema IXmlSerializable.GetSchema () { return null; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Runtime.InteropServices; using Xunit; using Xunit.Abstractions; namespace System.Slices.Tests { public class UsageScenarioTests { private readonly ITestOutputHelper output; public UsageScenarioTests(ITestOutputHelper output) { this.output = output; } private struct MyByte { public MyByte(byte value) { Value = value; } public byte Value { get; private set; } } [Theory] [InlineData(new byte[] { })] [InlineData(new byte[] { 0 })] [InlineData(new byte[] { 0, 1 })] [InlineData(new byte[] { 0, 1, 2 })] [InlineData(new byte[] { 0, 1, 2, 3 })] [InlineData(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 })] public void CtorSpanOverByteArrayValidCasesWithPropertiesAndBasicOperationsChecks(byte[] array) { Span<byte> span = new Span<byte>(array); Assert.Equal(array.Length, span.Length); Assert.NotSame(array, span.CreateArray()); Assert.False(span.Equals(array)); ReadOnlySpan<byte>.Enumerator it = span.GetEnumerator(); for (int i = 0; i < span.Length; i++) { Assert.True(it.MoveNext()); Assert.Equal(array[i], it.Current); Assert.Equal(array[i], span.Slice(i).Read<byte>()); Assert.Equal(array[i], span.Slice(i).Read<MyByte>().Value); array[i] = unchecked((byte)(array[i] + 1)); Assert.Equal(array[i], it.Current); Assert.Equal(array[i], span.Slice(i).Read<byte>()); Assert.Equal(array[i], span.Slice(i).Read<MyByte>().Value); span.Slice(i).Write<byte>(unchecked((byte)(array[i] + 1))); Assert.Equal(array[i], it.Current); Assert.Equal(array[i], span.Slice(i).Read<byte>()); Assert.Equal(array[i], span.Slice(i).Read<MyByte>().Value); span.Slice(i).Write<MyByte>(unchecked(new MyByte((byte)(array[i] + 1)))); Assert.Equal(array[i], it.Current); Assert.Equal(array[i], span.Slice(i).Read<byte>()); Assert.Equal(array[i], span.Slice(i).Read<MyByte>().Value); } Assert.False(it.MoveNext()); it.Reset(); for (int i = 0; i < span.Length; i++) { Assert.True(it.MoveNext()); Assert.Equal(array[i], it.Current); } Assert.False(it.MoveNext()); } [Theory] [InlineData(new byte[] { })] [InlineData(new byte[] { 0 })] [InlineData(new byte[] { 0, 1 })] [InlineData(new byte[] { 0, 1, 2 })] [InlineData(new byte[] { 0, 1, 2, 3 })] [InlineData(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 })] public void CtorReadOnlySpanOverByteArrayValidCasesWithPropertiesAndBasicOperationsChecks(byte[] array) { ReadOnlySpan<byte> span = new ReadOnlySpan<byte>(array); Assert.Equal(array.Length, span.Length); Assert.NotSame(array, span.CreateArray()); Assert.False(span.Equals(array)); ReadOnlySpan<byte>.Enumerator it = span.GetEnumerator(); for (int i = 0; i < span.Length; i++) { Assert.True(it.MoveNext()); Assert.Equal(array[i], it.Current); Assert.Equal(array[i], span.Slice(i).Read<byte>()); Assert.Equal(array[i], span.Slice(i).Read<MyByte>().Value); array[i] = unchecked((byte)(array[i] + 1)); Assert.Equal(array[i], it.Current); Assert.Equal(array[i], span.Slice(i).Read<byte>()); Assert.Equal(array[i], span.Slice(i).Read<MyByte>().Value); } Assert.False(it.MoveNext()); it.Reset(); for (int i = 0; i < span.Length; i++) { Assert.True(it.MoveNext()); Assert.Equal(array[i], it.Current); } Assert.False(it.MoveNext()); } [Theory] // copy whole buffer [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 4, 5, 6 }, 0, 6, new byte[] { 7, 7, 7, 7, 7, 7 }, 0, 6)] // copy first half to first half (length match) [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 7, 7, 7 }, 0, 3, new byte[] { 7, 7, 7, 4, 5, 6 }, 0, 3)] // copy second half to second half (length match) [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 7, 7, 4, 5, 6 }, 3, 3, new byte[] { 1, 2, 3, 7, 7, 7 }, 3, 3)] // copy first half to first half [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 7, 7, 7 }, 0, 3, new byte[] { 7, 7, 7, 4, 5, 6 }, 0, 6)] // copy no bytes starting from index 0 [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 7, 7, 7, 7, 7 }, 0, 0, new byte[] { 1, 2, 3, 4, 5, 6 }, 0, 6)] // copy no bytes starting from index 3 [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 7, 7, 7, 7, 7 }, 3, 0, new byte[] { 1, 2, 3, 4, 5, 6 }, 0, 6)] // copy no bytes starting at the end [InlineData( new byte[] { 7, 7, 7, 4, 5, 6 }, new byte[] { 1, 2, 3, 7, 7, 7 }, 6, 0, new byte[] { 7, 7, 7, 4, 5, 6 }, 0, 6)] // copy first byte of 1 element array to last position [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 6 }, 0, 1, new byte[] { 1, 2, 3, 4, 5, 7 }, 5, 1)] // copy first two bytes of 2 element array to last two positions [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 5, 6 }, 0, 2, new byte[] { 1, 2, 3, 4, 7, 7 }, 4, 2)] // copy first two bytes of 3 element array to last two positions [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 5, 6, 7 }, 0, 2, new byte[] { 1, 2, 3, 4, 7, 7 }, 4, 2)] // copy last two bytes of 3 element array to last two positions [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 5, 6 }, 1, 2, new byte[] { 1, 2, 3, 4, 7, 7 }, 4, 2)] // copy first two bytes of 2 element array to the middle of other array [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 3, 4 }, 0, 2, new byte[] { 1, 2, 7, 7, 5, 6 }, 2, 3)] // copy one byte from the beginning at the end of other array [InlineData( (byte[])null, new byte[] { 7, 7, 7, 7, 7, 7 }, 0, 1, new byte[] { 7, 7, 7, 7, 7, 7 }, 6, 0)] // copy two bytes from the beginning at 5th element [InlineData( (byte[])null, new byte[] { 7, 7, 7, 7, 7, 7 }, 0, 2, new byte[] { 7, 7, 7, 7, 7, 7 }, 5, 1)] // copy one byte from the beginning at the end of other array [InlineData( (byte[])null, new byte[] { 7, 7, 7, 7, 7, 7 }, 5, 1, new byte[] { 7, 7, 7, 7, 7, 7 }, 6, 0)] // copy two bytes from the beginning at 5th element [InlineData( (byte[])null, new byte[] { 7, 7, 7, 7, 7, 7 }, 4, 2, new byte[] { 7, 7, 7, 7, 7, 7 }, 5, 1)] public void SpanOfByteCopyToAnotherSpanOfByteTwoDifferentBuffersValidCases(byte[] expected, byte[] a, int aidx, int acount, byte[] b, int bidx, int bcount) { if (expected != null) { Span<byte> spanA = new Span<byte>(a, aidx, acount); Span<byte> spanB = new Span<byte>(b, bidx, bcount); Assert.True(spanA.TryCopyTo(spanB)); Assert.Equal(expected, b); Span<byte> spanExpected = new Span<byte>(expected); Span<byte> spanBAll = new Span<byte>(b); Assert.True(spanExpected.SequenceEqual(spanBAll)); } else { Span<byte> spanA = new Span<byte>(a, aidx, acount); Span<byte> spanB = new Span<byte>(b, bidx, bcount); Assert.False(spanA.TryCopyTo(spanB)); } } [Theory] // copy whole buffer [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 4, 5, 6 }, 0, 6, new byte[] { 7, 7, 7, 7, 7, 7 }, 0, 6)] // copy first half to first half (length match) [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 7, 7, 7 }, 0, 3, new byte[] { 7, 7, 7, 4, 5, 6 }, 0, 3)] // copy second half to second half (length match) [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 7, 7, 4, 5, 6 }, 3, 3, new byte[] { 1, 2, 3, 7, 7, 7 }, 3, 3)] // copy first half to first half [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 7, 7, 7 }, 0, 3, new byte[] { 7, 7, 7, 4, 5, 6 }, 0, 6)] // copy no bytes starting from index 0 [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 7, 7, 7, 7, 7 }, 0, 0, new byte[] { 1, 2, 3, 4, 5, 6 }, 0, 6)] // copy no bytes starting from index 3 [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 7, 7, 7, 7, 7 }, 3, 0, new byte[] { 1, 2, 3, 4, 5, 6 }, 0, 6)] // copy no bytes starting at the end [InlineData( new byte[] { 7, 7, 7, 4, 5, 6 }, new byte[] { 1, 2, 3, 7, 7, 7 }, 6, 0, new byte[] { 7, 7, 7, 4, 5, 6 }, 0, 6)] // copy first byte of 1 element array to last position [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 6 }, 0, 1, new byte[] { 1, 2, 3, 4, 5, 7 }, 5, 1)] // copy first two bytes of 2 element array to last two positions [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 5, 6 }, 0, 2, new byte[] { 1, 2, 3, 4, 7, 7 }, 4, 2)] // copy first two bytes of 3 element array to last two positions [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 5, 6, 7 }, 0, 2, new byte[] { 1, 2, 3, 4, 7, 7 }, 4, 2)] // copy last two bytes of 3 element array to last two positions [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 5, 6 }, 1, 2, new byte[] { 1, 2, 3, 4, 7, 7 }, 4, 2)] // copy first two bytes of 2 element array to the middle of other array [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 3, 4 }, 0, 2, new byte[] { 1, 2, 7, 7, 5, 6 }, 2, 3)] // copy one byte from the beginning at the end of other array [InlineData( (byte[])null, new byte[] { 7, 7, 7, 7, 7, 7 }, 0, 1, new byte[] { 7, 7, 7, 7, 7, 7 }, 6, 0)] // copy two bytes from the beginning at 5th element [InlineData( (byte[])null, new byte[] { 7, 7, 7, 7, 7, 7 }, 0, 2, new byte[] { 7, 7, 7, 7, 7, 7 }, 5, 1)] // copy one byte from the beginning at the end of other array [InlineData( (byte[])null, new byte[] { 7, 7, 7, 7, 7, 7 }, 5, 1, new byte[] { 7, 7, 7, 7, 7, 7 }, 6, 0)] // copy two bytes from the beginning at 5th element [InlineData( (byte[])null, new byte[] { 7, 7, 7, 7, 7, 7 }, 4, 2, new byte[] { 7, 7, 7, 7, 7, 7 }, 5, 1)] public void ReadOnlySpanOfByteCopyToAnotherSpanOfByteTwoDifferentBuffersValidCases(byte[] expected, byte[] a, int aidx, int acount, byte[] b, int bidx, int bcount) { if (expected != null) { ReadOnlySpan<byte> spanA = new ReadOnlySpan<byte>(a, aidx, acount); Span<byte> spanB = new Span<byte>(b, bidx, bcount); Assert.True(spanA.TryCopyTo(spanB)); Assert.Equal(expected, b); ReadOnlySpan<byte> spanExpected = new ReadOnlySpan<byte>(expected); ReadOnlySpan<byte> spanBAll = new ReadOnlySpan<byte>(b); Assert.True(spanExpected.SequenceEqual(spanBAll)); } else { ReadOnlySpan<byte> spanA = new ReadOnlySpan<byte>(a, aidx, acount); Span<byte> spanB = new Span<byte>(b, bidx, bcount); Assert.False(spanA.TryCopyTo(spanB)); } ReadOnlySpanOfByteCopyToAnotherSpanOfByteTwoDifferentBuffersValidCasesNative(expected, a, aidx, acount, b, bidx, bcount); } public unsafe void ReadOnlySpanOfByteCopyToAnotherSpanOfByteTwoDifferentBuffersValidCasesNative(byte[] expected, byte[] a, int aidx, int acount, byte[] b, int bidx, int bcount) { IntPtr pa = Marshal.AllocHGlobal(a.Length); Span<byte> na = new Span<byte>(pa.ToPointer(), a.Length); na.Set(a); IntPtr pb = Marshal.AllocHGlobal(b.Length); Span<byte> nb = new Span<byte>(pb.ToPointer(), b.Length); nb.Set(b); ReadOnlySpan<byte> spanA = na.Slice(aidx, acount); Span<byte> spanB = nb.Slice(bidx, bcount); if (expected != null) { Assert.True(spanA.TryCopyTo(spanB)); Assert.Equal(expected, b); ReadOnlySpan<byte> spanExpected = new ReadOnlySpan<byte>(expected); ReadOnlySpan<byte> spanBAll = new ReadOnlySpan<byte>(b); Assert.True(spanExpected.SequenceEqual(spanBAll)); } else { Assert.False(spanA.TryCopyTo(spanB)); } Marshal.FreeHGlobal(pa); Marshal.FreeHGlobal(pb); } [Theory] // copy whole buffer [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 4, 5, 6 }, 0, 6, new byte[] { 7, 7, 7, 7, 7, 7 })] // copy first half [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 7, 7, 7 }, 0, 3, new byte[] { 7, 7, 7, 4, 5, 6 })] // copy second half [InlineData( new byte[] { 4, 5, 6, 7, 7, 7 }, new byte[] { 7, 7, 7, 4, 5, 6 }, 3, 3, new byte[] { 7, 7, 7, 7, 7, 7 })] // copy no bytes starting from index 0 [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 7, 7, 7, 7, 7 }, 0, 0, new byte[] { 1, 2, 3, 4, 5, 6 })] // copy no bytes starting from index 3 [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 7, 7, 7, 7, 7 }, 3, 0, new byte[] { 1, 2, 3, 4, 5, 6 })] // copy no bytes starting at the end [InlineData( new byte[] { 7, 7, 7, 4, 5, 6 }, new byte[] { 1, 2, 3, 7, 7, 7 }, 6, 0, new byte[] { 7, 7, 7, 4, 5, 6 })] // copy first byte of 1 element array [InlineData( new byte[] { 6, 2, 3, 4, 5, 6 }, new byte[] { 6 }, 0, 1, new byte[] { 1, 2, 3, 4, 5, 6 })] public void SpanCopyToArrayTwoDifferentBuffersValidCases(byte[] expected, byte[] a, int aidx, int acount, byte[] b) { if (expected != null) { Span<byte> spanA = new Span<byte>(a, aidx, acount); Assert.True(spanA.TryCopyTo(b)); Assert.Equal(expected, b); Span<byte> spanExpected = new Span<byte>(expected); Span<byte> spanBAll = new Span<byte>(b); Assert.True(spanExpected.SequenceEqual(spanBAll)); } else { Span<byte> spanA = new Span<byte>(a, aidx, acount); Assert.False(spanA.TryCopyTo(b)); } } [Theory] // copy whole buffer [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 4, 5, 6 }, 0, 6, new byte[] { 7, 7, 7, 7, 7, 7 })] // copy first half [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 1, 2, 3, 7, 7, 7 }, 0, 3, new byte[] { 7, 7, 7, 4, 5, 6 })] // copy second half [InlineData( new byte[] { 4, 5, 6, 7, 7, 7 }, new byte[] { 7, 7, 7, 4, 5, 6 }, 3, 3, new byte[] { 7, 7, 7, 7, 7, 7 })] // copy no bytes starting from index 0 [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 7, 7, 7, 7, 7 }, 0, 0, new byte[] { 1, 2, 3, 4, 5, 6 })] // copy no bytes starting from index 3 [InlineData( new byte[] { 1, 2, 3, 4, 5, 6 }, new byte[] { 7, 7, 7, 7, 7, 7 }, 3, 0, new byte[] { 1, 2, 3, 4, 5, 6 })] // copy no bytes starting at the end [InlineData( new byte[] { 7, 7, 7, 4, 5, 6 }, new byte[] { 1, 2, 3, 7, 7, 7 }, 6, 0, new byte[] { 7, 7, 7, 4, 5, 6 })] // copy first byte of 1 element array [InlineData( new byte[] { 6, 2, 3, 4, 5, 6 }, new byte[] { 6 }, 0, 1, new byte[] { 1, 2, 3, 4, 5, 6 })] public void ROSpanCopyToArrayTwoDifferentBuffersValidCases(byte[] expected, byte[] a, int aidx, int acount, byte[] b) { if (expected != null) { ReadOnlySpan<byte> spanA = new ReadOnlySpan<byte>(a, aidx, acount); Assert.True(spanA.TryCopyTo(b)); Assert.Equal(expected, b); ReadOnlySpan<byte> spanExpected = new ReadOnlySpan<byte>(expected); ReadOnlySpan<byte> spanBAll = new ReadOnlySpan<byte>(b); Assert.True(spanExpected.SequenceEqual(spanBAll)); } else { ReadOnlySpan<byte> spanA = new ReadOnlySpan<byte>(a, aidx, acount); Assert.False(spanA.TryCopyTo(b)); } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using Funq; using NServiceKit.Html; using NServiceKit.IO; using NServiceKit.VirtualPath; using NServiceKit.ServiceHost; using NServiceKit.WebHost.Endpoints; using NServiceKit.WebHost.Endpoints.Extensions; namespace NServiceKit.ServiceInterface.Testing { /// <summary>A basic application host.</summary> public class BasicAppHost : IAppHost, IHasContainer, IDisposable { /// <summary>Initializes a new instance of the NServiceKit.ServiceInterface.Testing.BasicAppHost class.</summary> public BasicAppHost() { this.Container = new Container(); this.PreRequestFilters = new List<Action<IHttpRequest, IHttpResponse>>(); this.RequestFilters = new List<Action<IHttpRequest, IHttpResponse, object>>(); this.ResponseFilters = new List<Action<IHttpRequest, IHttpResponse, object>>(); this.ViewEngines = new List<IViewEngine>(); this.CatchAllHandlers = new List<HttpHandlerResolverDelegate>(); VirtualPathProvider = new FileSystemVirtualPathProvider(this, "~".MapServerPath()); } /// <summary>Registers as.</summary> /// /// <typeparam name="T"> Generic type parameter.</typeparam> /// <typeparam name="TAs">Type of as.</typeparam> public void RegisterAs<T, TAs>() where T : TAs { this.Container.RegisterAs<T, TAs>(); } /// <summary>Allows the clean up for executed autowired services and filters. Calls directly after services and filters are executed.</summary> /// /// <param name="instance">.</param> public virtual void Release(object instance) { } /// <summary>Called at the end of each request. Enables Request Scope.</summary> public void OnEndRequest() {} /// <summary>Register user-defined custom routes.</summary> /// /// <value>The routes.</value> public IServiceRoutes Routes { get; private set; } /// <summary>Registers this object.</summary> /// /// <typeparam name="T">Generic type parameter.</typeparam> /// <param name="instance">The instance.</param> public void Register<T>(T instance) { this.Container.Register(instance); } /// <summary>Try resolve.</summary> /// /// <typeparam name="T">Generic type parameter.</typeparam> /// /// <returns>A T.</returns> public T TryResolve<T>() { return this.Container.TryResolve<T>(); } /// <summary>Gets or sets the container.</summary> /// /// <value>The container.</value> public Container Container { get; set; } /// <summary>Register custom ContentType serializers.</summary> /// /// <value>The content type filters.</value> public IContentTypeFilter ContentTypeFilters { get; set; } /// <summary>Add Request Filters, to be applied before the dto is deserialized.</summary> /// /// <value>The pre request filters.</value> public List<Action<IHttpRequest, IHttpResponse>> PreRequestFilters { get; set; } /// <summary>Add Request Filters.</summary> /// /// <value>The request filters.</value> public List<Action<IHttpRequest, IHttpResponse, object>> RequestFilters { get; set; } /// <summary>Add Response Filters.</summary> /// /// <value>The response filters.</value> public List<Action<IHttpRequest, IHttpResponse, object>> ResponseFilters { get; set; } /// <summary>Add alternative HTML View Engines.</summary> /// /// <value>The view engines.</value> public List<IViewEngine> ViewEngines { get; set; } /// <summary>Provide an exception handler for un-caught exceptions.</summary> /// /// <value>The exception handler.</value> public HandleUncaughtExceptionDelegate ExceptionHandler { get; set; } /// <summary>Provide an exception handler for unhandled exceptions.</summary> /// /// <value>The service exception handler.</value> public HandleServiceExceptionDelegate ServiceExceptionHandler { get; set; } /// <summary>Provide a catch-all handler that doesn't match any routes.</summary> /// /// <value>The catch all handlers.</value> public List<HttpHandlerResolverDelegate> CatchAllHandlers { get; set; } /// <summary>Provide a custom model minder for a specific Request DTO.</summary> /// /// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception> /// /// <value>The request binders.</value> public Dictionary<Type, Func<IHttpRequest, object>> RequestBinders { get { throw new NotImplementedException(); } } private EndpointHostConfig config; /// <summary>The AppHost config.</summary> /// /// <value>The configuration.</value> public EndpointHostConfig Config { get { return config ?? (new EndpointHostConfig("BasicAppHost", new ServiceManager(Container, Assembly.GetExecutingAssembly()))); } set { config = value; } } /// <summary>Register an Adhoc web service on Startup.</summary> /// /// <param name="serviceType">.</param> /// <param name="atRestPaths">.</param> public void RegisterService(Type serviceType, params string[] atRestPaths) { Config.ServiceManager.RegisterService(serviceType); } /// <summary>List of pre-registered and user-defined plugins to be enabled in this AppHost.</summary> /// /// <value>The plugins.</value> public List<IPlugin> Plugins { get; private set; } /// <summary>Apply plugins to this AppHost.</summary> /// /// <param name="plugins">.</param> public void LoadPlugin(params IPlugin[] plugins) { plugins.ToList().ForEach(x => x.Register(this)); } /// <summary>Virtual access to file resources.</summary> /// /// <value>The virtual path provider.</value> public IVirtualPathProvider VirtualPathProvider { get; set; } /// <summary>Creates service runner.</summary> /// /// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception> /// /// <typeparam name="TRequest">Type of the request.</typeparam> /// <param name="actionContext">Context for the action.</param> /// /// <returns>The new service runner.</returns> public IServiceRunner<TRequest> CreateServiceRunner<TRequest>(ActionContext actionContext) { throw new NotImplementedException(); } /// <summary>Resolve the absolute url for this request.</summary> /// /// <param name="virtualPath">Full pathname of the virtual file.</param> /// <param name="httpReq"> The HTTP request.</param> /// /// <returns>A string.</returns> public virtual string ResolveAbsoluteUrl(string virtualPath, IHttpRequest httpReq) { return httpReq.GetAbsoluteUrl(virtualPath); } /// <summary>Initialises this object.</summary> /// /// <returns>A BasicAppHost.</returns> public BasicAppHost Init() { EndpointHost.ConfigureHost(this, GetType().Name, Config.ServiceManager); return this; } private bool disposed; /// <summary>Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.</summary> /// /// <param name="disposing">true to release both managed and unmanaged resources; false to release only unmanaged resources.</param> protected virtual void Dispose(bool disposing) { if (disposed) return; lock (this) { if (disposed) return; if (disposing) { if (EndpointHost.Config != null && EndpointHost.Config.ServiceManager != null) { EndpointHost.Config.ServiceManager.Dispose(); } EndpointHost.Dispose(); } //release unmanaged resources here... disposed = true; } } /// <summary>Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.</summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } } }
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Microsoft.Dnx.Runtime.Common.CommandLine { internal class CommandLineApplication { // Indicates whether the parser should throw an exception when it runs into an unexpected argument. // If this field is set to false, the parser will stop parsing when it sees an unexpected argument, and all // remaining arguments, including the first unexpected argument, will be stored in RemainingArguments property. private readonly bool _throwOnUnexpectedArg; public CommandLineApplication(bool throwOnUnexpectedArg = true) { _throwOnUnexpectedArg = throwOnUnexpectedArg; Options = new List<CommandOption>(); Arguments = new List<CommandArgument>(); Commands = new List<CommandLineApplication>(); RemainingArguments = new List<string>(); Invoke = () => 0; } public CommandLineApplication Parent { get; set; } public string Name { get; set; } public string FullName { get; set; } public string Syntax { get; set; } public string Description { get; set; } public List<CommandOption> Options { get; private set; } public CommandOption OptionHelp { get; private set; } public CommandOption OptionVersion { get; private set; } public List<CommandArgument> Arguments { get; private set; } public List<string> RemainingArguments { get; private set; } public bool IsShowingInformation { get; protected set; } // Is showing help or version? public Func<int> Invoke { get; set; } public Func<string> LongVersionGetter { get; set; } public Func<string> ShortVersionGetter { get; set; } public List<CommandLineApplication> Commands { get; private set; } public CommandLineApplication Command(string name, Action<CommandLineApplication> configuration, bool throwOnUnexpectedArg = true) { var command = new CommandLineApplication(throwOnUnexpectedArg) { Name = name, Parent = this }; Commands.Add(command); configuration(command); return command; } public CommandOption Option(string template, string description, CommandOptionType optionType) { return Option(template, description, optionType, _ => { }); } public CommandOption Option(string template, string description, CommandOptionType optionType, Action<CommandOption> configuration) { var option = new CommandOption(template, optionType) { Description = description }; Options.Add(option); configuration(option); return option; } public CommandArgument Argument(string name, string description, bool multipleValues = false) { return Argument(name, description, _ => { }, multipleValues); } public CommandArgument Argument(string name, string description, Action<CommandArgument> configuration, bool multipleValues = false) { var lastArg = Arguments.LastOrDefault(); if (lastArg != null && lastArg.MultipleValues) { var message = string.Format("The last argument '{0}' accepts multiple values. No more argument can be added.", lastArg.Name); throw new InvalidOperationException(message); } var argument = new CommandArgument { Name = name, Description = description, MultipleValues = multipleValues }; Arguments.Add(argument); configuration(argument); return argument; } public void OnExecute(Func<int> invoke) { Invoke = invoke; } public void OnExecute(Func<Task<int>> invoke) { Invoke = () => invoke().Result; } public int Execute(params string[] args) { CommandLineApplication command = this; CommandOption option = null; IEnumerator<CommandArgument> arguments = null; for (var index = 0; index < args.Length; index++) { var arg = args[index]; var processed = false; if (!processed && option == null) { string[] longOption = null; string[] shortOption = null; if (arg.StartsWith("--")) { longOption = arg.Substring(2).Split(new[] { ':', '=' }, 2); } else if (arg.StartsWith("-")) { shortOption = arg.Substring(1).Split(new[] { ':', '=' }, 2); } if (longOption != null) { processed = true; option = command.Options.SingleOrDefault(opt => string.Equals(opt.LongName, longOption[0], StringComparison.Ordinal)); if (option == null) { HandleUnexpectedArg(command, args, index, argTypeName: "option"); break; } // If we find a help/version option, show information and stop parsing if (command.OptionHelp == option) { command.ShowHelp(); return 0; } else if (command.OptionVersion == option) { command.ShowVersion(); return 0; } if (longOption.Length == 2) { if (!option.TryParse(longOption[1])) { command.ShowHint(); throw new CommandParsingException(command, $"Unexpected value '{longOption[1]}' for option '{option.LongName}'"); } option = null; } else if (option.OptionType == CommandOptionType.NoValue) { // No value is needed for this option option.TryParse(null); option = null; } } if (shortOption != null) { processed = true; option = command.Options.SingleOrDefault(opt => string.Equals(opt.ShortName, shortOption[0], StringComparison.Ordinal)); // If not a short option, try symbol option if (option == null) { option = command.Options.SingleOrDefault(opt => string.Equals(opt.SymbolName, shortOption[0], StringComparison.Ordinal)); } if (option == null) { HandleUnexpectedArg(command, args, index, argTypeName: "option"); break; } // If we find a help/version option, show information and stop parsing if (command.OptionHelp == option) { command.ShowHelp(); return 0; } else if (command.OptionVersion == option) { command.ShowVersion(); return 0; } if (shortOption.Length == 2) { if (!option.TryParse(shortOption[1])) { command.ShowHint(); throw new CommandParsingException(command, $"Unexpected value '{shortOption[1]}' for option '{option.LongName}'"); } option = null; } else if (option.OptionType == CommandOptionType.NoValue) { // No value is needed for this option option.TryParse(null); option = null; } } } if (!processed && option != null) { processed = true; if (!option.TryParse(arg)) { command.ShowHint(); throw new CommandParsingException(command, $"Unexpected value '{arg}' for option '{option.LongName}'"); } option = null; } if (!processed && arguments == null) { var currentCommand = command; foreach (var subcommand in command.Commands) { if (string.Equals(subcommand.Name, arg, StringComparison.OrdinalIgnoreCase)) { processed = true; command = subcommand; break; } } // If we detect a subcommand if (command != currentCommand) { processed = true; } } if (!processed) { if (arguments == null) { arguments = new CommandArgumentEnumerator(command.Arguments.GetEnumerator()); } if (arguments.MoveNext()) { processed = true; arguments.Current.Values.Add(arg); } } if (!processed) { HandleUnexpectedArg(command, args, index, argTypeName: "command or argument"); break; } } if (option != null) { command.ShowHint(); throw new CommandParsingException(command, $"Missing value for option '{option.LongName}'"); } return command.Invoke(); } // Helper method that adds a help option public CommandOption HelpOption(string template) { // Help option is special because we stop parsing once we see it // So we store it separately for further use OptionHelp = Option(template, "Show help information", CommandOptionType.NoValue); return OptionHelp; } public CommandOption VersionOption(string template, string shortFormVersion, string longFormVersion = null) { if (longFormVersion == null) { return VersionOption(template, () => shortFormVersion); } else { return VersionOption(template, () => shortFormVersion, () => longFormVersion); } } // Helper method that adds a version option public CommandOption VersionOption(string template, Func<string> shortFormVersionGetter, Func<string> longFormVersionGetter = null) { // Version option is special because we stop parsing once we see it // So we store it separately for further use OptionVersion = Option(template, "Show version information", CommandOptionType.NoValue); ShortVersionGetter = shortFormVersionGetter; LongVersionGetter = longFormVersionGetter ?? shortFormVersionGetter; return OptionVersion; } // Show short hint that reminds users to use help option public void ShowHint() { if (OptionHelp != null) { Console.WriteLine(string.Format("Specify --{0} for a list of available options and commands.", OptionHelp.LongName)); } } // Show full help public void ShowHelp(string commandName = null) { var headerBuilder = new StringBuilder("Usage:"); for (var cmd = this; cmd != null; cmd = cmd.Parent) { cmd.IsShowingInformation = true; headerBuilder.Insert(6, string.Format(" {0}", cmd.Name)); } CommandLineApplication target; if (commandName == null || string.Equals(Name, commandName, StringComparison.OrdinalIgnoreCase)) { target = this; } else { target = Commands.SingleOrDefault(cmd => string.Equals(cmd.Name, commandName, StringComparison.OrdinalIgnoreCase)); if (target != null) { headerBuilder.AppendFormat(" {0}", commandName); } else { // The command name is invalid so don't try to show help for something that doesn't exist target = this; } } var optionsBuilder = new StringBuilder(); var commandsBuilder = new StringBuilder(); var argumentsBuilder = new StringBuilder(); if (target.Arguments.Any()) { headerBuilder.Append(" [arguments]"); argumentsBuilder.AppendLine(); argumentsBuilder.AppendLine("Arguments:"); var maxArgLen = MaxArgumentLength(target.Arguments); var outputFormat = string.Format(" {{0, -{0}}}{{1}}", maxArgLen + 2); foreach (var arg in target.Arguments) { argumentsBuilder.AppendFormat(outputFormat, arg.Name, arg.Description); argumentsBuilder.AppendLine(); } } if (target.Options.Any()) { headerBuilder.Append(" [options]"); optionsBuilder.AppendLine(); optionsBuilder.AppendLine("Options:"); var maxOptLen = MaxOptionTemplateLength(target.Options); var outputFormat = string.Format(" {{0, -{0}}}{{1}}", maxOptLen + 2); foreach (var opt in target.Options) { optionsBuilder.AppendFormat(outputFormat, opt.Template, opt.Description); optionsBuilder.AppendLine(); } } if (target.Commands.Any()) { headerBuilder.Append(" [command]"); commandsBuilder.AppendLine(); commandsBuilder.AppendLine("Commands:"); var maxCmdLen = MaxCommandLength(target.Commands); var outputFormat = string.Format(" {{0, -{0}}}{{1}}", maxCmdLen + 2); foreach (var cmd in target.Commands.OrderBy(c => c.Name)) { commandsBuilder.AppendFormat(outputFormat, cmd.Name, cmd.Description); commandsBuilder.AppendLine(); } if (OptionHelp != null) { commandsBuilder.AppendLine(); commandsBuilder.AppendFormat("Use \"{0} [command] --help\" for more information about a command.", Name); commandsBuilder.AppendLine(); } } headerBuilder.AppendLine(); var nameAndVersion = new StringBuilder(); nameAndVersion.AppendLine(GetFullNameAndVersion()); nameAndVersion.AppendLine(); Console.Write("{0}{1}{2}{3}{4}", nameAndVersion, headerBuilder, argumentsBuilder, optionsBuilder, commandsBuilder); } public void ShowVersion() { for (var cmd = this; cmd != null; cmd = cmd.Parent) { cmd.IsShowingInformation = true; } Console.WriteLine(FullName); Console.WriteLine(LongVersionGetter()); } public string GetFullNameAndVersion() { return ShortVersionGetter == null ? FullName : string.Format("{0} {1}", FullName, ShortVersionGetter()); } public void ShowRootCommandFullNameAndVersion() { var rootCmd = this; while (rootCmd.Parent != null) { rootCmd = rootCmd.Parent; } Console.WriteLine(rootCmd.GetFullNameAndVersion()); Console.WriteLine(); } private int MaxOptionTemplateLength(IEnumerable<CommandOption> options) { var maxLen = 0; foreach (var opt in options) { maxLen = opt.Template.Length > maxLen ? opt.Template.Length : maxLen; } return maxLen; } private int MaxCommandLength(IEnumerable<CommandLineApplication> commands) { var maxLen = 0; foreach (var cmd in commands) { maxLen = cmd.Name.Length > maxLen ? cmd.Name.Length : maxLen; } return maxLen; } private int MaxArgumentLength(IEnumerable<CommandArgument> arguments) { var maxLen = 0; foreach (var arg in arguments) { maxLen = arg.Name.Length > maxLen ? arg.Name.Length : maxLen; } return maxLen; } private void HandleUnexpectedArg(CommandLineApplication command, string[] args, int index, string argTypeName) { if (command._throwOnUnexpectedArg) { command.ShowHint(); throw new CommandParsingException(command, $"Unrecognized {argTypeName} '{args[index]}'"); } else { // All remaining arguments are stored for further use command.RemainingArguments.AddRange(new ArraySegment<string>(args, index, args.Length - index)); } } private class CommandArgumentEnumerator : IEnumerator<CommandArgument> { private readonly IEnumerator<CommandArgument> _enumerator; public CommandArgumentEnumerator(IEnumerator<CommandArgument> enumerator) { _enumerator = enumerator; } public CommandArgument Current { get { return _enumerator.Current; } } object IEnumerator.Current { get { return Current; } } public void Dispose() { _enumerator.Dispose(); } public bool MoveNext() { if (Current == null || !Current.MultipleValues) { return _enumerator.MoveNext(); } // If current argument allows multiple values, we don't move forward and // all later values will be added to current CommandArgument.Values return true; } public void Reset() { _enumerator.Reset(); } } } }
/* Copyright (c) 2006-2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* Change history * Oct 13 2008 Joe Feser joseph.feser@gmail.com * Converted ArrayLists and other .NET 1.1 collections to use Generics * Combined IExtensionElement and IExtensionElementFactory interfaces * */ #region Using directives using System; using System.Collections; using System.Text; using System.Xml; using Google.GData.Client; using System.Collections.Generic; #endregion namespace Google.GData.GoogleBase { /////////////////////////////////////////////////////////////////////// /// <summary>Typesafe convenience methods for getting and setting /// google base attributes (Level 2) /// /// This class adds convenience methods for accessing attributes /// according to their type. /// </summary> /// <seealso cref="GBaseAttributes"/> /// <seealso cref="GBaseAttributeCollection"/> /// <seealso cref="GBaseEntry"/> /////////////////////////////////////////////////////////////////////// public class GBaseAttributeCollectionWithTypeConversion : GBaseAttributeCollection { private static readonly DateTime NoDateTime = new DateTime(0); /////////////////////////////////////////////////////////////////////// /// <summary>Creates a GBaseAttributeCollectionWithTypeConversion /// object that will access and modify the given extension list. /// </summary> /// <param name="baseList">a list that contains GBaseAttribute object, /// among others</param> /////////////////////////////////////////////////////////////////////// public GBaseAttributeCollectionWithTypeConversion(ExtensionList baseList) : base(baseList) { } /////////////////////////////////////////////////////////////////////// /// <summary>Gets the content of the first attribute found with /// a specific name, as a string, whatever its type.</summary> /// <param name="name">attribute name</param> /// <returns>the value of the first attribute, if found, or null /// </returns> /////////////////////////////////////////////////////////////////////// public String GetAttributeAsString(string name) { return ExtractContent(GetAttribute(name)); } /////////////////////////////////////////////////////////////////////// /// <summary>Gets the content of the first attribute found with /// a specific name and type, as a string.</summary> /// <param name="name">attribute name</param> /// <param name="type">type of the attribute</param> /// <returns>the value of the first attribute, if found, or null /// </returns> /////////////////////////////////////////////////////////////////////// public String GetAttributeAsString(string name, GBaseAttributeType type) { return ExtractContent(GetAttribute(name, type)); } /////////////////////////////////////////////////////////////////////// /// <summary>Gets all the content of all the attributes found /// with a specific name, whatever their type might be.</summary> /// <param name="name">attribute name</param> /// <returns>the values of all the attributes found with this name /// as an array of strings, never null</returns> /////////////////////////////////////////////////////////////////////// public String[] GetAttributesAsString(string name) { return ExtractContent(GetAttributes(name)); } /////////////////////////////////////////////////////////////////////// /// <summary>Gets all the content of all the attributes found /// with a specific name and type..</summary> /// <param name="name">attribute name</param> /// <param name="type">attribute type</param> /// <returns>the values of all the attributes found with this name /// and type as an array of strings, never null</returns> /////////////////////////////////////////////////////////////////////// public String[] GetAttributesAsString(string name, GBaseAttributeType type) { return ExtractContent(GetAttributes(name, type)); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// text with this name.</summary> /// <param name="name">attribute name</param> /// <returns>the value of the first attribute, or null</returns> /////////////////////////////////////////////////////////////////////// public String GetTextAttribute(string name) { return GetAttributeAsString(name, GBaseAttributeType.Text); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// text with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public String[] GetTextAttributes(string name) { return GetAttributesAsString(name, GBaseAttributeType.Text); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type text.</summary> /// <param name="name">attribute name</param> /// <param name="value">value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddTextAttribute(string name, string value) { return Add(new GBaseAttribute(name, GBaseAttributeType.Text, value)); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// url with this name.</summary> /// <param name="name">attribute name</param> /// <returns>the value of the first attribute, or null</returns> /////////////////////////////////////////////////////////////////////// public String GetUrlAttribute(string name) { return GetAttributeAsString(name, GBaseAttributeType.Url); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// url with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public String[] GetUrlAttributes(string name) { return GetAttributesAsString(name, GBaseAttributeType.Url); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type url.</summary> /// <param name="name">attribute name</param> /// <param name="value">value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddUrlAttribute(string name, string value) { return Add(new GBaseAttribute(name, GBaseAttributeType.Url, value)); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the address of the first attribute of type /// location with this name.</summary> /// <param name="name">attribute name</param> /// <returns>the value of the first attribute, or null</returns> /////////////////////////////////////////////////////////////////////// public String GetLocationAttribute(string name) { return GetAttributeAsString(name, GBaseAttributeType.Location); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the address of all the attribute of type /// location with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public String[] GetLocationAttributes(string name) { return GetAttributesAsString(name, GBaseAttributeType.Location); } ////////////////////////////////////////////////////////////////////// /// <summary>Returns the object representation of an attribute /// of type location with this name.</summary> ////////////////////////////////////////////////////////////////////// public Location GetLocationAttributeAsObject(string name) { GBaseAttribute attribute = GetAttribute(name, GBaseAttributeType.Location); if (attribute == null) { return null; } return new Location(attribute); } ////////////////////////////////////////////////////////////////////// /// <summary>Returns all the object representations for attributes /// of type location with this name.</summary> ////////////////////////////////////////////////////////////////////// public List<Location> GetLocationAttributesAsObjects(string name) { List<GBaseAttribute> attributes = GetAttributes(name, GBaseAttributeType.Location); List<Location> retval = new List<Location>(attributes.Count); for (int i = 0; i < retval.Count; i++) { retval[i] = new Location(attributes[i]); } return retval; } ////////////////////////////////////////////////////////////////////// /// <summary>Returns the object representation for an attribute /// of type shipping.</summary> ////////////////////////////////////////////////////////////////////// public Shipping GetShippingAttribute(string name) { GBaseAttribute attribute = GetAttribute(name, GBaseAttributeType.Shipping); if (attribute == null) { return null; } return new Shipping(attribute); } ////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type shipping.</summary> /// <param name="name">attribute name</param> /// <param name="value">attribute value</param> /// <returns>the newly-created GBaseAttribute object</returns> ////////////////////////////////////////////////////////////////////// public GBaseAttribute AddShippingAttribute(string name, Shipping value) { return Add(value.CreateGBaseAttribute(name)); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type location.</summary> /// <param name="name">attribute name</param> /// <param name="value">value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddLocationAttribute(string name, string value) { return Add(new GBaseAttribute(name, GBaseAttributeType.Location, value)); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type location.</summary> /// <param name="name">attribute name</param> /// <param name="value">value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddLocationAttribute(string name, Location value) { return Add(value.CreateGBaseAttribute(name)); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type boolean.</summary> /// <param name="name">attribute name</param> /// <param name="value">value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddBooleanAttribute(string name, bool value) { return Add(new GBaseAttribute(name, GBaseAttributeType.Boolean, value ? Utilities.XSDTrue : Utilities.XSDFalse)); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type int.</summary> /// <param name="name">attribute name</param> /// <param name="value">value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddIntAttribute(string name, int value) { return Add(new GBaseAttribute(name, GBaseAttributeType.Int, NumberFormat.ToString(value))); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type float.</summary> /// <param name="name">attribute name</param> /// <param name="value">value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddFloatAttribute(string name, float value) { return Add(new GBaseAttribute(name, GBaseAttributeType.Float, NumberFormat.ToString(value))); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type number.</summary> /// <param name="name">attribute name</param> /// <param name="value">value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddNumberAttribute(string name, float value) { return Add(new GBaseAttribute(name, GBaseAttributeType.Number, NumberFormat.ToString(value))); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type number.</summary> /// <param name="name">attribute name</param> /// <param name="value">value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddNumberAttribute(string name, int value) { return Add(new GBaseAttribute(name, GBaseAttributeType.Number, NumberFormat.ToString(value))); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type intUnit.</summary> /// <param name="name">attribute name</param> /// <param name="value">integer value</param> /// <param name="unit">unit</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddIntUnitAttribute(string name, int value, string unit) { return AddIntUnitAttribute(name, new IntUnit(value, unit)); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type intUnit.</summary> /// <param name="name">attribute name</param> /// <param name="value">attribute value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddIntUnitAttribute(string name, IntUnit value) { return Add(new GBaseAttribute(name, GBaseAttributeType.IntUnit, value.ToString())); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type floatUnit.</summary> /// <param name="name">attribute name</param> /// <param name="value">float value</param> /// <param name="unit">unit</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddFloatUnitAttribute(string name, float value, string unit) { return AddFloatUnitAttribute(name, new FloatUnit(value, unit)); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type floatUnit.</summary> /// <param name="name">attribute name</param> /// <param name="value">attribute value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddFloatUnitAttribute(string name, FloatUnit value) { return Add(new GBaseAttribute(name, GBaseAttributeType.FloatUnit, value.ToString())); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type numberUnit.</summary> /// <param name="name">attribute name</param> /// <param name="value">integer value</param> /// <param name="unit">unit</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddNumberUnitAttribute(string name, int value, string unit) { return AddNumberUnitAttribute(name, new IntUnit(value, unit)); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type numberUnit.</summary> /// <param name="name">attribute name</param> /// <param name="value">float value</param> /// <param name="unit">unit</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddNumberUnitAttribute(string name, float value, string unit) { return AddNumberUnitAttribute(name, new FloatUnit(value, unit)); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type numberUnit.</summary> /// <param name="name">attribute name</param> /// <param name="value">attribute value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddNumberUnitAttribute(string name, NumberUnit value) { return Add(new GBaseAttribute(name, GBaseAttributeType.NumberUnit, value.ToString())); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type date.</summary> /// <param name="name">attribute name</param> /// <param name="value">attribute value. Only the date will be /// used.</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddDateAttribute(string name, DateTime value) { return Add(new GBaseAttribute(name, GBaseAttributeType.Date, Utilities.LocalDateInUTC(value))); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type date/time.</summary> /// <param name="name">attribute name</param> /// <param name="value">attribute value</param> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddDateTimeAttribute(string name, DateTime value) { return Add(new GBaseAttribute(name, GBaseAttributeType.DateTime, Utilities.LocalDateTimeInUTC(value))); } /////////////////////////////////////////////////////////////////////// /// <summary>Adds a new attribute of type dateTimeRange.</summary> /// <param name="name">attribute name</param> /// <param name="value">value</param> /// <exception cref="ArgumentException">Thrown when the range is /// empty, in which case you should add a DateTime attribute instead. /// </exception> /// <returns>the newly-created GBaseAttribute object</returns> /////////////////////////////////////////////////////////////////////// public GBaseAttribute AddDateTimeRangeAttribute(string name, DateTimeRange value) { if (value.IsDateTimeOnly()) { // The server rejects empty ranges. throw new ArgumentException("value should not be an empty range. " + "You probably want to convert it into a " + "DateTime and call AddDateTimeAttribute()."); } return Add(new GBaseAttribute(name, GBaseAttributeType.DateTimeRange, value.ToString())); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// boolean with this name.</summary> /// <param name="name">attribute name</param> /// <param name="defaultValue">value to return if no attribute /// was found</param> /// <returns>the value of the first attribute, or the default</returns> /////////////////////////////////////////////////////////////////////// public bool GetBooleanAttribute(string name, bool defaultValue) { bool value; if (ExtractBooleanAttribute(name, out value)) { return value; } else { return defaultValue; } } /////////////////////////////////////////////////////////////////////// /// <summary>Looks for an attribute of type boolean with this name. /// </summary> /// <param name="name">attribute name</param> /// <param name="value">value to set if the attribute is found</param> /// <returns>true if an attribute was found, in which case /// the value will have been set.</returns> /////////////////////////////////////////////////////////////////////// public bool ExtractBooleanAttribute(string name, out bool value) { String stringValue = GetAttributeAsString(name, GBaseAttributeType.Boolean); if (stringValue == null) { value = false; return false; } value = "true" == stringValue; return true; } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// int with this name.</summary> /// <param name="name">attribute name</param> /// <param name="defaultValue">value to return if no attribute /// was found</param> /// <returns>the value of the first attribute, or the default</returns> /////////////////////////////////////////////////////////////////////// public int GetIntAttribute(string name, int defaultValue) { int value; if (ExtractIntAttribute(name, out value)) { return value; } else { return defaultValue; } } /////////////////////////////////////////////////////////////////////// /// <summary>Looks for an attribute of type int with this name. /// </summary> /// <param name="name">attribute name</param> /// <param name="value">value to set if the attribute is found</param> /// <returns>true if an attribute was found, in which case /// the value will have been set.</returns> /////////////////////////////////////////////////////////////////////// public bool ExtractIntAttribute(string name, out int value) { String stringValue = GetAttributeAsString(name, GBaseAttributeType.Int); if (stringValue == null) { value = 0; return false; } value = NumberFormat.ToInt(stringValue); return true; } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// int with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public List<int> GetIntAttributes(string name) { List<int> retval = new List<int>(); foreach (GBaseAttribute attribute in GetAttributes(name, GBaseAttributeType.Int)) { String content = attribute.Content; if (content != null) { retval.Add(NumberFormat.ToInt(content)); } } return retval; } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// float with this name.</summary> /// <param name="name">attribute name</param> /// <param name="defaultValue">value to return if no attribute /// was found</param> /// <returns>the value of the first attribute, or the default</returns> /////////////////////////////////////////////////////////////////////// public float GetFloatAttribute(string name, float defaultValue) { float value; if (ExtractFloatAttribute(name, out value)) { return value; } else { return defaultValue; } } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// number with this name.</summary> /// <param name="name">attribute name</param> /// <param name="defaultValue">value to return if no attribute /// was found</param> /// <returns>the value of the first attribute, or the default</returns> /////////////////////////////////////////////////////////////////////// public float GetNumberAttribute(string name, float defaultValue) { float value; if (ExtractNumberAttribute(name, out value)) { return value; } else { return defaultValue; } } /////////////////////////////////////////////////////////////////////// /// <summary>Looks for an attribute of type float with this name. /// </summary> /// <param name="name">attribute name</param> /// <param name="value">value to set if the attribute is found</param> /// <returns>true if an attribute was found, in which case /// the value will have been set.</returns> /////////////////////////////////////////////////////////////////////// public bool ExtractFloatAttribute(string name, out float value) { return ExtractAttributeAsFloat(name, out value, GBaseAttributeType.Float); } private bool ExtractAttributeAsFloat(string name, out float value, GBaseAttributeType type) { String stringValue = GetAttributeAsString(name, type); if (stringValue == null) { value = 0; return false; } value = NumberFormat.ToFloat(stringValue); return true; } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// float with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public List<float> GetFloatAttributes(string name) { return GetAttributesAsFloat(name, GBaseAttributeType.Float); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// float with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public List<float> GetNumberAttributes(string name) { return GetAttributesAsFloat(name, GBaseAttributeType.Number); } private List<float> GetAttributesAsFloat(string name, GBaseAttributeType type) { List<float> retval = new List<float>(); foreach (GBaseAttribute attribute in GetAttributes(name, type)) { String content = attribute.Content; if (content != null) { retval.Add(NumberFormat.ToFloat(content)); } } return retval; } /////////////////////////////////////////////////////////////////////// /// <summary>Looks for an attribute of type number with this name. /// </summary> /// <param name="name">attribute name</param> /// <param name="value">value to set if the attribute is found</param> /// <returns>true if an attribute was found, in which case /// the value will have been set.</returns> /////////////////////////////////////////////////////////////////////// public bool ExtractNumberAttribute(string name, out float value) { return ExtractAttributeAsFloat(name, out value, GBaseAttributeType.Number); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// intUnit, floatUnit or numberUnit with this name.</summary> /// <param name="name">attribute name</param> /// <returns>the value of the first attribute, or null</returns> /////////////////////////////////////////////////////////////////////// public NumberUnit GetNumberUnitAttribute(string name) { GBaseAttribute attribute = GetAttribute(name, GBaseAttributeType.NumberUnit); return toNumberUnit(attribute); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// intUnit, floatUnit or numberUnit with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public List<NumberUnit> GetNumberUnitAttributes(string name) { List<NumberUnit> retval = new List<NumberUnit>(); foreach (GBaseAttribute attribute in GetAttributes(name, GBaseAttributeType.NumberUnit)) { NumberUnit value = toNumberUnit(attribute); if (value != null) { retval.Add(value); } } return retval; } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// floatUnit with this name.</summary> /// <param name="name">attribute name</param> /// <returns>the value of the first attribute, or null</returns> /////////////////////////////////////////////////////////////////////// public FloatUnit GetFloatUnitAttribute(string name) { String stringValue = GetAttributeAsString(name, GBaseAttributeType.FloatUnit); if (stringValue == null) { return null; } return new FloatUnit(stringValue); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// floatUnit with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public List<FloatUnit> GetFloatUnitAttributes(string name) { String[] stringValues = GetAttributesAsString(name, GBaseAttributeType.FloatUnit); List<FloatUnit> retval = new List<FloatUnit>(); foreach (String stringValue in stringValues) { if (stringValue != null) { retval.Add(new FloatUnit(stringValue)); } } return retval; } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// intUnit with this name.</summary> /// <param name="name">attribute name</param> /// <returns>the value of the first attribute, or null</returns> /////////////////////////////////////////////////////////////////////// public IntUnit GetIntUnitAttribute(string name) { String stringValue = GetAttributeAsString(name, GBaseAttributeType.IntUnit); if (stringValue == null) { return null; } return new IntUnit(stringValue); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// intUnit with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public List<IntUnit> GetIntUnitAttributes(string name) { String[] stringValues = GetAttributesAsString(name, GBaseAttributeType.IntUnit); List<IntUnit> retval = new List<IntUnit>(); foreach (String stringValue in stringValues) { if (stringValue != null) { retval.Add(new IntUnit(stringValue)); } } return retval; } private NumberUnit toNumberUnit(GBaseAttribute attribute) { if (attribute == null) { return null; } if (attribute.Type == GBaseAttributeType.IntUnit) { return new IntUnit(attribute.Content); } return new FloatUnit(attribute.Content); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// date with this name.</summary> /// <param name="name">attribute name</param> /// <param name="defaultValue">value returned if no attribute /// could be found.</param> /// <returns>the value of the first attribute, or null</returns> /////////////////////////////////////////////////////////////////////// public DateTime GetDateAttribute(string name, DateTime defaultValue) { DateTime value; if (ExtractDateAttribute(name, out value)) { return value; } else { return defaultValue; } } /////////////////////////////////////////////////////////////////////// /// <summary>Looks for an attribute of type date with this name. /// </summary> /// <param name="name">attribute name</param> /// <param name="value">value to set if the attribute is found</param> /// <returns>true if an attribute was found, in which case /// the value will have been set.</returns> /////////////////////////////////////////////////////////////////////// public bool ExtractDateAttribute(string name, out DateTime value) { return ExtractAttributeAsDateTime(name, GBaseAttributeType.Date, out value); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// date or dateTime with this name.</summary> /// <param name="name">attribute name</param> /// <param name="defaultValue">value returned if no attribute /// could be found.</param> /// <returns>the value of the first attribute, or null</returns> /////////////////////////////////////////////////////////////////////// public DateTime GetDateTimeAttribute(string name, DateTime defaultValue) { DateTime value; if (ExtractDateTimeAttribute(name, out value)) { return value; } else { return defaultValue; } } /////////////////////////////////////////////////////////////////////// /// <summary>Looks for an attribute of type date or dateTime with this /// name</summary> /// <param name="name">attribute name</param> /// <param name="value">value to set if the attribute is found</param> /// <returns>true if an attribute was found, in which case /// the value will have been set.</returns> /////////////////////////////////////////////////////////////////////// public bool ExtractDateTimeAttribute(string name, out DateTime value) { return ExtractAttributeAsDateTime(name, GBaseAttributeType.DateTime, out value); } private bool ExtractAttributeAsDateTime(string name, GBaseAttributeType type, out DateTime value) { String stringValue = GetAttributeAsString(name, type); if (stringValue == null) { value = NoDateTime; return false; } try { value = DateTime.Parse(stringValue); return true; } catch(FormatException e) { throw new FormatException(e.Message + " (" + stringValue + ")", e); } } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the value of the first attribute of type /// date, dateTime or dateTimeRange with this name.</summary> /// <param name="name">attribute name</param> /// <returns>the value of the first attribute, or null</returns> /////////////////////////////////////////////////////////////////////// public DateTimeRange GetDateTimeRangeAttribute(string name) { String stringValue = GetAttributeAsString(name, GBaseAttributeType.DateTimeRange); if (stringValue == null) { return null; } return new DateTimeRange(stringValue); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// date with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public List<DateTime> GetDateAttributes(string name) { return GetAttributesAsDateTime(name, GBaseAttributeType.Date); } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// date or dateTime with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public List<DateTime> GetDateTimeAttributes(string name) { return GetAttributesAsDateTime(name, GBaseAttributeType.DateTime); } private List<DateTime> GetAttributesAsDateTime(string name, GBaseAttributeType type) { List<DateTime> retval = new List<DateTime>(); foreach (GBaseAttribute attribute in GetAttributes(name, type)) { if (attribute.Content != null) { retval.Add(DateTime.Parse(attribute.Content)); } } return retval; } /////////////////////////////////////////////////////////////////////// /// <summary>Returns the values of all the attribute of type /// date, dateTime or dateTimeRange with this name.</summary> /// <param name="name">attribute name</param> /// <returns>all the values found, never nul</returns> /////////////////////////////////////////////////////////////////////// public List<DateTimeRange> GetDateTimeRangeAttributes(string name) { List<DateTimeRange> retval = new List<DateTimeRange>(); foreach (GBaseAttribute attribute in GetAttributes(name, GBaseAttributeType.DateTimeRange)) { if (attribute.Content != null) { retval.Add(new DateTimeRange(attribute.Content)); } } return retval; } private String ExtractContent(GBaseAttribute attribute) { if (attribute == null) { return null; } return attribute.Content; } private String[] ExtractContent(List<GBaseAttribute> attributes) { String[] retval = new String[attributes.Count]; for (int i = 0; i < retval.Length; i++) { retval[i] = ExtractContent(attributes[i]); } return retval; } } }
/* * Farseer Physics Engine: * Copyright (c) 2012 Ian Qvist * * Original source Box2D: * Copyright (c) 2006-2011 Erin Catto http://www.box2d.org * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. */ namespace TrueSync.Physics2D { /// <summary> /// A line segment (edge) shape. These can be connected in chains or loops /// to other edge shapes. /// The connectivity information is used to ensure correct contact normals. /// </summary> public class EdgeShape : Shape { /// <summary> /// Edge start vertex /// </summary> internal TSVector2 _vertex1; /// <summary> /// Edge end vertex /// </summary> internal TSVector2 _vertex2; internal EdgeShape() : base(0) { ShapeType = ShapeType.Edge; _radius = Settings.PolygonRadius; } /// <summary> /// Create a new EdgeShape with the specified start and end. /// </summary> /// <param name="start">The start of the edge.</param> /// <param name="end">The end of the edge.</param> public EdgeShape(TSVector2 start, TSVector2 end) : base(0) { ShapeType = ShapeType.Edge; _radius = Settings.PolygonRadius; Set(start, end); } public override int ChildCount { get { return 1; } } /// <summary> /// Is true if the edge is connected to an adjacent vertex before vertex 1. /// </summary> public bool HasVertex0 { get; set; } /// <summary> /// Is true if the edge is connected to an adjacent vertex after vertex2. /// </summary> public bool HasVertex3 { get; set; } /// <summary> /// Optional adjacent vertices. These are used for smooth collision. /// </summary> public TSVector2 Vertex0 { get; set; } /// <summary> /// Optional adjacent vertices. These are used for smooth collision. /// </summary> public TSVector2 Vertex3 { get; set; } /// <summary> /// These are the edge vertices /// </summary> public TSVector2 Vertex1 { get { return _vertex1; } set { _vertex1 = value; ComputeProperties(); } } /// <summary> /// These are the edge vertices /// </summary> public TSVector2 Vertex2 { get { return _vertex2; } set { _vertex2 = value; ComputeProperties(); } } /// <summary> /// Set this as an isolated edge. /// </summary> /// <param name="start">The start.</param> /// <param name="end">The end.</param> public void Set(TSVector2 start, TSVector2 end) { _vertex1 = start; _vertex2 = end; HasVertex0 = false; HasVertex3 = false; ComputeProperties(); } public override bool TestPoint(ref Transform transform, ref TSVector2 point) { return false; } public override bool RayCast(out RayCastOutput output, ref RayCastInput input, ref Transform transform, int childIndex) { // p = p1 + t * d // v = v1 + s * e // p1 + t * d = v1 + s * e // s * e - t * d = p1 - v1 output = new RayCastOutput(); // Put the ray into the edge's frame of reference. TSVector2 p1 = MathUtils.MulT(transform.q, input.Point1 - transform.p); TSVector2 p2 = MathUtils.MulT(transform.q, input.Point2 - transform.p); TSVector2 d = p2 - p1; TSVector2 v1 = _vertex1; TSVector2 v2 = _vertex2; TSVector2 e = v2 - v1; TSVector2 normal = new TSVector2(e.y, -e.x); //TODO: Could possibly cache the normal. normal.Normalize(); // q = p1 + t * d // dot(normal, q - v1) = 0 // dot(normal, p1 - v1) + t * dot(normal, d) = 0 FP numerator = TSVector2.Dot(normal, v1 - p1); FP denominator = TSVector2.Dot(normal, d); if (denominator == 0.0f) { return false; } FP t = numerator / denominator; if (t < 0.0f || input.MaxFraction < t) { return false; } TSVector2 q = p1 + t * d; // q = v1 + s * r // s = dot(q - v1, r) / dot(r, r) TSVector2 r = v2 - v1; FP rr = TSVector2.Dot(r, r); if (rr == 0.0f) { return false; } FP s = TSVector2.Dot(q - v1, r) / rr; if (s < 0.0f || 1.0f < s) { return false; } output.Fraction = t; if (numerator > 0.0f) { output.Normal = -normal; } else { output.Normal = normal; } return true; } public override void ComputeAABB(out AABB aabb, ref Transform transform, int childIndex) { TSVector2 v1 = MathUtils.Mul(ref transform, _vertex1); TSVector2 v2 = MathUtils.Mul(ref transform, _vertex2); TSVector2 lower = TSVector2.Min(v1, v2); TSVector2 upper = TSVector2.Max(v1, v2); TSVector2 r = new TSVector2(Radius, Radius); aabb.LowerBound = lower - r; aabb.UpperBound = upper + r; } protected override void ComputeProperties() { MassData.Centroid = 0.5f * (_vertex1 + _vertex2); } public override FP ComputeSubmergedArea(ref TSVector2 normal, FP offset, ref Transform xf, out TSVector2 sc) { sc = TSVector2.zero; return 0; } public bool CompareTo(EdgeShape shape) { return (HasVertex0 == shape.HasVertex0 && HasVertex3 == shape.HasVertex3 && Vertex0 == shape.Vertex0 && Vertex1 == shape.Vertex1 && Vertex2 == shape.Vertex2 && Vertex3 == shape.Vertex3); } public override Shape Clone() { EdgeShape clone = new EdgeShape(); clone.ShapeType = ShapeType; clone._radius = _radius; clone._density = _density; clone.HasVertex0 = HasVertex0; clone.HasVertex3 = HasVertex3; clone.Vertex0 = Vertex0; clone._vertex1 = _vertex1; clone._vertex2 = _vertex2; clone.Vertex3 = Vertex3; clone.MassData = MassData; return clone; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Runtime; using System.Runtime.Serialization; using System.ServiceModel.Channels; using System.ServiceModel.Dispatcher; namespace System.ServiceModel { [Serializable] [KnownType(typeof(FaultCodeData))] [KnownType(typeof(FaultCodeData[]))] [KnownType(typeof(FaultReasonData))] [KnownType(typeof(FaultReasonData[]))] public class FaultException : CommunicationException { internal const string Namespace = "http://schemas.xmlsoap.org/Microsoft/WindowsCommunicationFoundation/2005/08/Faults/"; public FaultException() : base(SR.SFxFaultReason) { Code = DefaultCode; Reason = DefaultReason; } public FaultException(string reason) : base(reason) { Code = DefaultCode; Reason = CreateReason(reason); } public FaultException(FaultReason reason) : base(GetSafeReasonText(reason)) { Code = DefaultCode; Reason = EnsureReason(reason); } public FaultException(string reason, FaultCode code) : base(reason) { Code = EnsureCode(code); Reason = CreateReason(reason); } public FaultException(FaultReason reason, FaultCode code) : base(GetSafeReasonText(reason)) { Code = EnsureCode(code); Reason = EnsureReason(reason); } public FaultException(string reason, FaultCode code, string action) : base(reason) { Code = EnsureCode(code); Reason = CreateReason(reason); Action = action; } internal FaultException(string reason, FaultCode code, string action, Exception innerException) : base(reason, innerException) { Code = EnsureCode(code); Reason = CreateReason(reason); Action = action; } public FaultException(FaultReason reason, FaultCode code, string action) : base(GetSafeReasonText(reason)) { Code = EnsureCode(code); Reason = EnsureReason(reason); Action = action; } internal FaultException(FaultReason reason, FaultCode code, string action, Exception innerException) : base(GetSafeReasonText(reason), innerException) { Code = EnsureCode(code); Reason = EnsureReason(reason); Action = action; } public FaultException(MessageFault fault) : base(GetSafeReasonText(GetReason(fault))) { if (fault == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull(nameof(fault)); } Code = EnsureCode(fault.Code); Reason = EnsureReason(fault.Reason); Fault = fault; } public FaultException(MessageFault fault, string action) : base(GetSafeReasonText(GetReason(fault))) { if (fault == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull(nameof(fault)); } Code = fault.Code; Reason = fault.Reason; Fault = fault; Action = action; } protected FaultException(SerializationInfo info, StreamingContext context) : base(info, context) { Code = ReconstructFaultCode(info, "code"); Reason = ReconstructFaultReason(info, "reason"); Fault = (MessageFault)info.GetValue("messageFault", typeof(MessageFault)); Action = info.GetString("action"); } public string Action { get; } public FaultCode Code { get; } private static FaultReason DefaultReason { get { return new FaultReason(SR.SFxFaultReason); } } private static FaultCode DefaultCode { get { return new FaultCode("Sender"); } } public override string Message { get { return GetSafeReasonText(Reason); } } public FaultReason Reason { get; } internal MessageFault Fault { get; } internal void AddFaultCodeObjectData(SerializationInfo info, string key, FaultCode code) { info.AddValue(key, FaultCodeData.GetObjectData(code)); } internal void AddFaultReasonObjectData(SerializationInfo info, string key, FaultReason reason) { info.AddValue(key, FaultReasonData.GetObjectData(reason)); } public static FaultException CreateFault(MessageFault messageFault, params Type[] faultDetailTypes) { return CreateFault(messageFault, null, faultDetailTypes); } public static FaultException CreateFault(MessageFault messageFault, string action, params Type[] faultDetailTypes) { if (messageFault == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull(nameof(messageFault)); } if (faultDetailTypes == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull(nameof(faultDetailTypes)); } DataContractSerializerFaultFormatter faultFormatter = new DataContractSerializerFaultFormatter(faultDetailTypes); return faultFormatter.Deserialize(messageFault, action); } public virtual MessageFault CreateMessageFault() { if (Fault != null) { return Fault; } else { return MessageFault.CreateFault(Code, Reason); } } private static FaultReason CreateReason(string reason) { return (reason != null) ? new FaultReason(reason) : DefaultReason; } public override void GetObjectData(SerializationInfo info, StreamingContext context) { base.GetObjectData(info, context); AddFaultCodeObjectData(info, "code", Code); AddFaultReasonObjectData(info, "reason", Reason); info.AddValue("messageFault", Fault); info.AddValue("action", Action); } private static FaultReason GetReason(MessageFault fault) { if (fault == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull(nameof(fault)); } return fault.Reason; } internal static string GetSafeReasonText(MessageFault messageFault) { if (messageFault == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull(nameof(messageFault)); } return GetSafeReasonText(messageFault.Reason); } internal static string GetSafeReasonText(FaultReason reason) { if (reason == null) { return SR.SFxUnknownFaultNullReason0; } try { return reason.GetMatchingTranslation(Globalization.CultureInfo.CurrentCulture).Text; } catch (ArgumentException) { if (reason.Translations.Count == 0) { return SR.SFxUnknownFaultZeroReasons0; } else { return SR.Format(SR.SFxUnknownFaultNoMatchingTranslation1, reason.Translations[0].Text); } } } private static FaultCode EnsureCode(FaultCode code) { return (code != null) ? code : DefaultCode; } private static FaultReason EnsureReason(FaultReason reason) { return (reason != null) ? reason : DefaultReason; } internal FaultCode ReconstructFaultCode(SerializationInfo info, string key) { FaultCodeData[] data = (FaultCodeData[])info.GetValue(key, typeof(FaultCodeData[])); return FaultCodeData.Construct(data); } internal FaultReason ReconstructFaultReason(SerializationInfo info, string key) { FaultReasonData[] data = (FaultReasonData[])info.GetValue(key, typeof(FaultReasonData[])); return FaultReasonData.Construct(data); } [Serializable] internal class FaultCodeData { private string name; private string ns; internal static FaultCode Construct(FaultCodeData[] nodes) { FaultCode code = null; for (int i = nodes.Length - 1; i >= 0; i--) { code = new FaultCode(nodes[i].name, nodes[i].ns, code); } return code; } internal static FaultCodeData[] GetObjectData(FaultCode code) { FaultCodeData[] array = new FaultCodeData[GetDepth(code)]; for (int i = 0; i < array.Length; i++) { array[i] = new FaultCodeData(); array[i].name = code.Name; array[i].ns = code.Namespace; code = code.SubCode; } Fx.Assert(code == null, "FaultException.FaultCodeData.GetObjectData: (code != null)"); return array; } private static int GetDepth(FaultCode code) { int depth = 0; while (code != null) { depth++; code = code.SubCode; } return depth; } } [Serializable] internal class FaultReasonData { private string xmlLang; private string text; internal static FaultReason Construct(FaultReasonData[] nodes) { FaultReasonText[] reasons = new FaultReasonText[nodes.Length]; for (int i = 0; i < nodes.Length; i++) { reasons[i] = new FaultReasonText(nodes[i].text, nodes[i].xmlLang); } return new FaultReason(reasons); } internal static FaultReasonData[] GetObjectData(FaultReason reason) { SynchronizedReadOnlyCollection<FaultReasonText> translations = reason.Translations; FaultReasonData[] array = new FaultReasonData[translations.Count]; for (int i = 0; i < translations.Count; i++) { array[i] = new FaultReasonData(); array[i].xmlLang = translations[i].XmlLang; array[i].text = translations[i].Text; } return array; } } } [Serializable] [KnownType("GetKnownTypes")] public class FaultException<TDetail> : FaultException { public FaultException(TDetail detail) : base() { Detail = detail; } public FaultException(TDetail detail, string reason) : base(reason) { Detail = detail; } public FaultException(TDetail detail, FaultReason reason) : base(reason) { Detail = detail; } public FaultException(TDetail detail, string reason, FaultCode code) : base(reason, code) { Detail = detail; } public FaultException(TDetail detail, FaultReason reason, FaultCode code) : base(reason, code) { Detail = detail; } public FaultException(TDetail detail, string reason, FaultCode code, string action) : base(reason, code, action) { Detail = detail; } public FaultException(TDetail detail, FaultReason reason, FaultCode code, string action) : base(reason, code, action) { Detail = detail; } protected FaultException(SerializationInfo info, StreamingContext context) : base(info, context) { Detail = (TDetail)info.GetValue("detail", typeof(TDetail)); } public TDetail Detail { get; } public override MessageFault CreateMessageFault() { return MessageFault.CreateFault(Code, Reason, Detail); } public override void GetObjectData(SerializationInfo info, StreamingContext context) { base.GetObjectData(info, context); info.AddValue("detail", Detail); } public override string ToString() { return SR.Format(SR.SFxFaultExceptionToString3, GetType(), Message, Detail != null ? Detail.ToString() : string.Empty); } private static Type[] s_knownTypes = new Type[] { typeof(TDetail) }; internal static IEnumerable<Type> GetKnownTypes() { return s_knownTypes; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Generic; using System.Linq; using System.Threading; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CodeGeneration; using Microsoft.CodeAnalysis.FindSymbols; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Shared.Extensions; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.GenerateType { internal abstract partial class AbstractGenerateTypeService<TService, TSimpleNameSyntax, TObjectCreationExpressionSyntax, TExpressionSyntax, TTypeDeclarationSyntax, TArgumentSyntax> { protected class State { public string Name { get; private set; } public bool NameIsVerbatim { get; private set; } // The name node that we're on. Will be used to the name the type if it's // generated. public TSimpleNameSyntax SimpleName { get; private set; } // The entire expression containing the name, not including the creation. i.e. "X.Foo" // in "new X.Foo()". public TExpressionSyntax NameOrMemberAccessExpression { get; private set; } // The object creation node if we have one. i.e. if we're on the 'Foo' in "new X.Foo()". public TObjectCreationExpressionSyntax ObjectCreationExpressionOpt { get; private set; } // One of these will be non null. It's also possible for both to be non null. For // example, if you have "class C { Foo f; }", then "Foo" can be generated inside C or // inside the global namespace. The namespace can be null or the type can be null if the // user has something like "ExistingType.NewType" or "ExistingNamespace.NewType". In // that case they're being explicit about what they want to generate into. public INamedTypeSymbol TypeToGenerateInOpt { get; private set; } public string NamespaceToGenerateInOpt { get; private set; } // If we can infer a base type or interface for this type. // // i.e.: "IList<int> foo = new MyList();" public INamedTypeSymbol BaseTypeOrInterfaceOpt { get; private set; } public bool IsInterface { get; private set; } public bool IsStruct { get; private set; } public bool IsAttribute { get; private set; } public bool IsException { get; private set; } public bool IsMembersWithModule { get; private set; } public bool IsTypeGeneratedIntoNamespaceFromMemberAccess { get; private set; } public bool IsSimpleNameGeneric { get; private set; } public bool IsPublicAccessibilityForTypeGeneration { get; private set; } public bool IsInterfaceOrEnumNotAllowedInTypeContext { get; private set; } public IMethodSymbol DelegateMethodSymbol { get; private set; } public bool IsDelegateAllowed { get; private set; } public bool IsEnumNotAllowed { get; private set; } public Compilation Compilation { get; private set; } public bool IsDelegateOnly { get; private set; } public bool IsClassInterfaceTypes { get; private set; } public List<TSimpleNameSyntax> PropertiesToGenerate { get; private set; } private State(Compilation compilation) { Compilation = compilation; } public static State Generate( TService service, SemanticDocument document, SyntaxNode node, CancellationToken cancellationToken) { var state = new State(document.SemanticModel.Compilation); if (!state.TryInitialize(service, document, node, cancellationToken)) { return null; } return state; } private bool TryInitialize( TService service, SemanticDocument document, SyntaxNode node, CancellationToken cancellationToken) { if (!(node is TSimpleNameSyntax)) { return false; } this.SimpleName = (TSimpleNameSyntax)node; string name; int arity; var syntaxFacts = document.Project.LanguageServices.GetService<ISyntaxFactsService>(); syntaxFacts.GetNameAndArityOfSimpleName(this.SimpleName, out name, out arity); this.Name = name; this.NameIsVerbatim = syntaxFacts.IsVerbatimIdentifier(this.SimpleName.GetFirstToken()); if (string.IsNullOrWhiteSpace(this.Name)) { return false; } // We only support simple names or dotted names. i.e. "(some + expr).Foo" is not a // valid place to generate a type for Foo. GenerateTypeServiceStateOptions generateTypeServiceStateOptions; if (!service.TryInitializeState(document, this.SimpleName, cancellationToken, out generateTypeServiceStateOptions)) { return false; } this.NameOrMemberAccessExpression = generateTypeServiceStateOptions.NameOrMemberAccessExpression; this.ObjectCreationExpressionOpt = generateTypeServiceStateOptions.ObjectCreationExpressionOpt; var semanticModel = document.SemanticModel; var info = semanticModel.GetSymbolInfo(this.SimpleName, cancellationToken); if (info.Symbol != null) { // This bound, so no need to generate anything. return false; } var semanticFacts = document.Project.LanguageServices.GetService<ISemanticFactsService>(); if (!semanticFacts.IsTypeContext(semanticModel, this.NameOrMemberAccessExpression.SpanStart, cancellationToken) && !semanticFacts.IsExpressionContext(semanticModel, this.NameOrMemberAccessExpression.SpanStart, cancellationToken) && !semanticFacts.IsStatementContext(semanticModel, this.NameOrMemberAccessExpression.SpanStart, cancellationToken) && !semanticFacts.IsNameOfContext(semanticModel, this.NameOrMemberAccessExpression.SpanStart, cancellationToken) && !semanticFacts.IsNamespaceContext(semanticModel, this.NameOrMemberAccessExpression.SpanStart, cancellationToken)) { return false; } // If this isn't something that can be created, then don't bother offering to create // it. if (info.CandidateReason == CandidateReason.NotCreatable) { return false; } if (info.CandidateReason == CandidateReason.Inaccessible || info.CandidateReason == CandidateReason.NotReferencable || info.CandidateReason == CandidateReason.OverloadResolutionFailure) { // We bound to something inaccessible, or overload resolution on a // constructor call failed. Don't want to offer GenerateType here. return false; } if (this.ObjectCreationExpressionOpt != null) { // If we're new'ing up something illegal, then don't offer generate type. var typeInfo = semanticModel.GetTypeInfo(this.ObjectCreationExpressionOpt, cancellationToken); if (typeInfo.Type.IsModuleType()) { return false; } } DetermineNamespaceOrTypeToGenerateIn(service, document, cancellationToken); // Now, try to infer a possible base type for this new class/interface. this.InferBaseType(service, document, cancellationToken); this.IsInterface = GenerateInterface(service, cancellationToken); this.IsStruct = GenerateStruct(service, semanticModel, cancellationToken); this.IsAttribute = this.BaseTypeOrInterfaceOpt != null && this.BaseTypeOrInterfaceOpt.Equals(semanticModel.Compilation.AttributeType()); this.IsException = this.BaseTypeOrInterfaceOpt != null && this.BaseTypeOrInterfaceOpt.Equals(semanticModel.Compilation.ExceptionType()); this.IsMembersWithModule = generateTypeServiceStateOptions.IsMembersWithModule; this.IsTypeGeneratedIntoNamespaceFromMemberAccess = generateTypeServiceStateOptions.IsTypeGeneratedIntoNamespaceFromMemberAccess; this.IsInterfaceOrEnumNotAllowedInTypeContext = generateTypeServiceStateOptions.IsInterfaceOrEnumNotAllowedInTypeContext; this.IsDelegateAllowed = generateTypeServiceStateOptions.IsDelegateAllowed; this.IsDelegateOnly = generateTypeServiceStateOptions.IsDelegateOnly; this.IsEnumNotAllowed = generateTypeServiceStateOptions.IsEnumNotAllowed; this.DelegateMethodSymbol = generateTypeServiceStateOptions.DelegateCreationMethodSymbol; this.IsClassInterfaceTypes = generateTypeServiceStateOptions.IsClassInterfaceTypes; this.IsSimpleNameGeneric = service.IsGenericName(this.SimpleName); this.PropertiesToGenerate = generateTypeServiceStateOptions.PropertiesToGenerate; if (this.IsAttribute && this.TypeToGenerateInOpt.GetAllTypeParameters().Any()) { this.TypeToGenerateInOpt = null; } return this.TypeToGenerateInOpt != null || this.NamespaceToGenerateInOpt != null; } private void InferBaseType( TService service, SemanticDocument document, CancellationToken cancellationToken) { // See if we can find a possible base type for the type being generated. // NOTE(cyrusn): I currently limit this to when we have an object creation node. // That's because that's when we would have an expression that could be conerted to // somethign else. i.e. if the user writes "IList<int> list = new Foo()" then we can // infer a base interface for 'Foo'. However, if they write "IList<int> list = Foo" // then we don't really want to infer a base type for 'Foo'. // However, there are a few other cases were we can infer a base type. var syntaxFacts = document.Project.LanguageServices.GetService<ISyntaxFactsService>(); if (service.IsInCatchDeclaration(this.NameOrMemberAccessExpression)) { this.BaseTypeOrInterfaceOpt = document.SemanticModel.Compilation.ExceptionType(); } else if (syntaxFacts.IsAttributeName(this.NameOrMemberAccessExpression)) { this.BaseTypeOrInterfaceOpt = document.SemanticModel.Compilation.AttributeType(); } else if ( service.IsArrayElementType(this.NameOrMemberAccessExpression) || service.IsInVariableTypeContext(this.NameOrMemberAccessExpression) || this.ObjectCreationExpressionOpt != null) { var expr = this.ObjectCreationExpressionOpt ?? this.NameOrMemberAccessExpression; var typeInference = document.Project.LanguageServices.GetService<ITypeInferenceService>(); var baseType = typeInference.InferType(document.SemanticModel, expr, objectAsDefault: true, cancellationToken: cancellationToken) as INamedTypeSymbol; SetBaseType(baseType); } } private void SetBaseType(INamedTypeSymbol baseType) { if (baseType == null) { return; } // A base type need to be non class or interface type. Also, being 'object' is // redundant as the base type. if (baseType.IsSealed || baseType.IsStatic || baseType.SpecialType == SpecialType.System_Object) { return; } if (baseType.TypeKind != TypeKind.Class && baseType.TypeKind != TypeKind.Interface) { return; } this.BaseTypeOrInterfaceOpt = baseType; } private bool GenerateStruct(TService service, SemanticModel semanticModel, CancellationToken cancellationToken) { return service.IsInValueTypeConstraintContext(semanticModel, this.NameOrMemberAccessExpression, cancellationToken); } private bool GenerateInterface( TService service, CancellationToken cancellationToken) { if (!this.IsAttribute && !this.IsException && this.Name.LooksLikeInterfaceName() && this.ObjectCreationExpressionOpt == null && (this.BaseTypeOrInterfaceOpt == null || this.BaseTypeOrInterfaceOpt.TypeKind == TypeKind.Interface)) { return true; } return service.IsInInterfaceList(this.NameOrMemberAccessExpression); } private void DetermineNamespaceOrTypeToGenerateIn( TService service, SemanticDocument document, CancellationToken cancellationToken) { DetermineNamespaceOrTypeToGenerateInWorker(service, document.SemanticModel, cancellationToken); // Can only generate into a type if it's a class and it's from source. if (this.TypeToGenerateInOpt != null) { if (this.TypeToGenerateInOpt.TypeKind != TypeKind.Class && this.TypeToGenerateInOpt.TypeKind != TypeKind.Module) { this.TypeToGenerateInOpt = null; } else { var symbol = SymbolFinder.FindSourceDefinitionAsync(this.TypeToGenerateInOpt, document.Project.Solution, cancellationToken).WaitAndGetResult(cancellationToken); if (symbol == null || !symbol.IsKind(SymbolKind.NamedType) || !symbol.Locations.Any(loc => loc.IsInSource)) { this.TypeToGenerateInOpt = null; return; } var sourceTreeToBeGeneratedIn = symbol.Locations.First(loc => loc.IsInSource).SourceTree; var documentToBeGeneratedIn = document.Project.Solution.GetDocument(sourceTreeToBeGeneratedIn); if (documentToBeGeneratedIn == null) { this.TypeToGenerateInOpt = null; return; } // If the 2 documents are in different project then we must have Public Accessibility. // If we are generating in a website project, we also want to type to be public so the // designer files can access the type. if (documentToBeGeneratedIn.Project != document.Project || service.GeneratedTypesMustBePublic(documentToBeGeneratedIn.Project)) { this.IsPublicAccessibilityForTypeGeneration = true; } this.TypeToGenerateInOpt = (INamedTypeSymbol)symbol; } } if (this.TypeToGenerateInOpt != null) { if (!CodeGenerator.CanAdd(document.Project.Solution, this.TypeToGenerateInOpt, cancellationToken)) { this.TypeToGenerateInOpt = null; } } } private bool DetermineNamespaceOrTypeToGenerateInWorker( TService service, SemanticModel semanticModel, CancellationToken cancellationToken) { // If we're on the right of a dot, see if we can figure out what's on the left. If // it doesn't bind to a type or a namespace, then we can't proceed. if (this.SimpleName != this.NameOrMemberAccessExpression) { return DetermineNamespaceOrTypeToGenerateIn( service, semanticModel, service.GetLeftSideOfDot(this.SimpleName), cancellationToken); } else { // The name is standing alone. We can either generate the type into our // containing type, or into our containing namespace. // // TODO(cyrusn): We need to make this logic work if the type is in the // base/interface list of a type. var format = SymbolDisplayFormat.FullyQualifiedFormat.WithGlobalNamespaceStyle(SymbolDisplayGlobalNamespaceStyle.Omitted); this.TypeToGenerateInOpt = service.DetermineTypeToGenerateIn(semanticModel, this.SimpleName, cancellationToken); if (this.TypeToGenerateInOpt != null) { this.NamespaceToGenerateInOpt = this.TypeToGenerateInOpt.ContainingNamespace.ToDisplayString(format); } else { var namespaceSymbol = semanticModel.GetEnclosingNamespace(this.SimpleName.SpanStart, cancellationToken); if (namespaceSymbol != null) { this.NamespaceToGenerateInOpt = namespaceSymbol.ToDisplayString(format); } } } return true; } private bool DetermineNamespaceOrTypeToGenerateIn( TService service, SemanticModel semanticModel, TExpressionSyntax leftSide, CancellationToken cancellationToken) { var leftSideInfo = semanticModel.GetSymbolInfo(leftSide, cancellationToken); if (leftSideInfo.Symbol != null) { var symbol = leftSideInfo.Symbol; if (symbol is INamespaceSymbol) { this.NamespaceToGenerateInOpt = symbol.ToNameDisplayString(); return true; } else if (symbol is INamedTypeSymbol) { // TODO: Code coverage this.TypeToGenerateInOpt = (INamedTypeSymbol)symbol.OriginalDefinition; return true; } // We bound to something other than a namespace or named type. Can't generate a // type inside this. return false; } else { // If it's a dotted name, then perhaps it's a namespace. i.e. the user wrote // "new Foo.Bar.Baz()". In this case we want to generate a namespace for // "Foo.Bar". IList<string> nameParts; if (service.TryGetNameParts(leftSide, out nameParts)) { this.NamespaceToGenerateInOpt = string.Join(".", nameParts); return true; } } return false; } } protected class GenerateTypeServiceStateOptions { public TExpressionSyntax NameOrMemberAccessExpression { get; set; } public TObjectCreationExpressionSyntax ObjectCreationExpressionOpt { get; set; } public IMethodSymbol DelegateCreationMethodSymbol { get; set; } public List<TSimpleNameSyntax> PropertiesToGenerate { get; private set; } public bool IsMembersWithModule { get; set; } public bool IsTypeGeneratedIntoNamespaceFromMemberAccess { get; set; } public bool IsInterfaceOrEnumNotAllowedInTypeContext { get; set; } public bool IsDelegateAllowed { get; set; } public bool IsEnumNotAllowed { get; set; } public bool IsDelegateOnly { get; internal set; } public bool IsClassInterfaceTypes { get; internal set; } public GenerateTypeServiceStateOptions() { NameOrMemberAccessExpression = null; ObjectCreationExpressionOpt = null; DelegateCreationMethodSymbol = null; IsMembersWithModule = false; PropertiesToGenerate = new List<TSimpleNameSyntax>(); IsTypeGeneratedIntoNamespaceFromMemberAccess = false; IsInterfaceOrEnumNotAllowedInTypeContext = false; IsDelegateAllowed = true; IsEnumNotAllowed = false; IsDelegateOnly = false; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using Document = Lucene.Net.Documents.Document; using FieldSelector = Lucene.Net.Documents.FieldSelector; using MultiTermDocs = Lucene.Net.Index.MultiSegmentReader.MultiTermDocs; using MultiTermEnum = Lucene.Net.Index.MultiSegmentReader.MultiTermEnum; using MultiTermPositions = Lucene.Net.Index.MultiSegmentReader.MultiTermPositions; namespace Lucene.Net.Index { /// <summary>An IndexReader which reads multiple indexes, appending their content. /// /// </summary> public class MultiReader : IndexReader { protected internal IndexReader[] subReaders; private int[] starts; // 1st docno for each segment private bool[] decrefOnClose; // remember which subreaders to decRef on close private System.Collections.Hashtable normsCache = System.Collections.Hashtable.Synchronized(new System.Collections.Hashtable()); private int maxDoc = 0; private int numDocs = - 1; private bool hasDeletions = false; /// <summary> <p>Construct a MultiReader aggregating the named set of (sub)readers. /// Directory locking for delete, undeleteAll, and setNorm operations is /// left to the subreaders. </p> /// <p>Note that all subreaders are closed if this Multireader is closed.</p> /// </summary> /// <param name="subReaders">set of (sub)readers /// </param> /// <throws> IOException </throws> public MultiReader(IndexReader[] subReaders) { Initialize(subReaders, true); } /// <summary> <p>Construct a MultiReader aggregating the named set of (sub)readers. /// Directory locking for delete, undeleteAll, and setNorm operations is /// left to the subreaders. </p> /// </summary> /// <param name="closeSubReaders">indicates whether the subreaders should be closed /// when this MultiReader is closed /// </param> /// <param name="subReaders">set of (sub)readers /// </param> /// <throws> IOException </throws> public MultiReader(IndexReader[] subReaders, bool closeSubReaders) { Initialize(subReaders, closeSubReaders); } private void Initialize(IndexReader[] subReaders, bool closeSubReaders) { this.subReaders = (IndexReader[]) subReaders.Clone(); starts = new int[subReaders.Length + 1]; // build starts array decrefOnClose = new bool[subReaders.Length]; for (int i = 0; i < subReaders.Length; i++) { starts[i] = maxDoc; maxDoc += subReaders[i].MaxDoc(); // compute maxDocs if (!closeSubReaders) { subReaders[i].IncRef(); decrefOnClose[i] = true; } else { decrefOnClose[i] = false; } if (subReaders[i].HasDeletions()) hasDeletions = true; } starts[subReaders.Length] = maxDoc; } /// <summary> Tries to reopen the subreaders. /// <br> /// If one or more subreaders could be re-opened (i. e. subReader.reopen() /// returned a new instance != subReader), then a new MultiReader instance /// is returned, otherwise this instance is returned. /// <p> /// A re-opened instance might share one or more subreaders with the old /// instance. Index modification operations result in undefined behavior /// when performed before the old instance is closed. /// (see {@link IndexReader#Reopen()}). /// <p> /// If subreaders are shared, then the reference count of those /// readers is increased to ensure that the subreaders remain open /// until the last referring reader is closed. /// /// </summary> /// <throws> CorruptIndexException if the index is corrupt </throws> /// <throws> IOException if there is a low-level IO error </throws> public override IndexReader Reopen() { EnsureOpen(); bool reopened = false; IndexReader[] newSubReaders = new IndexReader[subReaders.Length]; bool[] newDecrefOnClose = new bool[subReaders.Length]; bool success = false; try { for (int i = 0; i < subReaders.Length; i++) { newSubReaders[i] = subReaders[i].Reopen(); // if at least one of the subreaders was updated we remember that // and return a new MultiReader if (newSubReaders[i] != subReaders[i]) { reopened = true; // this is a new subreader instance, so on close() we don't // decRef but close it newDecrefOnClose[i] = false; } } if (reopened) { for (int i = 0; i < subReaders.Length; i++) { if (newSubReaders[i] == subReaders[i]) { newSubReaders[i].IncRef(); newDecrefOnClose[i] = true; } } MultiReader mr = new MultiReader(newSubReaders); mr.decrefOnClose = newDecrefOnClose; success = true; return mr; } else { success = true; return this; } } finally { if (!success && reopened) { for (int i = 0; i < newSubReaders.Length; i++) { if (newSubReaders[i] != null) { try { if (newDecrefOnClose[i]) { newSubReaders[i].DecRef(); } else { newSubReaders[i].Close(); } } catch (System.IO.IOException) { // keep going - we want to clean up as much as possible } } } } } } public override TermFreqVector[] GetTermFreqVectors(int n) { EnsureOpen(); int i = ReaderIndex(n); // find segment num return subReaders[i].GetTermFreqVectors(n - starts[i]); // dispatch to segment } public override TermFreqVector GetTermFreqVector(int n, System.String field) { EnsureOpen(); int i = ReaderIndex(n); // find segment num return subReaders[i].GetTermFreqVector(n - starts[i], field); } public override void GetTermFreqVector(int docNumber, System.String field, TermVectorMapper mapper) { EnsureOpen(); int i = ReaderIndex(docNumber); // find segment num subReaders[i].GetTermFreqVector(docNumber - starts[i], field, mapper); } public override void GetTermFreqVector(int docNumber, TermVectorMapper mapper) { EnsureOpen(); int i = ReaderIndex(docNumber); // find segment num subReaders[i].GetTermFreqVector(docNumber - starts[i], mapper); } public override bool IsOptimized() { return false; } public override int NumDocs() { lock (this) { // Don't call ensureOpen() here (it could affect performance) if (numDocs == - 1) { // check cache int n = 0; // cache miss--recompute for (int i = 0; i < subReaders.Length; i++) n += subReaders[i].NumDocs(); // sum from readers numDocs = n; } return numDocs; } } public override int MaxDoc() { // Don't call ensureOpen() here (it could affect performance) return maxDoc; } // inherit javadoc public override Document Document(int n, FieldSelector fieldSelector) { EnsureOpen(); int i = ReaderIndex(n); // find segment num return subReaders[i].Document(n - starts[i], fieldSelector); // dispatch to segment reader } public override bool IsDeleted(int n) { // Don't call ensureOpen() here (it could affect performance) int i = ReaderIndex(n); // find segment num return subReaders[i].IsDeleted(n - starts[i]); // dispatch to segment reader } public override bool HasDeletions() { // Don't call ensureOpen() here (it could affect performance) return hasDeletions; } protected internal override void DoDelete(int n) { numDocs = - 1; // invalidate cache int i = ReaderIndex(n); // find segment num subReaders[i].DeleteDocument(n - starts[i]); // dispatch to segment reader hasDeletions = true; } protected internal override void DoUndeleteAll() { for (int i = 0; i < subReaders.Length; i++) subReaders[i].UndeleteAll(); hasDeletions = false; numDocs = - 1; // invalidate cache } private int ReaderIndex(int n) { // find reader for doc n: return MultiSegmentReader.ReaderIndex(n, this.starts, this.subReaders.Length); } public override bool HasNorms(System.String field) { EnsureOpen(); for (int i = 0; i < subReaders.Length; i++) { if (subReaders[i].HasNorms(field)) return true; } return false; } private byte[] ones; private byte[] FakeNorms() { if (ones == null) ones = SegmentReader.CreateFakeNorms(MaxDoc()); return ones; } public override byte[] Norms(System.String field) { lock (this) { EnsureOpen(); byte[] bytes = (byte[]) normsCache[field]; if (bytes != null) return bytes; // cache hit if (!HasNorms(field)) return FakeNorms(); bytes = new byte[MaxDoc()]; for (int i = 0; i < subReaders.Length; i++) subReaders[i].Norms(field, bytes, starts[i]); normsCache[field] = bytes; // update cache return bytes; } } public override void Norms(System.String field, byte[] result, int offset) { lock (this) { EnsureOpen(); byte[] bytes = (byte[]) normsCache[field]; if (bytes == null && !HasNorms(field)) bytes = FakeNorms(); if (bytes != null) // cache hit Array.Copy(bytes, 0, result, offset, MaxDoc()); for (int i = 0; i < subReaders.Length; i++) // read from segments subReaders[i].Norms(field, result, offset + starts[i]); } } protected internal override void DoSetNorm(int n, System.String field, byte value_Renamed) { lock (normsCache) { normsCache.Remove(field); // clear cache } int i = ReaderIndex(n); // find segment num subReaders[i].SetNorm(n - starts[i], field, value_Renamed); // dispatch } public override TermEnum Terms() { EnsureOpen(); return new MultiTermEnum(subReaders, starts, null); } public override TermEnum Terms(Term term) { EnsureOpen(); return new MultiTermEnum(subReaders, starts, term); } public override int DocFreq(Term t) { EnsureOpen(); int total = 0; // sum freqs in segments for (int i = 0; i < subReaders.Length; i++) total += subReaders[i].DocFreq(t); return total; } public override TermDocs TermDocs() { EnsureOpen(); return new MultiTermDocs(subReaders, starts); } public override TermPositions TermPositions() { EnsureOpen(); return new MultiTermPositions(subReaders, starts); } protected internal override void DoCommit() { for (int i = 0; i < subReaders.Length; i++) subReaders[i].Commit(); } protected internal override void DoClose() { lock (this) { for (int i = 0; i < subReaders.Length; i++) { if (decrefOnClose[i]) { subReaders[i].DecRef(); } else { subReaders[i].Close(); } } } } public override System.Collections.Generic.ICollection<string> GetFieldNames(IndexReader.FieldOption fieldNames) { EnsureOpen(); return MultiSegmentReader.GetFieldNames(fieldNames, this.subReaders); } /// <summary> Checks recursively if all subreaders are up to date. </summary> public override bool IsCurrent() { for (int i = 0; i < subReaders.Length; i++) { if (!subReaders[i].IsCurrent()) { return false; } } // all subreaders are up to date return true; } /// <summary>Not implemented.</summary> /// <throws> UnsupportedOperationException </throws> public override long GetVersion() { throw new System.NotSupportedException("MultiReader does not support this method."); } // for testing public /*internal*/ virtual IndexReader[] GetSubReaders() { return subReaders; } } }
//------------------------------------------------------------------------------ // <copyright file="BinaryNode.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // <owner current="true" primary="true">[....]</owner> // <owner current="true" primary="false">[....]</owner> // <owner current="false" primary="false">[....]</owner> //------------------------------------------------------------------------------ namespace System.Data { using System; using System.Diagnostics; using System.Collections.Generic; using System.Globalization; using System.ComponentModel; using System.Data.SqlTypes; using System.Data.Common; internal class BinaryNode : ExpressionNode { internal int op; internal ExpressionNode left; internal ExpressionNode right; internal BinaryNode(DataTable table, int op, ExpressionNode left, ExpressionNode right) : base(table) { this.op = op; this.left = left; this.right = right; } internal override void Bind(DataTable table, List<DataColumn> list) { BindTable(table); left.Bind(table, list); right.Bind(table, list); } internal override object Eval() { return Eval(null, DataRowVersion.Default); } internal override object Eval(DataRow row, DataRowVersion version) { return EvalBinaryOp(op, left, right, row, version, null); } internal override object Eval(int[] recordNos) { return EvalBinaryOp(op, left, right, null, DataRowVersion.Default, recordNos); } internal override bool IsConstant() { // return(left.IsConstant() && right.IsConstant()); } internal override bool IsTableConstant() { return(left.IsTableConstant() && right.IsTableConstant()); } internal override bool HasLocalAggregate() { return(left.HasLocalAggregate() || right.HasLocalAggregate()); } internal override bool HasRemoteAggregate() { return(left.HasRemoteAggregate() || right.HasRemoteAggregate()); } internal override bool DependsOn(DataColumn column) { if (left.DependsOn(column)) return true; return right.DependsOn(column); } internal override ExpressionNode Optimize() { left = left.Optimize(); if (op == Operators.Is) { // only 'Is Null' or 'Is Not Null' are valid if (right is UnaryNode) { UnaryNode un = (UnaryNode)right; if (un.op != Operators.Not) { throw ExprException.InvalidIsSyntax(); } op = Operators.IsNot; right = un.right; } if (right is ZeroOpNode) { if (((ZeroOpNode)right).op != Operators.Null) { throw ExprException.InvalidIsSyntax(); } } else { throw ExprException.InvalidIsSyntax(); } } else { right = right.Optimize(); } if (this.IsConstant()) { object val = this.Eval(); if (val == DBNull.Value) { return new ZeroOpNode(Operators.Null); } if (val is bool) { if ((bool)val) return new ZeroOpNode(Operators.True); else return new ZeroOpNode(Operators.False); } return new ConstNode(table, ValueType.Object, val, false); } else return this; } internal void SetTypeMismatchError(int op, Type left, Type right) { throw ExprException.TypeMismatchInBinop(op, left, right); } private static object Eval(ExpressionNode expr, DataRow row, DataRowVersion version, int[] recordNos) { if (recordNos == null) { return expr.Eval(row, version); } else { return expr.Eval(recordNos); } } internal int BinaryCompare(object vLeft, object vRight, StorageType resultType, int op) { return BinaryCompare(vLeft, vRight, resultType, op, null); } internal int BinaryCompare(object vLeft, object vRight, StorageType resultType, int op, CompareInfo comparer) { int result = 0; try { if (!DataStorage.IsSqlType(resultType)) { switch(resultType) { case StorageType.SByte: case StorageType.Int16: case StorageType.Int32: case StorageType.Byte: case StorageType.UInt16: return Convert.ToInt32(vLeft, FormatProvider).CompareTo(Convert.ToInt32(vRight, FormatProvider)); case StorageType.Int64: case StorageType.UInt32: case StorageType.UInt64: case StorageType.Decimal: return Decimal.Compare(Convert.ToDecimal(vLeft, FormatProvider), Convert.ToDecimal(vRight, FormatProvider)); case StorageType.Char: return Convert.ToInt32(vLeft, FormatProvider).CompareTo(Convert.ToInt32(vRight, FormatProvider)); case StorageType.Double: return Convert.ToDouble(vLeft, FormatProvider).CompareTo(Convert.ToDouble(vRight, FormatProvider)); case StorageType.Single: return Convert.ToSingle(vLeft, FormatProvider).CompareTo(Convert.ToSingle(vRight, FormatProvider)); case StorageType.DateTime: return DateTime.Compare(Convert.ToDateTime(vLeft, FormatProvider), Convert.ToDateTime(vRight, FormatProvider)); case StorageType.DateTimeOffset: // DTO can only be compared to DTO, other cases: cast Exception return DateTimeOffset.Compare((DateTimeOffset)vLeft, (DateTimeOffset)vRight); case StorageType.String: return table.Compare(Convert.ToString(vLeft, FormatProvider), Convert.ToString(vRight, FormatProvider), comparer); case StorageType.Guid: return ((Guid)vLeft).CompareTo((Guid) vRight); case StorageType.Boolean: if (op == Operators.EqualTo || op == Operators.NotEqual) { return Convert.ToInt32(DataExpression.ToBoolean(vLeft), FormatProvider) - Convert.ToInt32(DataExpression.ToBoolean(vRight), FormatProvider); } break; } } else{ switch(resultType) { case StorageType.SByte: case StorageType.Int16: case StorageType.Int32: case StorageType.Byte: case StorageType.UInt16: case StorageType.SqlByte: case StorageType.SqlInt16: case StorageType.SqlInt32: return SqlConvert.ConvertToSqlInt32(vLeft).CompareTo(SqlConvert.ConvertToSqlInt32(vRight)); case StorageType.Int64: case StorageType.UInt32: case StorageType.SqlInt64: return SqlConvert.ConvertToSqlInt64(vLeft).CompareTo(SqlConvert.ConvertToSqlInt64(vRight)); case StorageType.UInt64: case StorageType.SqlDecimal: return SqlConvert.ConvertToSqlDecimal(vLeft).CompareTo(SqlConvert.ConvertToSqlDecimal(vRight)); case StorageType.SqlDouble: return SqlConvert.ConvertToSqlDouble(vLeft).CompareTo(SqlConvert.ConvertToSqlDouble(vRight)); case StorageType.SqlSingle: return SqlConvert.ConvertToSqlSingle(vLeft).CompareTo(SqlConvert.ConvertToSqlSingle(vRight)); case StorageType.SqlString: return table.Compare(vLeft.ToString(), vRight.ToString()); case StorageType.SqlGuid: return ((SqlGuid)vLeft).CompareTo(vRight); case StorageType.SqlBoolean: if (op == Operators.EqualTo || op == Operators.NotEqual) { result = 1; if (((vLeft.GetType() == typeof(SqlBoolean)) && ((vRight.GetType() == typeof(SqlBoolean))|| (vRight.GetType() == typeof(Boolean))))|| ((vRight.GetType() == typeof(SqlBoolean)) && ((vLeft.GetType() == typeof(SqlBoolean))|| (vLeft.GetType() == typeof(Boolean))))){ return SqlConvert.ConvertToSqlBoolean(vLeft).CompareTo(SqlConvert.ConvertToSqlBoolean(vRight)); } } break; case StorageType.SqlBinary: return SqlConvert.ConvertToSqlBinary(vLeft).CompareTo(SqlConvert.ConvertToSqlBinary(vRight)); case StorageType.SqlDateTime: return SqlConvert.ConvertToSqlDateTime(vLeft).CompareTo(SqlConvert.ConvertToSqlDateTime(vRight)); case StorageType.SqlMoney: return SqlConvert.ConvertToSqlMoney(vLeft).CompareTo(SqlConvert.ConvertToSqlMoney(vRight)); } } } catch (System.ArgumentException e) { ExceptionBuilder.TraceExceptionWithoutRethrow(e); } catch (System.FormatException e) { ExceptionBuilder.TraceExceptionWithoutRethrow(e); } catch (System.InvalidCastException e) { ExceptionBuilder.TraceExceptionWithoutRethrow(e); } catch (System.OverflowException e) { ExceptionBuilder.TraceExceptionWithoutRethrow(e); } catch (System.Data.EvaluateException e) { ExceptionBuilder.TraceExceptionWithoutRethrow(e); } SetTypeMismatchError(op, vLeft.GetType(), vRight.GetType()); return result; } private object EvalBinaryOp(int op, ExpressionNode left, ExpressionNode right, DataRow row, DataRowVersion version, int[] recordNos) { object vLeft; object vRight; StorageType resultType; /* special case for OR and AND operators: we don't want to evaluate both right and left operands, because we can shortcut : for OR operator If one of the operands is true the result is true for AND operator If one of rhe operands is flase the result is false */ if (op != Operators.Or && op != Operators.And && op != Operators.In && op != Operators.Is && op != Operators.IsNot) { vLeft = BinaryNode.Eval(left, row, version, recordNos); vRight = BinaryNode.Eval(right, row, version, recordNos); Type typeofLeft = vLeft.GetType(); Type typeofRight = vRight.GetType(); StorageType leftStorage = DataStorage.GetStorageType(typeofLeft); StorageType rightStorage = DataStorage.GetStorageType(typeofRight); bool leftIsSqlType = DataStorage.IsSqlType(leftStorage); bool rightIsSqlType = DataStorage.IsSqlType(rightStorage); // special case of handling NULLS, currently only OR operator can work with NULLS if (leftIsSqlType && DataStorage.IsObjectSqlNull(vLeft)) { return vLeft; } else if (rightIsSqlType && DataStorage.IsObjectSqlNull(vRight)) { return vRight; } else if ((vLeft == DBNull.Value)||(vRight == DBNull.Value)) { return DBNull.Value; } if (leftIsSqlType || rightIsSqlType) { resultType = ResultSqlType(leftStorage, rightStorage, (left is ConstNode), (right is ConstNode), op); } else { resultType = ResultType(leftStorage, rightStorage, (left is ConstNode), (right is ConstNode), op); } if (StorageType.Empty == resultType) { SetTypeMismatchError(op, typeofLeft, typeofRight); } } else { vLeft = vRight = DBNull.Value; resultType = StorageType.Empty; // shouldnt we make it boolean? } object value = DBNull.Value; bool typeMismatch = false; try { switch (op) { case Operators.Plus: switch(resultType) { case StorageType.Byte:{ value = Convert.ToByte((Convert.ToByte(vLeft, FormatProvider) + Convert.ToByte(vRight, FormatProvider)), FormatProvider); break;} case StorageType.SByte:{ value = Convert.ToSByte((Convert.ToSByte(vLeft, FormatProvider) + Convert.ToSByte(vRight, FormatProvider)), FormatProvider); break;} case StorageType.Int16:{ value = Convert.ToInt16((Convert.ToInt16(vLeft, FormatProvider) + Convert.ToInt16(vRight, FormatProvider)), FormatProvider); break;} case StorageType.UInt16:{ value = Convert.ToUInt16((Convert.ToUInt16(vLeft, FormatProvider) + Convert.ToUInt16(vRight, FormatProvider)), FormatProvider); break;} case StorageType.Int32: { checked {value = Convert.ToInt32(vLeft, FormatProvider) + Convert.ToInt32(vRight, FormatProvider);} break;} case StorageType.UInt32: { checked {value = Convert.ToUInt32(vLeft, FormatProvider) + Convert.ToUInt32(vRight, FormatProvider);} break;} case StorageType.UInt64: { checked {value = Convert.ToUInt64(vLeft, FormatProvider) + Convert.ToUInt64(vRight, FormatProvider);} break;} case StorageType.Int64:{ checked {value = Convert.ToInt64(vLeft, FormatProvider) + Convert.ToInt64(vRight, FormatProvider);} break;} case StorageType.Decimal:{ checked {value = Convert.ToDecimal(vLeft, FormatProvider) + Convert.ToDecimal(vRight, FormatProvider);} break;} case StorageType.Single:{ checked {value = Convert.ToSingle(vLeft, FormatProvider) + Convert.ToSingle(vRight, FormatProvider);} break;} case StorageType.Double:{ checked {value = Convert.ToDouble(vLeft, FormatProvider) + Convert.ToDouble(vRight, FormatProvider);} break;} case StorageType.String: case StorageType.Char:{ value = Convert.ToString(vLeft, FormatProvider) + Convert.ToString(vRight, FormatProvider); break;} case StorageType.DateTime:{ // one of the operands should be a DateTime, and an other a TimeSpan if (vLeft is TimeSpan && vRight is DateTime) { value = (DateTime)vRight + (TimeSpan)vLeft; } else if (vLeft is DateTime && vRight is TimeSpan) { value = (DateTime)vLeft + (TimeSpan)vRight; } else { typeMismatch = true; } break;} case StorageType.TimeSpan:{ value = (TimeSpan)vLeft + (TimeSpan)vRight; break;} case StorageType.SqlInt16:{ value = (SqlConvert.ConvertToSqlInt16(vLeft) + SqlConvert.ConvertToSqlInt16(vRight)); break;} case StorageType.SqlInt32:{ value = ( SqlConvert.ConvertToSqlInt32(vLeft) + SqlConvert.ConvertToSqlInt32(vRight)); break;} case StorageType.SqlInt64:{ value = (SqlConvert.ConvertToSqlInt64(vLeft) + SqlConvert.ConvertToSqlInt64(vRight)); break;} case StorageType.SqlDouble:{ value = (SqlConvert.ConvertToSqlDouble(vLeft) + SqlConvert.ConvertToSqlDouble(vRight)); break;} case StorageType.SqlSingle:{ value = (SqlConvert.ConvertToSqlSingle(vLeft)+ SqlConvert.ConvertToSqlSingle(vRight)); break;} case StorageType.SqlDecimal:{ value = (SqlConvert.ConvertToSqlDecimal(vLeft) + SqlConvert.ConvertToSqlDecimal(vRight)); break;} case StorageType.SqlMoney:{ value = (SqlConvert.ConvertToSqlMoney(vLeft) + SqlConvert.ConvertToSqlMoney(vRight)); break;} case StorageType.SqlByte:{ value = (SqlConvert.ConvertToSqlByte(vLeft) + SqlConvert.ConvertToSqlByte(vRight)); break;} case StorageType.SqlString:{ value = (SqlConvert.ConvertToSqlString(vLeft) + SqlConvert.ConvertToSqlString(vRight)); break;} case StorageType.SqlDateTime:{ if (vLeft is TimeSpan && vRight is SqlDateTime) { SqlDateTime rValue = (SqlDateTime)SqlConvert.ConvertToSqlDateTime(vRight); value = (SqlDateTime)SqlConvert.ConvertToSqlDateTime((DateTime)rValue.Value + (TimeSpan)vLeft); } else if (vLeft is SqlDateTime && vRight is TimeSpan) { SqlDateTime lValue = (SqlDateTime)SqlConvert.ConvertToSqlDateTime(vLeft); value = (SqlDateTime)SqlConvert.ConvertToSqlDateTime((DateTime)lValue.Value + (TimeSpan)vRight); } else { typeMismatch = true; } break;} default:{ typeMismatch = true; break;} } break; // Operators.Plus case Operators.Minus: switch(resultType) { case StorageType.Byte: { value = Convert.ToByte((Convert.ToByte(vLeft, FormatProvider) - Convert.ToByte(vRight, FormatProvider)), FormatProvider); break; } case StorageType.SqlByte: { value = (SqlConvert.ConvertToSqlByte(vLeft) - SqlConvert.ConvertToSqlByte(vRight)); break;} case StorageType.SByte:{ value = Convert.ToSByte((Convert.ToSByte(vLeft, FormatProvider) - Convert.ToSByte(vRight, FormatProvider)), FormatProvider); break;} case StorageType.Int16:{ value = Convert.ToInt16((Convert.ToInt16(vLeft, FormatProvider) - Convert.ToInt16(vRight, FormatProvider)), FormatProvider); break;} case StorageType.SqlInt16:{ value = (SqlConvert.ConvertToSqlInt16(vLeft) - SqlConvert.ConvertToSqlInt16(vRight)); break;} case StorageType.UInt16:{ value = Convert.ToUInt16((Convert.ToUInt16(vLeft, FormatProvider) - Convert.ToUInt16(vRight, FormatProvider)), FormatProvider); break;} case StorageType.Int32:{ checked {value = Convert.ToInt32(vLeft, FormatProvider) - Convert.ToInt32(vRight, FormatProvider);} break;} case StorageType.SqlInt32:{ value = (SqlConvert.ConvertToSqlInt32(vLeft) - SqlConvert.ConvertToSqlInt32(vRight)); break;} case StorageType.UInt32:{ checked {value = Convert.ToUInt32(vLeft, FormatProvider) - Convert.ToUInt32(vRight, FormatProvider);} break;} case StorageType.Int64:{ checked {value = Convert.ToInt64(vLeft, FormatProvider) - Convert.ToInt64(vRight, FormatProvider);} break;} case StorageType.SqlInt64:{ value = (SqlConvert.ConvertToSqlInt64(vLeft) - SqlConvert.ConvertToSqlInt64(vRight)); break;} case StorageType.UInt64:{ checked {value = Convert.ToUInt64(vLeft, FormatProvider) - Convert.ToUInt64(vRight, FormatProvider);} break;} case StorageType.Decimal:{ checked {value = Convert.ToDecimal(vLeft, FormatProvider) - Convert.ToDecimal(vRight, FormatProvider);} break;} case StorageType.SqlDecimal:{ value = (SqlConvert.ConvertToSqlDecimal(vLeft) - SqlConvert.ConvertToSqlDecimal(vRight)); break;} case StorageType.Single:{ checked {value = Convert.ToSingle(vLeft, FormatProvider) - Convert.ToSingle(vRight, FormatProvider);} break;} case StorageType.SqlSingle:{ value = (SqlConvert.ConvertToSqlSingle(vLeft) - SqlConvert.ConvertToSqlSingle(vRight)); break;} case StorageType.Double:{ checked {value = Convert.ToDouble(vLeft, FormatProvider) - Convert.ToDouble(vRight, FormatProvider);} break;} case StorageType.SqlDouble:{ value = (SqlConvert.ConvertToSqlDouble(vLeft) - SqlConvert.ConvertToSqlDouble(vRight)); break;} case StorageType.SqlMoney:{ value = (SqlConvert.ConvertToSqlMoney(vLeft) - SqlConvert.ConvertToSqlMoney(vRight)); break;} case StorageType.DateTime:{ value = (DateTime)vLeft - (TimeSpan)vRight; break;} case StorageType.TimeSpan:{ if (vLeft is DateTime) { value = (DateTime)vLeft - (DateTime)vRight; } else value = (TimeSpan)vLeft - (TimeSpan)vRight; break;} case StorageType.SqlDateTime:{ if (vLeft is TimeSpan && vRight is SqlDateTime) { SqlDateTime rValue = (SqlDateTime)SqlConvert.ConvertToSqlDateTime(vRight); value = (SqlDateTime)SqlConvert.ConvertToSqlDateTime((DateTime)rValue.Value - (TimeSpan)vLeft); } else if (vLeft is SqlDateTime && vRight is TimeSpan) { SqlDateTime lValue = (SqlDateTime)SqlConvert.ConvertToSqlDateTime(vLeft); value = (SqlDateTime)SqlConvert.ConvertToSqlDateTime((DateTime)lValue.Value - (TimeSpan)vRight); } else { typeMismatch = true; } break;} default:{ typeMismatch = true; break;} } break; // Operators.Minus case Operators.Multiply: switch(resultType) { case StorageType.Byte:{ value = Convert.ToByte((Convert.ToByte(vLeft, FormatProvider) * Convert.ToByte(vRight, FormatProvider)), FormatProvider); break;} case StorageType.SqlByte:{ value = (SqlConvert.ConvertToSqlByte(vLeft) * SqlConvert.ConvertToSqlByte(vRight)); break;} case StorageType.SByte:{ value = Convert.ToSByte((Convert.ToSByte(vLeft, FormatProvider) * Convert.ToSByte(vRight, FormatProvider)), FormatProvider); break;} case StorageType.Int16:{ value = Convert.ToInt16((Convert.ToInt16(vLeft, FormatProvider) * Convert.ToInt16(vRight, FormatProvider)), FormatProvider); break;} case StorageType.SqlInt16:{ value = (SqlConvert.ConvertToSqlInt16(vLeft) * SqlConvert.ConvertToSqlInt16(vRight)); break;} case StorageType.UInt16:{ value = Convert.ToUInt16((Convert.ToUInt16(vLeft, FormatProvider) * Convert.ToUInt16(vRight, FormatProvider)), FormatProvider); break;} case StorageType.Int32:{ checked {value = Convert.ToInt32(vLeft, FormatProvider) * Convert.ToInt32(vRight, FormatProvider);} break;} case StorageType.SqlInt32:{ value = (SqlConvert.ConvertToSqlInt32(vLeft) * SqlConvert.ConvertToSqlInt32(vRight)); break;} case StorageType.UInt32:{ checked {value = Convert.ToUInt32(vLeft, FormatProvider) * Convert.ToUInt32(vRight, FormatProvider);} break;} case StorageType.Int64:{ checked {value = Convert.ToInt64(vLeft, FormatProvider) * Convert.ToInt64(vRight, FormatProvider);} break;} case StorageType.SqlInt64:{ value = (SqlConvert.ConvertToSqlInt64(vLeft) * SqlConvert.ConvertToSqlInt64(vRight)); break;} case StorageType.UInt64:{ checked {value = Convert.ToUInt64(vLeft, FormatProvider) * Convert.ToUInt64(vRight, FormatProvider);} break;} case StorageType.Decimal:{ checked {value = Convert.ToDecimal(vLeft, FormatProvider) * Convert.ToDecimal(vRight, FormatProvider);} break;} case StorageType.SqlDecimal:{ value = (SqlConvert.ConvertToSqlDecimal(vLeft) * SqlConvert.ConvertToSqlDecimal(vRight)); break;} case StorageType.Single:{ checked {value = Convert.ToSingle(vLeft, FormatProvider) * Convert.ToSingle(vRight, FormatProvider);} break;} case StorageType.SqlSingle:{ value = ( SqlConvert.ConvertToSqlSingle(vLeft) * SqlConvert.ConvertToSqlSingle(vRight)); break;} case StorageType.SqlMoney:{ value = (SqlConvert.ConvertToSqlMoney(vLeft) * SqlConvert.ConvertToSqlMoney(vRight)); break;} case StorageType.Double:{ checked {value = Convert.ToDouble(vLeft, FormatProvider) * Convert.ToDouble(vRight, FormatProvider);} break;} case StorageType.SqlDouble:{ value = (SqlConvert.ConvertToSqlDouble(vLeft) * SqlConvert.ConvertToSqlDouble(vRight)); break;} default:{ typeMismatch = true; break;} } break; // Operators.Multiply case Operators.Divide: switch(resultType) { case StorageType.Byte:{ value = Convert.ToByte((Convert.ToByte(vLeft, FormatProvider) / Convert.ToByte(vRight, FormatProvider)), FormatProvider); break;} case StorageType.SqlByte:{ value = (SqlConvert.ConvertToSqlByte(vLeft) / SqlConvert.ConvertToSqlByte(vRight)); break;} case StorageType.SByte:{ value = Convert.ToSByte((Convert.ToSByte(vLeft, FormatProvider) / Convert.ToSByte(vRight, FormatProvider)), FormatProvider); break;} case StorageType.Int16:{ value = Convert.ToInt16((Convert.ToInt16(vLeft, FormatProvider) / Convert.ToInt16(vRight, FormatProvider)), FormatProvider); break;} case StorageType.SqlInt16:{ value = (SqlConvert.ConvertToSqlInt16(vLeft) / SqlConvert.ConvertToSqlInt16(vRight)); break;} case StorageType.UInt16:{ value = Convert.ToUInt16((Convert.ToUInt16(vLeft, FormatProvider) / Convert.ToUInt16(vRight, FormatProvider)), FormatProvider); break;} case StorageType.Int32:{ checked {value = Convert.ToInt32(vLeft, FormatProvider) / Convert.ToInt32(vRight, FormatProvider);} break;} case StorageType.SqlInt32:{ value = (SqlConvert.ConvertToSqlInt32(vLeft) / SqlConvert.ConvertToSqlInt32(vRight)); break;} case StorageType.UInt32:{ checked {value = Convert.ToUInt32(vLeft, FormatProvider) / Convert.ToUInt32(vRight, FormatProvider);} break;} case StorageType.UInt64:{ checked {value = Convert.ToUInt64(vLeft, FormatProvider) / Convert.ToUInt64(vRight, FormatProvider);} break;} case StorageType.Int64:{ checked {value = Convert.ToInt64(vLeft, FormatProvider) / Convert.ToInt64(vRight, FormatProvider);} break;} case StorageType.SqlInt64:{ value = (SqlConvert.ConvertToSqlInt64(vLeft) / SqlConvert.ConvertToSqlInt64(vRight)); break;} case StorageType.Decimal:{ checked {value = Convert.ToDecimal(vLeft, FormatProvider) / Convert.ToDecimal(vRight, FormatProvider);} break;} case StorageType.SqlDecimal:{ value = (SqlConvert.ConvertToSqlDecimal(vLeft) / SqlConvert.ConvertToSqlDecimal(vRight)); break;} case StorageType.Single:{ checked {value = Convert.ToSingle(vLeft, FormatProvider) / Convert.ToSingle(vRight, FormatProvider);} break;} case StorageType.SqlSingle:{ value = ( SqlConvert.ConvertToSqlSingle(vLeft) / SqlConvert.ConvertToSqlSingle(vRight)); break;} case StorageType.SqlMoney:{ value = (SqlConvert.ConvertToSqlMoney(vLeft) / SqlConvert.ConvertToSqlMoney(vRight)); break;} case StorageType.Double:{ Double b = Convert.ToDouble(vRight, FormatProvider); checked {value = Convert.ToDouble(vLeft, FormatProvider) / b;} break;} case StorageType.SqlDouble:{ value = (SqlConvert.ConvertToSqlDouble(vLeft) / SqlConvert.ConvertToSqlDouble(vRight)); break;} default:{ typeMismatch = true; break;} } break; // Operators.Divide case Operators.EqualTo: if ((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft)) || (vRight == DBNull.Value)||(right.IsSqlColumn && DataStorage.IsObjectSqlNull(vRight))) return DBNull.Value; return(0 == BinaryCompare (vLeft, vRight, resultType, Operators.EqualTo)); case Operators.GreaterThen: if ((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft)) || (vRight == DBNull.Value)||(right.IsSqlColumn && DataStorage.IsObjectSqlNull(vRight))) return DBNull.Value; return(0 < BinaryCompare (vLeft, vRight, resultType, op)); case Operators.LessThen: if ((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft)) || (vRight == DBNull.Value)||(right.IsSqlColumn && DataStorage.IsObjectSqlNull(vRight))) return DBNull.Value; return(0 > BinaryCompare (vLeft, vRight, resultType, op)); case Operators.GreaterOrEqual: if ((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft)) || (vRight == DBNull.Value)||(right.IsSqlColumn && DataStorage.IsObjectSqlNull(vRight))) return DBNull.Value; return(0 <= BinaryCompare (vLeft, vRight, resultType, op)); case Operators.LessOrEqual: if (((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft))) || ((vRight == DBNull.Value)||(right.IsSqlColumn && DataStorage.IsObjectSqlNull(vRight)))) return DBNull.Value; return(0 >= BinaryCompare (vLeft, vRight, resultType, op)); case Operators.NotEqual: if (((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft))) || ((vRight == DBNull.Value)||(right.IsSqlColumn && DataStorage.IsObjectSqlNull(vRight)))) return DBNull.Value; return(0 != BinaryCompare (vLeft, vRight, resultType, op)); case Operators.Is: vLeft = BinaryNode.Eval(left, row, version, recordNos); if ((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft))){ return true; } return false; case Operators.IsNot: vLeft = BinaryNode.Eval(left, row, version, recordNos); if ((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft))){ return false; } return true; case Operators.And: /* special case evaluating of the AND operator: we don't want to evaluate both right and left operands, because we can shortcut : If one of the operands is flase the result is false */ vLeft = BinaryNode.Eval(left, row, version, recordNos); if ((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft))) return DBNull.Value; if ((!(vLeft is bool)) && (!(vLeft is SqlBoolean))){ vRight = BinaryNode.Eval(right, row, version, recordNos); typeMismatch = true; break; } if (vLeft is bool){ if ((bool)vLeft == false){ value = false; break; } } else{ if (((SqlBoolean) vLeft).IsFalse){ value = false; break; } } vRight = BinaryNode.Eval(right, row, version, recordNos); if ((vRight == DBNull.Value)||(right.IsSqlColumn && DataStorage.IsObjectSqlNull(vRight ))) return DBNull.Value; if ((!(vRight is bool)) && (!(vRight is SqlBoolean))){ typeMismatch = true; break; } if (vRight is bool){ value = (bool)vRight; break; } else{ value = ((SqlBoolean) vRight).IsTrue; } break; case Operators.Or: /* special case evaluating the OR operator: we don't want to evaluate both right and left operands, because we can shortcut : If one of the operands is true the result is true */ vLeft = BinaryNode.Eval(left, row, version, recordNos); if ((vLeft != DBNull.Value) && (!DataStorage.IsObjectSqlNull(vLeft))) { if ((!(vLeft is bool)) && (!(vLeft is SqlBoolean))) { vRight = BinaryNode.Eval(right, row, version, recordNos); typeMismatch = true; break; } if ((bool)vLeft == true) { value = true; break; } } vRight = BinaryNode.Eval(right, row, version, recordNos); if ((vRight == DBNull.Value)||(DataStorage.IsObjectSqlNull(vRight))) return vLeft; if ((vLeft == DBNull.Value)||(DataStorage.IsObjectSqlNull(vLeft))) return vRight; if ((!(vRight is bool)) && (!(vRight is SqlBoolean))) { typeMismatch = true; break; } value = (vRight is bool) ? ((bool)vRight) : (((SqlBoolean)vRight).IsTrue); break; /* for M3, use original code , in below, and make sure to have two different code path; increases perf vLeft = BinaryNode.Eval(left, row, version, recordNos); if (vLeft != DBNull.Value) { if (!(vLeft is bool)) { vRight = BinaryNode.Eval(right, row, version, recordNos); typeMismatch = true; break; } if ((bool)vLeft == true) { value = true; break; } } vRight = BinaryNode.Eval(right, row, version, recordNos); if (vRight == DBNull.Value) return vLeft; if (vLeft == DBNull.Value) return vRight; if (!(vRight is bool)) { typeMismatch = true; break; } value = (bool)vRight; break; */ case Operators.Modulo: if (ExpressionNode.IsIntegerSql(resultType)) { if (resultType == StorageType.UInt64) { value = Convert.ToUInt64(vLeft, FormatProvider) % Convert.ToUInt64(vRight, FormatProvider); } else if (DataStorage.IsSqlType(resultType)) { SqlInt64 res = (SqlConvert.ConvertToSqlInt64(vLeft) % SqlConvert.ConvertToSqlInt64(vRight)); if (resultType == StorageType.SqlInt32){ value = (SqlInt32) res.ToSqlInt32(); } else if (resultType == StorageType.SqlInt16){ value = (SqlInt16) res.ToSqlInt16(); } else if (resultType == StorageType.SqlByte){ value = (SqlByte) res.ToSqlByte(); } else{ value = (SqlInt64) res; } } else { value = Convert.ToInt64(vLeft, FormatProvider) % Convert.ToInt64(vRight, FormatProvider); value = Convert.ChangeType(value, DataStorage.GetTypeStorage(resultType), FormatProvider); } } else { typeMismatch = true; } break; case Operators.In: /* special case evaluating of the IN operator: the right have to be IN function node */ if (!(right is FunctionNode)) { // this is more like an Assert: should never happens, so we do not care about "nice" Exseptions throw ExprException.InWithoutParentheses(); } vLeft = BinaryNode.Eval(left, row, version, recordNos); if ((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft))) return DBNull.Value; /* validate IN parameters : must all be constant expressions */ value = false; FunctionNode into = (FunctionNode)right; for (int i = 0; i < into.argumentCount; i++) { vRight = into.arguments[i].Eval(); if ((vRight == DBNull.Value)||(right.IsSqlColumn && DataStorage.IsObjectSqlNull(vRight))) continue; Debug.Assert((!DataStorage.IsObjectNull(vLeft))&& (!DataStorage.IsObjectNull(vRight)), "Imposible.."); resultType = DataStorage.GetStorageType(vLeft.GetType()); if (0 == BinaryCompare(vLeft, vRight, resultType, Operators.EqualTo)) { value = true; break; } } break; default: throw ExprException.UnsupportedOperator(op); } } catch (OverflowException) { throw ExprException.Overflow(DataStorage.GetTypeStorage(resultType)); } if (typeMismatch) { SetTypeMismatchError(op, vLeft.GetType(), vRight.GetType()); } return value; } // Data type precedence rules specify which data type is converted to the other. // The data type with the lower precedence is converted to the data type with the higher precedence. // If the conversion is not a supported implicit conversion, an error is returned. // When both operand expressions have the same data type, the result of the operation has that data type. // This is the precedence order for the DataSet numeric data types: private enum DataTypePrecedence { SqlDateTime = 25, DateTimeOffset = 24, DateTime = 23, TimeSpan = 20, SqlDouble = 19, Double = 18, SqlSingle = 17, Single = 16, SqlDecimal = 15, Decimal = 14, SqlMoney = 13, UInt64 = 12, SqlInt64 = 11, Int64 = 10, UInt32 = 9, SqlInt32 = 8, Int32 = 7, UInt16 = 6, SqlInt16 = 5, Int16 = 4, Byte = 3, SqlByte = 2, SByte = 1, Error = 0, SqlBoolean = -1, Boolean = -2, SqlGuid = -3, SqlString = -4, String = -5, SqlXml = -6, SqlChars = -7, Char = -8, SqlBytes = -9, SqlBinary = -10, } private DataTypePrecedence GetPrecedence(StorageType storageType) { switch(storageType) { case StorageType.Boolean: return DataTypePrecedence.Boolean; case StorageType.Char: return DataTypePrecedence.Char; case StorageType.SByte: return DataTypePrecedence.SByte; case StorageType.Byte: return DataTypePrecedence.Byte; case StorageType.Int16: return DataTypePrecedence.Int16; case StorageType.UInt16: return DataTypePrecedence.UInt16; case StorageType.Int32: return DataTypePrecedence.Int32; case StorageType.UInt32: return DataTypePrecedence.UInt32; case StorageType.Int64: return DataTypePrecedence.Int64; case StorageType.UInt64: return DataTypePrecedence.UInt64; case StorageType.Single: return DataTypePrecedence.Single; case StorageType.Double: return DataTypePrecedence.Double; case StorageType.Decimal: return DataTypePrecedence.Decimal; case StorageType.DateTime: return DataTypePrecedence.DateTime; case StorageType.DateTimeOffset: return DataTypePrecedence.DateTimeOffset; case StorageType.TimeSpan: return DataTypePrecedence.TimeSpan; case StorageType.String: return DataTypePrecedence.String; case StorageType.SqlBinary: return DataTypePrecedence.SqlBinary; case StorageType.SqlBoolean: return DataTypePrecedence.SqlBoolean; case StorageType.SqlByte: return DataTypePrecedence.SqlByte; case StorageType.SqlBytes: return DataTypePrecedence.SqlBytes; case StorageType.SqlChars: return DataTypePrecedence.SqlChars; case StorageType.SqlDateTime: return DataTypePrecedence.SqlDateTime; case StorageType.SqlDecimal: return DataTypePrecedence.SqlDecimal; case StorageType.SqlDouble: return DataTypePrecedence.SqlDouble; case StorageType.SqlGuid: return DataTypePrecedence.SqlGuid; case StorageType.SqlInt16: return DataTypePrecedence.SqlInt16; case StorageType.SqlInt32: return DataTypePrecedence.SqlInt32; case StorageType.SqlInt64: return DataTypePrecedence.SqlInt64; case StorageType.SqlMoney: return DataTypePrecedence.SqlMoney; case StorageType.SqlSingle: return DataTypePrecedence.SqlSingle; case StorageType.SqlString: return DataTypePrecedence.SqlString; // case StorageType.SqlXml: return DataTypePrecedence.SqlXml; case StorageType.Empty: case StorageType.Object: case StorageType.DBNull: default: return DataTypePrecedence.Error; } } private static StorageType GetPrecedenceType(DataTypePrecedence code) { switch (code) { case DataTypePrecedence.Error: return StorageType.Empty; case DataTypePrecedence.SByte: return StorageType.SByte; case DataTypePrecedence.Byte: return StorageType.Byte; case DataTypePrecedence.Int16: return StorageType.Int16; case DataTypePrecedence.UInt16: return StorageType.UInt16; case DataTypePrecedence.Int32: return StorageType.Int32; case DataTypePrecedence.UInt32: return StorageType.UInt32; case DataTypePrecedence.Int64: return StorageType.Int64; case DataTypePrecedence.UInt64: return StorageType.UInt64; case DataTypePrecedence.Decimal: return StorageType.Decimal; case DataTypePrecedence.Single: return StorageType.Single; case DataTypePrecedence.Double: return StorageType.Double; case DataTypePrecedence.Boolean: return StorageType.Boolean; case DataTypePrecedence.String: return StorageType.String; case DataTypePrecedence.Char: return StorageType.Char; case DataTypePrecedence.DateTimeOffset: return StorageType.DateTimeOffset; case DataTypePrecedence.DateTime: return StorageType.DateTime; case DataTypePrecedence.TimeSpan: return StorageType.TimeSpan; case DataTypePrecedence.SqlDateTime: return StorageType.SqlDateTime; case DataTypePrecedence.SqlDouble: return StorageType.SqlDouble; case DataTypePrecedence.SqlSingle: return StorageType.SqlSingle; case DataTypePrecedence.SqlDecimal: return StorageType.SqlDecimal; case DataTypePrecedence.SqlInt64: return StorageType.SqlInt64; case DataTypePrecedence.SqlInt32: return StorageType.SqlInt32; case DataTypePrecedence.SqlInt16: return StorageType.SqlInt16; case DataTypePrecedence.SqlByte: return StorageType.SqlByte; case DataTypePrecedence.SqlBoolean: return StorageType.SqlBoolean; case DataTypePrecedence.SqlString: return StorageType.SqlString; case DataTypePrecedence.SqlGuid: return StorageType.SqlGuid; case DataTypePrecedence.SqlBinary: return StorageType.SqlBinary; case DataTypePrecedence.SqlMoney: return StorageType.SqlMoney; default: Debug.Assert(false, "Invalid (unmapped) precedence " + code.ToString()); goto case DataTypePrecedence.Error; } } private bool IsMixed(StorageType left, StorageType right) { return ((IsSigned(left) && IsUnsigned(right)) || (IsUnsigned(left) && IsSigned(right))); } private bool IsMixedSql(StorageType left, StorageType right) { return ((IsSignedSql(left) && IsUnsignedSql(right)) || (IsUnsignedSql(left) && IsSignedSql(right))); } internal StorageType ResultType(StorageType left, StorageType right, bool lc, bool rc, int op) { if ((left == StorageType.Guid) && (right == StorageType.Guid) && Operators.IsRelational(op)) return left; if ((left == StorageType.String) && (right == StorageType.Guid) && Operators.IsRelational(op)) return left; if ((left == StorageType.Guid) && (right == StorageType.String) && Operators.IsRelational(op)) return right; int leftPrecedence = (int)GetPrecedence(left); if (leftPrecedence == (int)DataTypePrecedence.Error) { return StorageType.Empty; } int rightPrecedence = (int)GetPrecedence(right); if (rightPrecedence == (int)DataTypePrecedence.Error) { return StorageType.Empty; } if (Operators.IsLogical(op)){ if (left == StorageType.Boolean && right == StorageType.Boolean) return StorageType.Boolean; else return StorageType.Empty; } if ((left == StorageType.DateTimeOffset) ||(right == StorageType.DateTimeOffset)) { // Rules to handle DateTimeOffset: // we only allow Relational operations to operate only on DTO vs DTO // all other operations: "exception" if (Operators.IsRelational(op) && left == StorageType.DateTimeOffset && right == StorageType.DateTimeOffset) return StorageType.DateTimeOffset; return StorageType.Empty; } if ((op == Operators.Plus) && ((left == StorageType.String) || (right == StorageType.String))) return StorageType.String; DataTypePrecedence higherPrec = (DataTypePrecedence)Math.Max(leftPrecedence, rightPrecedence); StorageType result = GetPrecedenceType(higherPrec); if (Operators.IsArithmetical(op)) { if (result != StorageType.String && result != StorageType.Char) { if (!IsNumeric(left)) return StorageType.Empty; if (!IsNumeric(right)) return StorageType.Empty; } } // if the operation is a division the result should be at least a double if ((op == Operators.Divide) && IsInteger(result)) { return StorageType.Double; } if (IsMixed(left, right)) { // we are dealing with one signed and one unsigned type so // try to see if one of them is a ConstNode if (lc && (!rc)) { return right; } else if ((!lc) && rc) { return left; } if (IsUnsigned(result)) { if (higherPrec < DataTypePrecedence.UInt64) // left and right are mixed integers but with the same length // so promote to the next signed type result = GetPrecedenceType(higherPrec+1); else throw ExprException.AmbiguousBinop(op, DataStorage.GetTypeStorage(left), DataStorage.GetTypeStorage(right)); } } return result; } internal StorageType ResultSqlType(StorageType left, StorageType right, bool lc, bool rc, int op) { int leftPrecedence = (int)GetPrecedence(left); if (leftPrecedence == (int)DataTypePrecedence.Error) { return StorageType.Empty; } int rightPrecedence = (int)GetPrecedence(right); if (rightPrecedence == (int)DataTypePrecedence.Error) { return StorageType.Empty; } if (Operators.IsLogical(op)){ if ((left != StorageType.Boolean && left != StorageType.SqlBoolean) || (right != StorageType.Boolean && right != StorageType.SqlBoolean)) return StorageType.Empty; if (left == StorageType.Boolean && right == StorageType.Boolean) return StorageType.Boolean; return StorageType.SqlBoolean; } if (op == Operators.Plus){ if((left == StorageType.SqlString) ||(right == StorageType.SqlString)) return StorageType.SqlString; if ((left == StorageType.String) || (right == StorageType.String)) return StorageType.String; } //SqlBinary is operable just with SqlBinary if ((left == StorageType.SqlBinary && right != StorageType.SqlBinary) ||(left != StorageType.SqlBinary && right == StorageType.SqlBinary)) return StorageType.Empty; //SqlGuid is operable just with SqlGuid if((left == StorageType.SqlGuid && right != StorageType.SqlGuid) ||(left != StorageType.SqlGuid && right == StorageType.SqlGuid)) return StorageType.Empty; if ((leftPrecedence > (int)DataTypePrecedence.SqlDouble && rightPrecedence <(int) DataTypePrecedence.TimeSpan)){ return StorageType.Empty; } if ((leftPrecedence < (int)DataTypePrecedence.TimeSpan && rightPrecedence >(int) DataTypePrecedence.SqlDouble)){ return StorageType.Empty; } if (leftPrecedence > (int) DataTypePrecedence.SqlDouble){ if (op == Operators.Plus || op == Operators.Minus){ if (left == StorageType.TimeSpan) return right; if (right == StorageType.TimeSpan) return left; return StorageType.Empty; // for plus or minus operations for time types, one of them MUST be time span } if (!Operators.IsRelational(op)) return StorageType.Empty; // we just have relational operations amoung time types return left; } // time types finished // continue with numerical types, numbers DataTypePrecedence higherPrec = (DataTypePrecedence)Math.Max(leftPrecedence, rightPrecedence); StorageType result = GetPrecedenceType(higherPrec); // if we have at least one Sql type, the intermediate result should be Sql type result = GetPrecedenceType((DataTypePrecedence)SqlResultType((int)higherPrec)); if (Operators.IsArithmetical(op)) { if (result != StorageType.String && result != StorageType.Char && result != StorageType.SqlString) { if (!IsNumericSql(left)) return StorageType.Empty; if (!IsNumericSql(right)) return StorageType.Empty; } } // if the operation is a division the result should be at least a double if ((op == Operators.Divide) && IsIntegerSql(result)) { return StorageType.SqlDouble; } if (result == StorageType.SqlMoney){ if ((left != StorageType.SqlMoney) && (right != StorageType.SqlMoney)) result = StorageType.SqlDecimal; } if (IsMixedSql(left, right)) { // we are dealing with one signed and one unsigned type so // try to see if one of them is a ConstNode if (IsUnsignedSql(result)) { if (higherPrec < DataTypePrecedence.UInt64) // left and right are mixed integers but with the same length // so promote to the next signed type result = GetPrecedenceType(higherPrec+1); else throw ExprException.AmbiguousBinop(op, DataStorage.GetTypeStorage(left), DataStorage.GetTypeStorage(right)); } } return result; } private int SqlResultType(int typeCode){ switch (typeCode){ case 23: return 24; case 20: return 21; case 18: return 19; case 16: return 17; case 14: return 15; case 12: return 13; case 9 : case 10: return 11; case 6 : case 7 : return 8; case 3 : case 4 : return 5; case 1 : return 2; case -2: return -1; case -5: return -4; case -8: return -7; default : return typeCode; } } } internal sealed class LikeNode : BinaryNode { // like kinds internal const int match_left = 1; // <STR>* internal const int match_right = 2; // *<STR> internal const int match_middle = 3; // *<STR>* internal const int match_exact = 4; // <STR> internal const int match_all = 5; // * int kind; string pattern = null; internal LikeNode(DataTable table, int op, ExpressionNode left, ExpressionNode right) : base (table, op, left, right) { } internal override object Eval(DataRow row, DataRowVersion version) { object vRight; // object vLeft = left.Eval(row, version); string substring; if ((vLeft == DBNull.Value)||(left.IsSqlColumn && DataStorage.IsObjectSqlNull(vLeft))) return DBNull.Value; if (pattern == null) { vRight = right.Eval(row, version); if (!(vRight is string) && !(vRight is SqlString)) { SetTypeMismatchError(op, vLeft.GetType(), vRight.GetType()); } if (vRight == DBNull.Value || DataStorage.IsObjectSqlNull(vRight)) return DBNull.Value; string rightStr = (string) SqlConvert.ChangeType2(vRight, StorageType.String, typeof(string), FormatProvider); // need to convert like pattern to a string // Parce the original pattern, and get the constant part of it.. substring = AnalyzePattern(rightStr); if (right.IsConstant()) pattern = substring; } else { substring = pattern; } if (!(vLeft is string) && !(vLeft is SqlString)) { SetTypeMismatchError(op, vLeft.GetType(), typeof(string)); } // WhiteSpace Chars Include : 0x9, 0xA, 0xB, 0xC, 0xD, 0x20, 0xA0, 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, 0x200B, 0x3000, and 0xFEFF. char[] trimChars = new char[2] {(char)0x20, (char)0x3000}; string tempStr; if (vLeft is SqlString) tempStr = ((SqlString)vLeft).Value; else tempStr = (string)vLeft; string s1 = (tempStr).TrimEnd(trimChars); switch (kind) { case match_all: return true; case match_exact: return(0 == table.Compare(s1, substring)); case match_middle: return(0 <= table.IndexOf(s1, substring)); case match_left: return(0 == table.IndexOf(s1, substring)); case match_right: string s2 = substring.TrimEnd(trimChars); return table.IsSuffix(s1, s2); default: Debug.Assert(false, "Unexpected LIKE kind"); return DBNull.Value; } } internal string AnalyzePattern(string pat) { int length = pat.Length; char[] patchars = new char[length+1]; pat.CopyTo(0, patchars, 0, length); patchars[length] = (char)0; string substring = null; char[] constchars = new char[length+1]; int newLength = 0; int stars = 0; int i = 0; while (i < length) { if (patchars[i] == '*' || patchars[i] == '%') { // replace conseq. * or % with one.. while ((patchars[i] == '*' || patchars[i] == '%') && i < length) i++; // we allowing only *str* pattern if ((i < length && newLength > 0) || stars >= 2) { // we have a star inside string constant.. throw ExprException.InvalidPattern(pat); } stars++; } else if (patchars[i] == '[') { i++; if (i >= length) { throw ExprException.InvalidPattern(pat); } constchars[newLength++] = patchars[i++]; if (i >= length) { throw ExprException.InvalidPattern(pat); } if (patchars[i] != ']') { throw ExprException.InvalidPattern(pat); } i++; } else { constchars[newLength++] = patchars[i]; i++; } } substring = new string(constchars, 0, newLength); if (stars == 0) { kind = match_exact; } else { if (newLength > 0) { if (patchars[0] == '*' || patchars[0] == '%') { if (patchars[length-1] == '*' || patchars[length-1] == '%') { kind = match_middle; } else { kind = match_right; } } else { Debug.Assert(patchars[length-1] == '*' || patchars[length-1] == '%', "Invalid LIKE pattern formed.. "); kind = match_left; } } else { kind = match_all; } } return substring; } } }
using YAF.Lucene.Net.Support; using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; namespace YAF.Lucene.Net.Util { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// A <see cref="PriorityQueue{T}"/> maintains a partial ordering of its elements such that the /// element with least priority can always be found in constant time. Put()'s and Pop()'s /// require log(size) time. /// /// <para/><b>NOTE</b>: this class will pre-allocate a full array of /// length <c>maxSize+1</c> if instantiated via the /// <see cref="PriorityQueue(int, bool)"/> constructor with /// <c>prepopulate</c> set to <c>true</c>. That maximum /// size can grow as we insert elements over the time. /// <para/> /// @lucene.internal /// </summary> #if FEATURE_SERIALIZABLE [Serializable] #endif public abstract class PriorityQueue<T> { private int size = 0; private int maxSize; private T[] heap; public PriorityQueue(int maxSize) : this(maxSize, true) { } public PriorityQueue(int maxSize, bool prepopulate) { int heapSize; if (0 == maxSize) { // We allocate 1 extra to avoid if statement in top() heapSize = 2; } else { if (maxSize > ArrayUtil.MAX_ARRAY_LENGTH) { // Don't wrap heapSize to -1, in this case, which // causes a confusing NegativeArraySizeException. // Note that very likely this will simply then hit // an OOME, but at least that's more indicative to // caller that this values is too big. We don't +1 // in this case, but it's very unlikely in practice // one will actually insert this many objects into // the PQ: // Throw exception to prevent confusing OOME: throw new ArgumentException("maxSize must be <= " + ArrayUtil.MAX_ARRAY_LENGTH + "; got: " + maxSize); } else { // NOTE: we add +1 because all access to heap is // 1-based not 0-based. heap[0] is unused. heapSize = maxSize + 1; } } // T is unbounded type, so this unchecked cast works always: T[] h = new T[heapSize]; this.heap = h; this.maxSize = maxSize; if (prepopulate) { // If sentinel objects are supported, populate the queue with them T sentinel = GetSentinelObject(); if (!EqualityComparer<T>.Default.Equals(sentinel, default(T))) { heap[1] = sentinel; for (int i = 2; i < heap.Length; i++) { heap[i] = GetSentinelObject(); } size = maxSize; } } } /// <summary> /// Determines the ordering of objects in this priority queue. Subclasses /// must define this one method. </summary> /// <returns> <c>true</c> if parameter <paramref name="a"/> is less than parameter <paramref name="b"/>. </returns> protected internal abstract bool LessThan(T a, T b); // LUCENENET: Internal for testing /// <summary> /// This method can be overridden by extending classes to return a sentinel /// object which will be used by the <see cref="PriorityQueue(int, bool)"/> /// constructor to fill the queue, so that the code which uses that queue can always /// assume it's full and only change the top without attempting to insert any new /// object. /// <para/> /// Those sentinel values should always compare worse than any non-sentinel /// value (i.e., <see cref="LessThan(T, T)"/> should always favor the /// non-sentinel values). /// <para/> /// By default, this method returns <c>false</c>, which means the queue will not be /// filled with sentinel values. Otherwise, the value returned will be used to /// pre-populate the queue. Adds sentinel values to the queue. /// <para/> /// If this method is extended to return a non-null value, then the following /// usage pattern is recommended: /// /// <code> /// // extends GetSentinelObject() to return a non-null value. /// PriorityQueue&lt;MyObject&gt; pq = new MyQueue&lt;MyObject&gt;(numHits); /// // save the 'top' element, which is guaranteed to not be null. /// MyObject pqTop = pq.Top; /// &lt;...&gt; /// // now in order to add a new element, which is 'better' than top (after /// // you've verified it is better), it is as simple as: /// pqTop.Change(). /// pqTop = pq.UpdateTop(); /// </code> /// <para/> /// <b>NOTE:</b> if this method returns a non-<c>null</c> value, it will be called by /// the <see cref="PriorityQueue(int, bool)"/> constructor /// <see cref="Count"/> times, relying on a new object to be returned and will not /// check if it's <c>null</c> again. Therefore you should ensure any call to this /// method creates a new instance and behaves consistently, e.g., it cannot /// return <c>null</c> if it previously returned non-<c>null</c>. /// </summary> /// <returns> The sentinel object to use to pre-populate the queue, or <c>null</c> if /// sentinel objects are not supported. </returns> protected virtual T GetSentinelObject() { return default(T); } /// <summary> /// Adds an Object to a <see cref="PriorityQueue{T}"/> in log(size) time. If one tries to add /// more objects than <see cref="maxSize"/> from initialize and it is not possible to resize /// the heap, an <see cref="IndexOutOfRangeException"/> is thrown. /// </summary> /// <returns> The new 'top' element in the queue. </returns> public T Add(T element) { size++; heap[size] = element; UpHeap(); return heap[1]; } /// <summary> /// Adds an Object to a <see cref="PriorityQueue{T}"/> in log(size) time. /// It returns the object (if any) that was /// dropped off the heap because it was full. This can be /// the given parameter (in case it is smaller than the /// full heap's minimum, and couldn't be added), or another /// object that was previously the smallest value in the /// heap and now has been replaced by a larger one, or <c>null</c> /// if the queue wasn't yet full with <see cref="maxSize"/> elements. /// </summary> public virtual T InsertWithOverflow(T element) { if (size < maxSize) { Add(element); return default(T); } else if (size > 0 && !LessThan(element, heap[1])) { T ret = heap[1]; heap[1] = element; UpdateTop(); return ret; } else { return element; } } /// <summary> /// Returns the least element of the <see cref="PriorityQueue{T}"/> in constant time. /// Returns <c>null</c> if the queue is empty. </summary> public T Top { get { // We don't need to check size here: if maxSize is 0, // then heap is length 2 array with both entries null. // If size is 0 then heap[1] is already null. return heap[1]; } } /// <summary> /// Removes and returns the least element of the <see cref="PriorityQueue{T}"/> in log(size) /// time. /// </summary> public T Pop() { if (size > 0) { T result = heap[1]; // save first value heap[1] = heap[size]; // move last to first heap[size] = default(T); // permit GC of objects size--; DownHeap(); // adjust heap return result; } else { return default(T); } } /// <summary> /// Should be called when the Object at top changes values. Still log(n) worst /// case, but it's at least twice as fast to /// /// <code> /// pq.Top.Change(); /// pq.UpdateTop(); /// </code> /// /// instead of /// /// <code> /// o = pq.Pop(); /// o.Change(); /// pq.Push(o); /// </code> /// </summary> /// <returns> The new 'top' element. </returns> public T UpdateTop() { DownHeap(); return heap[1]; } /// <summary> /// Returns the number of elements currently stored in the <see cref="PriorityQueue{T}"/>. /// NOTE: This was size() in Lucene. /// </summary> public int Count { get { return size; } } /// <summary> /// Removes all entries from the <see cref="PriorityQueue{T}"/>. </summary> public void Clear() { for (int i = 0; i <= size; i++) { heap[i] = default(T); } size = 0; } private void UpHeap() { int i = size; T node = heap[i]; // save bottom node int j = (int)((uint)i >> 1); while (j > 0 && LessThan(node, heap[j])) { heap[i] = heap[j]; // shift parents down i = j; j = (int)((uint)j >> 1); } heap[i] = node; // install saved node } private void DownHeap() { int i = 1; T node = heap[i]; // save top node int j = i << 1; // find smaller child int k = j + 1; if (k <= size && LessThan(heap[k], heap[j])) { j = k; } while (j <= size && LessThan(heap[j], node)) { heap[i] = heap[j]; // shift up child i = j; j = i << 1; k = j + 1; if (k <= size && LessThan(heap[k], heap[j])) { j = k; } } heap[i] = node; // install saved node } /// <summary> /// This method returns the internal heap array as T[]. /// <para/> /// @lucene.internal /// </summary> [WritableArray] [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")] protected T[] HeapArray { get { return heap; } } } }
using System; using System.Linq; using System.Collections.Generic; using Microsoft.EntityFrameworkCore; using System.Threading.Tasks; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Authorization; using hihapi.Models; using hihapi.Utilities; using hihapi.Exceptions; using Microsoft.AspNetCore.OData.Routing.Controllers; using Microsoft.AspNetCore.OData.Query; using Microsoft.AspNetCore.OData.Formatter; namespace hihapi.Controllers { [Authorize] public class FinanceDocumentTypesController: ODataController { private readonly hihDataContext _context; public FinanceDocumentTypesController(hihDataContext context) { _context = context; } /// GET: /FinanceDocumentTypes [EnableQuery] [HttpGet] public IActionResult Get() { String usrName = String.Empty; try { usrName = HIHAPIUtility.GetUserID(this); } catch { // Do nothing usrName = String.Empty; } if (String.IsNullOrEmpty(usrName)) return Ok(_context.FinDocumentTypes.Where(p => p.HomeID == null)); var rst0 = from acntctgy in _context.FinDocumentTypes where acntctgy.HomeID == null select acntctgy; var rst1 = from hmem in _context.HomeMembers where hmem.User == usrName select new { HomeID = hmem.HomeID } into hids join acntctgy in _context.FinDocumentTypes on hids.HomeID equals acntctgy.HomeID select acntctgy; return Ok(rst0.Union(rst1)); } [EnableQuery] [HttpGet] public FinanceDocumentType Get([FromODataUri] int key) { String usrName = String.Empty; try { usrName = HIHAPIUtility.GetUserID(this); } catch { // Do nothing usrName = String.Empty; } if (String.IsNullOrEmpty(usrName)) return _context.FinDocumentTypes.Where(p => p.ID == key && p.HomeID == null).SingleOrDefault(); return (from ctgy in _context.FinDocumentTypes join hmem in _context.HomeMembers on ctgy.HomeID equals hmem.HomeID into hmem2 from nhmem in hmem2.DefaultIfEmpty() where ctgy.ID == key && (nhmem == null || nhmem.User == usrName) select ctgy).SingleOrDefault(); } [HttpPost] public async Task<IActionResult> Post([FromBody] FinanceDocumentType ctgy) { if (!ModelState.IsValid) { HIHAPIUtility.HandleModalStateError(ModelState); } // Check if (!ctgy.IsValid(this._context) || !ctgy.HomeID.HasValue) { throw new BadRequestException("Inputted object IsValid failed"); } // User String usrName = String.Empty; try { usrName = HIHAPIUtility.GetUserID(this); if (String.IsNullOrEmpty(usrName)) { throw new UnauthorizedAccessException(); } } catch { throw new UnauthorizedAccessException(); } // Check whether User assigned with specified Home ID var hms = _context.HomeMembers.Where(p => p.HomeID == ctgy.HomeID.Value && p.User == usrName).Count(); if (hms <= 0) { throw new UnauthorizedAccessException(); } ctgy.Createdby = usrName; ctgy.CreatedAt = DateTime.Now; _context.FinDocumentTypes.Add(ctgy); await _context.SaveChangesAsync(); return Created(ctgy); } [HttpPut] public async Task<IActionResult> Put([FromODataUri] int key, [FromBody] FinanceDocumentType update) { if (!ModelState.IsValid) { HIHAPIUtility.HandleModalStateError(ModelState); } if (key != update.ID) { throw new BadRequestException("Inputted ID mismatched"); } // User String usrName = String.Empty; try { usrName = HIHAPIUtility.GetUserID(this); if (String.IsNullOrEmpty(usrName)) { throw new UnauthorizedAccessException(); } } catch { throw new UnauthorizedAccessException(); } // Check whether User assigned with specified Home ID var hms = _context.HomeMembers.Where(p => p.HomeID == update.HomeID && p.User == usrName).Count(); if (hms <= 0) { throw new UnauthorizedAccessException(); } if (!update.IsValid(this._context)) throw new BadRequestException("Inputted Object IsValid failed"); update.UpdatedAt = DateTime.Now; update.Updatedby = usrName; _context.Entry(update).State = EntityState.Modified; try { await _context.SaveChangesAsync(); } catch (DbUpdateConcurrencyException exp) { if (!_context.FinDocumentTypes.Any(p => p.ID == key)) { throw new NotFoundException("Object ID not found"); } else { throw new DBOperationException(exp.Message); } } return Updated(update); } [HttpDelete] public async Task<IActionResult> Delete([FromODataUri] short key) { var cc = await _context.FinDocumentTypes.FindAsync(key); if (cc == null) { throw new NotFoundException("Inputted ID not found"); } // User String usrName = String.Empty; try { usrName = HIHAPIUtility.GetUserID(this); if (String.IsNullOrEmpty(usrName)) { throw new UnauthorizedAccessException(); } } catch { throw new UnauthorizedAccessException(); } // Check whether User assigned with specified Home ID var hms = _context.HomeMembers.Where(p => p.HomeID == cc.HomeID && p.User == usrName).Count(); if (hms <= 0) { throw new UnauthorizedAccessException(); } if (!cc.IsDeleteAllowed(this._context)) throw new BadRequestException("Object IsDeletedAllowed Failed"); _context.FinDocumentTypes.Remove(cc); await _context.SaveChangesAsync(); return StatusCode(204); // HttpStatusCode.NoContent } } }
//*************************************************** //* This file was generated by tool //* SharpKit //* At: 29/08/2012 03:59:39 p.m. //*************************************************** using SharpKit.JavaScript; namespace Ext.data { #region JsonP /// <inheritdocs /> /// <summary> /// <p>This class is used to create JSONP requests. JSONP is a mechanism that allows for making /// requests for data cross domain. More information is available <i>here</i>.</p> /// </summary> [JsType(JsMode.Prototype, Export=false, OmitOptionalParameters=true)] public partial class JsonP : Ext.Base { /// <summary> /// Defaults to: <c>&quot;Ext.Base&quot;</c> /// </summary> [JsProperty(Name="$className")] private static JsString @className{get;set;} /// <summary> /// Specifies the GET parameter that will be sent to the server containing the function name to be executed when /// the request completes. Defaults to callback. Thus, a common request will be in the form of /// url?callback=Ext.data.JsonP.callback1 /// Defaults to: <c>&quot;callback&quot;</c> /// </summary> public static JsString callbackKey{get;set;} /// <summary> /// Defaults to: <c>{}</c> /// </summary> private static JsObject configMap{get;set;} /// <summary> /// True to add a unique cache-buster param to requests. Defaults to true. /// Defaults to: <c>true</c> /// </summary> public static bool disableCaching{get;set;} /// <summary> /// Change the parameter which is sent went disabling caching through a cache buster. Defaults to '_dc'. /// Defaults to: <c>&quot;_dc&quot;</c> /// </summary> public static JsString disableCachingParam{get;set;} /// <summary> /// Defaults to: <c>[]</c> /// </summary> private static JsArray initConfigList{get;set;} /// <summary> /// Defaults to: <c>{}</c> /// </summary> private static JsObject initConfigMap{get;set;} /// <summary> /// Defaults to: <c>true</c> /// </summary> private static bool isInstance{get;set;} /// <summary> /// Number of requests done so far. /// Defaults to: <c>0</c> /// </summary> private static JsNumber requestCount{get;set;} /// <summary> /// Hash of pending requests. /// Defaults to: <c>{}</c> /// </summary> private static JsObject requests{get;set;} /// <summary> /// Get the reference to the current class from which this object was instantiated. Unlike statics, /// this.self is scope-dependent and it's meant to be used for dynamic inheritance. See statics /// for a detailed comparison /// <code><see cref="Ext.ExtContext.define">Ext.define</see>('My.Cat', { /// statics: { /// speciesName: 'Cat' // My.Cat.speciesName = 'Cat' /// }, /// constructor: function() { /// alert(this.self.speciesName); // dependent on 'this' /// }, /// clone: function() { /// return new this.self(); /// } /// }); /// <see cref="Ext.ExtContext.define">Ext.define</see>('My.SnowLeopard', { /// extend: 'My.Cat', /// statics: { /// speciesName: 'Snow Leopard' // My.SnowLeopard.speciesName = 'Snow Leopard' /// } /// }); /// var cat = new My.Cat(); // alerts 'Cat' /// var snowLeopard = new My.SnowLeopard(); // alerts 'Snow Leopard' /// var clone = snowLeopard.clone(); /// alert(<see cref="Ext.ExtContext.getClassName">Ext.getClassName</see>(clone)); // alerts 'My.SnowLeopard' /// </code> /// </summary> protected static Class self{get;set;} /// <summary> /// A default timeout for any JsonP requests. If the request has not completed in this time the /// failure callback will be fired. The timeout is in ms. Defaults to 30000. /// Defaults to: <c>30000</c> /// </summary> public static JsNumber timeout{get;set;} /// <summary> /// Abort a request. If the request parameter is not specified all open requests will /// be aborted. /// </summary> /// <param name="request"><p>The request to abort</p> /// </param> public static void abort(object request=null){} /// <summary> /// Call the original method that was previously overridden with override /// <code><see cref="Ext.ExtContext.define">Ext.define</see>('My.Cat', { /// constructor: function() { /// alert("I'm a cat!"); /// } /// }); /// My.Cat.override({ /// constructor: function() { /// alert("I'm going to be a cat!"); /// this.callOverridden(); /// alert("Meeeeoooowwww"); /// } /// }); /// var kitty = new My.Cat(); // alerts "I'm going to be a cat!" /// // alerts "I'm a cat!" /// // alerts "Meeeeoooowwww" /// </code> /// <p>This method has been <strong>deprecated</strong> </p> /// <p>as of 4.1. Use <see cref="Ext.Base.callParent">callParent</see> instead.</p> /// </summary> /// <param name="args"><p>The arguments, either an array or the <c>arguments</c> object /// from the current method, for example: <c>this.callOverridden(arguments)</c></p> /// </param> /// <returns> /// <span><see cref="Object">Object</see></span><div><p>Returns the result of calling the overridden method</p> /// </div> /// </returns> protected static object callOverridden(object args=null){return null;} /// <summary> /// Call the "parent" method of the current method. That is the method previously /// overridden by derivation or by an override (see Ext.define). /// <code> <see cref="Ext.ExtContext.define">Ext.define</see>('My.Base', { /// constructor: function (x) { /// this.x = x; /// }, /// statics: { /// method: function (x) { /// return x; /// } /// } /// }); /// <see cref="Ext.ExtContext.define">Ext.define</see>('My.Derived', { /// extend: 'My.Base', /// constructor: function () { /// this.callParent([21]); /// } /// }); /// var obj = new My.Derived(); /// alert(obj.x); // alerts 21 /// </code> /// This can be used with an override as follows: /// <code> <see cref="Ext.ExtContext.define">Ext.define</see>('My.DerivedOverride', { /// override: 'My.Derived', /// constructor: function (x) { /// this.callParent([x*2]); // calls original My.Derived constructor /// } /// }); /// var obj = new My.Derived(); /// alert(obj.x); // now alerts 42 /// </code> /// This also works with static methods. /// <code> <see cref="Ext.ExtContext.define">Ext.define</see>('My.Derived2', { /// extend: 'My.Base', /// statics: { /// method: function (x) { /// return this.callParent([x*2]); // calls My.Base.method /// } /// } /// }); /// alert(My.Base.method(10); // alerts 10 /// alert(My.Derived2.method(10); // alerts 20 /// </code> /// Lastly, it also works with overridden static methods. /// <code> <see cref="Ext.ExtContext.define">Ext.define</see>('My.Derived2Override', { /// override: 'My.Derived2', /// statics: { /// method: function (x) { /// return this.callParent([x*2]); // calls My.Derived2.method /// } /// } /// }); /// alert(My.Derived2.method(10); // now alerts 40 /// </code> /// </summary> /// <param name="args"><p>The arguments, either an array or the <c>arguments</c> object /// from the current method, for example: <c>this.callParent(arguments)</c></p> /// </param> /// <returns> /// <span><see cref="Object">Object</see></span><div><p>Returns the result of calling the parent method</p> /// </div> /// </returns> protected static object callParent(object args=null){return null;} /// <summary> /// Cleans up anu script handling errors /// </summary> /// <param name="request"><p>The request</p> /// </param> private static void cleanupErrorHandling(object request){} /// <summary> /// </summary> private static void configClass(){} /// <summary> /// Create the script tag given the specified url, params and options. The options /// parameter is passed to allow an override to access it. /// </summary> /// <param name="url"><p>The url of the request</p> /// </param> /// <param name="params"><p>Any extra params to be sent</p> /// </param> /// <param name="options"><p>The object passed to <see cref="Ext.data.JsonP.request">request</see>.</p> /// </param> private static void createScript(JsString url, object @params, object options){} /// <summary> /// Overrides: <see cref="Ext.AbstractComponent.destroy">Ext.AbstractComponent.destroy</see>, <see cref="Ext.AbstractPlugin.destroy">Ext.AbstractPlugin.destroy</see>, <see cref="Ext.layout.Layout.destroy">Ext.layout.Layout.destroy</see> /// </summary> private static void destroy(){} /// <summary> /// Parameters<li><span>name</span> : <see cref="Object">Object</see><div> /// </div></li> /// </summary> /// <param name="name"> /// </param> private static void getConfig(object name){} /// <summary> /// Returns the initial configuration passed to constructor when instantiating /// this class. /// </summary> /// <param name="name"><p>Name of the config option to return.</p> /// </param> /// <returns> /// <span><see cref="Object">Object</see>/Mixed</span><div><p>The full config object or a single config value /// when <c>name</c> parameter specified.</p> /// </div> /// </returns> public static object getInitialConfig(object name=null){return null;} /// <summary> /// Handles any aborts when loading the script /// </summary> /// <param name="request"><p>The request</p> /// </param> private static void handleAbort(object request){} /// <summary> /// Handles any script errors when loading the script /// </summary> /// <param name="request"><p>The request</p> /// </param> private static void handleError(object request){} /// <summary> /// Handle a successful response /// </summary> /// <param name="result"><p>The result from the request</p> /// </param> /// <param name="request"><p>The request</p> /// </param> private static void handleResponse(object result, object request){} /// <summary> /// Handle any script timeouts /// </summary> /// <param name="request"><p>The request</p> /// </param> private static void handleTimeout(object request){} /// <summary> /// Parameters<li><span>config</span> : <see cref="Object">Object</see><div> /// </div></li> /// </summary> /// <param name="config"> /// </param> private static void hasConfig(object config){} /// <summary> /// Initialize configuration for this class. a typical example: /// <code><see cref="Ext.ExtContext.define">Ext.define</see>('My.awesome.Class', { /// // The default config /// config: { /// name: 'Awesome', /// isAwesome: true /// }, /// constructor: function(config) { /// this.initConfig(config); /// } /// }); /// var awesome = new My.awesome.Class({ /// name: 'Super Awesome' /// }); /// alert(awesome.getName()); // 'Super Awesome' /// </code> /// </summary> /// <param name="config"> /// </param> /// <returns> /// <span><see cref="Ext.Base">Ext.Base</see></span><div><p>this</p> /// </div> /// </returns> protected static Ext.Base initConfig(object config){return null;} /// <summary> /// Loads the script for the given request by appending it to the HEAD element. This is /// its own method so that users can override it (as well as createScript). /// </summary> /// <param name="request"><p>The request object.</p> /// </param> private static void loadScript(object request){} /// <summary> /// Parameters<li><span>names</span> : <see cref="Object">Object</see><div> /// </div></li><li><span>callback</span> : <see cref="Object">Object</see><div> /// </div></li><li><span>scope</span> : <see cref="Object">Object</see><div> /// </div></li> /// </summary> /// <param name="names"> /// </param> /// <param name="callback"> /// </param> /// <param name="scope"> /// </param> private static void onConfigUpdate(object names, object callback, object scope){} /// <summary> /// Makes a JSONP request. /// </summary> /// <param name="options"><p>An object which may contain the following properties. Note that options will /// take priority over any defaults that are specified in the class.</p> /// <ul> /// <li><b>url</b> : String <div>The URL to request.</div></li> /// <li><b>params</b> : Object (Optional)<div>An object containing a series of /// key value pairs that will be sent along with the request.</div></li> /// <li><b>timeout</b> : Number (Optional) <div>See <see cref="Ext.data.JsonP.timeout">timeout</see></div></li> /// <li><b>callbackKey</b> : String (Optional) <div>See <see cref="Ext.data.JsonP.callbackKey">callbackKey</see></div></li> /// <li><b>callbackName</b> : String (Optional) <div>The function name to use for this request. /// By default this name will be auto-generated: Ext.data.JsonP.callback1, Ext.data.JsonP.callback2, etc. /// Setting this option to "my_name" will force the function name to be <see cref="Ext.data.JsonP">Ext.data.JsonP</see>.my_name. /// Use this if you want deterministic behavior, but be careful - the callbackName should be different /// in each JsonP request that you make.</div></li> /// <li><b>disableCaching</b> : Boolean (Optional) <div>See <see cref="Ext.data.JsonP.disableCaching">disableCaching</see></div></li> /// <li><b>disableCachingParam</b> : String (Optional) <div>See <see cref="Ext.data.JsonP.disableCachingParam">disableCachingParam</see></div></li> /// <li><b>success</b> : Function (Optional) <div>A function to execute if the request succeeds.</div></li> /// <li><b>failure</b> : Function (Optional) <div>A function to execute if the request fails.</div></li> /// <li><b>callback</b> : Function (Optional) <div>A function to execute when the request /// completes, whether it is a success or failure.</div></li> /// <li><b>scope</b> : Object (Optional)<div>The scope in /// which to execute the callbacks: The "this" object for the callback function. Defaults to the browser window.</div></li> /// </ul> /// </param> /// <returns> /// <span><see cref="Object">Object</see></span><div><p>request An object containing the request details.</p> /// </div> /// </returns> public static object request(object options=null){return null;} /// <summary> /// Parameters<li><span>config</span> : <see cref="Object">Object</see><div> /// </div></li><li><span>applyIfNotSet</span> : <see cref="Object">Object</see><div> /// </div></li> /// </summary> /// <param name="config"> /// </param> /// <param name="applyIfNotSet"> /// </param> private static void setConfig(object config, object applyIfNotSet){} /// <summary> /// Sets up error handling for the script /// </summary> /// <param name="request"><p>The request</p> /// </param> private static void setupErrorHandling(object request){} /// <summary> /// Get the reference to the class from which this object was instantiated. Note that unlike self, /// this.statics() is scope-independent and it always returns the class from which it was called, regardless of what /// this points to during run-time /// <code><see cref="Ext.ExtContext.define">Ext.define</see>('My.Cat', { /// statics: { /// totalCreated: 0, /// speciesName: 'Cat' // My.Cat.speciesName = 'Cat' /// }, /// constructor: function() { /// var statics = this.statics(); /// alert(statics.speciesName); // always equals to 'Cat' no matter what 'this' refers to /// // equivalent to: My.Cat.speciesName /// alert(this.self.speciesName); // dependent on 'this' /// statics.totalCreated++; /// }, /// clone: function() { /// var cloned = new this.self; // dependent on 'this' /// cloned.groupName = this.statics().speciesName; // equivalent to: My.Cat.speciesName /// return cloned; /// } /// }); /// <see cref="Ext.ExtContext.define">Ext.define</see>('My.SnowLeopard', { /// extend: 'My.Cat', /// statics: { /// speciesName: 'Snow Leopard' // My.SnowLeopard.speciesName = 'Snow Leopard' /// }, /// constructor: function() { /// this.callParent(); /// } /// }); /// var cat = new My.Cat(); // alerts 'Cat', then alerts 'Cat' /// var snowLeopard = new My.SnowLeopard(); // alerts 'Cat', then alerts 'Snow Leopard' /// var clone = snowLeopard.clone(); /// alert(<see cref="Ext.ExtContext.getClassName">Ext.getClassName</see>(clone)); // alerts 'My.SnowLeopard' /// alert(clone.groupName); // alerts 'Cat' /// alert(My.Cat.totalCreated); // alerts 3 /// </code> /// </summary> /// <returns> /// <span><see cref="Ext.Class">Ext.Class</see></span><div> /// </div> /// </returns> protected static Class statics(){return null;} public JsonP(Ext.data.JsonPConfig config){} public JsonP(){} public JsonP(params object[] args){} } #endregion #region JsonPConfig /// <inheritdocs /> [JsType(JsMode.Json, Export=false, OmitOptionalParameters=true)] public partial class JsonPConfig : Ext.BaseConfig { public JsonPConfig(params object[] args){} } #endregion #region JsonPEvents /// <inheritdocs /> [JsType(JsMode.Json, Export=false, OmitOptionalParameters=true)] public partial class JsonPEvents : Ext.BaseEvents { public JsonPEvents(params object[] args){} } #endregion }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Controllers; using System.Web.Http.Description; using Hooker.WebApi.Areas.HelpPage.ModelDescriptions; using Hooker.WebApi.Areas.HelpPage.Models; using Hooker.WebApi.Areas.HelpPage.SampleGeneration; namespace Hooker.WebApi.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model description generator. /// </summary> /// <param name="config">The configuration.</param> /// <returns>The <see cref="ModelDescriptionGenerator"/></returns> public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config) { return (ModelDescriptionGenerator)config.Properties.GetOrAdd( typeof(ModelDescriptionGenerator), k => InitializeModelDescriptionGenerator(config)); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { model = GenerateApiModel(apiDescription, config); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config) { HelpPageApiModel apiModel = new HelpPageApiModel() { ApiDescription = apiDescription, }; ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator(); HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); GenerateUriParameters(apiModel, modelGenerator); GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator); GenerateResourceDescription(apiModel, modelGenerator); GenerateSamples(apiModel, sampleGenerator); return apiModel; } private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromUri) { HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor; Type parameterType = null; ModelDescription typeDescription = null; ComplexTypeModelDescription complexTypeDescription = null; if (parameterDescriptor != null) { parameterType = parameterDescriptor.ParameterType; typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType); complexTypeDescription = typeDescription as ComplexTypeModelDescription; } // Example: // [TypeConverter(typeof(PointConverter))] // public class Point // { // public Point(int x, int y) // { // X = x; // Y = y; // } // public int X { get; set; } // public int Y { get; set; } // } // Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection. // // public class Point // { // public int X { get; set; } // public int Y { get; set; } // } // Regular complex class Point will have properties X and Y added to UriParameters collection. if (complexTypeDescription != null && !IsBindableWithTypeConverter(parameterType)) { foreach (ParameterDescription uriParameter in complexTypeDescription.Properties) { apiModel.UriParameters.Add(uriParameter); } } else if (parameterDescriptor != null) { ParameterDescription uriParameter = AddParameterDescription(apiModel, apiParameter, typeDescription); if (!parameterDescriptor.IsOptional) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" }); } object defaultValue = parameterDescriptor.DefaultValue; if (defaultValue != null) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) }); } } else { Debug.Assert(parameterDescriptor == null); // If parameterDescriptor is null, this is an undeclared route parameter which only occurs // when source is FromUri. Ignored in request model and among resource parameters but listed // as a simple string here. ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string)); AddParameterDescription(apiModel, apiParameter, modelDescription); } } } } private static bool IsBindableWithTypeConverter(Type parameterType) { if (parameterType == null) { return false; } return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string)); } private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel, ApiParameterDescription apiParameter, ModelDescription typeDescription) { ParameterDescription parameterDescription = new ParameterDescription { Name = apiParameter.Name, Documentation = apiParameter.Documentation, TypeDescription = typeDescription, }; apiModel.UriParameters.Add(parameterDescription); return parameterDescription; } private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromBody) { Type parameterType = apiParameter.ParameterDescriptor.ParameterType; apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); apiModel.RequestDocumentation = apiParameter.Documentation; } else if (apiParameter.ParameterDescriptor != null && apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)) { Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); if (parameterType != null) { apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); } } } } private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ResponseDescription response = apiModel.ApiDescription.ResponseDescription; Type responseType = response.ResponseType ?? response.DeclaredType; if (responseType != null && responseType != typeof(void)) { apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType); } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator) { try { foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception message: {0}", HelpPageSampleGenerator.UnwrapException(e).Message)); } } private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType) { parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault( p => p.Source == ApiParameterSource.FromBody || (p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))); if (parameterDescription == null) { resourceType = null; return false; } resourceType = parameterDescription.ParameterDescriptor.ParameterType; if (resourceType == typeof(HttpRequestMessage)) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); } if (resourceType == null) { parameterDescription = null; return false; } return true; } private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config) { ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config); Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions; foreach (ApiDescription api in apis) { ApiParameterDescription parameterDescription; Type parameterType; if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType)) { modelGenerator.GetOrCreateModelDescription(parameterType); } } return modelGenerator; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace TestCases.HSSF.UserModel { using System; using NPOI.HSSF.UserModel; using NPOI.SS.UserModel; using NUnit.Framework; using TestCases.HSSF; using NPOI.SS.Formula; using TestCases.SS.Formula; using NPOI.SS.Formula.Eval; using NPOI.HSSF.Record; using TestCases.SS.UserModel; /** * * @author Josh Micich */ [TestFixture] public class TestHSSFFormulaEvaluator : BaseTestFormulaEvaluator { /// <summary> /// Some of the tests are depending on the american culture. /// </summary> [SetUp] public void InitializeCultere() { System.Threading.Thread.CurrentThread.CurrentCulture = System.Globalization.CultureInfo.CreateSpecificCulture("en-US"); } public TestHSSFFormulaEvaluator() : base(HSSFITestDataProvider.Instance) { } /** * Test that the HSSFFormulaEvaluator can Evaluate simple named ranges * (single cells and rectangular areas) */ [Test] public void TestEvaluateSimple() { HSSFWorkbook wb = HSSFTestDataSamples.OpenSampleWorkbook("TestNames.xls"); NPOI.SS.UserModel.ISheet sheet = wb.GetSheetAt(0); ICell cell = sheet.GetRow(8).GetCell(0); HSSFFormulaEvaluator fe = new HSSFFormulaEvaluator(wb); NPOI.SS.UserModel.CellValue cv = fe.Evaluate(cell); Assert.AreEqual(NPOI.SS.UserModel.CellType.Numeric, cv.CellType); Assert.AreEqual(3.72, cv.NumberValue, 0.0); } private static void setValue(NPOI.SS.UserModel.ISheet sheet, int rowIndex, int colIndex, double value) { IRow row = sheet.GetRow(rowIndex); if (row == null) { row = sheet.CreateRow(rowIndex); } row.CreateCell(colIndex).SetCellValue(value); } /** * When evaluating defined names, POI has to decide whether it is capable. Currently * (May2009) POI only supports simple cell and area refs.<br/> * The sample spreadsheet (bugzilla attachment 23508) had a name flagged as 'complex' * which contained a simple area ref. It is not clear what the 'complex' flag is used * for but POI should look elsewhere to decide whether it can evaluate the name. */ [Test] public void TestDefinedNameWithComplexFlag_bug47048() { // Mock up a spreadsheet to match the critical details of the sample HSSFWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet("Input"); IName definedName = wb.CreateName(); definedName.NameName = ("Is_Multicar_Vehicle"); definedName.RefersToFormula = ("Input!$B$17:$G$17"); // Set up some data and the formula IRow row17 = sheet.CreateRow(16); row17.CreateCell(0).SetCellValue(25.0); row17.CreateCell(1).SetCellValue(1.33); row17.CreateCell(2).SetCellValue(4.0); IRow row = sheet.CreateRow(0); ICell cellA1 = row.CreateCell(0); cellA1.CellFormula = ("SUM(Is_Multicar_Vehicle)"); // Set the complex flag - POI doesn't usually manipulate this flag NameRecord nameRec = TestHSSFName.GetNameRecord(definedName); nameRec.OptionFlag = (short)0x10; // 0x10 -> complex HSSFFormulaEvaluator hsf = new HSSFFormulaEvaluator(wb); CellValue value; try { value = hsf.Evaluate(cellA1); } catch (Exception e) { if (e.Message.Equals("Don't now how to evalate name 'Is_Multicar_Vehicle'")) { throw new AssertionException("Identified bug 47048a"); } throw e; } Assert.AreEqual(CellType.Numeric, value.CellType); Assert.AreEqual(5.33, value.NumberValue, 0.0); } private class EvalCountListener : EvaluationListener { private int _evalCount; public EvalCountListener() { _evalCount = 0; } public override void OnStartEvaluate(IEvaluationCell cell, ICacheEntry entry) { _evalCount++; } public int EvalCount { get { return _evalCount; } } } /** * The HSSFFormula evaluator performance benefits greatly from caching of intermediate cell values */ [Test] public void TestShortCircuitIfEvaluation() { // Set up a simple IF() formula that has measurable evaluation cost for its operands. HSSFWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet("Sheet1"); IRow row = sheet.CreateRow(0); ICell cellA1 = row.CreateCell(0); cellA1.CellFormula = "if(B1,C1,D1+E1+F1)"; // populate cells B1..F1 with simple formulas instead of plain values so we can use // EvaluationListener to check which parts of the first formula get evaluated for (int i = 1; i < 6; i++) { // formulas are just literal constants "1".."5" row.CreateCell(i).CellFormula = i.ToString(); } EvalCountListener evalListener = new EvalCountListener(); WorkbookEvaluator evaluator = WorkbookEvaluatorTestHelper.CreateEvaluator(wb, evalListener); ValueEval ve = evaluator.Evaluate(HSSFEvaluationTestHelper.WrapCell(cellA1)); int evalCount = evalListener.EvalCount; if (evalCount == 6) { // Without short-circuit-if evaluation, evaluating cell 'A1' takes 3 extra evaluations (for D1,E1,F1) throw new AssertionException("Identifed bug 48195 - Formula evaluator should short-circuit IF() calculations."); } Assert.AreEqual(3, evalCount); Assert.AreEqual(2.0, ((NumberEval)ve).NumberValue, 0D); } /** * Ensures that we can handle NameXPtgs in the formulas * we Parse. */ [Test] public void TestXRefs() { IWorkbook wb = HSSFTestDataSamples.OpenSampleWorkbook("XRefCalc.xls"); IWorkbook wbData = HSSFTestDataSamples.OpenSampleWorkbook("XRefCalcData.xls"); ICell cell; // VLookup on a name in another file cell = wb.GetSheetAt(0).GetRow(1).GetCell(2); Assert.AreEqual(CellType.Formula, cell.CellType); Assert.AreEqual(CellType.Numeric, cell.CachedFormulaResultType); Assert.AreEqual(12.30, cell.NumericCellValue, 0.0001); // WARNING - this is wrong! // The file name should be Showing, but bug #45970 is fixed // we seem to loose it Assert.AreEqual("VLOOKUP(PART,COSTS,2,FALSE)", cell.CellFormula); // Simple reference to a name in another file cell = wb.GetSheetAt(0).GetRow(1).GetCell(4); Assert.AreEqual(CellType.Formula, cell.CellType); Assert.AreEqual(CellType.Numeric, cell.CachedFormulaResultType); Assert.AreEqual(36.90, cell.NumericCellValue, 0.0001); // WARNING - this is wrong! // The file name should be Showing, but bug #45970 is fixed // we seem to loose it Assert.AreEqual("Cost*Markup_Cost", cell.CellFormula); // Evaluate the cells HSSFFormulaEvaluator eval = new HSSFFormulaEvaluator(wb); HSSFFormulaEvaluator.SetupEnvironment( new String[] { "XRefCalc.xls", "XRefCalcData.xls" }, new HSSFFormulaEvaluator[] { eval, new HSSFFormulaEvaluator(wbData) } ); eval.EvaluateFormulaCell( wb.GetSheetAt(0).GetRow(1).GetCell(2) ); eval.EvaluateFormulaCell( wb.GetSheetAt(0).GetRow(1).GetCell(4) ); // Re-check VLOOKUP one cell = wb.GetSheetAt(0).GetRow(1).GetCell(2); Assert.AreEqual(CellType.Formula, cell.CellType); Assert.AreEqual(CellType.Numeric, cell.CachedFormulaResultType); Assert.AreEqual(12.30, cell.NumericCellValue, 0.0001); // Re-check ref one cell = wb.GetSheetAt(0).GetRow(1).GetCell(4); Assert.AreEqual(CellType.Formula, cell.CellType); Assert.AreEqual(CellType.Numeric, cell.CachedFormulaResultType); Assert.AreEqual(36.90, cell.NumericCellValue, 0.0001); // Add a formula that refers to one of the existing external workbooks cell = wb.GetSheetAt(0).GetRow(1).CreateCell(40); cell.CellFormula = (/*setter*/"Cost*[XRefCalcData.xls]MarkupSheet!$B$1"); // Check is was stored correctly Assert.AreEqual("Cost*[XRefCalcData.xls]MarkupSheet!$B$1", cell.CellFormula); // Check it Evaluates correctly eval.EvaluateFormulaCell(cell); Assert.AreEqual(24.60 * 1.8, cell.NumericCellValue); // Try to add a formula for a new external workbook, won't be allowed to start try { cell = wb.GetSheetAt(0).GetRow(1).CreateCell(42); cell.CellFormula = (/*setter*/"[alt.xls]Sheet0!$A$1"); Assert.Fail("New workbook not linked, shouldn't be able to Add"); } catch (Exception) { } // Link our new workbook HSSFWorkbook alt = new HSSFWorkbook(); alt.CreateSheet().CreateRow(0).CreateCell(0).SetCellValue("In another workbook"); wb.LinkExternalWorkbook("alt.xls", alt); // Now add a formula that refers to our new workbook cell.CellFormula = (/*setter*/"[alt.xls]Sheet0!$A$1"); Assert.AreEqual("[alt.xls]Sheet0!$A$1", cell.CellFormula); // Evaluate it, without a link to that workbook try { eval.Evaluate(cell); Assert.Fail("No cached value and no link to workbook, shouldn't Evaluate"); } catch (Exception) { } // Add a link, check it does HSSFFormulaEvaluator.SetupEnvironment( new String[] { "XRefCalc.xls", "XRefCalcData.xls", "alt.xls" }, new HSSFFormulaEvaluator[] { eval, new HSSFFormulaEvaluator(wbData), new HSSFFormulaEvaluator(alt) } ); eval.EvaluateFormulaCell(cell); Assert.AreEqual("In another workbook", cell.StringCellValue); // Save and re-load wb = HSSFTestDataSamples.WriteOutAndReadBack(wb as HSSFWorkbook); eval = new HSSFFormulaEvaluator(wb); HSSFFormulaEvaluator.SetupEnvironment( new String[] { "XRefCalc.xls", "XRefCalcData.xls", "alt.xls" }, new HSSFFormulaEvaluator[] { eval, new HSSFFormulaEvaluator(wbData), new HSSFFormulaEvaluator(alt) } ); // Check the one referring to the previously existing workbook behaves cell = wb.GetSheetAt(0).GetRow(1).GetCell(40); Assert.AreEqual("Cost*[XRefCalcData.xls]MarkupSheet!$B$1", cell.CellFormula); eval.EvaluateFormulaCell(cell); Assert.AreEqual(24.60 * 1.8, cell.NumericCellValue); // Now check the newly Added reference cell = wb.GetSheetAt(0).GetRow(1).GetCell(42); Assert.AreEqual("[alt.xls]Sheet0!$A$1", cell.CellFormula); eval.EvaluateFormulaCell(cell); Assert.AreEqual("In another workbook", cell.StringCellValue); } [Test] public void TestSharedFormulas() { BaseTestSharedFormulas("shared_formulas.xls"); } } }
using System; using System.Collections.Generic; using System.Text; using System.ServiceProcess; using System.Threading; namespace ServiceConsoleControl { /// <summary> /// Main entry class /// </summary> class Program { private static String NAME = "scc"; static void Main(string[] args) { if (args.Length != 2) { ShowUsageAndExit(null); } try { Command cmd = Command.Factory(args[0]); cmd.SetServiceName(args[1]); cmd.Execute(); } catch (Exception e) { ShowUsageAndExit("Error: " + e.Message); } } /// <summary> /// Shows the usage of this program and exits with code 0 /// </summary> /// <param name="_line">additional line to print before usage is shown</param> public static void ShowUsageAndExit(String _line) { if (_line != null) { Program.LogLine(_line); } Program.LogLine(Program.NAME + " starts, stops and restarts services with all dependencies to other services"); Program.LogLine("Usage: " + Program.NAME + " [" + Command.CMD_START + ", " + Command.CMD_STOP + ", " + Command.CMD_RESTART + "] [Service]"); Environment.Exit(0); } /// <summary> /// Write string to console /// </summary> /// <param name="_msg">string to write</param> public static void LogLine(String _msg) { Console.WriteLine(_msg); } } /// <summary> /// Interface for executable service commands /// </summary> interface IServiceCommand { /// <summary> /// Execute the command /// </summary> void Execute(); /// <summary> /// Sets name of service /// </summary> /// <param name="_name">name of service</param> void SetServiceName(String _name); /// <summary> /// Retrieves name of service /// </summary> /// <returns>name of service</returns> string GetServiceName(); } /// <summary> /// Abstract class for commands, implements IServiceCommand /// </summary> abstract class Command : IServiceCommand { private String _name; protected ServiceController sc = new ServiceController(); public static String CMD_STOP = "stop"; public static String CMD_START = "start"; public static String CMD_RESTART = "restart"; /// <summary> /// Factory method for command. /// Given command will be lowered. /// If the command is not supported, an exception will be thrown /// </summary> /// <param name="_cmd">Command to execute - this can be one of this.CMD_*</param> /// <returns>Instance of Command</returns> public static Command Factory(String _cmd) { _cmd = _cmd.ToLower(); if (_cmd.Equals(CMD_RESTART)) { return new RestartCommand(); } else if (_cmd.Equals(CMD_START)) { return new StartCommand(); } else if (_cmd.Equals(CMD_STOP)) { return new StopCommand(); } throw new Exception("Unsupported command: " + _cmd); } /// <summary> /// Sets service name, required by IServiceCommand /// </summary> /// <param name="_serviceName">Service name to set</param> public void SetServiceName(String _serviceName) { _name = _serviceName; sc.ServiceName = _serviceName; } /// <summary> /// Gets service name, required by IServiceCommand /// </summary> /// <returns>Service name</returns> public String GetServiceName() { return _name; } /// <summary> /// Abstract execute command which is required by IServiceCommand /// </summary> abstract public void Execute(); } /// <summary> /// Abstract class for recursive commands, can be used for solving dependencies between services /// </summary> abstract class RecursiveCommand : Command { /// <summary> /// Depth of dependency /// </summary> private int _depth = 0; /// <summary> /// Stringbuilder with prefixing spaces /// </summary> private StringBuilder sb; /// <summary> /// Timespan to wait for service /// </summary> protected TimeSpan tsWait = new System.TimeSpan(10000000000000); /// <summary> /// Abstract method which retrieves the recursive command /// </summary> /// <returns>RecursiveCommand</returns> abstract protected RecursiveCommand GetCommand(); /// <summary> /// Depth value /// </summary> public int Depth { set { _depth = value; } get { return _depth; } } /// <summary> /// Return depth as spaces /// </summary> /// <returns>String with depth as spaces</returns> protected String GetSpaces() { if (sb == null) { sb = new StringBuilder(); for (int i = 0; i < _depth; i++) { sb.Append(" "); } } return sb.ToString(); } /// <summary> /// Executes GetCommand() on all given ServiceController[] /// </summary> /// <param name="scs">Array of ServiceController on which the GetCommand() will be executed</param> protected void ExecuteCommandOnServiceControllers(ServiceController[] _scs) { Program.LogLine(GetSpaces() + "Service dependencies: " + _scs.Length); for (int i = 0, m = _scs.Length; i < m; i++) { RecursiveCommand rc = this.GetCommand(); rc.Depth = this.Depth + 1; rc.SetServiceName(_scs[i].ServiceName); rc.Execute(); } } /// <summary> /// Returns true if service is of ServiceType Win32ShareProcess or Win32OwnProcess. Other types cannot be killed /// </summary> protected bool IsKillable { get { if (sc.ServiceType.Equals(ServiceType.Win32ShareProcess) || sc.ServiceType.Equals(ServiceType.Win32OwnProcess)) { return true; } Program.LogLine(GetSpaces() + this.DisplayName + " is of type " + sc.ServiceType.ToString() + ". These services can not be processed."); return false; } } /// <summary> /// Returns display name /// </summary> public String DisplayName { get { return "Service " + sc.DisplayName + " (" + sc.ServiceName + ")"; } } } /// <summary> /// Command for restarting a service /// </summary> class RestartCommand : Command { /// <summary> /// Stops the service and then restarts /// </summary> public override void Execute() { IServiceCommand stopCmd = new StopCommand(); IServiceCommand startCmd = new StartCommand(); stopCmd.SetServiceName(this.GetServiceName()); startCmd.SetServiceName(this.GetServiceName()); stopCmd.Execute(); startCmd.Execute(); } public override String ToString() { return CMD_RESTART; } } /// <summary> /// Command for starting a service with all its dependencies /// </summary> class StartCommand : RecursiveCommand { public override String ToString() { return CMD_START; } /// <summary> /// Retrieve a new instance of myself /// </summary> /// <returns></returns> protected override RecursiveCommand GetCommand() { return new StartCommand(); } /// <summary> /// Starts a service. The service could only be started if it is Win32[Own|Share]Process. KernelDriver or other types of services are not allowed. /// At first all services will be started which this service depends on. At second the service itself will be started. /// After that all services are started which depends on this service. /// </summary> public override void Execute() { if (this.IsKillable) { if (sc.Status.Equals(ServiceControllerStatus.Stopped)) { Program.LogLine(GetSpaces() + "Starting " + this.DisplayName); Program.LogLine(GetSpaces() + "Starting all services that " + this.DisplayName + " depends on ..."); ExecuteCommandOnServiceControllers(sc.ServicesDependedOn); sc.Start(); sc.WaitForStatus(ServiceControllerStatus.Running, tsWait); Program.LogLine(GetSpaces() + "Service is now in status " + sc.Status.ToString()); Program.LogLine(GetSpaces() + "Starting all services that depends on " + this.DisplayName + " ... "); ExecuteCommandOnServiceControllers(sc.DependentServices); } else { Program.LogLine(GetSpaces() + this.DisplayName + " has status " + sc.Status.ToString() + " - nothing to do"); } } } } /// <summary> /// Command for stopping a service with all its dependencies /// </summary> class StopCommand : RecursiveCommand { public override String ToString() { return CMD_STOP; } /// <summary> /// Returns an instance of myself /// </summary> /// <returns>New instance of StopCommand</returns> protected override RecursiveCommand GetCommand() { return new StopCommand(); } /// <summary> /// Executes the command. The service could only be stopped if it is Win32[Own|Share]Process. KernelDriver or other types of services are not allowed. /// At first all services will be stopped that depends on this service. Then this service is stopped. /// </summary> public override void Execute() { if (this.IsKillable) { if (sc.Status.Equals(ServiceControllerStatus.Running)) { Program.LogLine(GetSpaces() + "Stopping " + this.DisplayName); Program.LogLine(GetSpaces() + "Stopping all services that depends on " + this.DisplayName + " ..."); ExecuteCommandOnServiceControllers(sc.DependentServices); sc.Stop(); sc.WaitForStatus(ServiceControllerStatus.Stopped, tsWait); Program.LogLine(GetSpaces() + "Service is now in status " + sc.Status.ToString()); } else { Program.LogLine(GetSpaces() + this.DisplayName + " has status " + sc.Status.ToString() + " - nothing to do"); } } } } }
using UnityEngine; using UnityEditor; using System.Collections.Generic; using System.IO; namespace tk2dEditor.SpriteCollectionBuilder { public static class Deprecated { public static bool CheckAndFixUpParams(tk2dSpriteCollection gen) { if (gen.DoNotUse__TextureRefs != null && gen.textureParams != null && gen.DoNotUse__TextureRefs.Length != gen.textureParams.Length) { tk2dSpriteCollectionDefinition[] newDefs = new tk2dSpriteCollectionDefinition[gen.DoNotUse__TextureRefs.Length]; int c = Mathf.Min( newDefs.Length, gen.textureParams.Length ); if (gen.DoNotUse__TextureRefs.Length > gen.textureParams.Length) { Texture2D[] newTexRefs = new Texture2D[gen.DoNotUse__TextureRefs.Length - gen.textureParams.Length]; System.Array.Copy(gen.DoNotUse__TextureRefs, gen.textureParams.Length, newTexRefs, 0, newTexRefs.Length); System.Array.Sort(newTexRefs, (Texture2D a, Texture2D b) => tk2dSpriteGuiUtility.NameCompare(a?a.name:"", b?b.name:"")); System.Array.Copy(newTexRefs, 0, gen.DoNotUse__TextureRefs, gen.textureParams.Length, newTexRefs.Length); } for (int i = 0; i < c; ++i) { newDefs[i] = new tk2dSpriteCollectionDefinition(); newDefs[i].CopyFrom( gen.textureParams[i] ); } for (int i = c; i < newDefs.Length; ++i) { newDefs[i] = new tk2dSpriteCollectionDefinition(); newDefs[i].pad = gen.defaults.pad; newDefs[i].additive = gen.defaults.additive; newDefs[i].anchor = gen.defaults.anchor; newDefs[i].scale = gen.defaults.scale; newDefs[i].colliderType = gen.defaults.colliderType; } gen.textureParams = newDefs; } // clear thumbnails on build foreach (var param in gen.textureParams) { param.thumbnailTexture = null; } foreach (var param in gen.textureParams) { if (gen.allowMultipleAtlases && param.dice) { EditorUtility.DisplayDialog("Error", "Multiple atlas spanning is not allowed when there are textures with dicing enabled in the SpriteCollection.", "Ok"); gen.allowMultipleAtlases = false; return false; } } return true; } public static void TrimTextureList(tk2dSpriteCollection gen) { // trim textureRefs & textureParams int lastNonEmpty = -1; for (int i = 0; i < gen.DoNotUse__TextureRefs.Length; ++i) { if (gen.DoNotUse__TextureRefs[i] != null) lastNonEmpty = i; } Texture2D[] textureRefs = gen.DoNotUse__TextureRefs; System.Array.Resize(ref textureRefs, lastNonEmpty + 1); System.Array.Resize(ref gen.textureParams, lastNonEmpty + 1); gen.DoNotUse__TextureRefs = textureRefs; } public static bool SetUpSpriteSheets(tk2dSpriteCollection gen) { // delete textures which aren't in sprite sheets any more // and delete textures which are out of range of the spritesheet for (int i = 0; i < gen.DoNotUse__TextureRefs.Length; ++i) { if (gen.textureParams[i].fromSpriteSheet) { bool found = false; foreach (var ss in gen.spriteSheets) { if (gen.DoNotUse__TextureRefs[i] == ss.texture) { found = true; int numTiles = (ss.numTiles == 0)?(ss.tilesX * ss.tilesY):Mathf.Min(ss.numTiles, ss.tilesX * ss.tilesY); // delete textures which are out of range if (gen.textureParams[i].regionId >= numTiles) { gen.DoNotUse__TextureRefs[i] = null; gen.textureParams[i].fromSpriteSheet = false; gen.textureParams[i].extractRegion = false; gen.textureParams[i].colliderType = tk2dSpriteCollectionDefinition.ColliderType.UserDefined; gen.textureParams[i].boxColliderMin = Vector3.zero; gen.textureParams[i].boxColliderMax = Vector3.zero; } } } if (!found) { gen.DoNotUse__TextureRefs[i] = null; gen.textureParams[i].fromSpriteSheet = false; gen.textureParams[i].extractRegion = false; gen.textureParams[i].colliderType = tk2dSpriteCollectionDefinition.ColliderType.UserDefined; gen.textureParams[i].boxColliderMin = Vector3.zero; gen.textureParams[i].boxColliderMax = Vector3.zero; } } } if (gen.spriteSheets == null) { gen.spriteSheets = new tk2dSpriteSheetSource[0]; } int spriteSheetId = 0; for (spriteSheetId = 0; spriteSheetId < gen.spriteSheets.Length; ++spriteSheetId) { var spriteSheet = gen.spriteSheets[spriteSheetId]; // New mode sprite sheets have sprites already created if (spriteSheet.version > 0) continue; // Sanity check if (spriteSheet.texture == null) { continue; // deleted, safely ignore this } if (spriteSheet.tilesX * spriteSheet.tilesY == 0 || (spriteSheet.numTiles != 0 && spriteSheet.numTiles > spriteSheet.tilesX * spriteSheet.tilesY)) { EditorUtility.DisplayDialog("Invalid sprite sheet", "Sprite sheet '" + spriteSheet.texture.name + "' has an invalid number of tiles", "Ok"); return false; } if ((spriteSheet.texture.width % spriteSheet.tilesX) != 0 || (spriteSheet.texture.height % spriteSheet.tilesY) != 0) { EditorUtility.DisplayDialog("Invalid sprite sheet", "Sprite sheet '" + spriteSheet.texture.name + "' doesn't match tile count", "Ok"); return false; } int numTiles = (spriteSheet.numTiles == 0)?(spriteSheet.tilesX * spriteSheet.tilesY):Mathf.Min(spriteSheet.numTiles, spriteSheet.tilesX * spriteSheet.tilesY); for (int y = 0; y < spriteSheet.tilesY; ++y) { for (int x = 0; x < spriteSheet.tilesX; ++x) { // limit to number of tiles, if told to int tileIdx = y * spriteSheet.tilesX + x; if (tileIdx >= numTiles) break; bool foundInCollection = false; // find texture in collection int textureIdx = -1; for (int i = 0; i < gen.textureParams.Length; ++i) { if (gen.textureParams[i].fromSpriteSheet && gen.textureParams[i].regionId == tileIdx && gen.DoNotUse__TextureRefs[i] == spriteSheet.texture) { textureIdx = i; foundInCollection = true; break; } } if (textureIdx == -1) { // find first empty texture slot for (int i = 0; i < gen.textureParams.Length; ++i) { if (gen.DoNotUse__TextureRefs[i] == null) { textureIdx = i; break; } } } if (textureIdx == -1) { // texture not found, so extend arrays Texture2D[] textureRefs = gen.DoNotUse__TextureRefs; System.Array.Resize(ref textureRefs, gen.DoNotUse__TextureRefs.Length + 1); System.Array.Resize(ref gen.textureParams, gen.textureParams.Length + 1); gen.DoNotUse__TextureRefs = textureRefs; textureIdx = gen.DoNotUse__TextureRefs.Length - 1; } gen.DoNotUse__TextureRefs[textureIdx] = spriteSheet.texture; var param = new tk2dSpriteCollectionDefinition(); param.fromSpriteSheet = true; param.name = spriteSheet.texture.name + "/" + tileIdx; param.regionId = tileIdx; param.regionW = spriteSheet.texture.width / spriteSheet.tilesX; param.regionH = spriteSheet.texture.height / spriteSheet.tilesY; param.regionX = (tileIdx % spriteSheet.tilesX) * param.regionW; param.regionY = (spriteSheet.tilesY - 1 - (tileIdx / spriteSheet.tilesX)) * param.regionH; param.extractRegion = true; param.additive = spriteSheet.additive; param.pad = spriteSheet.pad; param.anchor = (tk2dSpriteCollectionDefinition.Anchor)spriteSheet.anchor; param.scale = (spriteSheet.scale.sqrMagnitude == 0.0f)?Vector3.one:spriteSheet.scale; // Let the user tweak individually if (foundInCollection) { param.colliderType = gen.textureParams[textureIdx].colliderType; param.boxColliderMin = gen.textureParams[textureIdx].boxColliderMin; param.boxColliderMax = gen.textureParams[textureIdx].boxColliderMax; param.polyColliderIslands = gen.textureParams[textureIdx].polyColliderIslands; param.colliderConvex = gen.textureParams[textureIdx].colliderConvex; param.colliderSmoothSphereCollisions = gen.textureParams[textureIdx].colliderSmoothSphereCollisions; param.colliderColor = gen.textureParams[textureIdx].colliderColor; } else { param.colliderType = spriteSheet.colliderType; } gen.textureParams[textureIdx] = param; } } } return true; } } }
using System; using System.Linq; using NUnit.Framework; using Umbraco.Core; using Umbraco.Core.Models; using Umbraco.Core.Models.EntityBase; using Umbraco.Core.Serialization; using Umbraco.Tests.TestHelpers; using Umbraco.Tests.TestHelpers.Entities; namespace Umbraco.Tests.Models { [TestFixture] public class ContentTypeTests : BaseUmbracoConfigurationTest { [Test] public void Can_Deep_Clone_Content_Type_Sort() { var contentType = new ContentTypeSort(new Lazy<int>(() => 3), 4, "test"); var clone = (ContentTypeSort) contentType.DeepClone(); Assert.AreNotSame(clone, contentType); Assert.AreEqual(clone, contentType); Assert.AreEqual(clone.Id.Value, contentType.Id.Value); Assert.AreEqual(clone.SortOrder, contentType.SortOrder); Assert.AreEqual(clone.Alias, contentType.Alias); } [Test] public void Can_Deep_Clone_Content_Type_With_Reset_Identities() { var contentType = MockedContentTypes.CreateTextpageContentType(); contentType.Id = 99; var i = 200; foreach (var propertyType in contentType.PropertyTypes) { propertyType.Id = ++i; } foreach (var group in contentType.PropertyGroups) { group.Id = ++i; } //add a property type without a property group contentType.PropertyTypeCollection.Add( new PropertyType("test", DataTypeDatabaseType.Ntext, "title2") { Name = "Title2", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 }); contentType.AllowedTemplates = new[] { new Template("-1,2", "Name", "name") { Id = 200 }, new Template("-1,3", "Name2", "name2") { Id = 201 } }; contentType.AllowedContentTypes = new[] { new ContentTypeSort(new Lazy<int>(() => 888), 8, "sub"), new ContentTypeSort(new Lazy<int>(() => 889), 9, "sub2") }; contentType.Id = 10; contentType.CreateDate = DateTime.Now; contentType.CreatorId = 22; contentType.SetDefaultTemplate(new Template("-1,2,3,4", "Test Template", "testTemplate") { Id = 88 }); contentType.Description = "test"; contentType.Icon = "icon"; contentType.IsContainer = true; contentType.Thumbnail = "thumb"; contentType.Key = Guid.NewGuid(); contentType.Level = 3; contentType.Path = "-1,4,10"; contentType.SortOrder = 5; contentType.Trashed = false; contentType.UpdateDate = DateTime.Now; //ensure that nothing is marked as dirty contentType.ResetDirtyProperties(false); var clone = (ContentType)contentType.Clone("newAlias"); Assert.AreEqual("newAlias", clone.Alias); Assert.AreNotEqual("newAlias", contentType.Alias); Assert.IsFalse(clone.HasIdentity); foreach (var propertyGroup in clone.PropertyGroups) { Assert.IsFalse(propertyGroup.HasIdentity); foreach (var propertyType in propertyGroup.PropertyTypes) { Assert.IsFalse(propertyType.HasIdentity); } } foreach (var propertyType in clone.PropertyTypes.Where(x => x.HasIdentity)) { Assert.IsFalse(propertyType.HasIdentity); } } [Ignore] [Test] public void Can_Deep_Clone_Content_Type_Perf_Test() { // Arrange var contentType = MockedContentTypes.CreateTextpageContentType(); contentType.Id = 99; var i = 200; foreach (var propertyType in contentType.PropertyTypes) { propertyType.Id = ++i; } foreach (var group in contentType.PropertyGroups) { group.Id = ++i; } contentType.AllowedTemplates = new[] { new Template("-1,2", "Name", "name") { Id = 200 }, new Template("-1,3", "Name2", "name2") { Id = 201 } }; contentType.AllowedContentTypes = new[] { new ContentTypeSort(new Lazy<int>(() => 888), 8, "sub"), new ContentTypeSort(new Lazy<int>(() => 889), 9, "sub2") }; contentType.Id = 10; contentType.CreateDate = DateTime.Now; contentType.CreatorId = 22; contentType.SetDefaultTemplate(new Template("-1,2,3,4", "Test Template", "testTemplate") { Id = 88 }); contentType.Description = "test"; contentType.Icon = "icon"; contentType.IsContainer = true; contentType.Thumbnail = "thumb"; contentType.Key = Guid.NewGuid(); contentType.Level = 3; contentType.Path = "-1,4,10"; contentType.SortOrder = 5; contentType.Trashed = false; contentType.UpdateDate = DateTime.Now; ((IUmbracoEntity)contentType).AdditionalData.Add("test1", 123); ((IUmbracoEntity)contentType).AdditionalData.Add("test2", "hello"); using (DisposableTimer.DebugDuration<ContentTypeTests>("STARTING PERF TEST")) { for (int j = 0; j < 1000; j++) { using (DisposableTimer.DebugDuration<ContentTypeTests>("Cloning content type")) { var clone = (ContentType)contentType.DeepClone(); } } } } [Test] public void Can_Deep_Clone_Content_Type() { // Arrange var contentType = MockedContentTypes.CreateTextpageContentType(); contentType.Id = 99; var i = 200; foreach (var propertyType in contentType.PropertyTypes) { propertyType.Id = ++i; } foreach (var group in contentType.PropertyGroups) { group.Id = ++i; } contentType.AllowedTemplates = new[] { new Template("-1,2", "Name", "name") { Id = 200 }, new Template("-1,3", "Name2", "name2") { Id = 201 } }; contentType.AllowedContentTypes = new[] {new ContentTypeSort(new Lazy<int>(() => 888), 8, "sub"), new ContentTypeSort(new Lazy<int>(() => 889), 9, "sub2")}; contentType.Id = 10; contentType.CreateDate = DateTime.Now; contentType.CreatorId = 22; contentType.SetDefaultTemplate(new Template("-1,2,3,4", "Test Template", "testTemplate") { Id = 88 }); contentType.Description = "test"; contentType.Icon = "icon"; contentType.IsContainer = true; contentType.Thumbnail = "thumb"; contentType.Key = Guid.NewGuid(); contentType.Level = 3; contentType.Path = "-1,4,10"; contentType.SortOrder = 5; contentType.Trashed = false; contentType.UpdateDate = DateTime.Now; ((IUmbracoEntity)contentType).AdditionalData.Add("test1", 123); ((IUmbracoEntity)contentType).AdditionalData.Add("test2", "hello"); // Act var clone = (ContentType)contentType.DeepClone(); // Assert Assert.AreNotSame(clone, contentType); Assert.AreEqual(clone, contentType); Assert.AreEqual(clone.Id, contentType.Id); Assert.AreEqual(((IUmbracoEntity)clone).AdditionalData, ((IUmbracoEntity)contentType).AdditionalData); Assert.AreEqual(clone.AllowedTemplates.Count(), contentType.AllowedTemplates.Count()); for (var index = 0; index < contentType.AllowedTemplates.Count(); index++) { Assert.AreNotSame(clone.AllowedTemplates.ElementAt(index), contentType.AllowedTemplates.ElementAt(index)); Assert.AreEqual(clone.AllowedTemplates.ElementAt(index), contentType.AllowedTemplates.ElementAt(index)); } Assert.AreNotSame(clone.PropertyGroups, contentType.PropertyGroups); Assert.AreEqual(clone.PropertyGroups.Count, contentType.PropertyGroups.Count); for (var index = 0; index < contentType.PropertyGroups.Count; index++) { Assert.AreNotSame(clone.PropertyGroups[index], contentType.PropertyGroups[index]); Assert.AreEqual(clone.PropertyGroups[index], contentType.PropertyGroups[index]); } Assert.AreNotSame(clone.PropertyTypes, contentType.PropertyTypes); Assert.AreEqual(clone.PropertyTypes.Count(), contentType.PropertyTypes.Count()); Assert.AreEqual(0, ((ContentTypeBase)clone).NonGroupedPropertyTypes.Count()); for (var index = 0; index < contentType.PropertyTypes.Count(); index++) { Assert.AreNotSame(clone.PropertyTypes.ElementAt(index), contentType.PropertyTypes.ElementAt(index)); Assert.AreEqual(clone.PropertyTypes.ElementAt(index), contentType.PropertyTypes.ElementAt(index)); } Assert.AreEqual(clone.CreateDate, contentType.CreateDate); Assert.AreEqual(clone.CreatorId, contentType.CreatorId); Assert.AreEqual(clone.Key, contentType.Key); Assert.AreEqual(clone.Level, contentType.Level); Assert.AreEqual(clone.Path, contentType.Path); Assert.AreEqual(clone.SortOrder, contentType.SortOrder); Assert.AreNotSame(clone.DefaultTemplate, contentType.DefaultTemplate); Assert.AreEqual(clone.DefaultTemplate, contentType.DefaultTemplate); Assert.AreEqual(clone.DefaultTemplateId, contentType.DefaultTemplateId); Assert.AreEqual(clone.Trashed, contentType.Trashed); Assert.AreEqual(clone.UpdateDate, contentType.UpdateDate); Assert.AreEqual(clone.Thumbnail, contentType.Thumbnail); Assert.AreEqual(clone.Icon, contentType.Icon); Assert.AreEqual(clone.IsContainer, contentType.IsContainer); //This double verifies by reflection var allProps = clone.GetType().GetProperties(); foreach (var propertyInfo in allProps) { Assert.AreEqual(propertyInfo.GetValue(clone, null), propertyInfo.GetValue(contentType, null)); } //need to ensure the event handlers are wired var asDirty = (ICanBeDirty)clone; Assert.IsFalse(asDirty.IsPropertyDirty("PropertyTypes")); clone.AddPropertyType(new PropertyType("test", DataTypeDatabaseType.Nvarchar, "blah")); Assert.IsTrue(asDirty.IsPropertyDirty("PropertyTypes")); Assert.IsFalse(asDirty.IsPropertyDirty("PropertyGroups")); clone.AddPropertyGroup("hello"); Assert.IsTrue(asDirty.IsPropertyDirty("PropertyGroups")); } [Test] public void Can_Serialize_Content_Type_Without_Error() { var ss = new SerializationService(new JsonNetSerializer()); // Arrange var contentType = MockedContentTypes.CreateTextpageContentType(); contentType.Id = 99; var i = 200; foreach (var propertyType in contentType.PropertyTypes) { propertyType.Id = ++i; } contentType.AllowedTemplates = new[] { new Template("-1,2", "Name", "name") { Id = 200 }, new Template("-1,3", "Name2", "name2") { Id = 201 } }; contentType.AllowedContentTypes = new[] { new ContentTypeSort(new Lazy<int>(() => 888), 8, "sub"), new ContentTypeSort(new Lazy<int>(() => 889), 9, "sub2") }; contentType.Id = 10; contentType.CreateDate = DateTime.Now; contentType.CreatorId = 22; contentType.SetDefaultTemplate(new Template("-1,2,3,4", "Test Template", "testTemplate") { Id = 88 }); contentType.Description = "test"; contentType.Icon = "icon"; contentType.IsContainer = true; contentType.Thumbnail = "thumb"; contentType.Key = Guid.NewGuid(); contentType.Level = 3; contentType.Path = "-1,4,10"; contentType.SortOrder = 5; contentType.Trashed = false; contentType.UpdateDate = DateTime.Now; ((IUmbracoEntity)contentType).AdditionalData.Add("test1", 123); ((IUmbracoEntity)contentType).AdditionalData.Add("test2", "hello"); var result = ss.ToStream(contentType); var json = result.ResultStream.ToJsonString(); Console.WriteLine(json); } [Test] public void Can_Deep_Clone_Media_Type() { // Arrange var contentType = MockedContentTypes.CreateImageMediaType(); contentType.Id = 99; var i = 200; foreach (var propertyType in contentType.PropertyTypes) { propertyType.Id = ++i; } contentType.Id = 10; contentType.CreateDate = DateTime.Now; contentType.CreatorId = 22; contentType.Description = "test"; contentType.Icon = "icon"; contentType.IsContainer = true; contentType.Thumbnail = "thumb"; contentType.Key = Guid.NewGuid(); contentType.Level = 3; contentType.Path = "-1,4,10"; contentType.SortOrder = 5; contentType.Trashed = false; contentType.UpdateDate = DateTime.Now; ((IUmbracoEntity)contentType).AdditionalData.Add("test1", 123); ((IUmbracoEntity)contentType).AdditionalData.Add("test2", "hello"); // Act var clone = (MediaType)contentType.DeepClone(); // Assert Assert.AreNotSame(clone, contentType); Assert.AreEqual(clone, contentType); Assert.AreEqual(clone.Id, contentType.Id); Assert.AreEqual(((IUmbracoEntity)clone).AdditionalData, ((IUmbracoEntity)contentType).AdditionalData); Assert.AreEqual(clone.PropertyGroups.Count, contentType.PropertyGroups.Count); for (var index = 0; index < contentType.PropertyGroups.Count; index++) { Assert.AreNotSame(clone.PropertyGroups[index], contentType.PropertyGroups[index]); Assert.AreEqual(clone.PropertyGroups[index], contentType.PropertyGroups[index]); } Assert.AreEqual(clone.PropertyTypes.Count(), contentType.PropertyTypes.Count()); for (var index = 0; index < contentType.PropertyTypes.Count(); index++) { Assert.AreNotSame(clone.PropertyTypes.ElementAt(index), contentType.PropertyTypes.ElementAt(index)); Assert.AreEqual(clone.PropertyTypes.ElementAt(index), contentType.PropertyTypes.ElementAt(index)); } Assert.AreEqual(clone.CreateDate, contentType.CreateDate); Assert.AreEqual(clone.CreatorId, contentType.CreatorId); Assert.AreEqual(clone.Key, contentType.Key); Assert.AreEqual(clone.Level, contentType.Level); Assert.AreEqual(clone.Path, contentType.Path); Assert.AreEqual(clone.SortOrder, contentType.SortOrder); Assert.AreEqual(clone.Trashed, contentType.Trashed); Assert.AreEqual(clone.UpdateDate, contentType.UpdateDate); Assert.AreEqual(clone.Thumbnail, contentType.Thumbnail); Assert.AreEqual(clone.Icon, contentType.Icon); Assert.AreEqual(clone.IsContainer, contentType.IsContainer); //This double verifies by reflection var allProps = clone.GetType().GetProperties(); foreach (var propertyInfo in allProps) { Assert.AreEqual(propertyInfo.GetValue(clone, null), propertyInfo.GetValue(contentType, null)); } } [Test] public void Can_Serialize_Media_Type_Without_Error() { var ss = new SerializationService(new JsonNetSerializer()); // Arrange var contentType = MockedContentTypes.CreateImageMediaType(); contentType.Id = 99; var i = 200; foreach (var propertyType in contentType.PropertyTypes) { propertyType.Id = ++i; } contentType.Id = 10; contentType.CreateDate = DateTime.Now; contentType.CreatorId = 22; contentType.Description = "test"; contentType.Icon = "icon"; contentType.IsContainer = true; contentType.Thumbnail = "thumb"; contentType.Key = Guid.NewGuid(); contentType.Level = 3; contentType.Path = "-1,4,10"; contentType.SortOrder = 5; contentType.Trashed = false; contentType.UpdateDate = DateTime.Now; ((IUmbracoEntity)contentType).AdditionalData.Add("test1", 123); ((IUmbracoEntity)contentType).AdditionalData.Add("test2", "hello"); var result = ss.ToStream(contentType); var json = result.ResultStream.ToJsonString(); Console.WriteLine(json); } [Test] public void Can_Deep_Clone_Member_Type() { // Arrange var contentType = MockedContentTypes.CreateSimpleMemberType(); contentType.Id = 99; var i = 200; foreach (var propertyType in contentType.PropertyTypes) { propertyType.Id = ++i; } contentType.Id = 10; contentType.CreateDate = DateTime.Now; contentType.CreatorId = 22; contentType.Description = "test"; contentType.Icon = "icon"; contentType.IsContainer = true; contentType.Thumbnail = "thumb"; contentType.Key = Guid.NewGuid(); contentType.Level = 3; contentType.Path = "-1,4,10"; contentType.SortOrder = 5; contentType.Trashed = false; contentType.UpdateDate = DateTime.Now; contentType.SetMemberCanEditProperty("title", true); contentType.SetMemberCanViewProperty("bodyText", true); ((IUmbracoEntity)contentType).AdditionalData.Add("test1", 123); ((IUmbracoEntity)contentType).AdditionalData.Add("test2", "hello"); // Act var clone = (MemberType)contentType.DeepClone(); // Assert Assert.AreNotSame(clone, contentType); Assert.AreEqual(clone, contentType); Assert.AreEqual(clone.Id, contentType.Id); Assert.AreEqual(((IUmbracoEntity)clone).AdditionalData, ((IUmbracoEntity)contentType).AdditionalData); Assert.AreEqual(clone.PropertyGroups.Count, contentType.PropertyGroups.Count); for (var index = 0; index < contentType.PropertyGroups.Count; index++) { Assert.AreNotSame(clone.PropertyGroups[index], contentType.PropertyGroups[index]); Assert.AreEqual(clone.PropertyGroups[index], contentType.PropertyGroups[index]); } Assert.AreEqual(clone.PropertyTypes.Count(), contentType.PropertyTypes.Count()); for (var index = 0; index < contentType.PropertyTypes.Count(); index++) { Assert.AreNotSame(clone.PropertyTypes.ElementAt(index), contentType.PropertyTypes.ElementAt(index)); Assert.AreEqual(clone.PropertyTypes.ElementAt(index), contentType.PropertyTypes.ElementAt(index)); } Assert.AreEqual(clone.CreateDate, contentType.CreateDate); Assert.AreEqual(clone.CreatorId, contentType.CreatorId); Assert.AreEqual(clone.Key, contentType.Key); Assert.AreEqual(clone.Level, contentType.Level); Assert.AreEqual(clone.Path, contentType.Path); Assert.AreEqual(clone.SortOrder, contentType.SortOrder); Assert.AreEqual(clone.Trashed, contentType.Trashed); Assert.AreEqual(clone.UpdateDate, contentType.UpdateDate); Assert.AreEqual(clone.Thumbnail, contentType.Thumbnail); Assert.AreEqual(clone.Icon, contentType.Icon); Assert.AreEqual(clone.IsContainer, contentType.IsContainer); Assert.AreEqual(clone.MemberTypePropertyTypes, contentType.MemberTypePropertyTypes); //This double verifies by reflection var allProps = clone.GetType().GetProperties(); foreach (var propertyInfo in allProps) { Assert.AreEqual(propertyInfo.GetValue(clone, null), propertyInfo.GetValue(contentType, null)); } } [Test] public void Can_Serialize_Member_Type_Without_Error() { var ss = new SerializationService(new JsonNetSerializer()); // Arrange var contentType = MockedContentTypes.CreateSimpleMemberType(); contentType.Id = 99; var i = 200; foreach (var propertyType in contentType.PropertyTypes) { propertyType.Id = ++i; } contentType.Id = 10; contentType.CreateDate = DateTime.Now; contentType.CreatorId = 22; contentType.Description = "test"; contentType.Icon = "icon"; contentType.IsContainer = true; contentType.Thumbnail = "thumb"; contentType.Key = Guid.NewGuid(); contentType.Level = 3; contentType.Path = "-1,4,10"; contentType.SortOrder = 5; contentType.Trashed = false; contentType.UpdateDate = DateTime.Now; contentType.SetMemberCanEditProperty("title", true); contentType.SetMemberCanViewProperty("bodyText", true); ((IUmbracoEntity)contentType).AdditionalData.Add("test1", 123); ((IUmbracoEntity)contentType).AdditionalData.Add("test2", "hello"); var result = ss.ToStream(contentType); var json = result.ResultStream.ToJsonString(); Console.WriteLine(json); } } }
using System; using System.Collections.Generic; using System.IO; using System.Runtime.InteropServices; using System.Security; namespace SFML { namespace Graphics { //////////////////////////////////////////////////////////// /// <summary> /// Structure describing a glyph (a visual character) /// </summary> //////////////////////////////////////////////////////////// [StructLayout(LayoutKind.Sequential)] public struct Glyph { /// <summary>Offset to move horizontically to the next character</summary> public int Advance; /// <summary>Bounding rectangle of the glyph, in coordinates relative to the baseline</summary> public IntRect Rectangle; /// <summary>Texture coordinates of the glyph inside the font's image</summary> public FloatRect TexCoords; } //////////////////////////////////////////////////////////// /// <summary> /// Font is the low-level class for loading and /// manipulating character fonts. This class is meant to /// be used by String2D /// </summary> //////////////////////////////////////////////////////////// public class Font : ObjectBase { //////////////////////////////////////////////////////////// /// <summary> /// Construct the font from a file /// </summary> /// <param name="filename">Font file to load</param> /// <exception cref="LoadingFailedException" /> //////////////////////////////////////////////////////////// public Font(string filename) : base(sfFont_CreateFromFile(filename)) { if (This == IntPtr.Zero) throw new LoadingFailedException("font", filename); } //////////////////////////////////////////////////////////// /// <summary> /// Construct the font from a file in a stream /// </summary> /// <param name="stream">Stream containing the file contents</param> /// <exception cref="LoadingFailedException" /> //////////////////////////////////////////////////////////// public Font(Stream stream) : base(IntPtr.Zero) { unsafe { stream.Position = 0; byte[] StreamData = new byte[stream.Length]; uint Read = (uint)stream.Read(StreamData, 0, StreamData.Length); fixed (byte* dataPtr = StreamData) { SetThis(sfFont_CreateFromMemory((char*)dataPtr, Read)); } } if (This == IntPtr.Zero) throw new LoadingFailedException("font"); } //////////////////////////////////////////////////////////// /// <summary> /// Construct the font from another font /// </summary> /// <param name="copy">Font to copy</param> //////////////////////////////////////////////////////////// public Font(Font copy) : base(sfFont_Copy(copy.This)) { } //////////////////////////////////////////////////////////// /// <summary> /// Get a glyph in the font /// </summary> /// <param name="codePoint">Unicode code point of the character to get</param> /// <param name="characterSize">Character size</param> /// <param name="bold">Retrieve the bold version or the regular one?</param> /// <returns>The glyph corresponding to the character</returns> //////////////////////////////////////////////////////////// public Glyph GetGlyph(uint codePoint, uint characterSize, bool bold) { return sfFont_GetGlyph(This, codePoint, characterSize, bold); } //////////////////////////////////////////////////////////// /// <summary> /// Get the kerning offset between two glyphs /// </summary> /// <param name="first">Unicode code point of the first character</param> /// <param name="second">Unicode code point of the second character</param> /// <param name="characterSize">Character size</param> /// <returns>Kerning offset, in pixels</returns> //////////////////////////////////////////////////////////// public int GetKerning(uint first, uint second, uint characterSize) { return sfFont_GetKerning(This, first, second, characterSize); } //////////////////////////////////////////////////////////// /// <summary> /// Get spacing between two consecutive lines /// </summary> /// <param name="characterSize">Character size</param> /// <returns>Line spacing, in pixels</returns> //////////////////////////////////////////////////////////// public int GetLineSpacing(uint characterSize) { return sfFont_GetLineSpacing(This, characterSize); } //////////////////////////////////////////////////////////// /// <summary> /// Get the image containing the glyphs of a given size /// </summary> /// <param name="characterSize">Character size</param> /// <returns>Image storing the glyphs for the given size</returns> //////////////////////////////////////////////////////////// public Image GetImage(uint characterSize) { myImages[characterSize] = new Image(sfFont_GetImage(This, characterSize)); return myImages[characterSize]; } //////////////////////////////////////////////////////////// /// <summary> /// Default built-in font /// </summary> //////////////////////////////////////////////////////////// public static Font DefaultFont { get { if (ourDefaultFont == null) ourDefaultFont = new Font(sfFont_GetDefaultFont()); return ourDefaultFont; } } //////////////////////////////////////////////////////////// /// <summary> /// Provide a string describing the object /// </summary> /// <returns>String description of the object</returns> //////////////////////////////////////////////////////////// public override string ToString() { return "[Font]"; } //////////////////////////////////////////////////////////// /// <summary> /// Handle the destruction of the object /// </summary> /// <param name="disposing">Is the GC disposing the object, or is it an explicit call ?</param> //////////////////////////////////////////////////////////// protected override void Destroy(bool disposing) { if (this != ourDefaultFont) { if (!disposing) Context.Global.SetActive(true); sfFont_Destroy(This); if (disposing) { foreach (Image image in myImages.Values) image.Dispose(); } if (!disposing) Context.Global.SetActive(false); } } //////////////////////////////////////////////////////////// /// <summary> /// Internal constructor /// </summary> /// <param name="thisPtr">Pointer to the object in C library</param> //////////////////////////////////////////////////////////// private Font(IntPtr thisPtr) : base(thisPtr) { } private Dictionary<uint, Image> myImages = new Dictionary<uint, Image>(); private static Font ourDefaultFont = null; #region Imports [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern IntPtr sfFont_CreateFromFile(string Filename); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] unsafe static extern IntPtr sfFont_CreateFromMemory(char* Data, uint SizeInBytes); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern IntPtr sfFont_Copy(IntPtr Font); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern void sfFont_Destroy(IntPtr This); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern Glyph sfFont_GetGlyph(IntPtr This, uint codePoint, uint characterSize, bool bold); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern int sfFont_GetKerning(IntPtr This, uint first, uint second, uint characterSize); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern int sfFont_GetLineSpacing(IntPtr This, uint characterSize); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern IntPtr sfFont_GetImage(IntPtr This, uint characterSize); [DllImport("csfml-graphics-2", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] static extern IntPtr sfFont_GetDefaultFont(); #endregion } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. #if XMLSERIALIZERGENERATOR namespace Microsoft.XmlSerializer.Generator #else namespace System.Xml.Serialization #endif { using System.Reflection; using System; using System.Globalization; using System.Xml.Schema; using System.Collections; using System.ComponentModel; using System.Threading; using System.Linq; using System.Xml; /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public class SoapReflectionImporter { private TypeScope _typeScope; private SoapAttributeOverrides _attributeOverrides; private NameTable _types = new NameTable(); // xmltypename + xmlns -> Mapping private NameTable _nullables = new NameTable(); // xmltypename + xmlns -> NullableMapping private StructMapping _root; private string _defaultNs; private ModelScope _modelScope; /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.SoapReflectionImporter"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public SoapReflectionImporter() : this(null, null) { } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.SoapReflectionImporter1"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public SoapReflectionImporter(string defaultNamespace) : this(null, defaultNamespace) { } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.SoapReflectionImporter2"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public SoapReflectionImporter(SoapAttributeOverrides attributeOverrides) : this(attributeOverrides, null) { } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.SoapReflectionImporter3"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public SoapReflectionImporter(SoapAttributeOverrides attributeOverrides, string defaultNamespace) { if (defaultNamespace == null) defaultNamespace = String.Empty; if (attributeOverrides == null) attributeOverrides = new SoapAttributeOverrides(); _attributeOverrides = attributeOverrides; _defaultNs = defaultNamespace; _typeScope = new TypeScope(); _modelScope = new ModelScope(_typeScope); } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.IncludeTypes"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public void IncludeTypes(ICustomAttributeProvider provider) { IncludeTypes(provider, new RecursionLimiter()); } private void IncludeTypes(ICustomAttributeProvider provider, RecursionLimiter limiter) { object[] attrs = provider.GetCustomAttributes(typeof(SoapIncludeAttribute), false); for (int i = 0; i < attrs.Length; i++) { IncludeType(((SoapIncludeAttribute)attrs[i]).Type, limiter); } } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.IncludeType"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public void IncludeType(Type type) { IncludeType(type, new RecursionLimiter()); } private void IncludeType(Type type, RecursionLimiter limiter) { ImportTypeMapping(_modelScope.GetTypeModel(type), limiter); } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="XmlReflectionImporter.ImportTypeMapping"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public XmlTypeMapping ImportTypeMapping(Type type) { return ImportTypeMapping(type, null); } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="XmlReflectionImporter.ImportTypeMapping1"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public XmlTypeMapping ImportTypeMapping(Type type, string defaultNamespace) { ElementAccessor element = new ElementAccessor(); element.IsSoap = true; element.Mapping = ImportTypeMapping(_modelScope.GetTypeModel(type), new RecursionLimiter()); element.Name = element.Mapping.DefaultElementName; element.Namespace = element.Mapping.Namespace == null ? defaultNamespace : element.Mapping.Namespace; element.Form = XmlSchemaForm.Qualified; XmlTypeMapping xmlMapping = new XmlTypeMapping(_typeScope, element); xmlMapping.SetKeyInternal(XmlMapping.GenerateKey(type, null, defaultNamespace)); xmlMapping.IsSoap = true; xmlMapping.GenerateSerializer = true; return xmlMapping; } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.ImportMembersMapping"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public XmlMembersMapping ImportMembersMapping(string elementName, string ns, XmlReflectionMember[] members) { return ImportMembersMapping(elementName, ns, members, true, true, false); } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.ImportMembersMapping1"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public XmlMembersMapping ImportMembersMapping(string elementName, string ns, XmlReflectionMember[] members, bool hasWrapperElement, bool writeAccessors) { return ImportMembersMapping(elementName, ns, members, hasWrapperElement, writeAccessors, false); } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.ImportMembersMapping2"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public XmlMembersMapping ImportMembersMapping(string elementName, string ns, XmlReflectionMember[] members, bool hasWrapperElement, bool writeAccessors, bool validate) { return ImportMembersMapping(elementName, ns, members, hasWrapperElement, writeAccessors, validate, XmlMappingAccess.Read | XmlMappingAccess.Write); } /// <include file='doc\SoapReflectionImporter.uex' path='docs/doc[@for="SoapReflectionImporter.ImportMembersMapping3"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public XmlMembersMapping ImportMembersMapping(string elementName, string ns, XmlReflectionMember[] members, bool hasWrapperElement, bool writeAccessors, bool validate, XmlMappingAccess access) { ElementAccessor element = new ElementAccessor(); element.IsSoap = true; element.Name = elementName == null || elementName.Length == 0 ? elementName : XmlConvert.EncodeLocalName(elementName); element.Mapping = ImportMembersMapping(members, ns, hasWrapperElement, writeAccessors, validate, new RecursionLimiter()); element.Mapping.TypeName = elementName; element.Namespace = element.Mapping.Namespace == null ? ns : element.Mapping.Namespace; element.Form = XmlSchemaForm.Qualified; XmlMembersMapping xmlMapping = new XmlMembersMapping(_typeScope, element, access); xmlMapping.IsSoap = true; xmlMapping.GenerateSerializer = true; return xmlMapping; } private Exception ReflectionException(string context, Exception e) { return new InvalidOperationException(SR.Format(SR.XmlReflectionError, context), e); } private SoapAttributes GetAttributes(Type type) { SoapAttributes attrs = _attributeOverrides[type]; if (attrs != null) return attrs; return new SoapAttributes(type); } private SoapAttributes GetAttributes(MemberInfo memberInfo) { SoapAttributes attrs = _attributeOverrides[memberInfo.DeclaringType, memberInfo.Name]; if (attrs != null) return attrs; return new SoapAttributes(memberInfo); } private TypeMapping ImportTypeMapping(TypeModel model, RecursionLimiter limiter) { return ImportTypeMapping(model, String.Empty, limiter); } private TypeMapping ImportTypeMapping(TypeModel model, string dataType, RecursionLimiter limiter) { if (dataType.Length > 0) { if (!model.TypeDesc.IsPrimitive) { throw new InvalidOperationException(SR.Format(SR.XmlInvalidDataTypeUsage, dataType, "SoapElementAttribute.DataType")); } TypeDesc td = _typeScope.GetTypeDesc(dataType, XmlSchema.Namespace); if (td == null) { throw new InvalidOperationException(SR.Format(SR.XmlInvalidXsdDataType, dataType, "SoapElementAttribute.DataType", new XmlQualifiedName(dataType, XmlSchema.Namespace).ToString())); } if (model.TypeDesc.FullName != td.FullName) { throw new InvalidOperationException(SR.Format(SR.XmlDataTypeMismatch, dataType, "SoapElementAttribute.DataType", model.TypeDesc.FullName)); } } SoapAttributes a = GetAttributes(model.Type); if ((a.SoapFlags & ~SoapAttributeFlags.Type) != 0) throw new InvalidOperationException(SR.Format(SR.XmlInvalidTypeAttributes, model.Type.FullName)); switch (model.TypeDesc.Kind) { case TypeKind.Enum: return ImportEnumMapping((EnumModel)model); case TypeKind.Primitive: return ImportPrimitiveMapping((PrimitiveModel)model, dataType); case TypeKind.Array: case TypeKind.Collection: case TypeKind.Enumerable: return ImportArrayLikeMapping((ArrayModel)model, limiter); case TypeKind.Root: case TypeKind.Class: case TypeKind.Struct: if (model.TypeDesc.IsOptionalValue) { TypeDesc baseTypeDesc = model.TypeDesc.BaseTypeDesc; SoapAttributes baseAttributes = GetAttributes(baseTypeDesc.Type); string typeNs = _defaultNs; if (baseAttributes.SoapType != null && baseAttributes.SoapType.Namespace != null) typeNs = baseAttributes.SoapType.Namespace; TypeDesc valueTypeDesc = string.IsNullOrEmpty(dataType) ? model.TypeDesc.BaseTypeDesc : _typeScope.GetTypeDesc(dataType, XmlSchema.Namespace); string xsdTypeName = string.IsNullOrEmpty(dataType) ? model.TypeDesc.BaseTypeDesc.Name : dataType; TypeMapping baseMapping = GetTypeMapping(xsdTypeName, typeNs, valueTypeDesc); if (baseMapping == null) baseMapping = ImportTypeMapping(_modelScope.GetTypeModel(baseTypeDesc.Type), dataType, limiter); return CreateNullableMapping(baseMapping, model.TypeDesc.Type); } else { return ImportStructLikeMapping((StructModel)model, limiter); } default: throw new NotSupportedException(SR.Format(SR.XmlUnsupportedSoapTypeKind, model.TypeDesc.FullName)); } } private StructMapping CreateRootMapping() { TypeDesc typeDesc = _typeScope.GetTypeDesc(typeof(object)); StructMapping mapping = new StructMapping(); mapping.IsSoap = true; mapping.TypeDesc = typeDesc; mapping.Members = new MemberMapping[0]; mapping.IncludeInSchema = false; mapping.TypeName = Soap.UrType; mapping.Namespace = XmlSchema.Namespace; return mapping; } private StructMapping GetRootMapping() { if (_root == null) { _root = CreateRootMapping(); _typeScope.AddTypeMapping(_root); } return _root; } private TypeMapping GetTypeMapping(string typeName, string ns, TypeDesc typeDesc) { TypeMapping mapping = (TypeMapping)_types[typeName, ns]; if (mapping == null) return null; if (mapping.TypeDesc != typeDesc) throw new InvalidOperationException(SR.Format(SR.XmlTypesDuplicate, typeDesc.FullName, mapping.TypeDesc.FullName, typeName, ns)); return mapping; } private NullableMapping CreateNullableMapping(TypeMapping baseMapping, Type type) { TypeDesc typeDesc = baseMapping.TypeDesc.GetNullableTypeDesc(type); TypeMapping existingMapping = (TypeMapping)_nullables[baseMapping.TypeName, baseMapping.Namespace]; NullableMapping mapping; if (existingMapping != null) { if (existingMapping is NullableMapping) { mapping = (NullableMapping)existingMapping; if (mapping.BaseMapping is PrimitiveMapping && baseMapping is PrimitiveMapping) return mapping; else if (mapping.BaseMapping == baseMapping) { return mapping; } else { throw new InvalidOperationException(SR.Format(SR.XmlTypesDuplicate, typeDesc.FullName, existingMapping.TypeDesc.FullName, typeDesc.Name, existingMapping.Namespace)); } } else if (!(baseMapping is PrimitiveMapping)) { throw new InvalidOperationException(SR.Format(SR.XmlTypesDuplicate, typeDesc.FullName, existingMapping.TypeDesc.FullName, typeDesc.Name, existingMapping.Namespace)); } } mapping = new NullableMapping(); mapping.BaseMapping = baseMapping; mapping.TypeDesc = typeDesc; mapping.TypeName = baseMapping.TypeName; mapping.Namespace = baseMapping.Namespace; mapping.IncludeInSchema = false; //baseMapping.IncludeInSchema; _nullables.Add(baseMapping.TypeName, mapping.Namespace, mapping); _typeScope.AddTypeMapping(mapping); return mapping; } private StructMapping ImportStructLikeMapping(StructModel model, RecursionLimiter limiter) { if (model.TypeDesc.Kind == TypeKind.Root) return GetRootMapping(); SoapAttributes a = GetAttributes(model.Type); string typeNs = _defaultNs; if (a.SoapType != null && a.SoapType.Namespace != null) typeNs = a.SoapType.Namespace; string typeName = XsdTypeName(model.Type, a, model.TypeDesc.Name); typeName = XmlConvert.EncodeLocalName(typeName); StructMapping mapping = (StructMapping)GetTypeMapping(typeName, typeNs, model.TypeDesc); if (mapping == null) { mapping = new StructMapping(); mapping.IsSoap = true; mapping.TypeDesc = model.TypeDesc; mapping.Namespace = typeNs; mapping.TypeName = typeName; if (a.SoapType != null) mapping.IncludeInSchema = a.SoapType.IncludeInSchema; _typeScope.AddTypeMapping(mapping); _types.Add(typeName, typeNs, mapping); if (limiter.IsExceededLimit) { limiter.DeferredWorkItems.Add(new ImportStructWorkItem(model, mapping)); return mapping; } limiter.Depth++; InitializeStructMembers(mapping, model, limiter); while (limiter.DeferredWorkItems.Count > 0) { int index = limiter.DeferredWorkItems.Count - 1; ImportStructWorkItem item = limiter.DeferredWorkItems[index]; if (InitializeStructMembers(item.Mapping, item.Model, limiter)) { // // if InitializeStructMembers returns true, then there were *no* changes to the DeferredWorkItems // #if DEBUG // use exception in the place of Debug.Assert to avoid throwing asserts from a server process such as aspnet_ewp.exe if (index != limiter.DeferredWorkItems.Count - 1) throw new InvalidOperationException(SR.Format(SR.XmlInternalErrorDetails, "DeferredWorkItems.Count have changed")); if (item != limiter.DeferredWorkItems[index]) throw new InvalidOperationException(SR.Format(SR.XmlInternalErrorDetails, "DeferredWorkItems.Top have changed")); #endif // Remove the last work item limiter.DeferredWorkItems.RemoveAt(index); } } limiter.Depth--; } return mapping; } private bool InitializeStructMembers(StructMapping mapping, StructModel model, RecursionLimiter limiter) { if (mapping.IsFullyInitialized) return true; if (model.TypeDesc.BaseTypeDesc != null) { StructMapping baseMapping = ImportStructLikeMapping((StructModel)_modelScope.GetTypeModel(model.Type.BaseType, false), limiter); // check to see if the import of the baseMapping was deferred int baseIndex = limiter.DeferredWorkItems.IndexOf(mapping.BaseMapping); if (baseIndex < 0) { mapping.BaseMapping = baseMapping; } else { // the import of the baseMapping was deferred, make sure that the derived mappings is deferred as well if (!limiter.DeferredWorkItems.Contains(mapping)) { limiter.DeferredWorkItems.Add(new ImportStructWorkItem(model, mapping)); } // make sure that baseMapping get processed before the derived int top = limiter.DeferredWorkItems.Count - 1; if (baseIndex < top) { ImportStructWorkItem baseMappingWorkItem = limiter.DeferredWorkItems[baseIndex]; limiter.DeferredWorkItems[baseIndex] = limiter.DeferredWorkItems[top]; limiter.DeferredWorkItems[top] = baseMappingWorkItem; } return false; } } ArrayList members = new ArrayList(); foreach (MemberInfo memberInfo in model.GetMemberInfos()) { if (!(memberInfo is FieldInfo) && !(memberInfo is PropertyInfo)) continue; SoapAttributes memberAttrs = GetAttributes(memberInfo); if (memberAttrs.SoapIgnore) continue; FieldModel fieldModel = model.GetFieldModel(memberInfo); if (fieldModel == null) continue; MemberMapping member = ImportFieldMapping(fieldModel, memberAttrs, mapping.Namespace, limiter); if (member == null) continue; if (!member.TypeDesc.IsPrimitive && !member.TypeDesc.IsEnum && !member.TypeDesc.IsOptionalValue) { if (model.TypeDesc.IsValueType) throw new NotSupportedException(SR.Format(SR.XmlRpcRefsInValueType, model.TypeDesc.FullName)); if (member.TypeDesc.IsValueType) throw new NotSupportedException(SR.Format(SR.XmlRpcNestedValueType, member.TypeDesc.FullName)); } if (mapping.BaseMapping != null) { if (mapping.BaseMapping.Declares(member, mapping.TypeName)) continue; } members.Add(member); } mapping.Members = (MemberMapping[])members.ToArray(typeof(MemberMapping)); if (mapping.BaseMapping == null) mapping.BaseMapping = GetRootMapping(); IncludeTypes(model.Type, limiter); return true; } private ArrayMapping ImportArrayLikeMapping(ArrayModel model, RecursionLimiter limiter) { ArrayMapping mapping = new ArrayMapping(); mapping.IsSoap = true; TypeMapping itemTypeMapping = ImportTypeMapping(model.Element, limiter); if (itemTypeMapping.TypeDesc.IsValueType && !itemTypeMapping.TypeDesc.IsPrimitive && !itemTypeMapping.TypeDesc.IsEnum) throw new NotSupportedException(SR.Format(SR.XmlRpcArrayOfValueTypes, model.TypeDesc.FullName)); mapping.TypeDesc = model.TypeDesc; mapping.Elements = new ElementAccessor[] { CreateElementAccessor(itemTypeMapping, mapping.Namespace) }; SetArrayMappingType(mapping); // in the case of an ArrayMapping we can have more that one mapping correspond to a type // examples of that are ArrayList and object[] both will map tp ArrayOfur-type // so we create a link list for all mappings of the same XSD type ArrayMapping existingMapping = (ArrayMapping)_types[mapping.TypeName, mapping.Namespace]; if (existingMapping != null) { ArrayMapping first = existingMapping; while (existingMapping != null) { if (existingMapping.TypeDesc == model.TypeDesc) return existingMapping; existingMapping = existingMapping.Next; } mapping.Next = first; _types[mapping.TypeName, mapping.Namespace] = mapping; return mapping; } _typeScope.AddTypeMapping(mapping); _types.Add(mapping.TypeName, mapping.Namespace, mapping); IncludeTypes(model.Type); return mapping; } // UNDONE Nullable private void SetArrayMappingType(ArrayMapping mapping) { bool useDefaultNs = false; string itemTypeName; string itemTypeNamespace; TypeMapping itemTypeMapping; if (mapping.Elements.Length == 1) itemTypeMapping = mapping.Elements[0].Mapping; else itemTypeMapping = null; if (itemTypeMapping is EnumMapping) { itemTypeNamespace = itemTypeMapping.Namespace; itemTypeName = itemTypeMapping.TypeName; } else if (itemTypeMapping is PrimitiveMapping) { itemTypeNamespace = itemTypeMapping.TypeDesc.IsXsdType ? XmlSchema.Namespace : UrtTypes.Namespace; itemTypeName = itemTypeMapping.TypeDesc.DataType.Name; useDefaultNs = true; } else if (itemTypeMapping is StructMapping) { if (itemTypeMapping.TypeDesc.IsRoot) { itemTypeNamespace = XmlSchema.Namespace; itemTypeName = Soap.UrType; useDefaultNs = true; } else { itemTypeNamespace = itemTypeMapping.Namespace; itemTypeName = itemTypeMapping.TypeName; } } else if (itemTypeMapping is ArrayMapping) { itemTypeNamespace = itemTypeMapping.Namespace; itemTypeName = itemTypeMapping.TypeName; } else { throw new InvalidOperationException(SR.Format(SR.XmlInvalidSoapArray, mapping.TypeDesc.FullName)); } itemTypeName = CodeIdentifier.MakePascal(itemTypeName); string uniqueName = "ArrayOf" + itemTypeName; string ns = useDefaultNs ? _defaultNs : itemTypeNamespace; int i = 1; TypeMapping existingMapping = (TypeMapping)_types[uniqueName, ns]; while (existingMapping != null) { if (existingMapping is ArrayMapping) { ArrayMapping arrayMapping = (ArrayMapping)existingMapping; if (AccessorMapping.ElementsMatch(arrayMapping.Elements, mapping.Elements)) { break; } } // need to re-name the mapping uniqueName = itemTypeName + i.ToString(CultureInfo.InvariantCulture); existingMapping = (TypeMapping)_types[uniqueName, ns]; i++; } mapping.Namespace = ns; mapping.TypeName = uniqueName; } private PrimitiveMapping ImportPrimitiveMapping(PrimitiveModel model, string dataType) { PrimitiveMapping mapping = new PrimitiveMapping(); mapping.IsSoap = true; if (dataType.Length > 0) { mapping.TypeDesc = _typeScope.GetTypeDesc(dataType, XmlSchema.Namespace); if (mapping.TypeDesc == null) { // try it as a non-Xsd type mapping.TypeDesc = _typeScope.GetTypeDesc(dataType, UrtTypes.Namespace); if (mapping.TypeDesc == null) { throw new InvalidOperationException(SR.Format(SR.XmlUdeclaredXsdType, dataType)); } } } else { mapping.TypeDesc = model.TypeDesc; } mapping.TypeName = mapping.TypeDesc.DataType.Name; mapping.Namespace = mapping.TypeDesc.IsXsdType ? XmlSchema.Namespace : UrtTypes.Namespace; return mapping; } private EnumMapping ImportEnumMapping(EnumModel model) { SoapAttributes a = GetAttributes(model.Type); string typeNs = _defaultNs; if (a.SoapType != null && a.SoapType.Namespace != null) typeNs = a.SoapType.Namespace; string typeName = XsdTypeName(model.Type, a, model.TypeDesc.Name); typeName = XmlConvert.EncodeLocalName(typeName); EnumMapping mapping = (EnumMapping)GetTypeMapping(typeName, typeNs, model.TypeDesc); if (mapping == null) { mapping = new EnumMapping(); mapping.IsSoap = true; mapping.TypeDesc = model.TypeDesc; mapping.TypeName = typeName; mapping.Namespace = typeNs; mapping.IsFlags = model.Type.IsDefined(typeof(FlagsAttribute), false); _typeScope.AddTypeMapping(mapping); _types.Add(typeName, typeNs, mapping); ArrayList constants = new ArrayList(); for (int i = 0; i < model.Constants.Length; i++) { ConstantMapping constant = ImportConstantMapping(model.Constants[i]); if (constant != null) constants.Add(constant); } if (constants.Count == 0) { throw new InvalidOperationException(SR.Format(SR.XmlNoSerializableMembers, model.TypeDesc.FullName)); } mapping.Constants = (ConstantMapping[])constants.ToArray(typeof(ConstantMapping)); } return mapping; } private ConstantMapping ImportConstantMapping(ConstantModel model) { SoapAttributes a = GetAttributes(model.FieldInfo); if (a.SoapIgnore) return null; if ((a.SoapFlags & ~SoapAttributeFlags.Enum) != 0) throw new InvalidOperationException(SR.XmlInvalidEnumAttribute); if (a.SoapEnum == null) a.SoapEnum = new SoapEnumAttribute(); ConstantMapping constant = new ConstantMapping(); constant.XmlName = a.SoapEnum.Name.Length == 0 ? model.Name : a.SoapEnum.Name; constant.Name = model.Name; constant.Value = model.Value; return constant; } private MembersMapping ImportMembersMapping(XmlReflectionMember[] xmlReflectionMembers, string ns, bool hasWrapperElement, bool writeAccessors, bool validateWrapperElement, RecursionLimiter limiter) { MembersMapping members = new MembersMapping(); members.TypeDesc = _typeScope.GetTypeDesc(typeof(object[])); MemberMapping[] mappings = new MemberMapping[xmlReflectionMembers.Length]; for (int i = 0; i < mappings.Length; i++) { try { XmlReflectionMember member = xmlReflectionMembers[i]; MemberMapping mapping = ImportMemberMapping(member, ns, xmlReflectionMembers, hasWrapperElement ? XmlSchemaForm.Unqualified : XmlSchemaForm.Qualified, limiter); if (member.IsReturnValue && writeAccessors) { // no special treatment for return values with doc/enc if (i > 0) throw new InvalidOperationException(SR.XmlInvalidReturnPosition); mapping.IsReturnValue = true; } mappings[i] = mapping; } catch (Exception e) { if (e is OutOfMemoryException) { throw; } throw ReflectionException(xmlReflectionMembers[i].MemberName, e); } } members.Members = mappings; members.HasWrapperElement = hasWrapperElement; if (hasWrapperElement) { members.ValidateRpcWrapperElement = validateWrapperElement; } members.WriteAccessors = writeAccessors; members.IsSoap = true; if (hasWrapperElement && !writeAccessors) members.Namespace = ns; return members; } private MemberMapping ImportMemberMapping(XmlReflectionMember xmlReflectionMember, string ns, XmlReflectionMember[] xmlReflectionMembers, XmlSchemaForm form, RecursionLimiter limiter) { SoapAttributes a = xmlReflectionMember.SoapAttributes; if (a.SoapIgnore) return null; MemberMapping member = new MemberMapping(); member.IsSoap = true; member.Name = xmlReflectionMember.MemberName; bool checkSpecified = XmlReflectionImporter.FindSpecifiedMember(xmlReflectionMember.MemberName, xmlReflectionMembers) != null; FieldModel model = new FieldModel(xmlReflectionMember.MemberName, xmlReflectionMember.MemberType, _typeScope.GetTypeDesc(xmlReflectionMember.MemberType), checkSpecified, false); member.CheckShouldPersist = model.CheckShouldPersist; member.CheckSpecified = model.CheckSpecified; member.ReadOnly = model.ReadOnly; // || !model.FieldTypeDesc.HasDefaultConstructor; ImportAccessorMapping(member, model, a, ns, form, limiter); if (xmlReflectionMember.OverrideIsNullable) member.Elements[0].IsNullable = false; return member; } private MemberMapping ImportFieldMapping(FieldModel model, SoapAttributes a, string ns, RecursionLimiter limiter) { if (a.SoapIgnore) return null; MemberMapping member = new MemberMapping(); member.IsSoap = true; member.Name = model.Name; member.CheckShouldPersist = model.CheckShouldPersist; member.CheckSpecified = model.CheckSpecified; member.MemberInfo = model.MemberInfo; member.CheckSpecifiedMemberInfo = model.CheckSpecifiedMemberInfo; member.CheckShouldPersistMethodInfo = model.CheckShouldPersistMethodInfo; member.ReadOnly = model.ReadOnly; // || !model.FieldTypeDesc.HasDefaultConstructor; ImportAccessorMapping(member, model, a, ns, XmlSchemaForm.Unqualified, limiter); return member; } private void ImportAccessorMapping(MemberMapping accessor, FieldModel model, SoapAttributes a, string ns, XmlSchemaForm form, RecursionLimiter limiter) { Type accessorType = model.FieldType; string accessorName = model.Name; accessor.TypeDesc = _typeScope.GetTypeDesc(accessorType); if (accessor.TypeDesc.IsVoid) { throw new InvalidOperationException(SR.XmlInvalidVoid); } SoapAttributeFlags flags = a.SoapFlags; if ((flags & SoapAttributeFlags.Attribute) == SoapAttributeFlags.Attribute) { if (!accessor.TypeDesc.IsPrimitive && !accessor.TypeDesc.IsEnum) throw new InvalidOperationException(SR.Format(SR.XmlIllegalSoapAttribute, accessorName, accessor.TypeDesc.FullName)); if ((flags & SoapAttributeFlags.Attribute) != flags) throw new InvalidOperationException(SR.XmlInvalidElementAttribute); AttributeAccessor attribute = new AttributeAccessor(); attribute.Name = Accessor.EscapeQName(a.SoapAttribute == null || a.SoapAttribute.AttributeName.Length == 0 ? accessorName : a.SoapAttribute.AttributeName); attribute.Namespace = a.SoapAttribute == null || a.SoapAttribute.Namespace == null ? ns : a.SoapAttribute.Namespace; attribute.Form = XmlSchemaForm.Qualified; // attributes are always qualified since they're only used for encoded soap headers attribute.Mapping = ImportTypeMapping(_modelScope.GetTypeModel(accessorType), (a.SoapAttribute == null ? String.Empty : a.SoapAttribute.DataType), limiter); attribute.Default = GetDefaultValue(model.FieldTypeDesc, a); accessor.Attribute = attribute; accessor.Elements = new ElementAccessor[0]; } else { if ((flags & SoapAttributeFlags.Element) != flags) throw new InvalidOperationException(SR.XmlInvalidElementAttribute); ElementAccessor element = new ElementAccessor(); element.IsSoap = true; element.Name = XmlConvert.EncodeLocalName(a.SoapElement == null || a.SoapElement.ElementName.Length == 0 ? accessorName : a.SoapElement.ElementName); element.Namespace = ns; element.Form = form; element.Mapping = ImportTypeMapping(_modelScope.GetTypeModel(accessorType), (a.SoapElement == null ? String.Empty : a.SoapElement.DataType), limiter); if (a.SoapElement != null) element.IsNullable = a.SoapElement.IsNullable; accessor.Elements = new ElementAccessor[] { element }; } } private static ElementAccessor CreateElementAccessor(TypeMapping mapping, string ns) { ElementAccessor element = new ElementAccessor(); element.IsSoap = true; element.Name = mapping.TypeName; //XmlConvert.EncodeLocalName(name == null || name.Length == 0 ? mapping.TypeName : name); element.Namespace = ns; element.Mapping = mapping; return element; } private object GetDefaultValue(TypeDesc fieldTypeDesc, SoapAttributes a) { if (a.SoapDefaultValue == null || a.SoapDefaultValue == DBNull.Value) return null; if (!(fieldTypeDesc.Kind == TypeKind.Primitive || fieldTypeDesc.Kind == TypeKind.Enum)) { a.SoapDefaultValue = null; return a.SoapDefaultValue; } // for enums validate and return a string representation if (fieldTypeDesc.Kind == TypeKind.Enum) { if (fieldTypeDesc != _typeScope.GetTypeDesc(a.SoapDefaultValue.GetType())) throw new InvalidOperationException(SR.Format(SR.XmlInvalidDefaultEnumValue, a.SoapDefaultValue.GetType().FullName, fieldTypeDesc.FullName)); string strValue = Enum.Format(a.SoapDefaultValue.GetType(), a.SoapDefaultValue, "G").Replace(",", " "); string numValue = Enum.Format(a.SoapDefaultValue.GetType(), a.SoapDefaultValue, "D"); if (strValue == numValue) // means enum value wasn't recognized throw new InvalidOperationException(SR.Format(SR.XmlInvalidDefaultValue, strValue, a.SoapDefaultValue.GetType().FullName)); return strValue; } return a.SoapDefaultValue; } internal string XsdTypeName(Type type) { if (type == typeof(object)) return Soap.UrType; TypeDesc typeDesc = _typeScope.GetTypeDesc(type); if (typeDesc.IsPrimitive && typeDesc.DataType != null && typeDesc.DataType.Name != null && typeDesc.DataType.Name.Length > 0) return typeDesc.DataType.Name; return XsdTypeName(type, GetAttributes(type), typeDesc.Name); } internal string XsdTypeName(Type type, SoapAttributes a, string name) { string typeName = name; if (a.SoapType != null && a.SoapType.TypeName.Length > 0) typeName = a.SoapType.TypeName; if (type.IsGenericType && typeName.IndexOf('{') >= 0) { Type genType = type.GetGenericTypeDefinition(); Type[] names = genType.GetGenericArguments(); Type[] types = type.GetGenericArguments(); for (int i = 0; i < names.Length; i++) { string argument = "{" + names[i] + "}"; if (typeName.Contains(argument)) { typeName = typeName.Replace(argument, XsdTypeName(types[i])); if (typeName.IndexOf('{') < 0) { break; } } } } // CONSIDER: throw if not all parameters were filled return typeName; } } }
// Python Tools for Visual Studio // Copyright(c) Microsoft Corporation // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the License); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS // OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY // IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABLITY OR NON-INFRINGEMENT. // // See the Apache Version 2.0 License for specific language governing // permissions and limitations under the License. using System.Collections.Generic; using System.Linq; using Microsoft.Build.Construction; namespace Microsoft.PythonTools.Project.ImportWizard { abstract class ProjectCustomization { public abstract string DisplayName { get; } public override string ToString() { return DisplayName; } public abstract void Process( ProjectRootElement project, Dictionary<string, ProjectPropertyGroupElement> groups ); protected static void AddOrSetProperty(ProjectRootElement project, string name, string value) { bool anySet = false; foreach (var prop in project.Properties.Where(p => p.Name == name)) { prop.Value = value; anySet = true; } if (!anySet) { project.AddProperty(name, value); } } protected static void AddOrSetProperty(ProjectPropertyGroupElement group, string name, string value) { bool anySet = false; foreach (var prop in group.Properties.Where(p => p.Name == name)) { prop.Value = value; anySet = true; } if (!anySet) { group.AddProperty(name, value); } } } class DefaultProjectCustomization : ProjectCustomization { public static readonly ProjectCustomization Instance = new DefaultProjectCustomization(); private DefaultProjectCustomization() { } public override string DisplayName { get { return SR.GetString(SR.ImportWizardDefaultProjectCustomization); } } public override void Process( ProjectRootElement project, Dictionary<string, ProjectPropertyGroupElement> groups ) { ProjectPropertyGroupElement imports; if (!groups.TryGetValue("Imports", out imports)) { imports = project.AddPropertyGroup(); } AddOrSetProperty(imports, "PtvsTargetsFile", @"$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.targets"); project.AddImport("$(PtvsTargetsFile)").Condition = "Exists($(PtvsTargetsFile))"; project.AddImport(@"$(MSBuildToolsPath)\Microsoft.Common.targets").Condition = "!Exists($(PtvsTargetsFile))"; } } class BottleProjectCustomization : ProjectCustomization { public static readonly ProjectCustomization Instance = new BottleProjectCustomization(); private BottleProjectCustomization() { } public override string DisplayName { get { return SR.GetString(SR.ImportWizardBottleProjectCustomization); } } public override void Process( ProjectRootElement project, Dictionary<string, ProjectPropertyGroupElement> groups ) { ProjectPropertyGroupElement globals; if (!groups.TryGetValue("Globals", out globals)) { globals = project.AddPropertyGroup(); } AddOrSetProperty(globals, "ProjectTypeGuids", "{e614c764-6d9e-4607-9337-b7073809a0bd};{1b580a1a-fdb3-4b32-83e1-6407eb2722e6};{349c5851-65df-11da-9384-00065b846f21};{888888a0-9f3d-457c-b088-3a5042f75d52}"); AddOrSetProperty(globals, "LaunchProvider", PythonConstants.WebLauncherName); AddOrSetProperty(globals, "PythonDebugWebServerCommandArguments", "--debug $(CommandLineArguments)"); AddOrSetProperty(globals, "PythonWsgiHandler", "{StartupModule}.wsgi_app()"); project.AddImport(@"$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.Web.targets"); } } class DjangoProjectCustomization : ProjectCustomization { public static readonly ProjectCustomization Instance = new DjangoProjectCustomization(); private DjangoProjectCustomization() { } public override string DisplayName { get { return SR.GetString(SR.ImportWizardDjangoProjectCustomization); } } public override void Process( ProjectRootElement project, Dictionary<string, ProjectPropertyGroupElement> groups ) { ProjectPropertyGroupElement globals; if (!groups.TryGetValue("Globals", out globals)) { globals = project.AddPropertyGroup(); } AddOrSetProperty(globals, "StartupFile", "manage.py"); AddOrSetProperty(globals, "ProjectTypeGuids", "{5F0BE9CA-D677-4A4D-8806-6076C0FAAD37};{349c5851-65df-11da-9384-00065b846f21};{888888a0-9f3d-457c-b088-3a5042f75d52}"); AddOrSetProperty(globals, "LaunchProvider", "Django launcher"); project.AddImport(@"$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.Django.targets"); } } class FlaskProjectCustomization : ProjectCustomization { public static readonly ProjectCustomization Instance = new FlaskProjectCustomization(); private FlaskProjectCustomization() { } public override string DisplayName { get { return SR.GetString(SR.ImportWizardFlaskProjectCustomization); } } public override void Process( ProjectRootElement project, Dictionary<string, ProjectPropertyGroupElement> groups ) { ProjectPropertyGroupElement globals; if (!groups.TryGetValue("Globals", out globals)) { globals = project.AddPropertyGroup(); } AddOrSetProperty(globals, "ProjectTypeGuids", "{789894c7-04a9-4a11-a6b5-3f4435165112};{1b580a1a-fdb3-4b32-83e1-6407eb2722e6};{349c5851-65df-11da-9384-00065b846f21};{888888a0-9f3d-457c-b088-3a5042f75d52}"); AddOrSetProperty(globals, "LaunchProvider", PythonConstants.WebLauncherName); AddOrSetProperty(globals, "PythonWsgiHandler", "{StartupModule}.wsgi_app"); project.AddImport(@"$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.Web.targets"); } } class GenericWebProjectCustomization : ProjectCustomization { public static readonly ProjectCustomization Instance = new GenericWebProjectCustomization(); private GenericWebProjectCustomization() { } public override string DisplayName { get { return SR.GetString(SR.ImportWizardGenericWebProjectCustomization); } } public override void Process( ProjectRootElement project, Dictionary<string, ProjectPropertyGroupElement> groups ) { ProjectPropertyGroupElement globals; if (!groups.TryGetValue("Globals", out globals)) { globals = project.AddPropertyGroup(); } AddOrSetProperty(globals, "ProjectTypeGuids", "{1b580a1a-fdb3-4b32-83e1-6407eb2722e6};{349c5851-65df-11da-9384-00065b846f21};{888888a0-9f3d-457c-b088-3a5042f75d52}"); AddOrSetProperty(globals, "LaunchProvider", PythonConstants.WebLauncherName); project.AddImport(@"$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.Web.targets"); } } class UwpProjectCustomization : ProjectCustomization { public static readonly ProjectCustomization Instance = new UwpProjectCustomization(); private UwpProjectCustomization() { } public override string DisplayName { get { return SR.GetString(SR.ImportWizardUwpProjectCustomization); } } public override void Process( ProjectRootElement project, Dictionary<string, ProjectPropertyGroupElement> groups ) { ProjectPropertyGroupElement globals; if (!groups.TryGetValue("Globals", out globals)) { globals = project.AddPropertyGroup(); } AddOrSetProperty(globals, "ProjectTypeGuids", "{2b557614-1a2b-4903-b9df-ed20d7b63f3a};{888888A0-9F3D-457C-B088-3A5042F75D52}"); } } }
#region Header /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Copyright (c) 2007-2008 James Nies and NArrange contributors. * All rights reserved. * * This program and the accompanying materials are made available under * the terms of the Common Public License v1.0 which accompanies this * distribution. * * Redistribution and use in source and binary forms, with or * without modification, are permitted provided that the following * conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * *<author>James Nies</author> *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ #endregion Header namespace NArrange.Gui.Configuration { using System; using System.Collections; using System.ComponentModel; using System.Windows.Forms; /// <summary> /// Tree node for a list item. /// </summary> public sealed class ListItemTreeNode : TreeNode { #region Fields /// <summary> /// Component being edited. /// </summary> private object _component; /// <summary> /// Context menu. /// </summary> private ContextMenuStrip _contextMenu; /// <summary> /// Item in the list associated with the node. /// </summary> private object _listItem; /// <summary> /// Parent list property. /// </summary> private PropertyDescriptor _listProperty; /// <summary> /// Move down menu item. /// </summary> private ToolStripMenuItem _moveDownMenuItem; /// <summary> /// Move up menu item. /// </summary> private ToolStripMenuItem _moveUpMenuItem; /// <summary> /// Remove menu item. /// </summary> private ToolStripMenuItem _removeMenuItem; #endregion Fields #region Constructors /// <summary> /// Creates a new ListItemTreeNode. /// </summary> /// <param name="listProperty">The list property.</param> /// <param name="component">The component.</param> /// <param name="listItem">The list item.</param> public ListItemTreeNode(PropertyDescriptor listProperty, object component, object listItem) { _listProperty = listProperty; _component = component; _listItem = listItem; this.Tag = _listItem; Initialize(); } #endregion Constructors #region Properties /// <summary> /// Gets the list item associated with this node. /// </summary> public object ListItem { get { return _listItem; } } #endregion Properties #region Methods /// <summary> /// Moves this list item node down in the collection. /// </summary> public void MoveDown() { IList list = this._listProperty.GetValue(_component) as IList; if (list != null && list.Contains(_listItem)) { int index = list.IndexOf(_listItem); if (index < list.Count - 1) { TreeNode parent = this.Parent; if (parent != null) { parent.Nodes.Remove(this); } list.Remove(_listItem); int newIndex = ++index; if (parent != null) { parent.Nodes.Insert(newIndex, this); } list.Insert(newIndex, _listItem); } this.Select(); this.UpdateMenu(); } } /// <summary> /// Moves this list item node down in the collection. /// </summary> public void MoveUp() { IList list = this._listProperty.GetValue(_component) as IList; if (list != null && list.Contains(_listItem)) { int index = list.IndexOf(_listItem); if (index > 0) { TreeNode parent = this.Parent; if (parent != null) { parent.Nodes.Remove(this); } list.Remove(_listItem); int newIndex = --index; if (parent != null) { parent.Nodes.Insert(newIndex, this); } list.Insert(newIndex, _listItem); } this.Select(); this.UpdateMenu(); } } /// <summary> /// Removes the item from the collection. /// </summary> public void RemoveItem() { IList list = this._listProperty.GetValue(_component) as IList; if (list != null && list.Contains(_listItem)) { if (this.Parent != null) { this.Parent.TreeView.SelectedNode = this.Parent; } list.Remove(_listItem); } } /// <summary> /// Updates the context menu for the tree node. /// </summary> public void UpdateMenu() { IList list = this._listProperty.GetValue(_component) as IList; if (list != null && list.Contains(_listItem)) { int index = list.IndexOf(_listItem); this._moveUpMenuItem.Enabled = index > 0; this._moveDownMenuItem.Enabled = index < list.Count - 1; } } /// <summary> /// Updates the display text. /// </summary> public void UpdateText() { this.Text = _listItem.ToString(); } /// <summary> /// Event handler for the Move Down menu item click event. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> private void HandleMoveDownMenuItemClick(object sender, EventArgs e) { MoveDown(); } /// <summary> /// Event handler for the Move Up menu item click event. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> private void HandleMoveUpMenuItemClick(object sender, EventArgs e) { MoveUp(); } /// <summary> /// Event handler for the Remove menu item click event. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> private void HandleRemoveMenuItemClick(object sender, EventArgs e) { this.RemoveItem(); } /// <summary> /// Initializes this tree node. /// </summary> private void Initialize() { this.UpdateText(); _contextMenu = new ContextMenuStrip(); _removeMenuItem = new ToolStripMenuItem("&Remove"); _removeMenuItem.Click += new EventHandler(HandleRemoveMenuItemClick); _removeMenuItem.ShortcutKeys = Keys.Delete; _contextMenu.Items.Add(_removeMenuItem); _moveUpMenuItem = new ToolStripMenuItem("Move &Up"); _moveUpMenuItem.Click += new EventHandler(HandleMoveUpMenuItemClick); _moveUpMenuItem.ShortcutKeys = Keys.Control | Keys.Up; _contextMenu.Items.Add(_moveUpMenuItem); _moveDownMenuItem = new ToolStripMenuItem("Move &Down"); _moveDownMenuItem.Click += new EventHandler(HandleMoveDownMenuItemClick); _moveDownMenuItem.ShortcutKeys = Keys.Control | Keys.Down; _contextMenu.Items.Add(_moveDownMenuItem); this.UpdateMenu(); this.ContextMenuStrip = _contextMenu; } /// <summary> /// Sets this node as the selected node in the tree view. /// </summary> private void Select() { if (this.TreeView != null) { this.TreeView.SelectedNode = this; } } #endregion Methods } }
namespace Gu.Wpf.Adorners { using System.Diagnostics; using System.Windows; using System.Windows.Controls; using System.Windows.Documents; /// <summary> /// For showing adorners similar to validation errors. /// </summary> public static class Info { #pragma warning disable SA1202 // Elements must be ordered by access /// <summary> /// Template used to generate info feedback on the AdornerLayer. /// </summary> public static readonly DependencyProperty TemplateProperty = DependencyProperty.RegisterAttached( "Template", typeof(ControlTemplate), typeof(Info), new PropertyMetadata( default(ControlTemplate), OnTemplateChanged)); /// <summary> /// Gets or sets visibility of the adorner. /// Note that setting it to visible does not need to mean it will be rendered. This can happen if the adorned element is collapsed for example. /// </summary> public static readonly DependencyProperty VisibilityProperty = DependencyProperty.RegisterAttached( "Visibility", typeof(Visibility), typeof(Info), new PropertyMetadata( Visibility.Visible, (d, e) => OnAdornedElementChanged(d, e))); private static readonly DependencyPropertyKey IsVisiblePropertyKey = DependencyProperty.RegisterAttachedReadOnly( "IsVisible", typeof(bool), typeof(Info), new PropertyMetadata( default(bool), OnIsVisibleChanged)); /// <summary> /// Gets or sets if the adorner is currently visible. /// </summary> public static readonly DependencyProperty IsVisibleProperty = IsVisiblePropertyKey.DependencyProperty; private static readonly DependencyProperty AdornerProperty = DependencyProperty.RegisterAttached( "Adorner", typeof(Adorner), typeof(Info), new PropertyMetadata( default(Adorner), (d, e) => ((Adorner)e.OldValue)?.ClearTemplatedAdornerChild())); /// <summary>Helper for setting <see cref="TemplateProperty"/> on <paramref name="element"/>.</summary> /// <param name="element"><see cref="DependencyObject"/> to set <see cref="TemplateProperty"/> on.</param> /// <param name="value">Template property value.</param> public static void SetTemplate(DependencyObject element, ControlTemplate? value) { if (element is null) { throw new System.ArgumentNullException(nameof(element)); } element.SetValue(TemplateProperty, value); } /// <summary>Helper for getting <see cref="TemplateProperty"/> from <paramref name="element"/>.</summary> /// <param name="element"><see cref="DependencyObject"/> to read <see cref="TemplateProperty"/> from.</param> /// <returns>Template property value.</returns> public static ControlTemplate GetTemplate(DependencyObject element) { if (element is null) { throw new System.ArgumentNullException(nameof(element)); } return (ControlTemplate)element.GetValue(TemplateProperty); } /// <summary>Helper for setting <see cref="VisibilityProperty"/> on <paramref name="element"/>.</summary> /// <param name="element"><see cref="DependencyObject"/> to set <see cref="VisibilityProperty"/> on.</param> /// <param name="value">Visibility property value.</param> public static void SetVisibility(DependencyObject element, Visibility value) { if (element is null) { throw new System.ArgumentNullException(nameof(element)); } element.SetValue(VisibilityProperty, value); } /// <summary>Helper for getting <see cref="VisibilityProperty"/> from <paramref name="element"/>.</summary> /// <param name="element"><see cref="DependencyObject"/> to read <see cref="VisibilityProperty"/> from.</param> /// <returns>Visibility property value.</returns> public static Visibility GetVisibility(DependencyObject element) { if (element is null) { throw new System.ArgumentNullException(nameof(element)); } return (Visibility)element.GetValue(VisibilityProperty); } private static void SetIsVisible(this DependencyObject element, bool value) { element.SetValue(IsVisiblePropertyKey, value); } /// <summary>Helper for getting <see cref="IsVisibleProperty"/> from <paramref name="element"/>.</summary> /// <param name="element"><see cref="DependencyObject"/> to read <see cref="IsVisibleProperty"/> from.</param> /// <returns>IsVisible property value.</returns> [AttachedPropertyBrowsableForChildren(IncludeDescendants = false)] [AttachedPropertyBrowsableForType(typeof(UIElement))] public static bool GetIsVisible(this DependencyObject element) { if (element is null) { throw new System.ArgumentNullException(nameof(element)); } return (bool)element.GetValue(IsVisibleProperty); } #pragma warning restore SA1202 // Elements must be ordered by access private static void OnSizeChanged(object? sender, RoutedEventArgs e) { if (sender is UIElement adornedElement) { (adornedElement.GetValue(AdornerProperty) as Adorner)?.InvalidateMeasure(); UpdateIsVisible(adornedElement); } } #pragma warning disable SA1313 // Parameter names should begin with lower-case letter private static void OnAdornedElementChanged(object? sender, object _) #pragma warning restore SA1313 // Parameter names should begin with lower-case letter { if (sender is UIElement adornedElement) { UpdateIsVisible(adornedElement); } } private static void OnTemplateChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { if (d is FrameworkElement adornedElement) { IsVisibleChangedEventManager.UpdateHandler(adornedElement, OnAdornedElementChanged); LoadedEventManager.UpdateHandler(adornedElement, OnAdornedElementChanged); UnloadedEventManager.UpdateHandler(adornedElement, OnAdornedElementChanged); SizeChangedEventManager.UpdateHandler(adornedElement, OnSizeChanged); UpdateIsVisible(adornedElement); } } private static void OnIsVisibleChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { if (Equals(e.NewValue, true) && d is UIElement element && d.GetValue(TemplateProperty) is ControlTemplate template) { if (d.GetValue(AdornerProperty) is null) { var adorner = TemplatedAdorner.Create(element, template); d.SetCurrentValue(AdornerProperty, adorner); AdornerService.Show(adorner); } else { Debug.Assert(condition: false, message: $"Element {d} already has an info adorner."); } } else if (d.GetValue(AdornerProperty) is Adorner adorner) { AdornerService.Remove(adorner); d.ClearValue(AdornerProperty); } } private static void UpdateIsVisible(UIElement element) { if (element.IsVisible && element.IsLoaded() && GetTemplate(element) is { } && GetVisibility(element) == Visibility.Visible) { element.SetIsVisible(true); return; } else { element.SetIsVisible(false); } } } }
using System; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using System.Linq; using System.Reflection; using JetBrains.Annotations; namespace LinqToDB.DataProvider.SqlServer { using Configuration; using Data; public static class SqlServerTools { #region Init static readonly SqlServerDataProvider _sqlServerDataProvider2000 = new SqlServerDataProvider(ProviderName.SqlServer2000, SqlServerVersion.v2000); static readonly SqlServerDataProvider _sqlServerDataProvider2005 = new SqlServerDataProvider(ProviderName.SqlServer2005, SqlServerVersion.v2005); static readonly SqlServerDataProvider _sqlServerDataProvider2008 = new SqlServerDataProvider(ProviderName.SqlServer2008, SqlServerVersion.v2008); static readonly SqlServerDataProvider _sqlServerDataProvider2012 = new SqlServerDataProvider(ProviderName.SqlServer2012, SqlServerVersion.v2012); public static bool AutoDetectProvider { get; set; } static SqlServerTools() { AutoDetectProvider = true; DataConnection.AddDataProvider(ProviderName.SqlServer, _sqlServerDataProvider2008); DataConnection.AddDataProvider(ProviderName.SqlServer2014, _sqlServerDataProvider2012); DataConnection.AddDataProvider(_sqlServerDataProvider2012); DataConnection.AddDataProvider(_sqlServerDataProvider2008); DataConnection.AddDataProvider(_sqlServerDataProvider2005); DataConnection.AddDataProvider(_sqlServerDataProvider2000); DataConnection.AddProviderDetector(ProviderDetector); } static IDataProvider ProviderDetector(IConnectionStringSettings css, string connectionString) { //if (css.IsGlobal /* DataConnection.IsMachineConfig(css)*/) // return null; switch (css.ProviderName) { case "" : case null : if (css.Name == "SqlServer") goto case "SqlServer"; break; case "SqlServer2000" : case "SqlServer.2000" : return _sqlServerDataProvider2000; case "SqlServer2005" : case "SqlServer.2005" : return _sqlServerDataProvider2005; case "SqlServer2008" : case "SqlServer.2008" : return _sqlServerDataProvider2008; case "SqlServer2012" : case "SqlServer.2012" : return _sqlServerDataProvider2012; case "SqlServer2014" : case "SqlServer.2014" : return _sqlServerDataProvider2012; case "SqlServer" : case "System.Data.SqlClient" : if (css.Name.Contains("2000")) return _sqlServerDataProvider2000; if (css.Name.Contains("2005")) return _sqlServerDataProvider2005; if (css.Name.Contains("2008")) return _sqlServerDataProvider2008; if (css.Name.Contains("2012")) return _sqlServerDataProvider2012; if (css.Name.Contains("2014")) return _sqlServerDataProvider2012; if (AutoDetectProvider) { try { var cs = string.IsNullOrWhiteSpace(connectionString) ? css.ConnectionString : connectionString; using (var conn = new SqlConnection(cs)) { conn.Open(); int version; if (int.TryParse(conn.ServerVersion.Split('.')[0], out version)) { switch (version) { case 8 : return _sqlServerDataProvider2000; case 9 : return _sqlServerDataProvider2005; case 10 : return _sqlServerDataProvider2008; case 11 : return _sqlServerDataProvider2012; case 12 : return _sqlServerDataProvider2012; default : if (version > 12) return _sqlServerDataProvider2012; break; } } } } catch (Exception) { } } break; } return null; } #endregion #region Public Members public static IDataProvider GetDataProvider(SqlServerVersion version = SqlServerVersion.v2008) { switch (version) { case SqlServerVersion.v2000 : return _sqlServerDataProvider2000; case SqlServerVersion.v2005 : return _sqlServerDataProvider2005; case SqlServerVersion.v2012 : return _sqlServerDataProvider2012; } return _sqlServerDataProvider2008; } public static void AddUdtType(Type type, string udtName) { _sqlServerDataProvider2000.AddUdtType(type, udtName); _sqlServerDataProvider2005.AddUdtType(type, udtName); _sqlServerDataProvider2008.AddUdtType(type, udtName); _sqlServerDataProvider2012.AddUdtType(type, udtName); } public static void AddUdtType<T>(string udtName, T nullValue, DataType dataType = DataType.Undefined) { _sqlServerDataProvider2000.AddUdtType(udtName, nullValue, dataType); _sqlServerDataProvider2005.AddUdtType(udtName, nullValue, dataType); _sqlServerDataProvider2008.AddUdtType(udtName, nullValue, dataType); _sqlServerDataProvider2012.AddUdtType(udtName, nullValue, dataType); } public static void ResolveSqlTypes([NotNull] string path) { if (path == null) throw new ArgumentNullException("path"); new AssemblyResolver(path, "Microsoft.SqlServer.Types"); } public static void ResolveSqlTypes([NotNull] Assembly assembly) { var types = assembly.GetTypes(); SqlHierarchyIdType = types.First(t => t.Name == "SqlHierarchyId"); SqlGeographyType = types.First(t => t.Name == "SqlGeography"); SqlGeometryType = types.First(t => t.Name == "SqlGeometry"); } internal static Type SqlHierarchyIdType; internal static Type SqlGeographyType; internal static Type SqlGeometryType; public static void SetSqlTypes(Type sqlHierarchyIdType, Type sqlGeographyType, Type sqlGeometryType) { SqlHierarchyIdType = sqlHierarchyIdType; SqlGeographyType = sqlGeographyType; SqlGeometryType = sqlGeometryType; } #endregion #region CreateDataConnection public static DataConnection CreateDataConnection(string connectionString, SqlServerVersion version = SqlServerVersion.v2008) { switch (version) { case SqlServerVersion.v2000 : return new DataConnection(_sqlServerDataProvider2000, connectionString); case SqlServerVersion.v2005 : return new DataConnection(_sqlServerDataProvider2005, connectionString); case SqlServerVersion.v2012 : return new DataConnection(_sqlServerDataProvider2012, connectionString); } return new DataConnection(_sqlServerDataProvider2008, connectionString); } public static DataConnection CreateDataConnection(IDbConnection connection, SqlServerVersion version = SqlServerVersion.v2008) { switch (version) { case SqlServerVersion.v2000 : return new DataConnection(_sqlServerDataProvider2000, connection); case SqlServerVersion.v2005 : return new DataConnection(_sqlServerDataProvider2005, connection); case SqlServerVersion.v2012 : return new DataConnection(_sqlServerDataProvider2012, connection); } return new DataConnection(_sqlServerDataProvider2008, connection); } public static DataConnection CreateDataConnection(IDbTransaction transaction, SqlServerVersion version = SqlServerVersion.v2008) { switch (version) { case SqlServerVersion.v2000 : return new DataConnection(_sqlServerDataProvider2000, transaction); case SqlServerVersion.v2005 : return new DataConnection(_sqlServerDataProvider2005, transaction); case SqlServerVersion.v2012 : return new DataConnection(_sqlServerDataProvider2012, transaction); } return new DataConnection(_sqlServerDataProvider2008, transaction); } #endregion #region BulkCopy private static BulkCopyType _defaultBulkCopyType = BulkCopyType.ProviderSpecific; public static BulkCopyType DefaultBulkCopyType { get { return _defaultBulkCopyType; } set { _defaultBulkCopyType = value; } } // public static int MultipleRowsCopy<T>(DataConnection dataConnection, IEnumerable<T> source, int maxBatchSize = 1000) // { // return dataConnection.BulkCopy( // new BulkCopyOptions // { // BulkCopyType = BulkCopyType.MultipleRows, // MaxBatchSize = maxBatchSize, // }, source); // } public static BulkCopyRowsCopied ProviderSpecificBulkCopy<T>( DataConnection dataConnection, IEnumerable<T> source, int? maxBatchSize = null, int? bulkCopyTimeout = null, bool keepIdentity = false, bool checkConstraints = false, int notifyAfter = 0, Action<BulkCopyRowsCopied> rowsCopiedCallback = null) { return dataConnection.BulkCopy( new BulkCopyOptions { BulkCopyType = BulkCopyType.ProviderSpecific, MaxBatchSize = maxBatchSize, BulkCopyTimeout = bulkCopyTimeout, KeepIdentity = keepIdentity, CheckConstraints = checkConstraints, NotifyAfter = notifyAfter, RowsCopiedCallback = rowsCopiedCallback, }, source); } #endregion #region Extensions public static void SetIdentityInsert<T>(this DataConnection dataConnection, ITable<T> table, bool isOn) { dataConnection.Execute("SET IDENTITY_INSERT "); } #endregion public static class Sql { public const string OptionRecompile = "OPTION(RECOMPILE)"; } public static Func<IDataReader,int,decimal> DataReaderGetMoney = (dr, i) => dr.GetDecimal(i); public static Func<IDataReader,int,decimal> DataReaderGetDecimal = (dr, i) => dr.GetDecimal(i); } }
/* Copyright 2006 - 2010 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Text; using System.Reflection; using System.Collections; using OpenSource.UPnP; using OpenSource.UPnP.AV; namespace UPnPValidator { /// <summary> /// Provides state object and other information specific to a subtest when /// calling the <see cref="ISubTest.Run"/> method. /// </summary> public interface ISubTestArgument { /// <summary> /// Device under test. /// </summary> UPnPDevice Device { get; } /// <summary> /// Implementation-specific information about the state /// of a current test group. Often used a means to /// provide references to detailed results of /// subtests, so that subtests can obtain the /// results /// </summary> object TestGroupState { get; } /// <summary> /// Provides information to the test group, which gives access to /// CountDown methods. /// </summary> BasicTests.BasicTestGroup TestGroup { get; } /// <summary> /// Listing of tests that the argument will be used with. /// </summary> TestQueue ActiveTests { get; set; } } public interface ISubTest { /// <summary> /// If true, then the subtest is enabled. /// </summary> bool Enabled { get; set; } /// <summary> /// Unique name for the test within a test group. /// </summary> string Name { get; } /// <summary> /// A description of the subtest. /// </summary> string Description { get; } /// <summary> /// Tells the test to recalculate its expected run-time, /// useful for calculating expected time when the results /// of a prerequisite affect the running time of a test. /// </summary> /// <param name="otherSubTests"></param> /// <param name="arg"></param> void CalculateExpectedTestingTime(ICollection otherSubTests, ISubTestArgument arg); /// <summary> /// The expected time in seconds that the subtest should take. /// This value may change. /// </summary> int ExpectedTestingTime { get; } /// <summary> /// Test-defined object for describing the results of the tests /// in terms of object variables. /// </summary> object Details { get; } /// <summary> /// A list of prerequisite subtests. /// </summary> SubTest[] Prerequisites { get; } /// <summary> /// Current test state of the subtest. /// </summary> UPnPTestStates TestState { get; } /// <summary> /// Run the subtest. /// </summary> /// <param name="otherSubTests"> /// Collection of <see cref="ISubTest"/> objects that have run or will run /// as part of a sequence of subtests along with this test. Provided so that /// a subtest can have information about what has run before and after it. /// </param> /// <param name="arg"> /// This <see cref="ISubTestArgument"/> object can be used to pass state information /// from one subtest to another. /// </param> /// <returns>indicates the result of the test</returns> UPnPTestStates Run(ICollection otherSubTests, ISubTestArgument arg); } public abstract class SubTest : ISubTest, IComparable { protected string _Name; public string Name { get { return _Name; } } protected string _Description; public string Description { get { return _Description; } } protected int _ExpectedTestingTime = 0; public int ExpectedTestingTime { get { return _ExpectedTestingTime; } } internal void SetExpectedTestingTime (int expectedTime) { this._ExpectedTestingTime = expectedTime; } public abstract object Details { get; } public abstract void CalculateExpectedTestingTime(ICollection otherSubTests, ISubTestArgument arg); protected ArrayList _Prerequisites = new ArrayList(); public SubTest[] Prerequisites { get { return (SubTest[]) _Prerequisites.ToArray(typeof(SubTest)); } } protected UPnPTestStates _TestState = UPnPTestStates.Ready; public UPnPTestStates TestState { get { return _TestState; } } public bool _Enabled = true; public bool Enabled { get { return _Enabled; } set { _Enabled = value; } } /// <summary> /// /// </summary> /// <param name="otherSubTests"> /// Collection of <see cref="ISubTest"/> objects that have run or will run /// as part of a sequence of subtests along with this test. Provided so that /// a subtest can have information about what has run before and after it. /// </param> /// <param name="arg"> /// This <see cref="ISubTestArgument"/> object can be used to pass state information /// from one subtest to another. /// </param> /// <returns>indicates the result of the test</returns> public abstract UPnPTestStates Run (ICollection otherSubTests, ISubTestArgument arg); /// <summary> /// Allows SubTest objects to be sorted according to their prerequisites. /// </summary> /// <param name="subTest">other <see cref="SubTest"/> object to compare against</param> /// <returns>0=if no dependencies; 1=if this test has dependency on other test; -1=if this test is dependency of other test</returns> public int CompareTo(object subTest) { SubTest other = (SubTest) subTest; int result = 0; foreach (SubTest prereq in this._Prerequisites) { if (other.Name == prereq.Name) { // other subtest is a prerequisite of this test, // so this test has a greater value result = 1; } } foreach (SubTest otherPrereq in other._Prerequisites) { if (this.Name == otherPrereq.Name) { if (result == 0) { // this test is a prereq of the other test // so this test has a lower value result = -1; } else { throw new ApplicationException(this.Name + " and " + other.Name + " are prerequisites of each other."); } } } return result; } /// <summary> /// Base class for storing input parameters to a upnp action. /// Derived classes should declare public fields if they /// desire those fields to get printed in the LogInvokeError method. /// The order of the fields should match the order of the input parameters. /// </summary> public abstract class InputParams : ICloneable { public virtual object Clone() { return this.MemberwiseClone(); } } /// <summary> /// /// </summary> /// <param name="testGroup"></param> /// <param name="test"></param> /// <param name="input"></param> /// <param name="methodName"></param> /// <param name="invokeError"></param> /// <param name="otherErrors"></param> public static void LogErrors(AdvancedTestGroup testGroup, CdsSubTest test, InputParams input, string methodName, UPnPInvokeException invokeError, IList otherErrors) { if (invokeError != null) { StringBuilder msg = new StringBuilder(); msg.Append("\r\n"); msg.AppendFormat("[{0}]({1}) <Invoke Error>. ", testGroup.GroupName, test.Name, LogImportance.Critical.ToString()); FieldInfo[] fi = input.GetType().GetFields(); msg.AppendFormat("Method={0}\r\n \tInput=(", methodName); for (int i=0; i < fi.Length; i++) { msg.Append("\r\n\t\t"); object val = fi[i].GetValue(input); string valString = ""; if (val != null) { valString = val.ToString(); } if (i > 0) { msg.Append(", "); } msg.AppendFormat("[{0}={1}]", fi[i].Name, valString); } msg.Append("\r\n\t)."); msg.AppendFormat("\r\n\tInvokeErrorMessage=<{0}>.", PrintStackTraceRecursively(invokeError, "\t\t")); foreach (Exception e in otherErrors) { if (e != null) { msg.AppendFormat("\r\n\tAdditionalErrorInfo=<{0}>.", PrintStackTraceRecursively(e, "\t\t")); } } testGroup.AddEvent(LogImportance.Critical, test.Name, msg.ToString()); } } public static string PrintStackTraceRecursively(Exception e, string tabs) { string msg = "\r\n"+ e.Message.Replace("\r\n", "\r\n"+tabs); string msg2 = ""; if (e.InnerException != null) { msg2 = PrintStackTraceRecursively(e.InnerException, tabs+"\t"); } return msg + msg2; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. namespace Microsoft.DocAsCode.Metadata.ManagedReference { using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Security.Cryptography; using System.Reflection; using Microsoft.DocAsCode.Common; internal abstract class CacheBase { private static readonly int CleanupIntervalInDays = 5; // 5 days and clean up private static readonly int CleanupMaxCount = 100; // 100 items before clean up private static readonly int CleanupTo = 10; // clean up and keep latest 10 items private Dictionary<string, BuildInfo> _configs = new Dictionary<string, BuildInfo>(); private readonly string _path; public static readonly string AssemblyName; static CacheBase() { AssemblyName = Assembly.GetExecutingAssembly().GetName().ToString(); } public CacheBase(string path) { _path = path; _configs = ReadCacheFile(path); } public BuildInfo GetValidConfig(IEnumerable<string> inputProjects) { var key = StringExtension.GetNormalizedFullPathKey(inputProjects); return GetConfig(key); } public void SaveToCache(IEnumerable<string> inputProjects, IDictionary<string, List<string>> containedFiles, DateTime triggeredTime, string outputFolder, IList<string> fileRelativePaths, bool shouldSkipMarkup) { var key = StringExtension.GetNormalizedFullPathKey(inputProjects); DateTime completeTime = DateTime.UtcNow; BuildInfo info = new BuildInfo { InputFilesKey = key, ContainedFiles = containedFiles, TriggeredUtcTime = triggeredTime, CompleteUtcTime = completeTime, OutputFolder = StringExtension.ToNormalizedFullPath(outputFolder), RelatvieOutputFiles = StringExtension.GetNormalizedPathList(fileRelativePaths), BuildAssembly = AssemblyName, ShouldSkipMarkup = shouldSkipMarkup }; this.SaveConfig(key, info); } #region Virtual Methods protected virtual BuildInfo GetConfig(string key) { BuildInfo buildInfo = this.ReadConfig(key); if (buildInfo != null) { var checksum = buildInfo.CheckSum; try { var resultCorrupted = GetMd5(buildInfo.OutputFolder, buildInfo.RelatvieOutputFiles) != checksum; if (!resultCorrupted && checksum != null) { return buildInfo; } else { Logger.Log(LogLevel.Warning, $"Cache for {key} in {_path} is corrupted"); } } catch (Exception e) { Logger.Log(LogLevel.Warning, $"Cache for {key} in {_path} is not valid: {e.Message}"); } } return null; } protected virtual BuildInfo ReadConfig(string key) { BuildInfo info; if (_configs.TryGetValue(key, out info)) return info; return null; } protected virtual void SaveConfig(string key, BuildInfo config) { config.CheckSum = GetMd5(config.OutputFolder, config.RelatvieOutputFiles); _configs[key] = config; CleanupConfig(); JsonUtility.Serialize(_path, _configs); } protected virtual void CleanupConfig() { // Copy oldkeys to a new list var oldKeys = _configs.Where(s => s.Value.TriggeredUtcTime.CompareTo(DateTime.UtcNow.AddDays(-CleanupIntervalInDays)) < 1).ToList(); foreach (var key in oldKeys) { _configs.Remove(key.Key); } if (_configs.Count > CleanupMaxCount) { var cleanUpTo = Math.Min(CleanupMaxCount, CleanupTo); // Cleanup the old ones _configs = _configs.OrderByDescending(s => s.Value.TriggeredUtcTime).Take(cleanUpTo).ToDictionary(s => s.Key, s => s.Value); } } #endregion #region Private Methodes private static Dictionary<string, BuildInfo> ReadCacheFile(string path) { try { if (File.Exists(path)) { return JsonUtility.Deserialize<Dictionary<string, BuildInfo>>(path); } } catch { } return new Dictionary<string, BuildInfo>(); } private static string GetMd5(string rootFolder, IEnumerable<string> relativeFilePath) { if (relativeFilePath == null) return null; var files = (from p in relativeFilePath select Path.Combine(rootFolder, p)).ToList(); MD5 md5 = MD5.Create(); using (FileCollectionStream reader = new FileCollectionStream(files)) { var hash = md5.ComputeHash(reader); return BitConverter.ToString(hash).Replace("-", ""); } } class FileCollectionStream : Stream { private IEnumerator<string> _fileEnumerator; private FileStream _stream; public FileCollectionStream(IEnumerable<string> files) { if (files == null) _fileEnumerator = null; else _fileEnumerator = files.GetEnumerator(); } public override bool CanRead { get { return true; } } public override bool CanSeek { get { return false; } } public override bool CanWrite { get { return false; } } public override long Length { get { throw new NotSupportedException(); } } public override long Position { get { throw new NotSupportedException(); } set { throw new NotSupportedException(); } } public override void Flush() { throw new NotSupportedException(); } public override int Read(byte[] buffer, int offset, int count) { if (_fileEnumerator == null) return 0; if (_stream == null) { if (!TryGetNextFileStream(out _stream)) return 0; } int readed; while (true) { readed = _stream.Read(buffer, offset, count); if (readed == 0) { // Dispose current stream before fetching the next one _stream.Dispose(); if (!TryGetNextFileStream(out _stream)) return 0; } else { return readed; } } } public override long Seek(long offset, SeekOrigin origin) { throw new NotSupportedException(); } public override void SetLength(long value) { throw new NotSupportedException(); } public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } protected override void Dispose(bool disposing) { if (disposing) { if (_fileEnumerator != null) _fileEnumerator.Dispose(); if (_stream != null) _stream.Dispose(); } base.Dispose(disposing); } private bool TryGetNextFileStream(out FileStream stream) { var next = _fileEnumerator.MoveNext(); if (!next) { stream = null; return false; } stream = new FileStream(_fileEnumerator.Current, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); return true; } } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Generic; using System.ComponentModel.Composition; using System.Linq; using System.Threading; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Completion.Providers; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Editing; using Microsoft.CodeAnalysis.Editor.Host; using Microsoft.CodeAnalysis.Editor.Implementation.IntelliSense.Completion; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Shared.Utilities; using Microsoft.CodeAnalysis.Text; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.CSharp.Completion.CompletionProviders { [ExportCompletionProvider("OverrideCompletionProvider", LanguageNames.CSharp)] internal partial class OverrideCompletionProvider : AbstractOverrideCompletionProvider, ICustomCommitCompletionProvider { [ImportingConstructor] public OverrideCompletionProvider( IWaitIndicator waitIndicator) : base(waitIndicator) { } protected override SyntaxNode GetSyntax(SyntaxToken token) { return (SyntaxNode)token.GetAncestor<EventFieldDeclarationSyntax>() ?? (SyntaxNode)token.GetAncestor<EventDeclarationSyntax>() ?? (SyntaxNode)token.GetAncestor<PropertyDeclarationSyntax>() ?? (SyntaxNode)token.GetAncestor<IndexerDeclarationSyntax>() ?? (SyntaxNode)token.GetAncestor<MethodDeclarationSyntax>(); } protected override TextSpan GetTextChangeSpan(SourceText text, int position) { return CompletionUtilities.GetTextChangeSpan(text, position); } public override bool IsTriggerCharacter(SourceText text, int characterPosition, OptionSet options) { return CompletionUtilities.IsTriggerAfterSpaceOrStartOfWordCharacter(text, characterPosition, options); } protected override SyntaxToken GetToken(MemberInsertionCompletionItem completionItem, SyntaxTree tree, CancellationToken cancellationToken) { var token = completionItem.Token; return tree.FindTokenOnLeftOfPosition(token.Span.End, cancellationToken); } public override bool TryDetermineReturnType(SyntaxToken startToken, SemanticModel semanticModel, CancellationToken cancellationToken, out ITypeSymbol returnType, out SyntaxToken nextToken) { nextToken = startToken; returnType = null; if (startToken.Parent is TypeSyntax) { var typeSyntax = (TypeSyntax)startToken.Parent; // 'partial' is actually an identifier. If we see it just bail. This does mean // we won't handle overrides that actually return a type called 'partial'. And // not a single tear was shed. if (typeSyntax is IdentifierNameSyntax && ((IdentifierNameSyntax)typeSyntax).Identifier.IsKindOrHasMatchingText(SyntaxKind.PartialKeyword)) { return false; } returnType = semanticModel.GetTypeInfo(typeSyntax, cancellationToken).Type; nextToken = typeSyntax.GetFirstToken().GetPreviousToken(); } return true; } public override bool TryDetermineModifiers(SyntaxToken startToken, SourceText text, int startLine, out Accessibility seenAccessibility, out DeclarationModifiers modifiers) { var token = startToken; modifiers = new DeclarationModifiers(); seenAccessibility = Accessibility.NotApplicable; var overrideToken = default(SyntaxToken); bool isUnsafe = false; bool isSealed = false; bool isAbstract = false; while (IsOnStartLine(token.SpanStart, text, startLine) && !token.IsKind(SyntaxKind.None)) { switch (token.Kind()) { case SyntaxKind.UnsafeKeyword: isUnsafe = true; break; case SyntaxKind.OverrideKeyword: overrideToken = token; break; case SyntaxKind.SealedKeyword: isSealed = true; break; case SyntaxKind.AbstractKeyword: isAbstract = true; break; case SyntaxKind.ExternKeyword: break; // Filter on the most recently typed accessibility; keep the first one we see case SyntaxKind.PublicKeyword: if (seenAccessibility == Accessibility.NotApplicable) { seenAccessibility = Accessibility.Public; } break; case SyntaxKind.InternalKeyword: if (seenAccessibility == Accessibility.NotApplicable) { seenAccessibility = Accessibility.Internal; } // If we see internal AND protected, filter for protected internal if (seenAccessibility == Accessibility.Protected) { seenAccessibility = Accessibility.ProtectedOrInternal; } break; case SyntaxKind.ProtectedKeyword: if (seenAccessibility == Accessibility.NotApplicable) { seenAccessibility = Accessibility.Protected; } // If we see protected AND internal, filter for protected internal if (seenAccessibility == Accessibility.Internal) { seenAccessibility = Accessibility.ProtectedOrInternal; } break; default: // Anything else and we bail. return false; } var previousToken = token.GetPreviousToken(); // We want only want to consume modifiers if (previousToken.IsKind(SyntaxKind.None) || !IsOnStartLine(previousToken.SpanStart, text, startLine)) { break; } token = previousToken; } startToken = token; modifiers = new DeclarationModifiers(isUnsafe: isUnsafe, isAbstract: isAbstract, isOverride: true, isSealed: isSealed); return overrideToken.IsKind(SyntaxKind.OverrideKeyword) && IsOnStartLine(overrideToken.Parent.SpanStart, text, startLine); } public override SyntaxToken FindStartingToken(SyntaxTree tree, int position, CancellationToken cancellationToken) { var token = tree.FindTokenOnLeftOfPosition(position, cancellationToken); return token.GetPreviousTokenIfTouchingWord(position); } public override ISet<ISymbol> FilterOverrides(ISet<ISymbol> members, ITypeSymbol returnType) { var filteredMembers = new HashSet<ISymbol>( from m in members where SymbolEquivalenceComparer.Instance.Equals(GetReturnType(m), returnType) select m); // Don't filter by return type if we would then have nothing to show. // This way, the user gets completion even if they speculatively typed the wrong return type if (filteredMembers.Count > 0) { members = filteredMembers; } return members; } protected override int GetTargetCaretPosition(SyntaxNode caretTarget) { // Inserted Event declarations are a single line, so move to the end of the line. if (caretTarget is EventFieldDeclarationSyntax) { return caretTarget.GetLocation().SourceSpan.End; } else if (caretTarget is MethodDeclarationSyntax) { var methodDeclaration = (MethodDeclarationSyntax)caretTarget; // abstract override blah(); : move to the end of the line if (methodDeclaration.Body == null) { return methodDeclaration.GetLocation().SourceSpan.End; } else { // move to the end of the last statement in the method var lastStatement = methodDeclaration.Body.Statements.Last(); return lastStatement.GetLocation().SourceSpan.End; } } else if (caretTarget is BasePropertyDeclarationSyntax) { // property: no accessors; move to the end of the declaration var propertyDeclaration = (BasePropertyDeclarationSyntax)caretTarget; if (propertyDeclaration.AccessorList != null && propertyDeclaration.AccessorList.Accessors.Any()) { // move to the end of the last statement of the first accessor var firstAccessorStatement = propertyDeclaration.AccessorList.Accessors.First().Body.Statements.Last(); return firstAccessorStatement.GetLocation().SourceSpan.End; } else { return propertyDeclaration.GetLocation().SourceSpan.End; } } else { throw ExceptionUtilities.Unreachable; } } } }
/******************************************************************************* * Copyright 2008-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * ***************************************************************************** * __ _ _ ___ * ( )( \/\/ )/ __) * /__\ \ / \__ \ * (_)(_) \/\/ (___/ * * AWS SDK for .NET * API Version: 2006-03-01 * */ using System; using System.Collections.Generic; using System.Text; using Amazon.Util; namespace Amazon.S3.Model { /// <summary> /// The parameters to request upload of a part in a multipart upload operation. /// </summary> /// <remarks> /// <para> /// If PartSize is not specified then the rest of the content from the file /// or stream will be sent to Amazon S3. /// </para> /// <para> /// You must set either the FilePath or InputStream. If FilePath is set then the FilePosition /// property must be set. /// </para> /// </remarks> public class UploadPartRequest : S3Request { private string bucketName; private string key; private string uploadId; private int? partNumber; private long? partSize; private string md5Digest; private bool fGenerateMD5Digest; private string filePath; private long? filePosition; #region BucketName /// <summary> /// The name of the bucket containing the object to receive the part. /// </summary> public string BucketName { get { return this.bucketName; } set { this.bucketName = value; } } /// <summary> /// Sets the name of the bucket containing the object to receive the part. /// </summary> /// <param name="bucketName">The bucket name</param> /// <returns>the request with the BucketName set</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public UploadPartRequest WithBucketName(string bucketName) { this.bucketName = bucketName; return this; } /// <summary> /// Checks if BucketName property is set. /// </summary> /// <returns>true if BucketName property is set.</returns> internal bool IsSetBucketName() { return !System.String.IsNullOrEmpty(this.bucketName); } #endregion #region Key /// <summary> /// The key of the object. /// </summary> public string Key { get { return this.key; } set { this.key = value; } } /// <summary> /// Sets the key of the object. /// </summary> /// <param name="key">Object key</param> /// <returns>the request with the Key set</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public UploadPartRequest WithKey(string key) { this.key = key; return this; } /// <summary> /// Checks if the Key property is set. /// </summary> /// <returns>true if Key property is set.</returns> internal bool IsSetKey() { return !System.String.IsNullOrEmpty(this.key); } #endregion #region UploadId /// <summary> /// The upload id for the multipart upload in progress. /// </summary> public string UploadId { get { return this.uploadId; } set { this.uploadId = value; } } /// <summary> /// Sets the upload id for the multipart upload in progress. /// </summary> /// <param name="uploadId">The ID of the in-progress upload</param> /// <returns>the request with the UploadId set</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public UploadPartRequest WithUploadId(string uploadId) { this.uploadId = uploadId; return this; } /// <summary> /// Checks if the UploadId property is set. /// </summary> /// <returns>true if UploadId property is set.</returns> internal bool IsSetUploadId() { return !System.String.IsNullOrEmpty(this.uploadId); } #endregion #region PartNumber /// <summary> /// The identifier of the part to be uploaded. /// </summary> public int PartNumber { get { return this.partNumber.GetValueOrDefault(); } set { this.partNumber = value; } } /// <summary> /// Sets the identifier of the part to be uploaded. /// </summary> /// <param name="partNumber">Part ideentifier</param> /// <returns>the request with the PartNumber set</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public UploadPartRequest WithPartNumber(int partNumber) { this.partNumber = partNumber; return this; } /// <summary> /// Checks if PartNumber property is set. /// </summary> /// <returns>true if PartNumber property is set.</returns> internal bool IsSetPartNumber() { return this.partNumber.HasValue; } #endregion #region PartSize /// <summary> /// The size of the part to be uploaded. /// </summary> public long PartSize { get { return this.partSize.GetValueOrDefault(); } set { this.partSize = value; } } /// <summary> /// Sets the size of the part to be uploaded. /// </summary> /// <param name="partSize">The size of the part</param> /// <returns>the request with the PartSize set</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public UploadPartRequest WithPartSize(long partSize) { this.partSize = partSize; return this; } /// <summary> /// Checks if PartSize property is set. /// </summary> /// <returns>true if PartSize property is set.</returns> internal bool IsSetPartSize() { return this.partSize.HasValue; } #endregion #region MD5Digest /// <summary> /// An MD5 digest for the part. /// </summary> public string MD5Digest { get { return this.md5Digest; } set { this.md5Digest = value; } } /// <summary> /// Sets an MD5 digest for the part. /// </summary> /// <param name="md5Digest">Digest value</param> /// <returns>the request with the Md5Digest set</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public UploadPartRequest WithMD5Digest(string md5Digest) { this.md5Digest = md5Digest; return this; } /// <summary> /// Checks if the MD5Digest property is set. /// </summary> /// <returns>true if Md5Digest property is set.</returns> internal bool IsSetMD5Digest() { return !string.IsNullOrEmpty(this.md5Digest); } #endregion #region GenerateMD5Digest /// <summary> /// If set true, an MD5 digest is automatically computed for the part. /// </summary> /// <remarks> /// This is a computationally expensive operation, /// and will add to the total time it will take to upload /// data to S3. Please use this option judicially. /// </remarks> public bool GenerateMD5Digest { get { return this.fGenerateMD5Digest; } set { this.fGenerateMD5Digest = value; } } /// <summary> /// If set true, an MD5 digest is automatically computed for the part. /// </summary> /// <remarks> /// This is a computationally expensive operation, /// and will add to the total time it will take to upload /// data to S3. Please use this option judicially. /// </remarks> /// <param name="fGenerateMD5Digest">True to automatically compute an MD5 digest for the part</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public UploadPartRequest WithGenerateChecksum(bool fGenerateMD5Digest) { this.fGenerateMD5Digest = fGenerateMD5Digest; return this; } #endregion #region FilePath /// <summary> /// Full path and name of a file from which the content for the part is retrieved. /// </summary> public string FilePath { get { return this.filePath; } set { this.filePath = value; } } /// <summary> /// Sets the full path and name of a file from which the content for the part is retrieved. /// </summary> /// <param name="filePath">Full path and name of the file</param> /// <returns>the request with the FilePath set</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public UploadPartRequest WithFilePath(string filePath) { this.filePath = filePath; return this; } /// <summary> /// Checks if the FilePath property is set. /// </summary> /// <returns>true if FilePath property is set.</returns> internal bool IsSetFilePath() { return !string.IsNullOrEmpty(this.filePath); } #endregion #region FilePosition /// <summary> /// Position in the file specified by FilePath from which to retrieve the content of the part. /// This field is required when a file path is specified. It is ignored when using the InputStream property. /// </summary> public long FilePosition { get { return this.filePosition.GetValueOrDefault(); } set { this.filePosition = value; } } /// <summary> /// Sets the position in the file specified by FilePath from which to retrieve the content of the part. /// This field is required when a file path is specified. It is ignored when using the InputStream property. /// </summary> /// <param name="filePosition">The value that FilePosition is set to</param> /// <returns>the request with the FilePosition set</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public UploadPartRequest WithFilePosition(long filePosition) { this.filePosition = filePosition; return this; } /// <summary> /// Checks if the FilePosition property is set. /// </summary> /// <returns>true if FilePosition property is set.</returns> internal bool IsSetFilePosition() { return this.filePosition.HasValue; } #endregion #region Timeout /// <summary> /// Custom Timeout property (in milliseconds). /// </summary> /// <remarks> /// <para> /// The value of this property is assigned to the /// Timeout property of the HTTPWebRequest object used /// for S3 PUT Object requests. /// </para> /// <para> /// Please set the timeout only if you are certain that /// the file will not be transferred within the default intervals /// for an HttpWebRequest. /// </para> /// <para> /// A value less than or equal to 0 will be silently ignored /// </para> /// </remarks> /// <seealso cref="P:System.Net.HttpWebRequest.ReadWriteTimeout"/> /// <seealso cref="P:System.Net.HttpWebRequest.Timeout"/> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] new public UploadPartRequest WithTimeout(int timeout) { Timeout = timeout; return this; } #endregion #region ReadWriteTimeout /// <summary> /// Custom ReadWriteTimeout property (in milliseconds). /// </summary> /// <remarks> /// <para> /// The value of this property is assigned to the /// ReadWriteTimeout property of the HTTPWebRequest object /// used for S3 PUT Object requests. /// </para> /// <para> /// A value less than or equal to 0 will be silently ignored /// </para> /// </remarks> /// <seealso cref="P:System.Net.HttpWebRequest.ReadWriteTimeout"/> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] new public UploadPartRequest WithReadWriteTimeout(int readWriteTimeout) { ReadWriteTimeout = readWriteTimeout; return this; } #endregion /// <summary> /// The event for Put Object progress notifications. All /// subscribers will be notified when a new progress /// event is raised. /// </summary> /// <remarks> /// Subscribe to this event if you want to receive /// put object progress notifications. Here is how:<br /> /// 1. Define a method with a signature similar to this one: /// <code> /// private void displayProgress(object sender, PutObjectProgressArgs args) /// { /// Console.WriteLine(args); /// } /// </code> /// 2. Add this method to the Put Object Progress Event delegate's invocation list /// <code> /// PutObjectRequest request = new PutObjectRequest(); /// request.PutObjectProgressEvent += displayProgress; /// </code> /// </remarks> public event EventHandler<UploadPartProgressArgs> UploadPartProgressEvent; /// <summary> /// The "handler" will be notified every time a put /// object progress event is raised. /// </summary> /// <param name="handler">A method that consumes the put object progress notification</param> /// <returns>this instance of the PutObjectRequest</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public UploadPartRequest WithSubscriber(EventHandler<UploadPartProgressArgs> handler) { this.UploadPartProgressEvent += handler; return this; } /// <summary> /// This method is called by a producer of put object progress /// notifications. When called, all the subscribers in the /// invocation list will be called sequentially. /// </summary> /// <param name="incrementTransferred">The number of bytes transferred since last event</param> /// <param name="transferred">The number of bytes transferred</param> /// <param name="total">The total number of bytes to be transferred</param> internal override void OnRaiseProgressEvent(long incrementTransferred, long transferred, long total) { AWSSDKUtils.InvokeInBackground(UploadPartProgressEvent, new UploadPartProgressArgs(incrementTransferred, transferred, total), this); } internal override bool Expect100Continue { get { return true; } } } /// <summary> /// Encapsulates the information needed to provide /// transfer progress to subscribers of the Put Object /// Event. /// </summary> public class UploadPartProgressArgs : TransferProgressArgs { /// <summary> /// The constructor takes the number of /// currently transferred bytes and the /// total number of bytes to be transferred /// </summary> /// <param name="incrementTransferred">The number of bytes transferred since last event</param> /// <param name="transferred">The number of bytes transferred</param> /// <param name="total">The total number of bytes to be transferred</param> public UploadPartProgressArgs(long incrementTransferred, long transferred, long total) : base(incrementTransferred, transferred, total) { } } }
// // Copyright (c) 2004-2011 Jaroslaw Kowalski <jaak@jkowalski.net> // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // using System.Security; namespace NLog.Internal.FileAppenders { using System; using System.IO; using System.Runtime.InteropServices; using NLog.Common; using NLog.Config; using NLog.Internal; using NLog.Time; /// <summary> /// Base class for optimized file appenders. /// </summary> [SecuritySafeCritical] internal abstract class BaseFileAppender : IDisposable { private readonly Random random = new Random(); /// <summary> /// Initializes a new instance of the <see cref="BaseFileAppender" /> class. /// </summary> /// <param name="fileName">Name of the file.</param> /// <param name="createParameters">The create parameters.</param> public BaseFileAppender(string fileName, ICreateFileParameters createParameters) { this.CreateFileParameters = createParameters; this.FileName = fileName; this.OpenTime = DateTime.UtcNow; // to be consistent with timeToKill in FileTarget.AutoClosingTimerCallback this.LastWriteTime = DateTime.MinValue; } /// <summary> /// Gets the name of the file. /// </summary> /// <value>The name of the file.</value> public string FileName { get; private set; } /// <summary> /// Gets the last write time. /// </summary> /// <value>The last write time. DateTime value must be of UTC kind.</value> public DateTime LastWriteTime { get; private set; } /// <summary> /// Gets the open time of the file. /// </summary> /// <value>The open time. DateTime value must be of UTC kind.</value> public DateTime OpenTime { get; private set; } /// <summary> /// Gets the file creation parameters. /// </summary> /// <value>The file creation parameters.</value> public ICreateFileParameters CreateFileParameters { get; private set; } /// <summary> /// Writes the specified bytes. /// </summary> /// <param name="bytes">The bytes.</param> public abstract void Write(byte[] bytes); /// <summary> /// Flushes this instance. /// </summary> public abstract void Flush(); /// <summary> /// Closes this instance. /// </summary> public abstract void Close(); /// <summary> /// Gets the file info. /// </summary> /// <param name="lastWriteTime">The last file write time. The value must be of UTC kind.</param> /// <param name="fileLength">Length of the file.</param> /// <returns>True if the operation succeeded, false otherwise.</returns> public abstract bool GetFileInfo(out DateTime lastWriteTime, out long fileLength); /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public void Dispose() { this.Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="disposing">True to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> protected virtual void Dispose(bool disposing) { if (disposing) { this.Close(); } } /// <summary> /// Records the last write time for a file. /// </summary> protected void FileTouched() { // always use system time in UTC to be consistent with FileInfo.LastWriteTimeUtc this.LastWriteTime = DateTime.UtcNow; } /// <summary> /// Records the last write time for a file to be specific date. /// </summary> /// <param name="dateTime">Date and time when the last write occurred. The value must be of UTC kind.</param> protected void FileTouched(DateTime dateTime) { this.LastWriteTime = dateTime; } /// <summary> /// Creates the file stream. /// </summary> /// <param name="allowFileSharedWriting">If set to <c>true</c> sets the file stream to allow shared writing.</param> /// <returns>A <see cref="FileStream"/> object which can be used to write to the file.</returns> protected FileStream CreateFileStream(bool allowFileSharedWriting) { int currentDelay = this.CreateFileParameters.ConcurrentWriteAttemptDelay; InternalLogger.Trace("Opening {0} with allowFileSharedWriting={1}", this.FileName, allowFileSharedWriting); for (int i = 0; i < this.CreateFileParameters.ConcurrentWriteAttempts; ++i) { try { try { return this.TryCreateFileStream(allowFileSharedWriting); } catch (DirectoryNotFoundException) { if (!this.CreateFileParameters.CreateDirs) { throw; } Directory.CreateDirectory(Path.GetDirectoryName(this.FileName)); return this.TryCreateFileStream(allowFileSharedWriting); } } catch (IOException) { if (!this.CreateFileParameters.ConcurrentWrites || i + 1 == this.CreateFileParameters.ConcurrentWriteAttempts) { throw; // rethrow } int actualDelay = this.random.Next(currentDelay); InternalLogger.Warn("Attempt #{0} to open {1} failed. Sleeping for {2}ms", i, this.FileName, actualDelay); currentDelay *= 2; System.Threading.Thread.Sleep(actualDelay); } } throw new InvalidOperationException("Should not be reached."); } #if !SILVERLIGHT && !MONO [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope", Justification = "Objects are disposed elsewhere")] private FileStream WindowsCreateFile(string fileName, bool allowFileSharedWriting) { int fileShare = Win32FileNativeMethods.FILE_SHARE_READ; if (allowFileSharedWriting) { fileShare |= Win32FileNativeMethods.FILE_SHARE_WRITE; } if (this.CreateFileParameters.EnableFileDelete && PlatformDetector.CurrentOS != RuntimeOS.Windows) { fileShare |= Win32FileNativeMethods.FILE_SHARE_DELETE; } Microsoft.Win32.SafeHandles.SafeFileHandle handle = null; FileStream fileStream = null; try { handle = Win32FileNativeMethods.CreateFile( fileName, Win32FileNativeMethods.FileAccess.GenericWrite, fileShare, IntPtr.Zero, Win32FileNativeMethods.CreationDisposition.OpenAlways, this.CreateFileParameters.FileAttributes, IntPtr.Zero); if (handle.IsInvalid) { Marshal.ThrowExceptionForHR(Marshal.GetHRForLastWin32Error()); } fileStream = new FileStream(handle, FileAccess.Write, this.CreateFileParameters.BufferSize); fileStream.Seek(0, SeekOrigin.End); return fileStream; } catch { if (fileStream != null) fileStream.Dispose(); if ((handle != null) && (!handle.IsClosed)) handle.Close(); throw; } } #endif private FileStream TryCreateFileStream(bool allowFileSharedWriting) { FileShare fileShare = FileShare.Read; if (allowFileSharedWriting) { fileShare = FileShare.ReadWrite; } if (this.CreateFileParameters.EnableFileDelete && PlatformDetector.CurrentOS != RuntimeOS.Windows) { fileShare |= FileShare.Delete; } #if !SILVERLIGHT && !MONO try { if (!this.CreateFileParameters.ForceManaged && PlatformDetector.IsDesktopWin32) { return this.WindowsCreateFile(this.FileName, allowFileSharedWriting); } } catch (SecurityException) { InternalLogger.Debug("Could not use native Windows create file, falling back to managed filestream"); } #endif return new FileStream( this.FileName, FileMode.Append, FileAccess.Write, fileShare, this.CreateFileParameters.BufferSize); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Linq; using System.Numerics; using Xunit; namespace System.Security.Cryptography.Rsa.Tests { public partial class ImportExport { private static bool EphemeralKeysAreExportable => !PlatformDetection.IsFullFramework || PlatformDetection.IsNetfx462OrNewer(); [ConditionalFact(nameof(EphemeralKeysAreExportable))] public static void ExportAutoKey() { RSAParameters privateParams; RSAParameters publicParams; int keySize; using (RSA rsa = RSAFactory.Create()) { keySize = rsa.KeySize; // We've not done anything with this instance yet, but it should automatically // create the key, because we'll now asked about it. privateParams = rsa.ExportParameters(true); publicParams = rsa.ExportParameters(false); // It shouldn't be changing things when it generated the key. Assert.Equal(keySize, rsa.KeySize); } Assert.Null(publicParams.D); Assert.NotNull(privateParams.D); ValidateParameters(ref publicParams); ValidateParameters(ref privateParams); Assert.Equal(privateParams.Modulus, publicParams.Modulus); Assert.Equal(privateParams.Exponent, publicParams.Exponent); } [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "dotnet/corefx #19238")] [Fact] public static void PaddedExport() { // OpenSSL's numeric type for the storage of RSA key parts disregards zero-valued // prefix bytes. // // The .NET 4.5 RSACryptoServiceProvider type verifies that all of the D breakdown // values (P, DP, Q, DQ, InverseQ) are exactly half the size of D (which is itself // the same size as Modulus). // // These two things, in combination, suggest that we ensure that all .NET // implementations of RSA export their keys to the fixed array size suggested by their // KeySize property. RSAParameters diminishedDPParameters = TestData.DiminishedDPParameters; RSAParameters exported; using (RSA rsa = RSAFactory.Create()) { rsa.ImportParameters(diminishedDPParameters); exported = rsa.ExportParameters(true); } // DP is the most likely to fail, the rest just otherwise ensure that Export // isn't losing data. AssertKeyEquals(ref diminishedDPParameters, ref exported); } [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "dotnet/corefx #19238")] [Fact] public static void LargeKeyImportExport() { RSAParameters imported = TestData.RSA16384Params; using (RSA rsa = RSAFactory.Create()) { try { rsa.ImportParameters(imported); } catch (CryptographicException) { // The key is pretty big, perhaps it was refused. return; } RSAParameters exported = rsa.ExportParameters(false); Assert.Equal(exported.Modulus, imported.Modulus); Assert.Equal(exported.Exponent, imported.Exponent); Assert.Null(exported.D); exported = rsa.ExportParameters(true); AssertKeyEquals(ref imported, ref exported); } } [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "dotnet/corefx #19238")] [Fact] public static void UnusualExponentImportExport() { // Most choices for the Exponent value in an RSA key use a Fermat prime. // Since a Fermat prime is 2^(2^m) + 1, it always only has two bits set, and // frequently has the form { 0x01, [some number of 0x00s], 0x01 }, which has the same // representation in both big- and little-endian. // // The only real requirement for an Exponent value is that it be coprime to (p-1)(q-1). // So here we'll use the (non-Fermat) prime value 433 (0x01B1) to ensure big-endian export. RSAParameters unusualExponentParameters = TestData.UnusualExponentParameters; RSAParameters exported; using (RSA rsa = RSAFactory.Create()) { rsa.ImportParameters(unusualExponentParameters); exported = rsa.ExportParameters(true); } // Exponent is the most likely to fail, the rest just otherwise ensure that Export // isn't losing data. AssertKeyEquals(ref unusualExponentParameters, ref exported); } [ConditionalFact(nameof(EphemeralKeysAreExportable))] public static void ImportExport1032() { RSAParameters imported = TestData.RSA1032Parameters; RSAParameters exported; RSAParameters exportedPublic; using (RSA rsa = RSAFactory.Create()) { rsa.ImportParameters(imported); exported = rsa.ExportParameters(true); exportedPublic = rsa.ExportParameters(false); } AssertKeyEquals(ref imported, ref exported); Assert.Equal(exportedPublic.Modulus, imported.Modulus); Assert.Equal(exportedPublic.Exponent, imported.Exponent); Assert.Null(exportedPublic.D); } [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "dotnet/corefx #19238")] [Fact] public static void ImportReset() { using (RSA rsa = RSAFactory.Create()) { RSAParameters exported = rsa.ExportParameters(true); RSAParameters imported; // Ensure that we cause the KeySize value to change. if (rsa.KeySize == 1024) { imported = TestData.RSA2048Params; } else { imported = TestData.RSA1024Params; } Assert.NotEqual(imported.Modulus.Length * 8, rsa.KeySize); Assert.NotEqual(imported.Modulus, exported.Modulus); rsa.ImportParameters(imported); Assert.Equal(imported.Modulus.Length * 8, rsa.KeySize); exported = rsa.ExportParameters(true); AssertKeyEquals(ref imported, ref exported); } } [Fact] public static void ImportPrivateExportPublic() { RSAParameters imported = TestData.RSA1024Params; using (RSA rsa = RSAFactory.Create()) { rsa.ImportParameters(imported); RSAParameters exportedPublic = rsa.ExportParameters(false); Assert.Equal(imported.Modulus, exportedPublic.Modulus); Assert.Equal(imported.Exponent, exportedPublic.Exponent); Assert.Null(exportedPublic.D); ValidateParameters(ref exportedPublic); } } [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "dotnet/corefx #19238")] [Fact] public static void MultiExport() { RSAParameters imported = TestData.RSA1024Params; using (RSA rsa = RSAFactory.Create()) { rsa.ImportParameters(imported); RSAParameters exportedPrivate = rsa.ExportParameters(true); RSAParameters exportedPrivate2 = rsa.ExportParameters(true); RSAParameters exportedPublic = rsa.ExportParameters(false); RSAParameters exportedPublic2 = rsa.ExportParameters(false); RSAParameters exportedPrivate3 = rsa.ExportParameters(true); RSAParameters exportedPublic3 = rsa.ExportParameters(false); AssertKeyEquals(ref imported, ref exportedPrivate); Assert.Equal(imported.Modulus, exportedPublic.Modulus); Assert.Equal(imported.Exponent, exportedPublic.Exponent); Assert.Null(exportedPublic.D); ValidateParameters(ref exportedPublic); AssertKeyEquals(ref exportedPrivate, ref exportedPrivate2); AssertKeyEquals(ref exportedPrivate, ref exportedPrivate3); AssertKeyEquals(ref exportedPublic, ref exportedPublic2); AssertKeyEquals(ref exportedPublic, ref exportedPublic3); } } [Fact] public static void PublicOnlyPrivateExport() { RSAParameters imported = new RSAParameters { Modulus = TestData.RSA1024Params.Modulus, Exponent = TestData.RSA1024Params.Exponent, }; using (RSA rsa = RSAFactory.Create()) { rsa.ImportParameters(imported); Assert.ThrowsAny<CryptographicException>(() => rsa.ExportParameters(true)); } } [Fact] public static void ImportNoExponent() { RSAParameters imported = new RSAParameters { Modulus = TestData.RSA1024Params.Modulus, }; using (RSA rsa = RSAFactory.Create()) { if (rsa is RSACng && PlatformDetection.IsFullFramework) Assert.Throws<ArgumentException>(() => rsa.ImportParameters(imported)); else Assert.ThrowsAny<CryptographicException>(() => rsa.ImportParameters(imported)); } } [Fact] public static void ImportNoModulus() { RSAParameters imported = new RSAParameters { Exponent = TestData.RSA1024Params.Exponent, }; using (RSA rsa = RSAFactory.Create()) { if (rsa is RSACng && PlatformDetection.IsFullFramework) Assert.Throws<ArgumentException>(() => rsa.ImportParameters(imported)); else Assert.ThrowsAny<CryptographicException>(() => rsa.ImportParameters(imported)); } } [Fact] #if TESTING_CNG_IMPLEMENTATION [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "https://github.com/dotnet/corefx/issues/18882")] #endif public static void ImportNoDP() { // Because RSAParameters is a struct, this is a copy, // so assigning DP is not destructive to other tests. RSAParameters imported = TestData.RSA1024Params; imported.DP = null; using (RSA rsa = RSAFactory.Create()) { Assert.ThrowsAny<CryptographicException>(() => rsa.ImportParameters(imported)); } } internal static void AssertKeyEquals(ref RSAParameters expected, ref RSAParameters actual) { Assert.Equal(expected.Modulus, actual.Modulus); Assert.Equal(expected.Exponent, actual.Exponent); Assert.Equal(expected.P, actual.P); Assert.Equal(expected.DP, actual.DP); Assert.Equal(expected.Q, actual.Q); Assert.Equal(expected.DQ, actual.DQ); Assert.Equal(expected.InverseQ, actual.InverseQ); if (expected.D == null) { Assert.Null(actual.D); } else { Assert.NotNull(actual.D); // If the value matched expected, take that as valid and shortcut the math. // If it didn't, we'll test that the value is at least legal. if (!expected.D.SequenceEqual(actual.D)) { VerifyDValue(ref actual); } } } internal static void ValidateParameters(ref RSAParameters rsaParams) { Assert.NotNull(rsaParams.Modulus); Assert.NotNull(rsaParams.Exponent); // Key compatibility: RSA as an algorithm is achievable using just N (Modulus), // E (public Exponent) and D (private exponent). Having all of the breakdowns // of D make the algorithm faster, and shipped versions of RSACryptoServiceProvider // have thrown if D is provided and the rest of the private key values are not. // So, here we're going to assert that none of them were null for private keys. if (rsaParams.D == null) { Assert.Null(rsaParams.P); Assert.Null(rsaParams.DP); Assert.Null(rsaParams.Q); Assert.Null(rsaParams.DQ); Assert.Null(rsaParams.InverseQ); } else { Assert.NotNull(rsaParams.P); Assert.NotNull(rsaParams.DP); Assert.NotNull(rsaParams.Q); Assert.NotNull(rsaParams.DQ); Assert.NotNull(rsaParams.InverseQ); } } private static void VerifyDValue(ref RSAParameters rsaParams) { if (rsaParams.P == null) { return; } // Verify that the formula (D * E) % LCM(p - 1, q - 1) == 1 // is true. // // This is NOT the same as saying D = ModInv(E, LCM(p - 1, q - 1)), // because D = ModInv(E, (p - 1) * (q - 1)) is a valid choice, but will // still work through this formula. BigInteger p = PositiveBigInteger(rsaParams.P); BigInteger q = PositiveBigInteger(rsaParams.Q); BigInteger e = PositiveBigInteger(rsaParams.Exponent); BigInteger d = PositiveBigInteger(rsaParams.D); BigInteger lambda = LeastCommonMultiple(p - 1, q - 1); BigInteger modProduct = (d * e) % lambda; Assert.Equal(BigInteger.One, modProduct); } private static BigInteger LeastCommonMultiple(BigInteger a, BigInteger b) { BigInteger gcd = BigInteger.GreatestCommonDivisor(a, b); return BigInteger.Abs(a) / gcd * BigInteger.Abs(b); } private static BigInteger PositiveBigInteger(byte[] bigEndianBytes) { byte[] littleEndianBytes; if (bigEndianBytes[0] >= 0x80) { // Insert a padding 00 byte so the number is treated as positive. littleEndianBytes = new byte[bigEndianBytes.Length + 1]; Buffer.BlockCopy(bigEndianBytes, 0, littleEndianBytes, 1, bigEndianBytes.Length); } else { littleEndianBytes = (byte[])bigEndianBytes.Clone(); } Array.Reverse(littleEndianBytes); return new BigInteger(littleEndianBytes); } } }
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Management.Automation; using Microsoft.Azure.Commands.RecoveryServices.SiteRecovery.Properties; using Microsoft.Azure.Management.RecoveryServices.SiteRecovery.Models; namespace Microsoft.Azure.Commands.RecoveryServices.SiteRecovery { /// <summary> /// Updates Azure Site Recovery Policy. /// </summary> [Cmdlet( VerbsData.Update, "AzureRmRecoveryServicesAsrPolicy", SupportsShouldProcess = true)] [Alias("Update-ASRPolicy")] [OutputType(typeof(ASRJob))] public class UpdateAzureRmRecoveryServicesAsrPolicy : SiteRecoveryCmdletBase { /// <summary> /// Gets or sets Name of the Policy. /// </summary> [Parameter( Mandatory = true, ValueFromPipeline = true)] [Alias("Policy")] public ASRPolicy InputObject { get; set; } /// <summary> /// Gets or sets a value for Replication Method of the Policy. /// </summary> [Parameter] [ValidateNotNullOrEmpty] [ValidateSet( Constants.OnlineReplicationMethod, Constants.OfflineReplicationMethod)] public string ReplicationMethod { get; set; } /// <summary> /// Gets or sets Replication Frequency of the Policy in seconds. /// </summary> [Parameter] [ValidateNotNullOrEmpty] [ValidateSet( Constants.Thirty, Constants.ThreeHundred, Constants.NineHundred)] public string ReplicationFrequencyInSeconds { get; set; } /// <summary> /// Gets or sets Recovery Points of the Policy. /// </summary> [Parameter] [ValidateNotNullOrEmpty] [Alias("RecoveryPoints")] public int NumberOfRecoveryPointsToRetain { get; set; } /// <summary> /// Gets or sets Application Consistent Snapshot Frequency of the Policy in hours. /// </summary> [Parameter] [ValidateNotNullOrEmpty] public int ApplicationConsistentSnapshotFrequencyInHours { get; set; } /// <summary> /// Gets or sets a value indicating whether Compression needs to be Enabled on the Policy. /// </summary> [Parameter] [ValidateNotNullOrEmpty] [ValidateSet( Constants.Enable, Constants.Disable)] public string Compression { get; set; } /// <summary> /// Gets or sets the Replication Port of the Policy. /// </summary> [Parameter] [ValidateNotNullOrEmpty] public ushort ReplicationPort { get; set; } /// <summary> /// Gets or sets the Replication Port of the Policy. /// </summary> [Parameter] [ValidateNotNullOrEmpty] [ValidateSet( Constants.AuthenticationTypeCertificate, Constants.AuthenticationTypeKerberos)] public string Authentication { get; set; } /// <summary> /// Gets or sets Replication Start time of the Policy. /// </summary> [Parameter] [ValidateNotNullOrEmpty] public TimeSpan? ReplicationStartTime { get; set; } /// <summary> /// Gets or sets a value indicating whether Replica should be Deleted on /// disabling protection of a protection entity protected by the Policy. /// </summary> [Parameter] [ValidateNotNullOrEmpty] [ValidateSet( Constants.Required, Constants.NotRequired)] public string ReplicaDeletion { get; set; } /// <summary> /// Gets or sets Recovery Azure Storage Account Name of the Policy for E2A scenarios. /// </summary> [Parameter] [ValidateNotNullOrEmpty] public string RecoveryAzureStorageAccountId { get; set; } /// <summary> /// Gets or sets Encrypt parameter. On passing, data will be encrypted. /// </summary> [Parameter] [ValidateNotNullOrEmpty] [ValidateSet( Constants.Enable, Constants.Disable)] public string Encryption { get; set; } /// <summary> /// ProcessRecord of the command. /// </summary> public override void ExecuteSiteRecoveryCmdlet() { base.ExecuteSiteRecoveryCmdlet(); if (this.ShouldProcess( this.InputObject.FriendlyName, VerbsData.Update)) { if ((string.Compare( this.InputObject.ReplicationProvider, Constants.HyperVReplica2012, StringComparison.OrdinalIgnoreCase) == 0) || (string.Compare( this.InputObject.ReplicationProvider, Constants.HyperVReplica2012R2, StringComparison.OrdinalIgnoreCase) == 0)) { this.EnterpriseToEnterprisePolicyObject(); } else if (string.Compare( this.InputObject.ReplicationProvider, Constants.HyperVReplicaAzure, StringComparison.OrdinalIgnoreCase) == 0) { this.EnterpriseToAzurePolicyObject(); } } } /// <summary> /// Creates an E2A Policy Object /// </summary> private void EnterpriseToAzurePolicyObject() { if (string.Compare( this.InputObject.ReplicationProvider, Constants.HyperVReplicaAzure, StringComparison.OrdinalIgnoreCase) != 0) { throw new InvalidOperationException( string.Format( Resources.IncorrectReplicationProvider, this.InputObject.ReplicationProvider)); } var replicationProviderSettings = this.InputObject.ReplicationProviderSettings as ASRHyperVReplicaAzurePolicyDetails; this.replicationFrequencyInSeconds = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.ReplicationFrequencyInSeconds)) ? PSRecoveryServicesClient.ConvertReplicationFrequencyToUshort( this.ReplicationFrequencyInSeconds) : (ushort)replicationProviderSettings .ReplicationFrequencyInSeconds; this.recoveryPoints = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.NumberOfRecoveryPointsToRetain)) ? this.NumberOfRecoveryPointsToRetain : replicationProviderSettings.RecoveryPoints; this.applicationConsistentSnapshotFrequencyInHours = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName( () => this.ApplicationConsistentSnapshotFrequencyInHours)) ? this.ApplicationConsistentSnapshotFrequencyInHours : replicationProviderSettings.ApplicationConsistentSnapshotFrequencyInHours; this.replicationStartTime = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.ReplicationStartTime)) ? this.replicationStartTime : replicationProviderSettings.OnlineReplicationStartTime; this.recoveryAzureStorageAccountId = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.RecoveryAzureStorageAccountId)) ? this.RecoveryAzureStorageAccountId : replicationProviderSettings.ActiveStorageAccountId; this.encryption = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.Encryption)) ? this.Encryption : (string.Compare( replicationProviderSettings.Encryption, "Disabled", StringComparison.OrdinalIgnoreCase) == 0 ? Constants.Disable : Constants.Enable); var hyperVReplicaAzurePolicyInput = new HyperVReplicaAzurePolicyInput { ApplicationConsistentSnapshotFrequencyInHours = this.applicationConsistentSnapshotFrequencyInHours, Encryption = this.encryption, OnlineReplicationStartTime = this.replicationStartTime.ToString(), RecoveryPointHistoryDuration = this.recoveryPoints, ReplicationInterval = this.replicationFrequencyInSeconds }; hyperVReplicaAzurePolicyInput.StorageAccounts = new List<string>(); if (this.RecoveryAzureStorageAccountId != null) { var storageAccount = this.recoveryAzureStorageAccountId; hyperVReplicaAzurePolicyInput.StorageAccounts.Add(storageAccount); } var updatePolicyProperties = new UpdatePolicyInputProperties { ReplicationProviderSettings = hyperVReplicaAzurePolicyInput }; var updatePolicyInput = new UpdatePolicyInput { Properties = updatePolicyProperties }; var response = this.RecoveryServicesClient.UpdatePolicy( this.InputObject.Name, updatePolicyInput); var jobResponse = this.RecoveryServicesClient.GetAzureSiteRecoveryJobDetails( PSRecoveryServicesClient.GetJobIdFromReponseLocation(response.Location)); this.WriteObject(new ASRJob(jobResponse)); } /// <summary> /// Creates an E2E Policy object /// </summary> private void EnterpriseToEnterprisePolicyObject() { if ((string.Compare( this.InputObject.ReplicationProvider, Constants.HyperVReplica2012, StringComparison.OrdinalIgnoreCase) != 0) && (string.Compare( this.InputObject.ReplicationProvider, Constants.HyperVReplica2012R2, StringComparison.OrdinalIgnoreCase) != 0)) { throw new InvalidOperationException( string.Format( Resources.IncorrectReplicationProvider, this.InputObject.ReplicationProvider)); } var replicationProviderSettings = this.InputObject.ReplicationProviderSettings as ASRHyperVReplicaPolicyDetails; this.replicationMethod = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.ReplicationMethod)) ? (string.Compare( this .ReplicationMethod, Constants .OnlineReplicationMethod, StringComparison .OrdinalIgnoreCase) == 0 ? "OverNetwork" : "Offline") : (string.Compare( replicationProviderSettings.InitialReplicationMethod, Constants.OnlineReplicationMethod, StringComparison.OrdinalIgnoreCase) == 0 ? "OverNetwork" : "Offline"); this.replicationFrequencyInSeconds = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.ReplicationFrequencyInSeconds)) ? PSRecoveryServicesClient.ConvertReplicationFrequencyToUshort( this.ReplicationFrequencyInSeconds) : replicationProviderSettings .ReplicationFrequencyInSeconds; this.recoveryPoints = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.NumberOfRecoveryPointsToRetain)) ? this.NumberOfRecoveryPointsToRetain : replicationProviderSettings.RecoveryPoints; this.applicationConsistentSnapshotFrequencyInHours = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName( () => this.ApplicationConsistentSnapshotFrequencyInHours)) ? this.ApplicationConsistentSnapshotFrequencyInHours : replicationProviderSettings.ApplicationConsistentSnapshotFrequencyInHours; this.compression = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.Compression)) ? this.Compression : replicationProviderSettings.Compression; this.replicationPort = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.ReplicationPort)) ? this.ReplicationPort : replicationProviderSettings.ReplicationPort; this.authentication = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.Authentication)) ? (ushort)(string.Compare( this .Authentication, Constants .AuthenticationTypeKerberos, StringComparison .OrdinalIgnoreCase) == 0 ? 1 : 2) : (ushort)(string.Compare( replicationProviderSettings.AllowedAuthenticationType, Constants.AuthenticationTypeKerberos, StringComparison.OrdinalIgnoreCase) == 0 ? 1 : 2); this.replicationStartTime = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.ReplicationStartTime)) ? this.replicationStartTime : replicationProviderSettings.OnlineReplicationStartTime; this.replicaDeletion = this.MyInvocation.BoundParameters.ContainsKey( Utilities.GetMemberName(() => this.ReplicaDeletion)) ? this.ReplicaDeletion : replicationProviderSettings.ReplicaDeletionOption; var updatePolicyProperties = new UpdatePolicyInputProperties(); if (string.Compare( this.InputObject.ReplicationProvider, Constants.HyperVReplica2012, StringComparison.OrdinalIgnoreCase) == 0) { updatePolicyProperties.ReplicationProviderSettings = new HyperVReplicaPolicyInput { AllowedAuthenticationType = this.authentication, ApplicationConsistentSnapshotFrequencyInHours = this.applicationConsistentSnapshotFrequencyInHours, Compression = this.compression, InitialReplicationMethod = this.replicationMethod, OnlineReplicationStartTime = this.replicationStartTime.ToString(), RecoveryPoints = this.recoveryPoints, ReplicaDeletion = this.replicaDeletion, ReplicationPort = this.replicationPort }; } else { updatePolicyProperties.ReplicationProviderSettings = new HyperVReplicaBluePolicyInput { AllowedAuthenticationType = this.authentication, ApplicationConsistentSnapshotFrequencyInHours = this.applicationConsistentSnapshotFrequencyInHours, Compression = this.compression, InitialReplicationMethod = this.replicationMethod, OnlineReplicationStartTime = this.replicationStartTime.ToString(), RecoveryPoints = this.recoveryPoints, ReplicaDeletion = this.replicaDeletion, ReplicationPort = this.replicationPort, ReplicationFrequencyInSeconds = this.replicationFrequencyInSeconds }; } var updatePolicyInput = new UpdatePolicyInput { Properties = updatePolicyProperties }; var responseBlue = this.RecoveryServicesClient.UpdatePolicy( this.InputObject.Name, updatePolicyInput); var jobResponseBlue = this.RecoveryServicesClient.GetAzureSiteRecoveryJobDetails( PSRecoveryServicesClient.GetJobIdFromReponseLocation(responseBlue.Location)); this.WriteObject(new ASRJob(jobResponseBlue)); } #region Private private string replicationMethod; private ushort replicationFrequencyInSeconds; private int recoveryPoints; private int applicationConsistentSnapshotFrequencyInHours; private string compression; private ushort replicationPort { get; set; } private ushort authentication { get; set; } private TimeSpan? replicationStartTime { get; set; } private string replicaDeletion { get; set; } private string recoveryAzureStorageAccountId { get; set; } private string encryption { get; set; } #endregion Private } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Diagnostics.Contracts; using System.IO; using System.Reflection; using System.Security; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Text; using System.Threading; using System.Runtime.Versioning; using Microsoft.Win32.SafeHandles; namespace System { internal class SafeTypeNameParserHandle : SafeHandleZeroOrMinusOneIsInvalid { #region QCalls [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern void _ReleaseTypeNameParser(IntPtr pTypeNameParser); #endregion public SafeTypeNameParserHandle() : base(true) { } protected override bool ReleaseHandle() { _ReleaseTypeNameParser(handle); handle = IntPtr.Zero; return true; } } internal sealed class TypeNameParser : IDisposable { #region QCalls [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern void _CreateTypeNameParser(string typeName, ObjectHandleOnStack retHandle, bool throwOnError); [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern void _GetNames(SafeTypeNameParserHandle pTypeNameParser, ObjectHandleOnStack retArray); [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern void _GetTypeArguments(SafeTypeNameParserHandle pTypeNameParser, ObjectHandleOnStack retArray); [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern void _GetModifiers(SafeTypeNameParserHandle pTypeNameParser, ObjectHandleOnStack retArray); [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [SuppressUnmanagedCodeSecurity] private static extern void _GetAssemblyName(SafeTypeNameParserHandle pTypeNameParser, StringHandleOnStack retString); #endregion #region Static Members internal static Type GetType( string typeName, Func<AssemblyName, Assembly> assemblyResolver, Func<Assembly, string, bool, Type> typeResolver, bool throwOnError, bool ignoreCase, ref StackCrawlMark stackMark) { if (typeName == null) throw new ArgumentNullException(nameof(typeName)); if (typeName.Length > 0 && typeName[0] == '\0') throw new ArgumentException(SR.Format_StringZeroLength); Contract.EndContractBlock(); Type ret = null; SafeTypeNameParserHandle handle = CreateTypeNameParser(typeName, throwOnError); if (handle != null) { // If we get here the typeName must have been successfully parsed. // Let's construct the Type object. using (TypeNameParser parser = new TypeNameParser(handle)) { ret = parser.ConstructType(assemblyResolver, typeResolver, throwOnError, ignoreCase, ref stackMark); } } return ret; } #endregion #region Private Data Members private SafeTypeNameParserHandle m_NativeParser; private static readonly char[] SPECIAL_CHARS = { ',', '[', ']', '&', '*', '+', '\\' }; /* see typeparse.h */ #endregion #region Constructor and Disposer private TypeNameParser(SafeTypeNameParserHandle handle) { m_NativeParser = handle; } public void Dispose() { m_NativeParser.Dispose(); } #endregion #region private Members private unsafe Type ConstructType( Func<AssemblyName, Assembly> assemblyResolver, Func<Assembly, string, bool, Type> typeResolver, bool throwOnError, bool ignoreCase, ref StackCrawlMark stackMark) { // assembly name Assembly assembly = null; string asmName = GetAssemblyName(); // GetAssemblyName never returns null Debug.Assert(asmName != null); if (asmName.Length > 0) { assembly = ResolveAssembly(asmName, assemblyResolver, throwOnError, ref stackMark); if (assembly == null) { // Cannot resolve the assembly. If throwOnError is true we should have already thrown. return null; } } string[] names = GetNames(); if (names == null) { // This can only happen if the type name is an empty string or if the first char is '\0' if (throwOnError) throw new TypeLoadException(SR.Arg_TypeLoadNullStr); return null; } Type baseType = ResolveType(assembly, names, typeResolver, throwOnError, ignoreCase, ref stackMark); if (baseType == null) { // Cannot resolve the type. If throwOnError is true we should have already thrown. Debug.Assert(throwOnError == false); return null; } SafeTypeNameParserHandle[] typeArguments = GetTypeArguments(); Type[] types = null; if (typeArguments != null) { types = new Type[typeArguments.Length]; for (int i = 0; i < typeArguments.Length; i++) { Debug.Assert(typeArguments[i] != null); using (TypeNameParser argParser = new TypeNameParser(typeArguments[i])) { types[i] = argParser.ConstructType(assemblyResolver, typeResolver, throwOnError, ignoreCase, ref stackMark); } if (types[i] == null) { // If throwOnError is true argParser.ConstructType should have already thrown. Debug.Assert(throwOnError == false); return null; } } } int[] modifiers = GetModifiers(); fixed (int* ptr = modifiers) { IntPtr intPtr = new IntPtr(ptr); return RuntimeTypeHandle.GetTypeHelper(baseType, types, intPtr, modifiers == null ? 0 : modifiers.Length); } } private static Assembly ResolveAssembly(string asmName, Func<AssemblyName, Assembly> assemblyResolver, bool throwOnError, ref StackCrawlMark stackMark) { Contract.Requires(asmName != null && asmName.Length > 0); Assembly assembly = null; if (assemblyResolver == null) { if (throwOnError) { assembly = RuntimeAssembly.InternalLoad(asmName, null, ref stackMark, false /*forIntrospection*/); } else { // When throwOnError is false we should only catch FileNotFoundException. // Other exceptions like BadImangeFormatException should still fly. try { assembly = RuntimeAssembly.InternalLoad(asmName, null, ref stackMark, false /*forIntrospection*/); } catch (FileNotFoundException) { return null; } } } else { assembly = assemblyResolver(new AssemblyName(asmName)); if (assembly == null && throwOnError) { throw new FileNotFoundException(SR.Format(SR.FileNotFound_ResolveAssembly, asmName)); } } return assembly; } private static Type ResolveType(Assembly assembly, string[] names, Func<Assembly, string, bool, Type> typeResolver, bool throwOnError, bool ignoreCase, ref StackCrawlMark stackMark) { Contract.Requires(names != null && names.Length > 0); Type type = null; // both the customer provided and the default type resolvers accept escaped type names string OuterMostTypeName = EscapeTypeName(names[0]); // Resolve the top level type. if (typeResolver != null) { type = typeResolver(assembly, OuterMostTypeName, ignoreCase); if (type == null && throwOnError) { string errorString = assembly == null ? SR.Format(SR.TypeLoad_ResolveType, OuterMostTypeName): SR.Format(SR.TypeLoad_ResolveTypeFromAssembly, OuterMostTypeName, assembly.FullName); throw new TypeLoadException(errorString); } } else { if (assembly == null) { type = RuntimeType.GetType(OuterMostTypeName, throwOnError, ignoreCase, false, ref stackMark); } else { type = assembly.GetType(OuterMostTypeName, throwOnError, ignoreCase); } } // Resolve nested types. if (type != null) { BindingFlags bindingFlags = BindingFlags.NonPublic | BindingFlags.Public; if (ignoreCase) bindingFlags |= BindingFlags.IgnoreCase; for (int i = 1; i < names.Length; i++) { type = type.GetNestedType(names[i], bindingFlags); if (type == null) { if (throwOnError) throw new TypeLoadException(SR.Format(SR.TypeLoad_ResolveNestedType, names[i], names[i - 1])); else break; } } } return type; } private static string EscapeTypeName(string name) { if (name.IndexOfAny(SPECIAL_CHARS) < 0) return name; StringBuilder sb = StringBuilderCache.Acquire(); foreach (char c in name) { if (Array.IndexOf<char>(SPECIAL_CHARS, c) >= 0) sb.Append('\\'); sb.Append(c); } return StringBuilderCache.GetStringAndRelease(sb); } private static SafeTypeNameParserHandle CreateTypeNameParser(string typeName, bool throwOnError) { SafeTypeNameParserHandle retHandle = null; _CreateTypeNameParser(typeName, JitHelpers.GetObjectHandleOnStack(ref retHandle), throwOnError); return retHandle; } private string[] GetNames() { string[] names = null; _GetNames(m_NativeParser, JitHelpers.GetObjectHandleOnStack(ref names)); return names; } private SafeTypeNameParserHandle[] GetTypeArguments() { SafeTypeNameParserHandle[] arguments = null; _GetTypeArguments(m_NativeParser, JitHelpers.GetObjectHandleOnStack(ref arguments)); return arguments; } private int[] GetModifiers() { int[] modifiers = null; _GetModifiers(m_NativeParser, JitHelpers.GetObjectHandleOnStack(ref modifiers)); return modifiers; } private string GetAssemblyName() { string assemblyName = null; _GetAssemblyName(m_NativeParser, JitHelpers.GetStringHandleOnStack(ref assemblyName)); return assemblyName; } #endregion } }
using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Net; using System.Text; using System.Web; using System.Web.Routing; using Nop.Core; using Nop.Core.Domain.Directory; using Nop.Core.Domain.Orders; using Nop.Core.Domain.Payments; using Nop.Core.Domain.Shipping; using Nop.Core.Plugins; using Nop.Plugin.Payments.PayPalStandard.Controllers; using Nop.Services.Configuration; using Nop.Services.Directory; using Nop.Services.Localization; using Nop.Services.Orders; using Nop.Services.Payments; using Nop.Services.Tax; namespace Nop.Plugin.Payments.PayPalStandard { /// <summary> /// PayPalStandard payment processor /// </summary> public class PayPalStandardPaymentProcessor : BasePlugin, IPaymentMethod { #region Fields private readonly PayPalStandardPaymentSettings _paypalStandardPaymentSettings; private readonly ISettingService _settingService; private readonly ICurrencyService _currencyService; private readonly CurrencySettings _currencySettings; private readonly IWebHelper _webHelper; private readonly ICheckoutAttributeParser _checkoutAttributeParser; private readonly ITaxService _taxService; private readonly IOrderTotalCalculationService _orderTotalCalculationService; private readonly HttpContextBase _httpContext; #endregion #region Ctor public PayPalStandardPaymentProcessor(PayPalStandardPaymentSettings paypalStandardPaymentSettings, ISettingService settingService, ICurrencyService currencyService, CurrencySettings currencySettings, IWebHelper webHelper, ICheckoutAttributeParser checkoutAttributeParser, ITaxService taxService, IOrderTotalCalculationService orderTotalCalculationService, HttpContextBase httpContext) { this._paypalStandardPaymentSettings = paypalStandardPaymentSettings; this._settingService = settingService; this._currencyService = currencyService; this._currencySettings = currencySettings; this._webHelper = webHelper; this._checkoutAttributeParser = checkoutAttributeParser; this._taxService = taxService; this._orderTotalCalculationService = orderTotalCalculationService; this._httpContext = httpContext; } #endregion #region Utilities /// <summary> /// Gets Paypal URL /// </summary> /// <returns></returns> private string GetPaypalUrl() { return _paypalStandardPaymentSettings.UseSandbox ? "https://www.sandbox.paypal.com/us/cgi-bin/webscr" : "https://www.paypal.com/us/cgi-bin/webscr"; } /// <summary> /// Gets PDT details /// </summary> /// <param name="tx">TX</param> /// <param name="values">Values</param> /// <param name="response">Response</param> /// <returns>Result</returns> public bool GetPdtDetails(string tx, out Dictionary<string, string> values, out string response) { var req = (HttpWebRequest)WebRequest.Create(GetPaypalUrl()); req.Method = "POST"; req.ContentType = "application/x-www-form-urlencoded"; //now PayPal requires user-agent. otherwise, we can get 403 error req.UserAgent = HttpContext.Current.Request.UserAgent; string formContent = string.Format("cmd=_notify-synch&at={0}&tx={1}", _paypalStandardPaymentSettings.PdtToken, tx); req.ContentLength = formContent.Length; using (var sw = new StreamWriter(req.GetRequestStream(), Encoding.ASCII)) sw.Write(formContent); using (var sr = new StreamReader(req.GetResponse().GetResponseStream())) response = HttpUtility.UrlDecode(sr.ReadToEnd()); values = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); bool firstLine = true, success = false; foreach (string l in response.Split('\n')) { string line = l.Trim(); if (firstLine) { success = line.Equals("SUCCESS", StringComparison.OrdinalIgnoreCase); firstLine = false; } else { int equalPox = line.IndexOf('='); if (equalPox >= 0) values.Add(line.Substring(0, equalPox), line.Substring(equalPox + 1)); } } return success; } /// <summary> /// Verifies IPN /// </summary> /// <param name="formString">Form string</param> /// <param name="values">Values</param> /// <returns>Result</returns> public bool VerifyIpn(string formString, out Dictionary<string, string> values) { var req = (HttpWebRequest)WebRequest.Create(GetPaypalUrl()); req.Method = "POST"; req.ContentType = "application/x-www-form-urlencoded"; //now PayPal requires user-agent. otherwise, we can get 403 error req.UserAgent = HttpContext.Current.Request.UserAgent; string formContent = string.Format("{0}&cmd=_notify-validate", formString); req.ContentLength = formContent.Length; using (var sw = new StreamWriter(req.GetRequestStream(), Encoding.ASCII)) { sw.Write(formContent); } string response; using (var sr = new StreamReader(req.GetResponse().GetResponseStream())) { response = HttpUtility.UrlDecode(sr.ReadToEnd()); } bool success = response.Trim().Equals("VERIFIED", StringComparison.OrdinalIgnoreCase); values = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); foreach (string l in formString.Split('&')) { string line = l.Trim(); int equalPox = line.IndexOf('='); if (equalPox >= 0) values.Add(line.Substring(0, equalPox), line.Substring(equalPox + 1)); } return success; } #endregion #region Methods /// <summary> /// Process a payment /// </summary> /// <param name="processPaymentRequest">Payment info required for an order processing</param> /// <returns>Process payment result</returns> public ProcessPaymentResult ProcessPayment(ProcessPaymentRequest processPaymentRequest) { var result = new ProcessPaymentResult(); result.NewPaymentStatus = PaymentStatus.Pending; return result; } /// <summary> /// Post process payment (used by payment gateways that require redirecting to a third-party URL) /// </summary> /// <param name="postProcessPaymentRequest">Payment info required for an order processing</param> public void PostProcessPayment(PostProcessPaymentRequest postProcessPaymentRequest) { var builder = new StringBuilder(); builder.Append(GetPaypalUrl()); var cmd =_paypalStandardPaymentSettings.PassProductNamesAndTotals ? "_cart" : "_xclick"; builder.AppendFormat("?cmd={0}&business={1}", cmd, HttpUtility.UrlEncode(_paypalStandardPaymentSettings.BusinessEmail)); if (_paypalStandardPaymentSettings.PassProductNamesAndTotals) { builder.AppendFormat("&upload=1"); //get the items in the cart decimal cartTotal = decimal.Zero; var cartItems = postProcessPaymentRequest.Order.OrderItems; int x = 1; foreach (var item in cartItems) { var unitPriceExclTax = item.UnitPriceExclTax; var priceExclTax = item.PriceExclTax; //round var unitPriceExclTaxRounded = Math.Round(unitPriceExclTax, 2); builder.AppendFormat("&item_name_" + x + "={0}", HttpUtility.UrlEncode(item.Product.Name)); builder.AppendFormat("&amount_" + x + "={0}", unitPriceExclTaxRounded.ToString("0.00", CultureInfo.InvariantCulture)); builder.AppendFormat("&quantity_" + x + "={0}", item.Quantity); x++; cartTotal += priceExclTax; } //the checkout attributes that have a dollar value and send them to Paypal as items to be paid for var attributeValues = _checkoutAttributeParser.ParseCheckoutAttributeValues(postProcessPaymentRequest.Order.CheckoutAttributesXml); foreach (var val in attributeValues) { var attPrice = _taxService.GetCheckoutAttributePrice(val, false, postProcessPaymentRequest.Order.Customer); //round var attPriceRounded = Math.Round(attPrice, 2); if (attPrice > decimal.Zero) //if it has a price { var attribute = val.CheckoutAttribute; if (attribute != null) { var attName = attribute.Name; //set the name builder.AppendFormat("&item_name_" + x + "={0}", HttpUtility.UrlEncode(attName)); //name builder.AppendFormat("&amount_" + x + "={0}", attPriceRounded.ToString("0.00", CultureInfo.InvariantCulture)); //amount builder.AppendFormat("&quantity_" + x + "={0}", 1); //quantity x++; cartTotal += attPrice; } } } //order totals //shipping var orderShippingExclTax = postProcessPaymentRequest.Order.OrderShippingExclTax; var orderShippingExclTaxRounded = Math.Round(orderShippingExclTax, 2); if (orderShippingExclTax > decimal.Zero) { builder.AppendFormat("&item_name_" + x + "={0}", "Shipping fee"); builder.AppendFormat("&amount_" + x + "={0}", orderShippingExclTaxRounded.ToString("0.00", CultureInfo.InvariantCulture)); builder.AppendFormat("&quantity_" + x + "={0}", 1); x++; cartTotal += orderShippingExclTax; } //payment method additional fee var paymentMethodAdditionalFeeExclTax = postProcessPaymentRequest.Order.PaymentMethodAdditionalFeeExclTax; var paymentMethodAdditionalFeeExclTaxRounded = Math.Round(paymentMethodAdditionalFeeExclTax, 2); if (paymentMethodAdditionalFeeExclTax > decimal.Zero) { builder.AppendFormat("&item_name_" + x + "={0}", "Payment method fee"); builder.AppendFormat("&amount_" + x + "={0}", paymentMethodAdditionalFeeExclTaxRounded.ToString("0.00", CultureInfo.InvariantCulture)); builder.AppendFormat("&quantity_" + x + "={0}", 1); x++; cartTotal += paymentMethodAdditionalFeeExclTax; } //tax var orderTax = postProcessPaymentRequest.Order.OrderTax; var orderTaxRounded = Math.Round(orderTax, 2); if (orderTax > decimal.Zero) { //builder.AppendFormat("&tax_1={0}", orderTax.ToString("0.00", CultureInfo.InvariantCulture)); //add tax as item builder.AppendFormat("&item_name_" + x + "={0}", HttpUtility.UrlEncode("Sales Tax")); //name builder.AppendFormat("&amount_" + x + "={0}", orderTaxRounded.ToString("0.00", CultureInfo.InvariantCulture)); //amount builder.AppendFormat("&quantity_" + x + "={0}", 1); //quantity cartTotal += orderTax; x++; } if (cartTotal > postProcessPaymentRequest.Order.OrderTotal) { /* Take the difference between what the order total is and what it should be and use that as the "discount". * The difference equals the amount of the gift card and/or reward points used. */ decimal discountTotal = cartTotal - postProcessPaymentRequest.Order.OrderTotal; discountTotal = Math.Round(discountTotal, 2); //gift card or rewared point amount applied to cart in nopCommerce - shows in Paypal as "discount" builder.AppendFormat("&discount_amount_cart={0}", discountTotal.ToString("0.00", CultureInfo.InvariantCulture)); } } else { //pass order total builder.AppendFormat("&item_name=Order Number {0}", postProcessPaymentRequest.Order.Id); var orderTotal = Math.Round(postProcessPaymentRequest.Order.OrderTotal, 2); builder.AppendFormat("&amount={0}", orderTotal.ToString("0.00", CultureInfo.InvariantCulture)); } builder.AppendFormat("&custom={0}", postProcessPaymentRequest.Order.OrderGuid); builder.AppendFormat("&charset={0}", "utf-8"); builder.Append(string.Format("&no_note=1&currency_code={0}", HttpUtility.UrlEncode(_currencyService.GetCurrencyById(_currencySettings.PrimaryStoreCurrencyId).CurrencyCode))); builder.AppendFormat("&invoice={0}", postProcessPaymentRequest.Order.Id); builder.AppendFormat("&rm=2", new object[0]); if (postProcessPaymentRequest.Order.ShippingStatus != ShippingStatus.ShippingNotRequired) builder.AppendFormat("&no_shipping=2", new object[0]); else builder.AppendFormat("&no_shipping=1", new object[0]); string returnUrl = _webHelper.GetStoreLocation(false) + "Plugins/PaymentPayPalStandard/PDTHandler"; string cancelReturnUrl = _webHelper.GetStoreLocation(false) + "Plugins/PaymentPayPalStandard/CancelOrder"; builder.AppendFormat("&return={0}&cancel_return={1}", HttpUtility.UrlEncode(returnUrl), HttpUtility.UrlEncode(cancelReturnUrl)); //Instant Payment Notification (server to server message) if (_paypalStandardPaymentSettings.EnableIpn) { string ipnUrl; if (String.IsNullOrWhiteSpace(_paypalStandardPaymentSettings.IpnUrl)) ipnUrl = _webHelper.GetStoreLocation(false) + "Plugins/PaymentPayPalStandard/IPNHandler"; else ipnUrl = _paypalStandardPaymentSettings.IpnUrl; builder.AppendFormat("&notify_url={0}", ipnUrl); } //address builder.AppendFormat("&address_override={0}", _paypalStandardPaymentSettings.AddressOverride ? "1" : "0"); builder.AppendFormat("&first_name={0}", HttpUtility.UrlEncode(postProcessPaymentRequest.Order.BillingAddress.FirstName)); builder.AppendFormat("&last_name={0}", HttpUtility.UrlEncode(postProcessPaymentRequest.Order.BillingAddress.LastName)); builder.AppendFormat("&address1={0}", HttpUtility.UrlEncode(postProcessPaymentRequest.Order.BillingAddress.Address1)); builder.AppendFormat("&address2={0}", HttpUtility.UrlEncode(postProcessPaymentRequest.Order.BillingAddress.Address2)); builder.AppendFormat("&city={0}", HttpUtility.UrlEncode(postProcessPaymentRequest.Order.BillingAddress.City)); //if (!String.IsNullOrEmpty(postProcessPaymentRequest.Order.BillingAddress.PhoneNumber)) //{ // //strip out all non-digit characters from phone number; // string billingPhoneNumber = System.Text.RegularExpressions.Regex.Replace(postProcessPaymentRequest.Order.BillingAddress.PhoneNumber, @"\D", string.Empty); // if (billingPhoneNumber.Length >= 10) // { // builder.AppendFormat("&night_phone_a={0}", HttpUtility.UrlEncode(billingPhoneNumber.Substring(0, 3))); // builder.AppendFormat("&night_phone_b={0}", HttpUtility.UrlEncode(billingPhoneNumber.Substring(3, 3))); // builder.AppendFormat("&night_phone_c={0}", HttpUtility.UrlEncode(billingPhoneNumber.Substring(6, 4))); // } //} if (postProcessPaymentRequest.Order.BillingAddress.StateProvince != null) builder.AppendFormat("&state={0}", HttpUtility.UrlEncode(postProcessPaymentRequest.Order.BillingAddress.StateProvince.Abbreviation)); else builder.AppendFormat("&state={0}", ""); if (postProcessPaymentRequest.Order.BillingAddress.Country != null) builder.AppendFormat("&country={0}", HttpUtility.UrlEncode(postProcessPaymentRequest.Order.BillingAddress.Country.TwoLetterIsoCode)); else builder.AppendFormat("&country={0}", ""); builder.AppendFormat("&zip={0}", HttpUtility.UrlEncode(postProcessPaymentRequest.Order.BillingAddress.ZipPostalCode)); builder.AppendFormat("&email={0}", HttpUtility.UrlEncode(postProcessPaymentRequest.Order.BillingAddress.Email)); _httpContext.Response.Redirect(builder.ToString()); } /// <summary> /// Returns a value indicating whether payment method should be hidden during checkout /// </summary> /// <param name="cart">Shoping cart</param> /// <returns>true - hide; false - display.</returns> public bool HidePaymentMethod(IList<ShoppingCartItem> cart) { //you can put any logic here //for example, hide this payment method if all products in the cart are downloadable //or hide this payment method if current customer is from certain country return false; } /// <summary> /// Gets additional handling fee /// </summary> /// <param name="cart">Shoping cart</param> /// <returns>Additional handling fee</returns> public decimal GetAdditionalHandlingFee(IList<ShoppingCartItem> cart) { var result = this.CalculateAdditionalFee(_orderTotalCalculationService, cart, _paypalStandardPaymentSettings.AdditionalFee, _paypalStandardPaymentSettings.AdditionalFeePercentage); return result; } /// <summary> /// Captures payment /// </summary> /// <param name="capturePaymentRequest">Capture payment request</param> /// <returns>Capture payment result</returns> public CapturePaymentResult Capture(CapturePaymentRequest capturePaymentRequest) { var result = new CapturePaymentResult(); result.AddError("Capture method not supported"); return result; } /// <summary> /// Refunds a payment /// </summary> /// <param name="refundPaymentRequest">Request</param> /// <returns>Result</returns> public RefundPaymentResult Refund(RefundPaymentRequest refundPaymentRequest) { var result = new RefundPaymentResult(); result.AddError("Refund method not supported"); return result; } /// <summary> /// Voids a payment /// </summary> /// <param name="voidPaymentRequest">Request</param> /// <returns>Result</returns> public VoidPaymentResult Void(VoidPaymentRequest voidPaymentRequest) { var result = new VoidPaymentResult(); result.AddError("Void method not supported"); return result; } /// <summary> /// Process recurring payment /// </summary> /// <param name="processPaymentRequest">Payment info required for an order processing</param> /// <returns>Process payment result</returns> public ProcessPaymentResult ProcessRecurringPayment(ProcessPaymentRequest processPaymentRequest) { var result = new ProcessPaymentResult(); result.AddError("Recurring payment not supported"); return result; } /// <summary> /// Cancels a recurring payment /// </summary> /// <param name="cancelPaymentRequest">Request</param> /// <returns>Result</returns> public CancelRecurringPaymentResult CancelRecurringPayment(CancelRecurringPaymentRequest cancelPaymentRequest) { var result = new CancelRecurringPaymentResult(); result.AddError("Recurring payment not supported"); return result; } /// <summary> /// Gets a value indicating whether customers can complete a payment after order is placed but not completed (for redirection payment methods) /// </summary> /// <param name="order">Order</param> /// <returns>Result</returns> public bool CanRePostProcessPayment(Order order) { if (order == null) throw new ArgumentNullException("order"); //let's ensure that at least 5 seconds passed after order is placed //P.S. there's no any particular reason for that. we just do it if ((DateTime.UtcNow - order.CreatedOnUtc).TotalSeconds < 5) return false; return true; } /// <summary> /// Gets a route for provider configuration /// </summary> /// <param name="actionName">Action name</param> /// <param name="controllerName">Controller name</param> /// <param name="routeValues">Route values</param> public void GetConfigurationRoute(out string actionName, out string controllerName, out RouteValueDictionary routeValues) { actionName = "Configure"; controllerName = "PaymentPayPalStandard"; routeValues = new RouteValueDictionary { { "Namespaces", "Nop.Plugin.Payments.PayPalStandard.Controllers" }, { "area", null } }; } /// <summary> /// Gets a route for payment info /// </summary> /// <param name="actionName">Action name</param> /// <param name="controllerName">Controller name</param> /// <param name="routeValues">Route values</param> public void GetPaymentInfoRoute(out string actionName, out string controllerName, out RouteValueDictionary routeValues) { actionName = "PaymentInfo"; controllerName = "PaymentPayPalStandard"; routeValues = new RouteValueDictionary { { "Namespaces", "Nop.Plugin.Payments.PayPalStandard.Controllers" }, { "area", null } }; } public Type GetControllerType() { return typeof(PaymentPayPalStandardController); } public override void Install() { //settings var settings = new PayPalStandardPaymentSettings { UseSandbox = true, BusinessEmail = "test@test.com", PdtToken= "Your PDT token here...", PdtValidateOrderTotal = true, EnableIpn = true, AddressOverride = true, }; _settingService.SaveSetting(settings); //locales this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.RedirectionTip", "You will be redirected to PayPal site to complete the order."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.UseSandbox", "Use Sandbox"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.UseSandbox.Hint", "Check to enable Sandbox (testing environment)."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.BusinessEmail", "Business Email"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.BusinessEmail.Hint", "Specify your PayPal business email."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PDTToken", "PDT Identity Token"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PDTToken.Hint", "Specify PDT identity token"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PDTValidateOrderTotal", "PDT. Validate order total"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PDTValidateOrderTotal.Hint", "Check if PDT handler should validate order totals."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AdditionalFee", "Additional fee"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AdditionalFee.Hint", "Enter additional fee to charge your customers."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AdditionalFeePercentage", "Additional fee. Use percentage"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AdditionalFeePercentage.Hint", "Determines whether to apply a percentage additional fee to the order total. If not enabled, a fixed value is used."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PassProductNamesAndTotals", "Pass product names and order totals to PayPal"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PassProductNamesAndTotals.Hint", "Check if product names and order totals should be passed to PayPal."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.EnableIpn", "Enable IPN (Instant Payment Notification)"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.EnableIpn.Hint", "Check if IPN is enabled."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.EnableIpn.Hint2", "Leave blank to use the default IPN handler URL."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.IpnUrl", "IPN Handler"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.IpnUrl.Hint", "Specify IPN Handler."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AddressOverride", "Address override"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AddressOverride.Hint", "For people who already have PayPal accounts and whom you already prompted for a shipping address before they choose to pay with PayPal, you can use the entered address instead of the address the person has stored with PayPal."); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.ReturnFromPayPalWithoutPaymentRedirectsToOrderDetailsPage", "Return to order details page"); this.AddOrUpdatePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.ReturnFromPayPalWithoutPaymentRedirectsToOrderDetailsPage.Hint", "Enable if a customer should be redirected to the order details page when he clicks \"return to store\" link on PayPal site WITHOUT completing a payment"); base.Install(); } public override void Uninstall() { //settings _settingService.DeleteSetting<PayPalStandardPaymentSettings>(); //locales this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.RedirectionTip"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.UseSandbox"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.UseSandbox.Hint"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.BusinessEmail"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.BusinessEmail.Hint"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PDTToken"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PDTToken.Hint"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PDTValidateOrderTotal"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PDTValidateOrderTotal.Hint"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AdditionalFee"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AdditionalFee.Hint"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AdditionalFeePercentage"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AdditionalFeePercentage.Hint"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PassProductNamesAndTotals"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.PassProductNamesAndTotals.Hint"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.EnableIpn"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.EnableIpn.Hint"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.EnableIpn.Hint2"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.IpnUrl"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.IpnUrl.Hint"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AddressOverride"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.AddressOverride.Hint"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.ReturnFromPayPalWithoutPaymentRedirectsToOrderDetailsPage"); this.DeletePluginLocaleResource("Plugins.Payments.PayPalStandard.Fields.ReturnFromPayPalWithoutPaymentRedirectsToOrderDetailsPage.Hint"); base.Uninstall(); } #endregion #region Properties /// <summary> /// Gets a value indicating whether capture is supported /// </summary> public bool SupportCapture { get { return false; } } /// <summary> /// Gets a value indicating whether partial refund is supported /// </summary> public bool SupportPartiallyRefund { get { return false; } } /// <summary> /// Gets a value indicating whether refund is supported /// </summary> public bool SupportRefund { get { return false; } } /// <summary> /// Gets a value indicating whether void is supported /// </summary> public bool SupportVoid { get { return false; } } /// <summary> /// Gets a recurring payment type of payment method /// </summary> public RecurringPaymentType RecurringPaymentType { get { return RecurringPaymentType.NotSupported; } } /// <summary> /// Gets a payment method type /// </summary> public PaymentMethodType PaymentMethodType { get { return PaymentMethodType.Redirection; } } /// <summary> /// Gets a value indicating whether we should display a payment information page for this plugin /// </summary> public bool SkipPaymentInfo { get { return false; } } #endregion } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Controllers; using System.Web.Http.Description; using AzureBrowserApp.Areas.HelpPage.ModelDescriptions; using AzureBrowserApp.Areas.HelpPage.Models; namespace AzureBrowserApp.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model description generator. /// </summary> /// <param name="config">The configuration.</param> /// <returns>The <see cref="ModelDescriptionGenerator"/></returns> public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config) { return (ModelDescriptionGenerator)config.Properties.GetOrAdd( typeof(ModelDescriptionGenerator), k => InitializeModelDescriptionGenerator(config)); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { model = GenerateApiModel(apiDescription, config); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config) { HelpPageApiModel apiModel = new HelpPageApiModel() { ApiDescription = apiDescription, }; ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator(); HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); GenerateUriParameters(apiModel, modelGenerator); GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator); GenerateResourceDescription(apiModel, modelGenerator); GenerateSamples(apiModel, sampleGenerator); return apiModel; } private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromUri) { HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor; Type parameterType = null; ModelDescription typeDescription = null; ComplexTypeModelDescription complexTypeDescription = null; if (parameterDescriptor != null) { parameterType = parameterDescriptor.ParameterType; typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType); complexTypeDescription = typeDescription as ComplexTypeModelDescription; } // Example: // [TypeConverter(typeof(PointConverter))] // public class Point // { // public Point(int x, int y) // { // X = x; // Y = y; // } // public int X { get; set; } // public int Y { get; set; } // } // Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection. // // public class Point // { // public int X { get; set; } // public int Y { get; set; } // } // Regular complex class Point will have properties X and Y added to UriParameters collection. if (complexTypeDescription != null && !IsBindableWithTypeConverter(parameterType)) { foreach (ParameterDescription uriParameter in complexTypeDescription.Properties) { apiModel.UriParameters.Add(uriParameter); } } else if (parameterDescriptor != null) { ParameterDescription uriParameter = AddParameterDescription(apiModel, apiParameter, typeDescription); if (!parameterDescriptor.IsOptional) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" }); } object defaultValue = parameterDescriptor.DefaultValue; if (defaultValue != null) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) }); } } else { Debug.Assert(parameterDescriptor == null); // If parameterDescriptor is null, this is an undeclared route parameter which only occurs // when source is FromUri. Ignored in request model and among resource parameters but listed // as a simple string here. ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string)); AddParameterDescription(apiModel, apiParameter, modelDescription); } } } } private static bool IsBindableWithTypeConverter(Type parameterType) { if (parameterType == null) { return false; } return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string)); } private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel, ApiParameterDescription apiParameter, ModelDescription typeDescription) { ParameterDescription parameterDescription = new ParameterDescription { Name = apiParameter.Name, Documentation = apiParameter.Documentation, TypeDescription = typeDescription, }; apiModel.UriParameters.Add(parameterDescription); return parameterDescription; } private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromBody) { Type parameterType = apiParameter.ParameterDescriptor.ParameterType; apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); apiModel.RequestDocumentation = apiParameter.Documentation; } else if (apiParameter.ParameterDescriptor != null && apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)) { Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); if (parameterType != null) { apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); } } } } private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ResponseDescription response = apiModel.ApiDescription.ResponseDescription; Type responseType = response.ResponseType ?? response.DeclaredType; if (responseType != null && responseType != typeof(void)) { apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType); } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator) { try { foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception message: {0}", HelpPageSampleGenerator.UnwrapException(e).Message)); } } private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType) { parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault( p => p.Source == ApiParameterSource.FromBody || (p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))); if (parameterDescription == null) { resourceType = null; return false; } resourceType = parameterDescription.ParameterDescriptor.ParameterType; if (resourceType == typeof(HttpRequestMessage)) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); } if (resourceType == null) { parameterDescription = null; return false; } return true; } private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config) { ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config); Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions; foreach (ApiDescription api in apis) { ApiParameterDescription parameterDescription; Type parameterType; if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType)) { modelGenerator.GetOrCreateModelDescription(parameterType); } } return modelGenerator; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
//----------------------------------------------------------------------- // <copyright file="TestPublisher.cs" company="Akka.NET Project"> // Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using System.Collections.Generic; using Akka.Actor; using Akka.Event; using Akka.Streams.Implementation; using Akka.TestKit; using Reactive.Streams; namespace Akka.Streams.TestKit { /// <summary> /// Provides factory methods for various Publishers. /// </summary> public static class TestPublisher { #region messages public interface IPublisherEvent : INoSerializationVerificationNeeded, IDeadLetterSuppression { } public struct Subscribe : IPublisherEvent { public readonly ISubscription Subscription; public Subscribe(ISubscription subscription) { Subscription = subscription; } } public struct CancelSubscription : IPublisherEvent { public readonly ISubscription Subscription; public CancelSubscription(ISubscription subscription) { Subscription = subscription; } } public struct RequestMore : IPublisherEvent { public readonly ISubscription Subscription; public readonly long NrOfElements; public RequestMore(ISubscription subscription, long nrOfElements) { Subscription = subscription; NrOfElements = nrOfElements; } } #endregion /// <summary> /// Implementation of <see cref="IPublisher{T}"/> that allows various assertions. /// This probe does not track demand.Therefore you need to expect demand before sending /// elements downstream. /// </summary> public class ManualProbe<T> : IPublisher<T> { private readonly TestProbe _probe; internal ManualProbe(TestKitBase system, bool autoOnSubscribe = true) { _probe = system.CreateTestProbe(); AutoOnSubscribe = autoOnSubscribe; } public bool AutoOnSubscribe { get; } public IPublisher<T> Publisher => this; /// <summary> /// Subscribes a given <paramref name="subscriber"/> to this probe. /// </summary> public void Subscribe(ISubscriber<T> subscriber) { var subscription = new StreamTestKit.PublisherProbeSubscription<T>(subscriber, _probe); _probe.Ref.Tell(new Subscribe(subscription)); if (AutoOnSubscribe) subscriber.OnSubscribe(subscription); } /// <summary> /// Expect a subscription. /// </summary> public StreamTestKit.PublisherProbeSubscription<T> ExpectSubscription() => (StreamTestKit.PublisherProbeSubscription<T>)_probe.ExpectMsg<Subscribe>().Subscription; /// <summary> /// Expect demand from the given subscription. /// </summary> public ManualProbe<T> ExpectRequest(ISubscription subscription, int n) { _probe.ExpectMsg<RequestMore>(x => x.NrOfElements == n && x.Subscription == subscription); return this; } /// <summary> /// Expect no messages. /// </summary> public ManualProbe<T> ExpectNoMsg() { _probe.ExpectNoMsg(); return this; } /// <summary> /// Expect no messages for given duration. /// </summary> public ManualProbe<T> ExpectNoMsg(TimeSpan duration) { _probe.ExpectNoMsg(duration); return this; } /// <summary> /// Receive messages for a given duration or until one does not match a given partial function. /// </summary> public IEnumerable<TOther> ReceiveWhile<TOther>(TimeSpan? max = null, TimeSpan? idle = null, Func<object, TOther> filter = null, int msgs = int.MaxValue) where TOther : class { return _probe.ReceiveWhile(max, idle, filter, msgs); } public IPublisherEvent ExpectEvent() => _probe.ExpectMsg<IPublisherEvent>(); /// <summary> /// Execute code block while bounding its execution time between <paramref name="min"/> and /// <paramref name="max"/>. <see cref="Within{TOther}(TimeSpan,TimeSpan,Func{TOther})"/> blocks may be nested. /// All methods in this class which take maximum wait times are available in a version which implicitly uses /// the remaining time governed by the innermost enclosing <see cref="Within{TOther}(TimeSpan,TimeSpan,Func{TOther})"/> block. /// /// <para /> /// /// Note that the timeout is scaled using <see cref="TestKitBase.Dilated"/>, which uses the /// configuration entry "akka.test.timefactor", while the min Duration is not. /// /// <![CDATA[ /// var ret = probe.Within(Timespan.FromMilliseconds(50), () => /// { /// test.Tell("ping"); /// return ExpectMsg<string>(); /// }); /// ]]> /// </summary> /// <param name="min"></param> /// <param name="max"></param> /// <param name="execute"></param> /// <returns></returns> public TOther Within<TOther>(TimeSpan min, TimeSpan max, Func<TOther> execute) => _probe.Within(min, max, execute); /// <summary> /// Sane as calling Within(TimeSpan.Zero, max, function). /// </summary> public TOther Within<TOther>(TimeSpan max, Func<TOther> execute) => _probe.Within(max, execute); } /// <summary> /// Single subscription and demand tracking for <see cref="ManualProbe{T}"/>. /// </summary> /// <typeparam name="T"></typeparam> public class Probe<T> : ManualProbe<T> { private readonly long _initialPendingRequests; private readonly Lazy<StreamTestKit.PublisherProbeSubscription<T>> _subscription; internal Probe(TestKitBase system, long initialPendingRequests) : base(system) { _initialPendingRequests = Pending = initialPendingRequests; _subscription = new Lazy<StreamTestKit.PublisherProbeSubscription<T>>(ExpectSubscription); } /// <summary> /// Current pending requests. /// </summary> public long Pending { get; private set; } /// <summary> /// Asserts that a subscription has been received or will be received /// </summary> public void EnsureSubscription() { var _ = _subscription.Value; } public Probe<T> SendNext(T element) { var sub = _subscription.Value; if (Pending == 0) Pending = sub.ExpectRequest(); Pending--; sub.SendNext(element); return this; } public Probe<T> UnsafeSendNext(T element) { _subscription.Value.SendNext(element); return this; } public Probe<T> SendComplete() { _subscription.Value.SendComplete(); return this; } public Probe<T> SendError(Exception e) { _subscription.Value.SendError(e); return this; } public long ExpectRequest() { var requests = _subscription.Value.ExpectRequest(); Pending += requests; return requests; } public Probe<T> ExpectCancellation() { _subscription.Value.ExpectCancellation(); return this; } } internal sealed class LazyEmptyPublisher<T> : IPublisher<T> { public static readonly IPublisher<T> Instance = new LazyEmptyPublisher<T>(); private LazyEmptyPublisher() { } public void Subscribe(ISubscriber<T> subscriber) => subscriber.OnSubscribe(new StreamTestKit.CompletedSubscription<T>(subscriber)); public override string ToString() => "soon-to-complete-publisher"; } internal sealed class LazyErrorPublisher<T> : IPublisher<T> { public readonly string Name; public readonly Exception Cause; public LazyErrorPublisher(Exception cause, string name) { Name = name; Cause = cause; } public void Subscribe(ISubscriber<T> subscriber) => subscriber.OnSubscribe(new StreamTestKit.FailedSubscription<T>(subscriber, Cause)); public override string ToString() => Name; } /// <summary> /// Publisher that signals complete to subscribers, after handing a void subscription. /// </summary> public static IPublisher<T> Empty<T>() => EmptyPublisher<T>.Instance; /// <summary> /// Publisher that subscribes the subscriber and completes after the first request. /// </summary> public static IPublisher<T> LazyEmpty<T>() => LazyEmptyPublisher<T>.Instance; /// <summary> /// Publisher that signals error to subscribers immediately after handing out subscription. /// </summary> public static IPublisher<T> Error<T>(Exception exception) => new ErrorPublisher<T>(exception, "error"); /// <summary> /// Publisher subscribes the subscriber and signals error after the first request. /// </summary> public static IPublisher<T> LazyError<T>(Exception exception) => new LazyErrorPublisher<T>(exception, "error"); /// <summary> /// Probe that implements <see cref="IPublisher{T}"/> interface. /// </summary> public static ManualProbe<T> CreateManualPublisherProbe<T>(this TestKitBase testKit, bool autoOnSubscribe = true) => new ManualProbe<T>(testKit, autoOnSubscribe); public static Probe<T> CreatePublisherProbe<T>(this TestKitBase testKit, long initialPendingRequests = 0L) => new Probe<T>(testKit, initialPendingRequests); } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Management.Automation; using System.Threading; using Dbg = System.Management.Automation.Diagnostics; namespace Microsoft.PowerShell.Commands { /// <summary> /// This cmdlet waits for job to complete. /// </summary> [Cmdlet(VerbsLifecycle.Wait, "Job", DefaultParameterSetName = JobCmdletBase.SessionIdParameterSet, HelpUri = "https://go.microsoft.com/fwlink/?LinkID=2096902")] [OutputType(typeof(Job))] public class WaitJobCommand : JobCmdletBase, IDisposable { #region Parameters /// <summary> /// Specifies the Jobs objects which need to be /// removed. /// </summary> [Parameter(Mandatory = true, Position = 0, ValueFromPipeline = true, ValueFromPipelineByPropertyName = true, ParameterSetName = RemoveJobCommand.JobParameterSet)] [ValidateNotNullOrEmpty] [SuppressMessage("Microsoft.Performance", "CA1819:PropertiesShouldNotReturnArrays")] public Job[] Job { get; set; } /// <summary> /// Complete the cmdlet when any of the job is completed, instead of waiting for all of them to be completed. /// </summary> [Parameter] public SwitchParameter Any { get; set; } /// <summary> /// If timeout is specified, the cmdlet will only wait for this number of seconds. /// Value of -1 means never timeout. /// </summary> [Parameter] [Alias("TimeoutSec")] [ValidateRangeAttribute(-1, Int32.MaxValue)] public int Timeout { get { return _timeoutInSeconds; } set { _timeoutInSeconds = value; } } private int _timeoutInSeconds = -1; // -1: infinite, this default is to wait for as long as it takes. /// <summary> /// Forces the cmdlet to wait for Finished states (Completed, Failed, Stopped) instead of /// persistent states, which also include Suspended and Disconnected. /// </summary> [Parameter] public SwitchParameter Force { get; set; } /// <summary> /// </summary> public override string[] Command { get; set; } #endregion Parameters #region Coordinating how different events (timeout, stopprocessing, job finished, job blocked) affect what happens in EndProcessing private readonly object _endProcessingActionLock = new object(); private Action _endProcessingAction; private readonly ManualResetEventSlim _endProcessingActionIsReady = new ManualResetEventSlim(false); private void SetEndProcessingAction(Action endProcessingAction) { Dbg.Assert(endProcessingAction != null, "Caller should verify endProcessingAction != null"); lock (_endProcessingActionLock) { if (_endProcessingAction == null) { Dbg.Assert(!_endProcessingActionIsReady.IsSet, "This line should execute only once"); _endProcessingAction = endProcessingAction; _endProcessingActionIsReady.Set(); } } } private void InvokeEndProcessingAction() { _endProcessingActionIsReady.Wait(); Action endProcessingAction; lock (_endProcessingActionLock) { endProcessingAction = _endProcessingAction; } // Invoke action outside lock. endProcessingAction?.Invoke(); } private void CleanUpEndProcessing() { _endProcessingActionIsReady.Dispose(); } #endregion #region Support for triggering EndProcessing when jobs are finished or blocked private readonly HashSet<Job> _finishedJobs = new HashSet<Job>(); private readonly HashSet<Job> _blockedJobs = new HashSet<Job>(); private readonly List<Job> _jobsToWaitFor = new List<Job>(); private readonly object _jobTrackingLock = new object(); private void HandleJobStateChangedEvent(object source, JobStateEventArgs eventArgs) { Dbg.Assert(source is Job, "Caller should verify source is Job"); Dbg.Assert(eventArgs != null, "Caller should verify eventArgs != null"); var job = (Job)source; lock (_jobTrackingLock) { Dbg.Assert(_blockedJobs.All(j => !_finishedJobs.Contains(j)), "Job cannot be in *both* _blockedJobs and _finishedJobs"); if (eventArgs.JobStateInfo.State == JobState.Blocked) { _blockedJobs.Add(job); } else { _blockedJobs.Remove(job); } // Treat jobs in Disconnected state as finished jobs since the user // will have to reconnect the job before more information can be // obtained. // Suspended jobs require a Resume-Job call. Both of these states are persistent // without user interaction. // Wait should wait until a job is in a persistent state, OR if the force parameter // is specified, until the job is in a finished state, which is a subset of // persistent states. if (!Force && job.IsPersistentState(eventArgs.JobStateInfo.State) || (Force && job.IsFinishedState(eventArgs.JobStateInfo.State))) { if (!job.IsFinishedState(eventArgs.JobStateInfo.State)) { _warnNotTerminal = true; } _finishedJobs.Add(job); } else { _finishedJobs.Remove(job); } Dbg.Assert(_blockedJobs.All(j => !_finishedJobs.Contains(j)), "Job cannot be in *both* _blockedJobs and _finishedJobs"); if (this.Any.IsPresent) { if (_finishedJobs.Count > 0) { this.SetEndProcessingAction(this.EndProcessingOutputSingleFinishedJob); } else if (_blockedJobs.Count == _jobsToWaitFor.Count) { this.SetEndProcessingAction(this.EndProcessingBlockedJobsError); } } else { if (_finishedJobs.Count == _jobsToWaitFor.Count) { this.SetEndProcessingAction(this.EndProcessingOutputAllFinishedJobs); } else if (_blockedJobs.Count > 0) { this.SetEndProcessingAction(this.EndProcessingBlockedJobsError); } } } } private void AddJobsThatNeedJobChangesTracking(IEnumerable<Job> jobsToAdd) { Dbg.Assert(jobsToAdd != null, "Caller should verify jobs != null"); lock (_jobTrackingLock) { _jobsToWaitFor.AddRange(jobsToAdd); } } private void StartJobChangesTracking() { lock (_jobTrackingLock) { if (_jobsToWaitFor.Count == 0) { this.SetEndProcessingAction(this.EndProcessingDoNothing); return; } foreach (Job job in _jobsToWaitFor) { job.StateChanged += this.HandleJobStateChangedEvent; this.HandleJobStateChangedEvent(job, new JobStateEventArgs(job.JobStateInfo)); } } } private void CleanUpJobChangesTracking() { lock (_jobTrackingLock) { foreach (Job job in _jobsToWaitFor) { job.StateChanged -= this.HandleJobStateChangedEvent; } } } private List<Job> GetFinishedJobs() { List<Job> jobsToOutput; lock (_jobTrackingLock) { jobsToOutput = _jobsToWaitFor.Where(j => ((!Force && j.IsPersistentState(j.JobStateInfo.State)) || (Force && j.IsFinishedState(j.JobStateInfo.State)))).ToList(); } return jobsToOutput; } private Job GetOneBlockedJob() { lock (_jobTrackingLock) { return _jobsToWaitFor.Find(static j => j.JobStateInfo.State == JobState.Blocked); } } #endregion #region Support for triggering EndProcessing when timing out private Timer _timer; private readonly object _timerLock = new object(); private void StartTimeoutTracking(int timeoutInSeconds) { if (timeoutInSeconds == 0) { this.SetEndProcessingAction(this.EndProcessingDoNothing); } else if (timeoutInSeconds > 0) { lock (_timerLock) { _timer = new Timer((_) => this.SetEndProcessingAction(this.EndProcessingDoNothing), null, timeoutInSeconds * 1000, System.Threading.Timeout.Infinite); } } } private void CleanUpTimeoutTracking() { lock (_timerLock) { if (_timer != null) { _timer.Dispose(); _timer = null; } } } #endregion #region Overrides /// <summary> /// Cancel the Wait-Job cmdlet. /// </summary> protected override void StopProcessing() { this.SetEndProcessingAction(this.EndProcessingDoNothing); } /// <summary> /// In this method, we initialize the timer if timeout parameter is specified. /// </summary> protected override void BeginProcessing() { this.StartTimeoutTracking(_timeoutInSeconds); } /// <summary> /// This method just collects the Jobs which will be waited on in the EndProcessing method. /// </summary> protected override void ProcessRecord() { // List of jobs to wait List<Job> matches; switch (ParameterSetName) { case NameParameterSet: matches = FindJobsMatchingByName(true, false, true, false); break; case InstanceIdParameterSet: matches = FindJobsMatchingByInstanceId(true, false, true, false); break; case SessionIdParameterSet: matches = FindJobsMatchingBySessionId(true, false, true, false); break; case StateParameterSet: matches = FindJobsMatchingByState(false); break; case FilterParameterSet: matches = FindJobsMatchingByFilter(false); break; default: matches = CopyJobsToList(this.Job, false, false); break; } this.AddJobsThatNeedJobChangesTracking(matches); } /// <summary> /// Wait on the collected Jobs. /// </summary> protected override void EndProcessing() { this.StartJobChangesTracking(); this.InvokeEndProcessingAction(); if (_warnNotTerminal) { WriteWarning(RemotingErrorIdStrings.JobSuspendedDisconnectedWaitWithForce); } } private void EndProcessingOutputSingleFinishedJob() { Job finishedJob = this.GetFinishedJobs().FirstOrDefault(); if (finishedJob != null) { this.WriteObject(finishedJob); } } private void EndProcessingOutputAllFinishedJobs() { IEnumerable<Job> finishedJobs = this.GetFinishedJobs(); foreach (Job finishedJob in finishedJobs) { this.WriteObject(finishedJob); } } private void EndProcessingBlockedJobsError() { string message = RemotingErrorIdStrings.JobBlockedSoWaitJobCannotContinue; Exception exception = new ArgumentException(message); ErrorRecord errorRecord = new ErrorRecord( exception, "BlockedJobsDeadlockWithWaitJob", ErrorCategory.DeadlockDetected, this.GetOneBlockedJob()); this.ThrowTerminatingError(errorRecord); } private void EndProcessingDoNothing() { // do nothing } #endregion Overrides #region IDisposable Members /// <summary> /// Dispose all managed resources. This will suppress finalizer on the object from getting called by /// calling System.GC.SuppressFinalize(this). /// </summary> public void Dispose() { Dispose(true); // To prevent derived types with finalizers from having to re-implement System.IDisposable to call it, // unsealed types without finalizers should still call SuppressFinalize. System.GC.SuppressFinalize(this); } /// <summary> /// Release all the resources. /// </summary> /// <param name="disposing"> /// if true, release all the managed objects. /// </param> private void Dispose(bool disposing) { if (disposing) { lock (_disposableLock) { if (!_isDisposed) { _isDisposed = true; this.CleanUpTimeoutTracking(); this.CleanUpJobChangesTracking(); this.CleanUpEndProcessing(); // <- has to be last } } } } private bool _isDisposed; private readonly object _disposableLock = new object(); private bool _warnNotTerminal = false; #endregion IDisposable Members } }
// // Encog(tm) Core v3.2 - .Net Version // http://www.heatonresearch.com/encog/ // // Copyright 2008-2014 Heaton Research, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // For more information on Heaton Research copyrights, licenses // and trademarks visit: // http://www.heatonresearch.com/copyright // using System; using System.Collections; using System.Collections.Generic; using System.Linq; using Encog.Util; namespace Encog.ML.Data.Basic { /// <summary> /// A basic implementation of the MLSequenceSet. /// </summary> [Serializable] public class BasicMLSequenceSet : IMLSequenceSet { /// <summary> /// The data held by this object. /// </summary> private readonly IList<IMLDataSet> _sequences = new List<IMLDataSet>(); private IMLDataSetAddable _currentSequence; /// <summary> /// Default constructor. /// </summary> public BasicMLSequenceSet() { _currentSequence = new BasicMLDataSet(); _sequences.Add(_currentSequence); } public BasicMLSequenceSet(BasicMLSequenceSet other) { _sequences = other._sequences; _currentSequence = other._currentSequence; } /// <summary> /// Construct a data set from an input and ideal array. /// </summary> /// <param name="input">The input into the machine learning method for training.</param> /// <param name="ideal">The ideal output for training.</param> public BasicMLSequenceSet(double[][] input, double[][] ideal) { _currentSequence = new BasicMLDataSet(input, ideal); _sequences.Add(_currentSequence); } /// <summary> /// Construct a data set from an already created list. Mostly used to /// duplicate this class. /// </summary> /// <param name="theData">The data to use.</param> public BasicMLSequenceSet(IList<IMLDataPair> theData) { _currentSequence = new BasicMLDataSet(theData); _sequences.Add(_currentSequence); } /// <summary> /// Copy whatever dataset type is specified into a memory dataset. /// </summary> /// <param name="set">The dataset to copy.</param> public BasicMLSequenceSet(IMLDataSet set) { _currentSequence = new BasicMLDataSet(); _sequences.Add(_currentSequence); int inputCount = set.InputSize; int idealCount = set.IdealSize; foreach (IMLDataPair pair in set) { BasicMLData input = null; BasicMLData ideal = null; if (inputCount > 0) { input = new BasicMLData(inputCount); pair.Input.CopyTo(input.Data, 0, pair.Input.Count); } if (idealCount > 0) { ideal = new BasicMLData(idealCount); pair.Ideal.CopyTo(ideal.Data, 0, pair.Ideal.Count); } _currentSequence.Add(new BasicMLDataPair(input, ideal)); } } #region IMLSequenceSet Members /// <inheritdoc/> public void Add(IMLData theData) { _currentSequence.Add(theData); } /// <inheritdoc/> public void Add(IMLData inputData, IMLData idealData) { IMLDataPair pair = new BasicMLDataPair(inputData, idealData); _currentSequence.Add(pair); } /// <inheritdoc/> public void Add(IMLDataPair inputData) { _currentSequence.Add(inputData); } /// <inheritdoc/> public void Close() { // nothing to close } /// <inheritdoc/> public int IdealSize { get { if (_sequences[0].Count == 0) { return 0; } return _sequences[0].IdealSize; } } /// <inheritdoc/> public int InputSize { get { if (_sequences[0].Count == 0) { return 0; } return _sequences[0].IdealSize; } } /// <inheritdoc/> public int Count { get { return _sequences.Sum(ds => ds.Count); } } /// <inheritdoc/> public bool Supervised { get { if (_sequences[0].Count == 0) { return false; } return _sequences[0].Supervised; } } /// <inheritdoc/> public IMLDataSet OpenAdditional() { return new BasicMLSequenceSet(this); } public void StartNewSequence() { if (_currentSequence.Count > 0) { _currentSequence = new BasicMLDataSet(); _sequences.Add(_currentSequence); } } /// <inheritdoc/> public int SequenceCount { get { return _sequences.Count; } } /// <inheritdoc/> public IMLDataSet GetSequence(int i) { return this._sequences[i]; } /// <inheritdoc/> public ICollection<IMLDataSet> Sequences { get { return this._sequences; } } /// <inheritdoc/> public void Add(IMLDataSet sequence) { foreach (IMLDataPair pair in sequence) { this.Add(pair); } } /// <summary> /// Get an enumerator to access the data with. /// </summary> /// <returns>An enumerator.</returns> public IEnumerator<IMLDataPair> GetEnumerator() { return new BasicMLSequenceSetEnumerator(this); } /// <inheritdoc/> public IMLDataPair this[int x] { get { int sequenceIndex = 0; int recordIndex = x; while(_sequences[sequenceIndex].Count <= recordIndex) { recordIndex -= _sequences[sequenceIndex].Count; sequenceIndex++; if(sequenceIndex > _sequences.Count) { throw new MLDataError("Record out of range: " + x); } } return _sequences[sequenceIndex][recordIndex]; } } #endregion /// <inheritdoc/> public Object Clone() { return ObjectCloner.DeepCopy(this); } #region Nested type: BasicMLSequenceSetEnumerator /// <summary> /// Enumerate. /// </summary> public class BasicMLSequenceSetEnumerator : IEnumerator<IMLDataPair> { /// <summary> /// The owner. /// </summary> private readonly BasicMLSequenceSet _owner; /// <summary> /// The index that the iterator is currently at. /// </summary> private int _currentIndex; /// <summary> /// The sequence index. /// </summary> private int _currentSequenceIndex; /// <summary> /// Construct an enumerator. /// </summary> /// <param name="owner">The owner of the enumerator.</param> public BasicMLSequenceSetEnumerator(BasicMLSequenceSet owner) { Reset(); _owner = owner; } #region IEnumerator<IMLDataPair> Members /// <summary> /// The current data item. /// </summary> public IMLDataPair Current { get { if (_currentSequenceIndex >= _owner.SequenceCount) { throw new InvalidOperationException("Trying to read past the end of the dataset."); } if (_currentIndex < 0) { throw new InvalidOperationException("Must call MoveNext before reading Current."); } return _owner.GetSequence(_currentSequenceIndex)[_currentIndex]; } } /// <summary> /// Dispose of this object. /// </summary> public void Dispose() { // nothing needed } /// <summary> /// The current item. /// </summary> object IEnumerator.Current { get { if (_currentSequenceIndex >= _owner.SequenceCount) { throw new InvalidOperationException("Trying to read past the end of the dataset."); } if (_currentIndex < 0) { throw new InvalidOperationException("Must call MoveNext before reading Current."); } return _owner.GetSequence(_currentSequenceIndex)[_currentIndex]; } } /// <summary> /// Move to the next item. /// </summary> /// <returns>True if there is a next item.</returns> public bool MoveNext() { if (_currentSequenceIndex >= _owner.SequenceCount) { return false; } IMLDataSet current = _owner.GetSequence(_currentSequenceIndex); _currentIndex++; if (_currentIndex >= current.Count) { _currentIndex = 0; _currentSequenceIndex++; } if (_currentSequenceIndex >= _owner.SequenceCount) { return false; } return true; } /// <summary> /// Reset to the beginning. /// </summary> public void Reset() { _currentIndex = -1; _currentSequenceIndex = 0; } #endregion } #endregion } }
/* * NPlot - A charting library for .NET * * PlotSurface2D.cs * Copyright (C) 2003-2006 Matt Howlett and others. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ // #define DEBUG_BOUNDING_BOXES using System; using System.Collections; using System.Drawing; using System.Drawing.Drawing2D; namespace NPlot { /// <summary> /// Implements the surface on which IDrawables are drawn. Is extended /// by Bitmap.PlotSurface2D, Windows.PlotSurface2D etc. TODO: better explanation. /// </summary> public class PlotSurface2D : IPlotSurface2D { /// <summary> /// Possible positions of the X axis. /// </summary> public enum XAxisPosition { /// <summary> /// X axis is on the top. /// </summary> Top = 1, //Center = 2, /// <summary> /// X axis is on the bottom. /// </summary> Bottom = 3, } /// <summary> /// Possible positions of the Y axis. /// </summary> public enum YAxisPosition { /// <summary> /// Y axis on the left. /// </summary> Left = 1, // Center /// <summary> /// Y axis on the right. /// </summary> Right = 3, } private readonly StringFormat titleDrawFormat_; private bool autoScaleAutoGeneratedAxes_; private bool autoScaleTitle_; private ArrayList axesConstraints_; private object bbTitleCache_; private object bbXAxis1Cache_; private object bbXAxis2Cache_; private object bbYAxis1Cache_; private object bbYAxis2Cache_; private ArrayList drawables_; private int legendZOrder_ = -1; private Legend legend_; private SortedList ordering_; private PhysicalAxis pXAxis1Cache_; private PhysicalAxis pXAxis2Cache_; private PhysicalAxis pYAxis1Cache_; private PhysicalAxis pYAxis2Cache_; private int padding_; private object plotAreaBoundingBoxCache_; private IRectangleBrush plotBackBrush_; private object plotBackColor_; private System.Drawing.Bitmap plotBackImage_; private SmoothingMode smoothingMode_; private Brush titleBrush_; private Font titleFont_; private string title_; private int uniqueCounter_; private Axis xAxis1_; private Axis xAxis2_; private ArrayList xAxisPositions_; private Axis yAxis1_; private Axis yAxis2_; private ArrayList yAxisPositions_; private ArrayList zPositions_; /// <summary> /// Default constructor. /// </summary> public PlotSurface2D() { // only create this once. titleDrawFormat_ = new StringFormat(); titleDrawFormat_.Alignment = StringAlignment.Center; Init(); } /// <summary> /// The physical bounding box of the last drawn plot surface area is available here. /// </summary> public Rectangle PlotAreaBoundingBoxCache { get { if (plotAreaBoundingBoxCache_ == null) { return Rectangle.Empty; } else { return (Rectangle) plotAreaBoundingBoxCache_; } } } /// <summary> /// The physical XAxis1 that was last drawn. /// </summary> public PhysicalAxis PhysicalXAxis1Cache { get { return pXAxis1Cache_; } } /// <summary> /// The physical YAxis1 that was last drawn. /// </summary> public PhysicalAxis PhysicalYAxis1Cache { get { return pYAxis1Cache_; } } /// <summary> /// The physical XAxis2 that was last drawn. /// </summary> public PhysicalAxis PhysicalXAxis2Cache { get { return pXAxis2Cache_; } } /// <summary> /// The physical YAxis2 that was last drawn. /// </summary> public PhysicalAxis PhysicalYAxis2Cache { get { return pYAxis2Cache_; } } /// <summary> /// The bottom abscissa axis. /// </summary> public Axis XAxis1 { get { return xAxis1_; } set { xAxis1_ = value; } } /// <summary> /// The left ordinate axis. /// </summary> public Axis YAxis1 { get { return yAxis1_; } set { yAxis1_ = value; } } /// <summary> /// The top abscissa axis. /// </summary> public Axis XAxis2 { get { return xAxis2_; } set { xAxis2_ = value; } } /// <summary> /// The right ordinate axis. /// </summary> public Axis YAxis2 { get { return yAxis2_; } set { yAxis2_ = value; } } /// <summary> /// The chart title. /// </summary> public string Title { get { return title_; } set { title_ = value; } } /// <summary> /// The plot title font. /// </summary> public Font TitleFont { get { return titleFont_; } set { titleFont_ = value; } } /// <summary> /// The distance in pixels to leave between of the edge of the bounding rectangle /// supplied to the Draw method, and the markings that make up the plot. /// </summary> public int Padding { get { return padding_; } set { padding_ = value; } } /// <summary> /// Sets the title to be drawn using a solid brush of this color. /// </summary> public Color TitleColor { set { titleBrush_ = new SolidBrush(value); } } /// <summary> /// The brush used for drawing the title. /// </summary> public Brush TitleBrush { get { return titleBrush_; } set { titleBrush_ = value; } } /// <summary> /// A color used to paint the plot background. Mutually exclusive with PlotBackImage and PlotBackBrush /// </summary> public Color PlotBackColor { set { plotBackColor_ = value; plotBackBrush_ = null; plotBackImage_ = null; } } /// <summary> /// An imaged used to paint the plot background. Mutually exclusive with PlotBackColor and PlotBackBrush /// </summary> public System.Drawing.Bitmap PlotBackImage { set { plotBackImage_ = value; plotBackColor_ = null; plotBackBrush_ = null; } } /// <summary> /// A Rectangle brush used to paint the plot background. Mutually exclusive with PlotBackColor and PlotBackBrush /// </summary> public IRectangleBrush PlotBackBrush { set { plotBackBrush_ = value; plotBackColor_ = null; plotBackImage_ = null; } } /// <summary> /// Smoothing mode to use when drawing plots. /// </summary> public SmoothingMode SmoothingMode { get { return smoothingMode_; } set { smoothingMode_ = value; } } /// <summary> /// Adds a drawable object to the plot surface with z-order 0. If the object is an IPlot, /// the PlotSurface2D axes will also be updated. /// </summary> /// <param name="p">The IDrawable object to add to the plot surface.</param> public void Add(IDrawable p) { Add(p, 0); } /// <summary> /// Adds a drawable object to the plot surface. If the object is an IPlot, /// the PlotSurface2D axes will also be updated. /// </summary> /// <param name="p">The IDrawable object to add to the plot surface.</param> /// <param name="zOrder">The z-ordering when drawing (objects with lower numbers are drawn first)</param> public void Add(IDrawable p, int zOrder) { Add(p, XAxisPosition.Bottom, YAxisPosition.Left, zOrder); } /// <summary> /// Adds a drawable object to the plot surface against the specified axes with /// z-order of 0. If the object is an IPlot, the PlotSurface2D axes will also /// be updated. /// </summary> /// <param name="p">the IDrawable object to add to the plot surface</param> /// <param name="xp">the x-axis to add the plot against.</param> /// <param name="yp">the y-axis to add the plot against.</param> public void Add(IDrawable p, XAxisPosition xp, YAxisPosition yp) { Add(p, xp, yp, 0); } /// <summary> /// Adds a drawable object to the plot surface against the specified axes. If /// the object is an IPlot, the PlotSurface2D axes will also be updated. /// </summary> /// <param name="p">the IDrawable object to add to the plot surface</param> /// <param name="xp">the x-axis to add the plot against.</param> /// <param name="yp">the y-axis to add the plot against.</param> /// <param name="zOrder">The z-ordering when drawing (objects with lower numbers are drawn first)</param> public void Add(IDrawable p, XAxisPosition xp, YAxisPosition yp, int zOrder) { drawables_.Add(p); xAxisPositions_.Add(xp); yAxisPositions_.Add(yp); zPositions_.Add((double) zOrder); // fraction is to make key unique. With 10 million plots at same z, this buggers up.. double fraction = (double) (++uniqueCounter_)/10000000.0f; ordering_.Add(zOrder + fraction, drawables_.Count - 1); // if p is just an IDrawable, then it can't affect the axes. if (p is IPlot) { UpdateAxes(false); } } /// <summary> /// Clears the plot and resets all state to the default. /// </summary> public void Clear() { Init(); } /// <summary> /// Legend to use. If this property is null [default], then the plot /// surface will have no corresponding legend. /// </summary> public Legend Legend { get { return legend_; } set { legend_ = value; } } /// <summary> /// Add an axis constraint to the plot surface. Axes constraints give you /// control over where NPlot positions each axes, and the world - pixel /// ratio. /// </summary> /// <param name="constraint">The axis constraint to add.</param> public void AddAxesConstraint(AxesConstraint constraint) { axesConstraints_.Add(constraint); } /// <summary> /// Whether or not the title will be scaled according to size of the plot surface. /// </summary> public bool AutoScaleTitle { get { return autoScaleTitle_; } set { autoScaleTitle_ = value; } } /// <summary> /// When plots are added to the plot surface, the axes they are attached to /// are immediately modified to reflect data of the plot. If /// AutoScaleAutoGeneratedAxes is true when a plot is added, the axes will /// be turned in to auto scaling ones if they are not already [tick marks, /// tick text and label size scaled to size of plot surface]. If false, /// axes will not be autoscaling. /// </summary> public bool AutoScaleAutoGeneratedAxes { get { return autoScaleAutoGeneratedAxes_; } set { autoScaleAutoGeneratedAxes_ = value; } } /// <summary> /// Remove a drawable object. /// Note that axes are not updated. /// </summary> /// <param name="p">Drawable to remove.</param> /// <param name="updateAxes">if true, the axes are updated.</param> public void Remove(IDrawable p, bool updateAxes) { int index = drawables_.IndexOf(p); if (index < 0) return; drawables_.RemoveAt(index); xAxisPositions_.RemoveAt(index); yAxisPositions_.RemoveAt(index); zPositions_.RemoveAt(index); if (updateAxes) { UpdateAxes(true); } RefreshZOrdering(); } /// <summary> /// Gets an array list containing all drawables currently added to the PlotSurface2D. /// </summary> public ArrayList Drawables { get { return drawables_; } } /// <summary> /// Setting this value determines the order (relative to IDrawables added to the plot surface) /// that the legend is drawn. /// </summary> public int LegendZOrder { get { return legendZOrder_; } set { legendZOrder_ = value; } } /// <summary> /// Performs a hit test with the given point and returns information /// about the object being hit. /// </summary> /// <param name="p">The point to test.</param> /// <returns></returns> public ArrayList HitTest(Point p) { ArrayList a = new ArrayList(); // this is the case if PlotSurface has been cleared. if (bbXAxis1Cache_ == null) { return a; } else if (bbXAxis1Cache_ != null && ((Rectangle) bbXAxis1Cache_).Contains(p)) { a.Add(xAxis1_); return a; } else if (bbYAxis1Cache_ != null && ((Rectangle) bbYAxis1Cache_).Contains(p)) { a.Add(yAxis1_); return a; } else if (bbXAxis2Cache_ != null && ((Rectangle) bbXAxis2Cache_).Contains(p)) { a.Add(xAxis2_); return a; } else if (bbXAxis2Cache_ != null && ((Rectangle) bbYAxis2Cache_).Contains(p)) { a.Add(yAxis2_); return a; } else if (bbTitleCache_ != null && ((Rectangle) bbTitleCache_).Contains(p)) { a.Add(this); return a; } else if (plotAreaBoundingBoxCache_ != null && ((Rectangle) plotAreaBoundingBoxCache_).Contains(p)) { a.Add(this); return a; } return a; } private void Init() { drawables_ = new ArrayList(); xAxisPositions_ = new ArrayList(); yAxisPositions_ = new ArrayList(); zPositions_ = new ArrayList(); ordering_ = new SortedList(); FontFamily fontFamily = new FontFamily("Arial"); TitleFont = new Font(fontFamily, 14, FontStyle.Regular, GraphicsUnit.Pixel); padding_ = 10; title_ = ""; autoScaleTitle_ = false; autoScaleAutoGeneratedAxes_ = false; xAxis1_ = null; xAxis2_ = null; yAxis1_ = null; yAxis2_ = null; pXAxis1Cache_ = null; pYAxis1Cache_ = null; pXAxis2Cache_ = null; pYAxis2Cache_ = null; titleBrush_ = new SolidBrush(Color.Black); plotBackColor_ = Color.White; legend_ = null; smoothingMode_ = SmoothingMode.None; axesConstraints_ = new ArrayList(); } private float DetermineScaleFactor(int w, int h) { float diag = (float) Math.Sqrt(w*w + h*h); float scaleFactor = (diag/1400.0f)*2.4f; if (scaleFactor > 1.0f) { return scaleFactor; } else { return 1.0f; } } private void UpdateAxes(bool recalculateAll) { if (drawables_.Count != xAxisPositions_.Count || drawables_.Count != yAxisPositions_.Count) { throw new NPlotException("plots and axis position arrays our of sync"); } int position = 0; // if we're not recalculating axes using all iplots then set // position to last one in list. if (!recalculateAll) { position = drawables_.Count - 1; if (position < 0) position = 0; } if (recalculateAll) { xAxis1_ = null; yAxis1_ = null; xAxis2_ = null; yAxis2_ = null; } for (int i = position; i < drawables_.Count; ++i) { // only update axes if this drawable is an IPlot. if (!(drawables_[position] is IPlot)) continue; IPlot p = (IPlot) drawables_[position]; XAxisPosition xap = (XAxisPosition) xAxisPositions_[position]; YAxisPosition yap = (YAxisPosition) yAxisPositions_[position]; if (xap == XAxisPosition.Bottom) { if (xAxis1_ == null) { xAxis1_ = p.SuggestXAxis(); if (xAxis1_ != null) { xAxis1_.TicksAngle = -(float) Math.PI/2.0f; } } else { xAxis1_.LUB(p.SuggestXAxis()); } if (xAxis1_ != null) { xAxis1_.MinPhysicalLargeTickStep = 50; if (AutoScaleAutoGeneratedAxes) { xAxis1_.AutoScaleText = true; xAxis1_.AutoScaleTicks = true; xAxis1_.TicksIndependentOfPhysicalExtent = true; } else { xAxis1_.AutoScaleText = false; xAxis1_.AutoScaleTicks = false; xAxis1_.TicksIndependentOfPhysicalExtent = false; } } } if (xap == XAxisPosition.Top) { if (xAxis2_ == null) { xAxis2_ = p.SuggestXAxis(); if (xAxis2_ != null) { xAxis2_.TicksAngle = (float) Math.PI/2.0f; } } else { xAxis2_.LUB(p.SuggestXAxis()); } if (xAxis2_ != null) { xAxis2_.MinPhysicalLargeTickStep = 50; if (AutoScaleAutoGeneratedAxes) { xAxis2_.AutoScaleText = true; xAxis2_.AutoScaleTicks = true; xAxis2_.TicksIndependentOfPhysicalExtent = true; } else { xAxis2_.AutoScaleText = false; xAxis2_.AutoScaleTicks = false; xAxis2_.TicksIndependentOfPhysicalExtent = false; } } } if (yap == YAxisPosition.Left) { if (yAxis1_ == null) { yAxis1_ = p.SuggestYAxis(); if (yAxis1_ != null) { yAxis1_.TicksAngle = (float) Math.PI/2.0f; } } else { yAxis1_.LUB(p.SuggestYAxis()); } if (yAxis1_ != null) { if (AutoScaleAutoGeneratedAxes) { yAxis1_.AutoScaleText = true; yAxis1_.AutoScaleTicks = true; yAxis1_.TicksIndependentOfPhysicalExtent = true; } else { yAxis1_.AutoScaleText = false; yAxis1_.AutoScaleTicks = false; yAxis1_.TicksIndependentOfPhysicalExtent = false; } } } if (yap == YAxisPosition.Right) { if (yAxis2_ == null) { yAxis2_ = p.SuggestYAxis(); if (yAxis2_ != null) { yAxis2_.TicksAngle = -(float) Math.PI/2.0f; } } else { yAxis2_.LUB(p.SuggestYAxis()); } if (yAxis2_ != null) { if (AutoScaleAutoGeneratedAxes) { yAxis2_.AutoScaleText = true; yAxis2_.AutoScaleTicks = true; yAxis2_.TicksIndependentOfPhysicalExtent = true; } else { yAxis2_.AutoScaleText = false; yAxis2_.AutoScaleTicks = false; yAxis2_.TicksIndependentOfPhysicalExtent = false; } } } } } private void DetermineAxesToDraw(out Axis xAxis1, out Axis xAxis2, out Axis yAxis1, out Axis yAxis2) { xAxis1 = xAxis1_; xAxis2 = xAxis2_; yAxis1 = yAxis1_; yAxis2 = yAxis2_; if (xAxis1_ == null) { if (xAxis2_ == null) { throw new NPlotException("Error: No X-Axis specified"); } xAxis1 = (Axis) xAxis2_.Clone(); xAxis1.HideTickText = true; xAxis1.TicksAngle = -(float) Math.PI/2.0f; } if (xAxis2_ == null) { // don't need to check if xAxis1_ == null, as case already handled above. xAxis2 = (Axis) xAxis1_.Clone(); xAxis2.HideTickText = true; xAxis2.TicksAngle = (float) Math.PI/2.0f; } if (yAxis1_ == null) { if (yAxis2_ == null) { throw new NPlotException("Error: No Y-Axis specified"); } yAxis1 = (Axis) yAxis2_.Clone(); yAxis1.HideTickText = true; yAxis1.TicksAngle = (float) Math.PI/2.0f; } if (yAxis2_ == null) { // don't need to check if yAxis1_ == null, as case already handled above. yAxis2 = (Axis) yAxis1_.Clone(); yAxis2.HideTickText = true; yAxis2.TicksAngle = -(float) Math.PI/2.0f; } } private void DeterminePhysicalAxesToDraw(Rectangle bounds, Axis xAxis1, Axis xAxis2, Axis yAxis1, Axis yAxis2, out PhysicalAxis pXAxis1, out PhysicalAxis pXAxis2, out PhysicalAxis pYAxis1, out PhysicalAxis pYAxis2) { Rectangle cb = bounds; pXAxis1 = new PhysicalAxis(xAxis1, new Point(cb.Left, cb.Bottom), new Point(cb.Right, cb.Bottom)); pYAxis1 = new PhysicalAxis(yAxis1, new Point(cb.Left, cb.Bottom), new Point(cb.Left, cb.Top)); pXAxis2 = new PhysicalAxis(xAxis2, new Point(cb.Left, cb.Top), new Point(cb.Right, cb.Top)); pYAxis2 = new PhysicalAxis(yAxis2, new Point(cb.Right, cb.Bottom), new Point(cb.Right, cb.Top)); int bottomIndent = padding_; if (!pXAxis1.Axis.Hidden) { // evaluate its bounding box Rectangle bb = pXAxis1.GetBoundingBox(); // finally determine its indentation from the bottom bottomIndent = bottomIndent + bb.Bottom - cb.Bottom; } int leftIndent = padding_; if (!pYAxis1.Axis.Hidden) { // evaluate its bounding box Rectangle bb = pYAxis1.GetBoundingBox(); // finally determine its indentation from the left leftIndent = leftIndent - bb.Left + cb.Left; } int topIndent = padding_; float scale = DetermineScaleFactor(bounds.Width, bounds.Height); int titleHeight; if (AutoScaleTitle) { titleHeight = Utils.ScaleFont(titleFont_, scale).Height; } else { titleHeight = titleFont_.Height; } //count number of new lines in title. int nlCount = 0; for (int i = 0; i < title_.Length; ++i) { if (title_[i] == '\n') nlCount += 1; } titleHeight = (int) ((nlCount*0.75 + 1.0f)*titleHeight); if (!pXAxis2.Axis.Hidden) { // evaluate its bounding box Rectangle bb = pXAxis2.GetBoundingBox(); topIndent = topIndent - bb.Top + cb.Top; // finally determine its indentation from the top // correct top indendation to take into account plot title if (title_ != "") { topIndent += (int) (titleHeight*1.3f); } } int rightIndent = padding_; if (!pYAxis2.Axis.Hidden) { // evaluate its bounding box Rectangle bb = pYAxis2.GetBoundingBox(); // finally determine its indentation from the right rightIndent = (rightIndent + bb.Right - cb.Right); } // now we have all the default calculated positions and we can proceed to // "move" the axes to their right places // primary axes (bottom, left) pXAxis1.PhysicalMin = new Point(cb.Left + leftIndent, cb.Bottom - bottomIndent); pXAxis1.PhysicalMax = new Point(cb.Right - rightIndent, cb.Bottom - bottomIndent); pYAxis1.PhysicalMin = new Point(cb.Left + leftIndent, cb.Bottom - bottomIndent); pYAxis1.PhysicalMax = new Point(cb.Left + leftIndent, cb.Top + topIndent); // secondary axes (top, right) pXAxis2.PhysicalMin = new Point(cb.Left + leftIndent, cb.Top + topIndent); pXAxis2.PhysicalMax = new Point(cb.Right - rightIndent, cb.Top + topIndent); pYAxis2.PhysicalMin = new Point(cb.Right - rightIndent, cb.Bottom - bottomIndent); pYAxis2.PhysicalMax = new Point(cb.Right - rightIndent, cb.Top + topIndent); } /// <summary> /// Draw the the PlotSurface2D and all contents [axes, drawables, and legend] on the /// supplied graphics surface. /// </summary> /// <param name="g">The graphics surface on which to draw.</param> /// <param name="bounds"> /// A bounding box on this surface that denotes the area on the /// surface to confine drawing to. /// </param> public void Draw(Graphics g, Rectangle bounds) { // determine font sizes and tick scale factor. float scale = DetermineScaleFactor(bounds.Width, bounds.Height); // if there is nothing to plot, return. if (drawables_.Count == 0) { // draw title float x_center = (bounds.Left + bounds.Right)/2.0f; float y_center = (bounds.Top + bounds.Bottom)/2.0f; Font scaled_font; if (AutoScaleTitle) { scaled_font = Utils.ScaleFont(titleFont_, scale); } else { scaled_font = titleFont_; } g.DrawString(title_, scaled_font, titleBrush_, new PointF(x_center, y_center), titleDrawFormat_); return; } // determine the [non physical] axes to draw based on the axis properties set. Axis xAxis1 = null; Axis xAxis2 = null; Axis yAxis1 = null; Axis yAxis2 = null; DetermineAxesToDraw(out xAxis1, out xAxis2, out yAxis1, out yAxis2); // apply scale factor to axes as desired. if (xAxis1.AutoScaleTicks) xAxis1.TickScale = scale; if (xAxis1.AutoScaleText) xAxis1.FontScale = scale; if (yAxis1.AutoScaleTicks) yAxis1.TickScale = scale; if (yAxis1.AutoScaleText) yAxis1.FontScale = scale; if (xAxis2.AutoScaleTicks) xAxis2.TickScale = scale; if (xAxis2.AutoScaleText) xAxis2.FontScale = scale; if (yAxis2.AutoScaleTicks) yAxis2.TickScale = scale; if (yAxis2.AutoScaleText) yAxis2.FontScale = scale; // determine the default physical positioning of those axes. PhysicalAxis pXAxis1 = null; PhysicalAxis pYAxis1 = null; PhysicalAxis pXAxis2 = null; PhysicalAxis pYAxis2 = null; DeterminePhysicalAxesToDraw( bounds, xAxis1, xAxis2, yAxis1, yAxis2, out pXAxis1, out pXAxis2, out pYAxis1, out pYAxis2); float oldXAxis2Height = pXAxis2.PhysicalMin.Y; // Apply axes constraints for (int i = 0; i < axesConstraints_.Count; ++i) { ((AxesConstraint) axesConstraints_[i]).ApplyConstraint( pXAxis1, pYAxis1, pXAxis2, pYAxis2); } ///////////////////////////////////////////////////////////////////////// // draw legend if have one. // Note: this will update axes if necessary. Point legendPosition = new Point(0, 0); if (legend_ != null) { legend_.UpdateAxesPositions( pXAxis1, pYAxis1, pXAxis2, pYAxis2, drawables_, scale, padding_, bounds, out legendPosition); } float newXAxis2Height = pXAxis2.PhysicalMin.Y; float titleExtraOffset = oldXAxis2Height - newXAxis2Height; // now we are ready to define the bounding box for the plot area (to use in clipping // operations. plotAreaBoundingBoxCache_ = new Rectangle( Math.Min(pXAxis1.PhysicalMin.X, pXAxis1.PhysicalMax.X), Math.Min(pYAxis1.PhysicalMax.Y, pYAxis1.PhysicalMin.Y), Math.Abs(pXAxis1.PhysicalMax.X - pXAxis1.PhysicalMin.X + 1), Math.Abs(pYAxis1.PhysicalMin.Y - pYAxis1.PhysicalMax.Y + 1) ); bbXAxis1Cache_ = pXAxis1.GetBoundingBox(); bbXAxis2Cache_ = pXAxis2.GetBoundingBox(); bbYAxis1Cache_ = pYAxis1.GetBoundingBox(); bbYAxis2Cache_ = pYAxis2.GetBoundingBox(); // Fill in the background. if (plotBackColor_ != null) { g.FillRectangle( new SolidBrush((Color) plotBackColor_), (Rectangle) plotAreaBoundingBoxCache_); } else if (plotBackBrush_ != null) { g.FillRectangle( plotBackBrush_.Get((Rectangle) plotAreaBoundingBoxCache_), (Rectangle) plotAreaBoundingBoxCache_); } else if (plotBackImage_ != null) { g.DrawImage( Utils.TiledImage(plotBackImage_, new Size( ((Rectangle) plotAreaBoundingBoxCache_).Width, ((Rectangle) plotAreaBoundingBoxCache_).Height)), (Rectangle) plotAreaBoundingBoxCache_); } // draw title float xt = (pXAxis2.PhysicalMax.X + pXAxis2.PhysicalMin.X)/2.0f; float yt = bounds.Top + padding_ - titleExtraOffset; Font scaledFont; if (AutoScaleTitle) { scaledFont = Utils.ScaleFont(titleFont_, scale); } else { scaledFont = titleFont_; } g.DrawString(title_, scaledFont, titleBrush_, new PointF(xt, yt), titleDrawFormat_); //count number of new lines in title. int nlCount = 0; for (int i = 0; i < title_.Length; ++i) { if (title_[i] == '\n') nlCount += 1; } SizeF s = g.MeasureString(title_, scaledFont); bbTitleCache_ = new Rectangle((int) (xt - s.Width/2), (int) (yt), (int) (s.Width), (int) (s.Height)*(nlCount + 1)); // draw drawables.. SmoothingMode smoothSave = g.SmoothingMode; g.SmoothingMode = smoothingMode_; bool legendDrawn = false; for (int i_o = 0; i_o < ordering_.Count; ++i_o) { int i = (int) ordering_.GetByIndex(i_o); double zOrder = (double) ordering_.GetKey(i_o); if (zOrder > legendZOrder_) { // draw legend. if (!legendDrawn && legend_ != null) { legend_.Draw(g, legendPosition, drawables_, scale); legendDrawn = true; } } IDrawable drawable = (IDrawable) drawables_[i]; XAxisPosition xap = (XAxisPosition) xAxisPositions_[i]; YAxisPosition yap = (YAxisPosition) yAxisPositions_[i]; PhysicalAxis drawXAxis; PhysicalAxis drawYAxis; if (xap == XAxisPosition.Bottom) { drawXAxis = pXAxis1; } else { drawXAxis = pXAxis2; } if (yap == YAxisPosition.Left) { drawYAxis = pYAxis1; } else { drawYAxis = pYAxis2; } // set the clipping region.. (necessary for zoom) g.Clip = new Region((Rectangle) plotAreaBoundingBoxCache_); // plot. drawable.Draw(g, drawXAxis, drawYAxis); // reset it.. g.ResetClip(); } if (!legendDrawn && legend_ != null) { legend_.Draw(g, legendPosition, drawables_, scale); } // cache the physical axes we used on this draw; pXAxis1Cache_ = pXAxis1; pYAxis1Cache_ = pYAxis1; pXAxis2Cache_ = pXAxis2; pYAxis2Cache_ = pYAxis2; g.SmoothingMode = smoothSave; // now draw axes. Rectangle axisBounds; pXAxis1.Draw(g, out axisBounds); pXAxis2.Draw(g, out axisBounds); pYAxis1.Draw(g, out axisBounds); pYAxis2.Draw(g, out axisBounds); #if DEBUG_BOUNDING_BOXES g.DrawRectangle( new Pen(Color.Orange), (Rectangle) bbXAxis1Cache_ ); g.DrawRectangle( new Pen(Color.Orange), (Rectangle) bbXAxis2Cache_ ); g.DrawRectangle( new Pen(Color.Orange), (Rectangle) bbYAxis1Cache_ ); g.DrawRectangle( new Pen(Color.Orange), (Rectangle) bbYAxis2Cache_ ); g.DrawRectangle( new Pen(Color.Red,5.0F),(Rectangle) plotAreaBoundingBoxCache_); //if(this.ShowLegend)g.DrawRectangle( new Pen(Color.Chocolate, 3.0F), (Rectangle) bbLegendCache_); g.DrawRectangle( new Pen(Color.DeepPink,2.0F), (Rectangle) bbTitleCache_); #endif } /// <summary> /// If a plot is removed, then the ordering_ list needs to be /// recalculated. /// </summary> private void RefreshZOrdering() { uniqueCounter_ = 0; ordering_ = new SortedList(); for (int i = 0; i < zPositions_.Count; ++i) { double zpos = Convert.ToDouble(zPositions_[i]); double fraction = (double) (++uniqueCounter_)/10000000.0f; double d = zpos + fraction; ordering_.Add(d, i); } } /// <summary> /// Returns the x-axis associated with a given plot. /// </summary> /// <param name="plot">the plot to get associated x-axis.</param> /// <returns>the axis associated with the plot.</returns> public Axis WhichXAxis(IPlot plot) { int index = drawables_.IndexOf(plot); XAxisPosition p = (XAxisPosition) xAxisPositions_[index]; if (p == XAxisPosition.Bottom) return xAxis1_; else return xAxis2_; } /// <summary> /// Returns the y-axis associated with a given plot. /// </summary> /// <param name="plot">the plot to get associated y-axis.</param> /// <returns>the axis associated with the plot.</returns> public Axis WhichYAxis(IPlot plot) { int index = drawables_.IndexOf(plot); YAxisPosition p = (YAxisPosition) yAxisPositions_[index]; if (p == YAxisPosition.Left) return yAxis1_; else return yAxis2_; } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace SwaggerDemoApi.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
using System; using System.Collections.Generic; using System.IdentityModel.Claims; using System.Security.Cryptography.X509Certificates; using System.ServiceModel.Channels; using vm.Aspects.Wcf; using vm.Aspects.Wcf.Clients; namespace vm.Aspects.Model.PerCallContextRepositoryCallHandlerTests { /// <summary> /// WCF channel factory based client (proxy) for services implementing the contract ITestService. /// </summary> /// <seealso cref="LightClient{ITestService}" /> /// <seealso cref="ITestService" /> public class TestServiceClient : LightClient<ITestService>, ITestService { #region Constructors /// <summary> /// Initializes a new instance of the <see cref="TestServiceClient" /> class (creates the channel factory) /// from an endpoint configuration section given by the <paramref name="endpointConfigurationName" /> and service address. /// </summary> /// <param name="endpointConfigurationName">Name of the endpoint configuration.</param> /// <param name="remoteAddress">The remote address. If the remote address is <see langword="null" /> or empty /// the constructor will try to use the address in the endpoint configuration.</param> /// <param name="messagingPattern"> /// The messaging pattern defining the configuration of the connection. If <see langword="null"/>, empty or whitespace characters only, /// the constructor will try to resolve the pattern from the interface's attribute <see cref="MessagingPatternAttribute"/> if present, /// otherwise will apply the default messaging pattern fro the transport. /// </param> public TestServiceClient( string endpointConfigurationName, string remoteAddress, string messagingPattern = null) : base(endpointConfigurationName, remoteAddress, messagingPattern) { if (endpointConfigurationName.IsNullOrWhiteSpace() && remoteAddress.IsNullOrWhiteSpace()) throw new ArgumentException("At least one of the parameters must be not null, not empty and not consist of whitespace characters only."); } /// <summary> /// Initializes a new instance of the <see cref="TestServiceClient" /> class (creates the channel factory). /// </summary> /// <param name="remoteAddress">The remote address of the service.</param> /// <param name="identityType"> /// Type of the identity: can be <see cref="ServiceIdentity.Dns" />, <see cref="ServiceIdentity.Spn" />, <see cref="ServiceIdentity.Upn" />, or /// <see cref="ServiceIdentity.Rsa" />. /// </param> /// <param name="identity"> /// The identifier in the case of <see cref="ServiceIdentity.Dns" /> should be the DNS name of specified by the service's certificate or machine. /// If the identity type is <see cref="ServiceIdentity.Upn" /> - use the UPN of the service identity; if <see cref="ServiceIdentity.Spn" /> - use the SPN and if /// <see cref="ServiceIdentity.Rsa" /> - use the RSA key. /// </param> /// <param name="messagingPattern"> /// The messaging pattern defining the configuration of the connection. If <see langword="null"/>, empty or whitespace characters only, /// the constructor will try to resolve the pattern from the interface's attribute <see cref="MessagingPatternAttribute"/> if present, /// otherwise will apply the default messaging pattern fro the transport. /// </param> public TestServiceClient( string remoteAddress, ServiceIdentity identityType, string identity, string messagingPattern = null) : base(remoteAddress, identityType, identity, messagingPattern) { if (remoteAddress.IsNullOrWhiteSpace()) throw new ArgumentException("The argument cannot be null, empty string or consist of whitespace characters only.", nameof(remoteAddress)); if (identityType != ServiceIdentity.None && identityType != ServiceIdentity.Certificate && identity.IsNullOrWhiteSpace()) throw new ArgumentException("Invalid combination of identity parameters."); } /// <summary> /// Initializes a new instance of the <see cref="TestServiceClient" /> class. /// </summary> /// <param name="remoteAddress">The remote address of the service.</param> /// <param name="identityType"> /// Type of the identity: can be <see cref="ServiceIdentity.Certificate" /> or <see cref="ServiceIdentity.Rsa" />. /// </param> /// <param name="certificate">The identifying certificate.</param> /// <param name="messagingPattern"> /// The messaging pattern defining the configuration of the connection. If <see langword="null"/>, empty or whitespace characters only, /// the constructor will try to resolve the pattern from the interface's attribute <see cref="MessagingPatternAttribute"/> if present, /// otherwise will apply the default messaging pattern fro the transport. /// </param> public TestServiceClient( string remoteAddress, ServiceIdentity identityType, X509Certificate2 certificate, string messagingPattern = null) : base(remoteAddress, identityType, certificate, messagingPattern) { if (remoteAddress.IsNullOrWhiteSpace()) throw new ArgumentException("The argument cannot be null, empty string or consist of whitespace characters only.", nameof(remoteAddress)); if (identityType != ServiceIdentity.None && !((identityType == ServiceIdentity.Dns || identityType == ServiceIdentity.Rsa || identityType == ServiceIdentity.Certificate) && certificate!=null)) throw new ArgumentException("Invalid combination of identity parameters."); } /// <summary> /// Initializes a new instance of the <see cref="TestServiceClient" /> class. /// </summary> /// <param name="remoteAddress">The remote address of the service.</param> /// <param name="identityClaim">The identity claim.</param> /// <param name="messagingPattern"> /// The messaging pattern defining the configuration of the connection. If <see langword="null"/>, empty or whitespace characters only, /// the constructor will try to resolve the pattern from the interface's attribute <see cref="MessagingPatternAttribute"/> if present, /// otherwise will apply the default messaging pattern fro the transport. /// </param> public TestServiceClient( string remoteAddress, Claim identityClaim, string messagingPattern = null) : base(remoteAddress, identityClaim, messagingPattern) { if (remoteAddress.IsNullOrWhiteSpace()) throw new ArgumentException("The argument cannot be null, empty string or consist of whitespace characters only.", nameof(remoteAddress)); } /// <summary> /// Initializes a new instance of the <see cref="TestServiceClient" /> class (creates the channel factory). /// </summary> /// <param name="binding">A binding instance.</param> /// <param name="remoteAddress">The remote address of the service.</param> /// <param name="identityType"> /// Type of the identity: can be <see cref="ServiceIdentity.Dns" />, <see cref="ServiceIdentity.Spn" />, <see cref="ServiceIdentity.Upn" />, or /// <see cref="ServiceIdentity.Rsa" />. /// </param> /// <param name="identity"> /// The identifier in the case of <see cref="ServiceIdentity.Dns" /> should be the DNS name of specified by the service's certificate or machine. /// If the identity type is <see cref="ServiceIdentity.Upn" /> - use the UPN of the service identity; if <see cref="ServiceIdentity.Spn" /> - use the SPN and if /// <see cref="ServiceIdentity.Rsa" /> - use the RSA key. /// </param> /// <param name="messagingPattern"> /// The messaging pattern defining the configuration of the connection. If <see langword="null"/>, empty or whitespace characters only, /// the constructor will try to resolve the pattern from the interface's attribute <see cref="MessagingPatternAttribute"/> if present, /// otherwise will apply the default messaging pattern fro the transport. /// </param> public TestServiceClient( Binding binding, string remoteAddress, ServiceIdentity identityType, string identity, string messagingPattern = null) : base(binding, remoteAddress, identityType, identity, messagingPattern) { if (binding == null) throw new ArgumentNullException(nameof(binding)); if (remoteAddress.IsNullOrWhiteSpace()) throw new ArgumentException("The argument cannot be null, empty string or consist of whitespace characters only.", nameof(remoteAddress)); if (identityType != ServiceIdentity.None && identityType != ServiceIdentity.Certificate && identity.IsNullOrWhiteSpace()) throw new ArgumentException("Invalid combination of identity parameters."); } /// <summary> /// Initializes a new instance of the <see cref="T:TestServiceClient{TContract}" /> class. /// </summary> /// <param name="binding">A binding instance.</param> /// <param name="remoteAddress">The remote address of the service.</param> /// <param name="identityType"> /// Type of the identity: can be <see cref="ServiceIdentity.Certificate" /> or <see cref="ServiceIdentity.Rsa" />. /// </param> /// <param name="certificate">The identifying certificate.</param> /// <param name="messagingPattern"> /// The messaging pattern defining the configuration of the connection. If <see langword="null"/>, empty or whitespace characters only, /// the constructor will try to resolve the pattern from the interface's attribute <see cref="MessagingPatternAttribute"/> if present, /// otherwise will apply the default messaging pattern fro the transport. /// </param> public TestServiceClient( Binding binding, string remoteAddress, ServiceIdentity identityType, X509Certificate2 certificate, string messagingPattern = null) : base(binding, remoteAddress, identityType, certificate, messagingPattern) { if (binding == null) throw new ArgumentNullException(nameof(binding)); if (remoteAddress.IsNullOrWhiteSpace()) throw new ArgumentException("The argument cannot be null, empty string or consist of whitespace characters only.", nameof(remoteAddress)); if (identityType != ServiceIdentity.None && !((identityType == ServiceIdentity.Dns || identityType == ServiceIdentity.Rsa || identityType == ServiceIdentity.Certificate) && certificate!=null)) throw new ArgumentException("Invalid combination of identity parameters."); } /// <summary> /// Initializes a new instance of the <see cref="TestServiceClient" /> class. /// </summary> /// <param name="binding">A binding instance.</param> /// <param name="remoteAddress">The remote address of the service.</param> /// <param name="identityClaim">The identity claim.</param> /// <param name="messagingPattern"> /// The messaging pattern defining the configuration of the connection. If <see langword="null"/>, empty or whitespace characters only, /// the constructor will try to resolve the pattern from the interface's attribute <see cref="MessagingPatternAttribute"/> if present, /// otherwise will apply the default messaging pattern fro the transport. /// </param> public TestServiceClient( Binding binding, string remoteAddress, Claim identityClaim, string messagingPattern = null) : base(binding, remoteAddress, identityClaim, messagingPattern) { if (binding == null) throw new ArgumentNullException(nameof(binding)); if (remoteAddress.IsNullOrWhiteSpace()) throw new ArgumentException("The argument cannot be null, empty string or consist of whitespace characters only.", nameof(remoteAddress)); } #endregion #region ITestService implementation public void AddNewEntity() => Proxy.AddNewEntity(); public void UpdateEntities() => Proxy.UpdateEntities(); public int CountOfEntities() => Proxy.CountOfEntities(); public int CountOfValues() => Proxy.CountOfValues(); public ICollection<Entity> GetEntities( int skip, int take) => Proxy.GetEntities(skip, take); public EntitiesAndValuesCountsDto GetCounts() => Proxy.GetCounts(); #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Composition; using System.Diagnostics; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.CodeRefactorings; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.CSharp.Utilities; using Microsoft.CodeAnalysis.FindSymbols; using Microsoft.CodeAnalysis.Formatting; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Simplification; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.CSharp.CodeRefactorings.InlineTemporary { [ExportCodeRefactoringProvider(LanguageNames.CSharp, Name = PredefinedCodeRefactoringProviderNames.InlineTemporary), Shared] internal partial class InlineTemporaryCodeRefactoringProvider : CodeRefactoringProvider { internal static readonly SyntaxAnnotation DefinitionAnnotation = new SyntaxAnnotation(); internal static readonly SyntaxAnnotation ReferenceAnnotation = new SyntaxAnnotation(); internal static readonly SyntaxAnnotation InitializerAnnotation = new SyntaxAnnotation(); internal static readonly SyntaxAnnotation ExpressionToInlineAnnotation = new SyntaxAnnotation(); public override async Task ComputeRefactoringsAsync(CodeRefactoringContext context) { var document = context.Document; var textSpan = context.Span; var cancellationToken = context.CancellationToken; if (document.Project.Solution.Workspace.Kind == WorkspaceKind.MiscellaneousFiles) { return; } var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var token = root.FindToken(textSpan.Start); if (!token.Span.Contains(textSpan)) { return; } var node = token.Parent; if (!node.IsKind(SyntaxKind.VariableDeclarator) || !node.IsParentKind(SyntaxKind.VariableDeclaration) || !node.Parent.IsParentKind(SyntaxKind.LocalDeclarationStatement)) { return; } var variableDeclarator = (VariableDeclaratorSyntax)node; var variableDeclaration = (VariableDeclarationSyntax)variableDeclarator.Parent; var localDeclarationStatement = (LocalDeclarationStatementSyntax)variableDeclaration.Parent; if (variableDeclarator.Identifier != token || variableDeclarator.Initializer == null || variableDeclarator.Initializer.Value.IsMissing || variableDeclarator.Initializer.Value.IsKind(SyntaxKind.StackAllocArrayCreationExpression)) { return; } if (localDeclarationStatement.ContainsDiagnostics) { return; } var references = await GetReferencesAsync(document, variableDeclarator, cancellationToken).ConfigureAwait(false); if (!references.Any()) { return; } context.RegisterRefactoring( new MyCodeAction( CSharpFeaturesResources.InlineTemporaryVariable, (c) => this.InlineTemporaryAsync(document, variableDeclarator, c))); } private async Task<IEnumerable<ReferenceLocation>> GetReferencesAsync( Document document, VariableDeclaratorSyntax variableDeclarator, CancellationToken cancellationToken) { var semanticModel = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); var local = semanticModel.GetDeclaredSymbol(variableDeclarator, cancellationToken); if (local != null) { var findReferencesResult = await SymbolFinder.FindReferencesAsync(local, document.Project.Solution, cancellationToken).ConfigureAwait(false); var locations = findReferencesResult.Single(r => r.Definition == local).Locations; if (!locations.Any(loc => semanticModel.SyntaxTree.OverlapsHiddenPosition(loc.Location.SourceSpan, cancellationToken))) { return locations; } } return SpecializedCollections.EmptyEnumerable<ReferenceLocation>(); } private static bool HasConflict(IdentifierNameSyntax identifier, VariableDeclaratorSyntax variableDeclarator) { // TODO: Check for more conflict types. if (identifier.SpanStart < variableDeclarator.SpanStart) { return true; } var identifierNode = identifier .Ancestors() .TakeWhile(n => n.Kind() == SyntaxKind.ParenthesizedExpression || n.Kind() == SyntaxKind.CastExpression) .LastOrDefault(); if (identifierNode == null) { identifierNode = identifier; } if (identifierNode.IsParentKind(SyntaxKind.Argument)) { var argument = (ArgumentSyntax)identifierNode.Parent; if (argument.RefOrOutKeyword.Kind() != SyntaxKind.None) { return true; } } else if (identifierNode.Parent.IsKind( SyntaxKind.PreDecrementExpression, SyntaxKind.PreIncrementExpression, SyntaxKind.PostDecrementExpression, SyntaxKind.PostIncrementExpression, SyntaxKind.AddressOfExpression)) { return true; } else if (identifierNode.Parent is AssignmentExpressionSyntax) { var binaryExpression = (AssignmentExpressionSyntax)identifierNode.Parent; if (binaryExpression.Left == identifierNode) { return true; } } return false; } private static SyntaxAnnotation CreateConflictAnnotation() { return ConflictAnnotation.Create(CSharpFeaturesResources.ConflictsDetected); } private async Task<Document> InlineTemporaryAsync(Document document, VariableDeclaratorSyntax declarator, CancellationToken cancellationToken) { var workspace = document.Project.Solution.Workspace; // Annotate the variable declarator so that we can get back to it later. var updatedDocument = await document.ReplaceNodeAsync(declarator, declarator.WithAdditionalAnnotations(DefinitionAnnotation), cancellationToken).ConfigureAwait(false); var semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); var variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); // Create the expression that we're actually going to inline. var expressionToInline = await CreateExpressionToInlineAsync(variableDeclarator, updatedDocument, cancellationToken).ConfigureAwait(false); // Collect the identifier names for each reference. var local = (ILocalSymbol)semanticModel.GetDeclaredSymbol(variableDeclarator, cancellationToken); var symbolRefs = await SymbolFinder.FindReferencesAsync(local, updatedDocument.Project.Solution, cancellationToken).ConfigureAwait(false); var references = symbolRefs.Single(r => r.Definition == local).Locations; var syntaxRoot = await updatedDocument.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); // Collect the topmost parenting expression for each reference. var nonConflictingIdentifierNodes = references .Select(loc => (IdentifierNameSyntax)syntaxRoot.FindToken(loc.Location.SourceSpan.Start).Parent) .Where(ident => !HasConflict(ident, variableDeclarator)); // Add referenceAnnotations to identifier nodes being replaced. updatedDocument = await updatedDocument.ReplaceNodesAsync( nonConflictingIdentifierNodes, (o, n) => n.WithAdditionalAnnotations(ReferenceAnnotation), cancellationToken).ConfigureAwait(false); semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); // Get the annotated reference nodes. nonConflictingIdentifierNodes = await FindReferenceAnnotatedNodesAsync(updatedDocument, cancellationToken).ConfigureAwait(false); var topmostParentingExpressions = nonConflictingIdentifierNodes .Select(ident => GetTopMostParentingExpression(ident)) .Distinct(); var originalInitializerSymbolInfo = semanticModel.GetSymbolInfo(variableDeclarator.Initializer.Value, cancellationToken); // Make each topmost parenting statement or Equals Clause Expressions semantically explicit. updatedDocument = await updatedDocument.ReplaceNodesAsync(topmostParentingExpressions, (o, n) => Simplifier.Expand(n, semanticModel, workspace, cancellationToken: cancellationToken), cancellationToken).ConfigureAwait(false); semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); var semanticModelBeforeInline = semanticModel; variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); var scope = GetScope(variableDeclarator); var newScope = ReferenceRewriter.Visit(semanticModel, scope, variableDeclarator, expressionToInline, cancellationToken); updatedDocument = await updatedDocument.ReplaceNodeAsync(scope, newScope, cancellationToken).ConfigureAwait(false); semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); newScope = GetScope(variableDeclarator); var conflicts = newScope.GetAnnotatedNodesAndTokens(ConflictAnnotation.Kind); var declaratorConflicts = variableDeclarator.GetAnnotatedNodesAndTokens(ConflictAnnotation.Kind); // Note that we only remove the local declaration if there weren't any conflicts, // unless those conflicts are inside the local declaration. if (conflicts.Count() == declaratorConflicts.Count()) { // Certain semantic conflicts can be detected only after the reference rewriter has inlined the expression var newDocument = await DetectSemanticConflicts(updatedDocument, semanticModel, semanticModelBeforeInline, originalInitializerSymbolInfo, cancellationToken).ConfigureAwait(false); if (updatedDocument == newDocument) { // No semantic conflicts, we can remove the definition. updatedDocument = await updatedDocument.ReplaceNodeAsync(newScope, RemoveDeclaratorFromScope(variableDeclarator, newScope), cancellationToken).ConfigureAwait(false); } else { // There were some semantic conflicts, don't remove the definition. updatedDocument = newDocument; } } return updatedDocument; } private static async Task<VariableDeclaratorSyntax> FindDeclaratorAsync(Document document, CancellationToken cancellationToken) { return await FindNodeWithAnnotationAsync<VariableDeclaratorSyntax>(document, DefinitionAnnotation, cancellationToken).ConfigureAwait(false); } private static async Task<ExpressionSyntax> FindInitializerAsync(Document document, CancellationToken cancellationToken) { return await FindNodeWithAnnotationAsync<ExpressionSyntax>(document, InitializerAnnotation, cancellationToken).ConfigureAwait(false); } private static async Task<T> FindNodeWithAnnotationAsync<T>(Document document, SyntaxAnnotation annotation, CancellationToken cancellationToken) where T : SyntaxNode { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); return root .GetAnnotatedNodesAndTokens(annotation) .Single() .AsNode() as T; } private static async Task<IEnumerable<IdentifierNameSyntax>> FindReferenceAnnotatedNodesAsync(Document document, CancellationToken cancellationToken) { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); return FindReferenceAnnotatedNodes(root); } private static IEnumerable<IdentifierNameSyntax> FindReferenceAnnotatedNodes(SyntaxNode root) { var annotatedNodesAndTokens = root.GetAnnotatedNodesAndTokens(ReferenceAnnotation); foreach (var nodeOrToken in annotatedNodesAndTokens) { if (nodeOrToken.IsNode && nodeOrToken.AsNode().IsKind(SyntaxKind.IdentifierName)) { yield return (IdentifierNameSyntax)nodeOrToken.AsNode(); } } } private SyntaxNode GetScope(VariableDeclaratorSyntax variableDeclarator) { var variableDeclaration = (VariableDeclarationSyntax)variableDeclarator.Parent; var localDeclaration = (LocalDeclarationStatementSyntax)variableDeclaration.Parent; var scope = localDeclaration.Parent; while (scope.IsKind(SyntaxKind.LabeledStatement)) { scope = scope.Parent; } var parentExpressions = scope.AncestorsAndSelf().OfType<ExpressionSyntax>(); if (parentExpressions.Any()) { scope = parentExpressions.LastOrDefault().Parent; } return scope; } private VariableDeclaratorSyntax FindDeclarator(SyntaxNode node) { var annotatedNodesOrTokens = node.GetAnnotatedNodesAndTokens(DefinitionAnnotation).ToList(); Contract.Requires(annotatedNodesOrTokens.Count == 1, "Only a single variable declarator should have been annotated."); return (VariableDeclaratorSyntax)annotatedNodesOrTokens.First().AsNode(); } private SyntaxTriviaList GetTriviaToPreserve(SyntaxTriviaList syntaxTriviaList) { return ShouldPreserve(syntaxTriviaList) ? syntaxTriviaList : default(SyntaxTriviaList); } private static bool ShouldPreserve(SyntaxTriviaList trivia) { return trivia.Any(t => t.IsRegularComment() || t.IsDirective); } private SyntaxNode RemoveDeclaratorFromVariableList(VariableDeclaratorSyntax variableDeclarator, VariableDeclarationSyntax variableDeclaration) { Debug.Assert(variableDeclaration.Variables.Count > 1); Debug.Assert(variableDeclaration.Variables.Contains(variableDeclarator)); var localDeclaration = (LocalDeclarationStatementSyntax)variableDeclaration.Parent; var scope = GetScope(variableDeclarator); var newLocalDeclaration = localDeclaration.RemoveNode(variableDeclarator, SyntaxRemoveOptions.KeepNoTrivia) .WithAdditionalAnnotations(Formatter.Annotation); return scope.ReplaceNode(localDeclaration, newLocalDeclaration); } private SyntaxNode RemoveDeclaratorFromScope(VariableDeclaratorSyntax variableDeclarator, SyntaxNode scope) { var variableDeclaration = (VariableDeclarationSyntax)variableDeclarator.Parent; // If there is more than one variable declarator, remove this one from the variable declaration. if (variableDeclaration.Variables.Count > 1) { return RemoveDeclaratorFromVariableList(variableDeclarator, variableDeclaration); } var localDeclaration = (LocalDeclarationStatementSyntax)variableDeclaration.Parent; // There's only one variable declarator, so we'll remove the local declaration // statement entirely. This means that we'll concatenate the leading and trailing // trivia of this declaration and move it to the next statement. var leadingTrivia = localDeclaration .GetLeadingTrivia() .Reverse() .SkipWhile(t => t.MatchesKind(SyntaxKind.WhitespaceTrivia)) .Reverse() .ToSyntaxTriviaList(); var trailingTrivia = localDeclaration .GetTrailingTrivia() .SkipWhile(t => t.MatchesKind(SyntaxKind.WhitespaceTrivia, SyntaxKind.EndOfLineTrivia)) .ToSyntaxTriviaList(); var newLeadingTrivia = leadingTrivia.Concat(trailingTrivia); var nextToken = localDeclaration.GetLastToken().GetNextTokenOrEndOfFile(); var newNextToken = nextToken.WithPrependedLeadingTrivia(newLeadingTrivia) .WithAdditionalAnnotations(Formatter.Annotation); var newScope = scope.ReplaceToken(nextToken, newNextToken); var newLocalDeclaration = (LocalDeclarationStatementSyntax)FindDeclarator(newScope).Parent.Parent; // If the local is parented by a label statement, we can't remove this statement. Instead, // we'll replace the local declaration with an empty expression statement. if (newLocalDeclaration.IsParentKind(SyntaxKind.LabeledStatement)) { var labeledStatement = (LabeledStatementSyntax)newLocalDeclaration.Parent; var newLabeledStatement = labeledStatement.ReplaceNode(newLocalDeclaration, SyntaxFactory.ParseStatement("")); return newScope.ReplaceNode(labeledStatement, newLabeledStatement); } return newScope.RemoveNode(newLocalDeclaration, SyntaxRemoveOptions.KeepNoTrivia); } private ExpressionSyntax SkipRedundantExteriorParentheses(ExpressionSyntax expression) { while (expression.IsKind(SyntaxKind.ParenthesizedExpression)) { var parenthesized = (ParenthesizedExpressionSyntax)expression; if (parenthesized.Expression == null || parenthesized.Expression.IsMissing) { break; } if (parenthesized.Expression.IsKind(SyntaxKind.ParenthesizedExpression) || parenthesized.Expression.IsKind(SyntaxKind.IdentifierName)) { expression = parenthesized.Expression; } else { break; } } return expression; } private async Task<ExpressionSyntax> CreateExpressionToInlineAsync( VariableDeclaratorSyntax variableDeclarator, Document document, CancellationToken cancellationToken) { var updatedDocument = document; var expression = SkipRedundantExteriorParentheses(variableDeclarator.Initializer.Value); var semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); var localSymbol = (ILocalSymbol)semanticModel.GetDeclaredSymbol(variableDeclarator, cancellationToken); var newExpression = InitializerRewriter.Visit(expression, localSymbol, semanticModel); // If this is an array initializer, we need to transform it into an array creation // expression for inlining. if (newExpression.Kind() == SyntaxKind.ArrayInitializerExpression) { var arrayType = (ArrayTypeSyntax)localSymbol.Type.GenerateTypeSyntax(); var arrayInitializer = (InitializerExpressionSyntax)newExpression; // Add any non-whitespace trailing trivia from the equals clause to the type. var equalsToken = variableDeclarator.Initializer.EqualsToken; if (equalsToken.HasTrailingTrivia) { var trailingTrivia = equalsToken.TrailingTrivia.SkipInitialWhitespace(); if (trailingTrivia.Any()) { arrayType = arrayType.WithTrailingTrivia(trailingTrivia); } } newExpression = SyntaxFactory.ArrayCreationExpression(arrayType, arrayInitializer); } newExpression = newExpression.WithAdditionalAnnotations(InitializerAnnotation); updatedDocument = await updatedDocument.ReplaceNodeAsync(variableDeclarator.Initializer.Value, newExpression, cancellationToken).ConfigureAwait(false); semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); newExpression = await FindInitializerAsync(updatedDocument, cancellationToken).ConfigureAwait(false); var newVariableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); localSymbol = (ILocalSymbol)semanticModel.GetDeclaredSymbol(newVariableDeclarator, cancellationToken); bool wasCastAdded; var explicitCastExpression = newExpression.CastIfPossible(localSymbol.Type, newVariableDeclarator.SpanStart, semanticModel, out wasCastAdded); if (wasCastAdded) { updatedDocument = await updatedDocument.ReplaceNodeAsync(newExpression, explicitCastExpression, cancellationToken).ConfigureAwait(false); semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); newVariableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false); } // Now that the variable declarator is normalized, make its initializer // value semantically explicit. newExpression = await Simplifier.ExpandAsync(newVariableDeclarator.Initializer.Value, updatedDocument, cancellationToken: cancellationToken).ConfigureAwait(false); return newExpression.WithAdditionalAnnotations(ExpressionToInlineAnnotation); } private static SyntaxNode GetTopMostParentingExpression(ExpressionSyntax expression) { return expression.AncestorsAndSelf().OfType<ExpressionSyntax>().Last(); } private static async Task<Document> DetectSemanticConflicts( Document inlinedDocument, SemanticModel newSemanticModelForInlinedDocument, SemanticModel semanticModelBeforeInline, SymbolInfo originalInitializerSymbolInfo, CancellationToken cancellationToken) { // In this method we detect if inlining the expression introduced the following semantic change: // The symbol info associated with any of the inlined expressions does not match the symbol info for original initializer expression prior to inline. // If any semantic changes were introduced by inlining, we update the document with conflict annotations. // Otherwise we return the given inlined document without any changes. var syntaxRootBeforeInline = await semanticModelBeforeInline.SyntaxTree.GetRootAsync(cancellationToken).ConfigureAwait(false); // Get all the identifier nodes which were replaced with inlined expression. var originalIdentifierNodes = FindReferenceAnnotatedNodes(syntaxRootBeforeInline); if (originalIdentifierNodes.IsEmpty()) { // No conflicts return inlinedDocument; } // Get all the inlined expression nodes. var syntaxRootAfterInline = await inlinedDocument.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var inlinedExprNodes = syntaxRootAfterInline.GetAnnotatedNodesAndTokens(ExpressionToInlineAnnotation); Debug.Assert(originalIdentifierNodes.Count() == inlinedExprNodes.Count()); Dictionary<SyntaxNode, SyntaxNode> replacementNodesWithChangedSemantics = null; using (var originalNodesEnum = originalIdentifierNodes.GetEnumerator()) { using (var inlinedNodesOrTokensEnum = inlinedExprNodes.GetEnumerator()) { while (originalNodesEnum.MoveNext()) { inlinedNodesOrTokensEnum.MoveNext(); var originalNode = originalNodesEnum.Current; // expressionToInline is Parenthesized prior to replacement, so get the parenting parenthesized expression. var inlinedNode = (ExpressionSyntax)inlinedNodesOrTokensEnum.Current.Parent; Debug.Assert(inlinedNode.IsKind(SyntaxKind.ParenthesizedExpression)); // inlinedNode is the expanded form of the actual initializer expression in the original document. // We have annotated the inner initializer with a special syntax annotation "InitializerAnnotation". // Get this annotated node and compute the symbol info for this node in the inlined document. var innerInitializerInInlineNodeOrToken = inlinedNode.GetAnnotatedNodesAndTokens(InitializerAnnotation).First(); ExpressionSyntax innerInitializerInInlineNode = (ExpressionSyntax)(innerInitializerInInlineNodeOrToken.IsNode ? innerInitializerInInlineNodeOrToken.AsNode() : innerInitializerInInlineNodeOrToken.AsToken().Parent); var newInitializerSymbolInfo = newSemanticModelForInlinedDocument.GetSymbolInfo(innerInitializerInInlineNode, cancellationToken); // Verification: The symbol info associated with any of the inlined expressions does not match the symbol info for original initializer expression prior to inline. if (!SpeculationAnalyzer.SymbolInfosAreCompatible(originalInitializerSymbolInfo, newInitializerSymbolInfo, performEquivalenceCheck: true)) { newInitializerSymbolInfo = newSemanticModelForInlinedDocument.GetSymbolInfo(inlinedNode, cancellationToken); if (!SpeculationAnalyzer.SymbolInfosAreCompatible(originalInitializerSymbolInfo, newInitializerSymbolInfo, performEquivalenceCheck: true)) { if (replacementNodesWithChangedSemantics == null) { replacementNodesWithChangedSemantics = new Dictionary<SyntaxNode, SyntaxNode>(); } replacementNodesWithChangedSemantics.Add(inlinedNode, originalNode); } } } } } if (replacementNodesWithChangedSemantics == null) { // No conflicts. return inlinedDocument; } // Replace the conflicting inlined nodes with the original nodes annotated with conflict annotation. Func<SyntaxNode, SyntaxNode, SyntaxNode> conflictAnnotationAdder = (SyntaxNode oldNode, SyntaxNode newNode) => newNode.WithAdditionalAnnotations(ConflictAnnotation.Create(CSharpFeaturesResources.ConflictsDetected)); return await inlinedDocument.ReplaceNodesAsync(replacementNodesWithChangedSemantics.Keys, conflictAnnotationAdder, cancellationToken).ConfigureAwait(false); } private class MyCodeAction : CodeAction.DocumentChangeAction { public MyCodeAction(string title, Func<CancellationToken, Task<Document>> createChangedDocument) : base(title, createChangedDocument) { } } } }
using System; using System.Collections.Generic; using System.Text; using System.IO; namespace NotJavaScript.Tree { public class InlineFunction : Expression { gType ReturnType; Arguments args; Block code; DebugInformation Debug; Arguments requires; public InlineFunction(gType rt, Arguments a, Block b, Arguments c, DebugInformation di) { ReturnType = rt; args = a; code = b; requires = c; Debug = di; } public void _Map(Walk W) { W.Step(code); } public gType _TypeOf(Environment E) { Type_Functional tf; try { tf = new Type_Functional(args._TypeOf(), ReturnType); if (requires != null) { tf.Requirements = requires._TypeOf(); } } catch (Exception err) { Console.Error.WriteLine(Debug.ErrorPrefix(1018) + "Anonymous Function can not type arguments"); throw err; } try { NotJavaScript.Tree.Environment NX = new NotJavaScript.Tree.Environment(true, E.Segments, E.Enumerations, E.Contracts, E.Functions, ReturnType, E.CurrentNameSpace, E.DatabaseGroupings, E.ReducedFunctionNames); foreach (Field F in args.Args) { NX.Declare(F.Name, F.Typ); } if (requires != null) { foreach (Field F in requires.Args) { NX.Declare(F.Name, F.Typ); } } code._WellType(NX); } catch (Exception err) { Console.Error.WriteLine(Debug.ErrorPrefix(1021) + "Anonymous Function can not well type code body"); throw err; } return tf; } public void _PhpOf(Environment E, StringWriter O) { if (E.InsideDatabaseConnection) { Console.Error.WriteLine(Debug.ErrorPrefix(3001) + "Can not construct a closure whilst inside a database connection"); throw new Exception("can't while inside a database connection"); } StringWriter BK = new StringWriter(); NotJavaScript.Tree.Environment NX = new NotJavaScript.Tree.Environment(true, E.Segments, E.Enumerations, E.Contracts, E.Functions, ReturnType, E.CurrentNameSpace, E.DatabaseGroupings, E.ReducedFunctionNames); foreach (Field F in args.Args) { NX.Declare(F.Name, F.Typ); } if (requires != null) { foreach (Field F in requires.Args) { NX.Declare(F.Name, F.Typ); } } code._PhpOf(NX, BK); List<string> externals = NX.GetExternalRequests(); string fName = "" + E.CurrentNameSpace + "_" + E.AnonyClassCounter; string ParMap0 = ""; string ParMap = ""; int k = 0; foreach (Field F in args.Args) { if (k > 0) { ParMap0 += ","; } ParMap0 += "$" + F.Name; ParMap += ","; ParMap += "$" + F.Name; //E.WriteLine("$" + F.Name + "=$Par["+k+"];"); k++; } /* E.WriteLine("function " + fName + "($Hid" + ParMap + ")"); E.WriteLine("{"); foreach (string e in externals) { E.WriteLine("$" + e + "=$Hid[\"" + e + "\"];"); } */ // Building New Environment NX = new NotJavaScript.Tree.Environment(false, E.Segments, E.Enumerations, E.Contracts, E.Functions, ReturnType, E.CurrentNameSpace, E.DatabaseGroupings, E.ReducedFunctionNames); foreach (string e in externals) { gType TT = E.FindType(e); if (TT is Type_NamedType) { if (E.Contracts.ContainsKey((TT as Type_NamedType).Name)) { TT = E.Contracts[(TT as Type_NamedType).Name]; } else if (E.Enumerations.ContainsKey((TT as Type_NamedType).Name)) { TT = E.Enumerations[(TT as Type_NamedType).Name]; } } NX.Declare(e, TT); } foreach (Field F in args.Args) { NX.Declare(F.Name, F.Typ); } BK = new StringWriter(); code._PhpOf2(NX, BK); /* E.WriteLine(BK.ToString().Trim()); E.WriteLine("}"); */ E.WriteLine("class CLOSURE_" + fName); E.WriteLine("{"); // E.WriteLine("\tpublic $_Hidden;"); //string Hid = ""; string ParHidden = ""; foreach (string e in externals) { if (ParHidden.Length > 0) ParHidden += ","; ParHidden += "$" + e; E.WriteLine("\tprivate $_" + e + ";"); } E.WriteLine("\tpublic function Ev(" + ParMap0 + ")"); E.WriteLine("\t{"); if (externals.Count > 0) { //E.WriteLine("\t\treturn " + fName + "($this->_Hidden" + ParMap + ");"); foreach (string e in externals) { E.WriteLine("\t\t$" + e + "=$this->_" + e + ";"); } } if (requires != null) { foreach (Field F in requires.Args) { E.WriteLine("\t\t$" + F.Name + "=$_SESSION['" + F.Name + "'];"); } } E.WriteLine("\t\t" + BK.ToString().Trim().Replace("" + (char)10,"" + (char)10 + "\t\t")); E.WriteLine("\t}"); E.WriteLine("\tpublic function __construct("+ParHidden+")"); E.WriteLine("\t{"); foreach (string e in externals) { E.WriteLine("\t$this->_" + e + " = $"+e+";"); } //E.WriteLine("\t\t$this->_Hidden = $h;"); E.WriteLine("\t}"); E.WriteLine("}"); O.Write("new CLOSURE_" + fName + "(" + ParHidden + ")"); } } public class Apply : Expression { Expression expr; Tuple param; public Apply(Expression e, Tuple p) { expr = e; param = p; } public void _Map(Walk W) { expr._Map(W); param._Map(W); } public gType _TypeOf(Environment E) { DebugRange Debug = param.Debug; gType T = expr._TypeOf(E); try { if (T is Type_Functional) { if ((T as Type_Functional).Inputs.Suitable(E, param._TypeOf(E))) { return (T as Type_Functional).Output; } } } catch (Exception err) { Console.Error.WriteLine(Debug.ErrorPrefix(1019) + "Functional Application Failure"); throw err; } Console.Error.WriteLine(Debug.ErrorPrefix(1020) + "Expression is not a function"); throw new Exception("Typing Failure (Application)"); } public void _PhpOf(Environment E, StringWriter O) { _TypeOf(E); if (expr is Variable) { string x = (expr as Variable).Var; if (E.isGlobalFunction(x)) { string y = x; if (y.IndexOf("_NS_") < 0) { y = E.CurrentNameSpace + "_NS_" + x; } if (E.ReducedFunctionNames.ContainsKey(x)) { y = E.ReducedFunctionNames[x]; } O.Write(y); param._PhpOf(E, O); return; } } O.Write("_I_("); expr._PhpOf(E, O); O.Write(")->Ev"); param._PhpOf(E, O); } } }
//////////////////////////////////////////////////////////////////////////// // // Copyright 2016 Realm Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////// using System; using System.Collections.Generic; using System.ComponentModel; using System.IO; using System.Linq; using System.Threading.Tasks; using Nito.AsyncEx; using NUnit.Framework; using Realms; namespace Tests.Database { [TestFixture, Preserve(AllMembers = true)] public class PropertyChangedTests { private string _databasePath; private Lazy<Realm> _lazyRealm; private Realm _realm => _lazyRealm.Value; // We capture the current SynchronizationContext when opening a Realm. // However, NUnit replaces the SynchronizationContext after the SetUp method and before the async test method. // That's why we make sure we open the Realm in the test method by accessing it lazily. [SetUp] public void SetUp() { _databasePath = Path.GetTempFileName(); _lazyRealm = new Lazy<Realm>(() => Realm.GetInstance(_databasePath)); } [TearDown] public void TearDown() { if (_lazyRealm.IsValueCreated) { _realm.Dispose(); Realm.DeleteRealm(_realm.Config); } } [Test] public void UnmanagedObject() { AsyncContext.Run(async delegate { string notifiedPropertyName = null; var person = new Person(); var handler = new PropertyChangedEventHandler((sender, e) => { notifiedPropertyName = e.PropertyName; }); person.PropertyChanged += handler; // Subscribed - should trigger person.FirstName = "Peter"; await Task.Yield(); Assert.That(notifiedPropertyName, Is.EqualTo(nameof(Person.FirstName))); notifiedPropertyName = null; person.PropertyChanged -= handler; // Unsubscribed - should not trigger person.FirstName = "George"; await Task.Yield(); Assert.That(notifiedPropertyName, Is.Null); }); } [Test] public void UnmanagedObject_AfterAdd_ShouldContinueTriggering() { var notifications = 0; var person = new Person(); person.PropertyChanged += (sender, e) => { if (e.PropertyName == nameof(Person.FirstName)) { notifications++; } }; person.FirstName = "Peter"; _realm.Refresh(); Assert.That(notifications, Is.EqualTo(1)); _realm.Write(() => { _realm.Add(person); }); _realm.Refresh(); Assert.That(notifications, Is.EqualTo(1)); _realm.Write(() => { person.FirstName = "George"; }); _realm.Refresh(); Assert.That(notifications, Is.EqualTo(2)); } [Test] public void ManagedObject_WhenSameInstanceChanged() { AsyncContext.Run(delegate { return TestManaged((person, name) => { _realm.Write(() => { person.FirstName = name; }); return Task.CompletedTask; }); }); } [Test] public void ManagedObject_WhenAnotherInstanceChanged() { AsyncContext.Run(delegate { return TestManaged((_, name) => { _realm.Write(() => { var otherPersonInstance = _realm.All<Person>().First(); otherPersonInstance.FirstName = name; }); return Task.CompletedTask; }); }); } [Test] public void ManagedObject_WhenAnotherThreadInstanceChanged() { AsyncContext.Run(delegate { return TestManaged(async (_, name) => { await _realm.WriteAsync(otherRealm => { var otherPersonInstance = otherRealm.All<Person>().First(); otherPersonInstance.FirstName = name; }); await Task.Delay(50); }); }); } [Test] public void ManagedObject_WhenSameInstanceTransactionRollback() { TestManagedRollback((person, name) => { person.FirstName = name; }, _realm.BeginWrite); } [Test] public void ManagedObject_WhenAnotherInstaceTransactionRollback() { TestManagedRollback((_, name) => { var otherInstance = _realm.All<Person>().First(); otherInstance.FirstName = name; }, _realm.BeginWrite); } [Test] public void ManagedObject_WhenAnotherThreadInstanceTransactionRollback() { AsyncContext.Run(async delegate { var notifiedPropertyNames = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); person.PropertyChanged += (sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }; await Task.Run(() => { using (var otherRealm = Realm.GetInstance(_databasePath)) using (var transaction = otherRealm.BeginWrite()) { var otherInstance = otherRealm.All<Person>().First(); otherInstance.FirstName = "Peter"; Assert.That(notifiedPropertyNames, Is.Empty); transaction.Rollback(); } }); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.Empty); }); } [Test] public void ManagedObject_MultipleProperties() { var notifiedPropertyNames = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); person.PropertyChanged += (sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }; _realm.Write(() => { person.FirstName = "Peter"; }); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { nameof(Person.FirstName) })); _realm.Write(() => { person.LastName = "Smith"; }); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { nameof(Person.FirstName), nameof(Person.LastName) })); _realm.Write(() => { person.Score = 3.5f; }); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { nameof(Person.FirstName), nameof(Person.LastName), nameof(Person.Score) })); } [Test] #if WINDOWS [Ignore("ExtenrnalCommitHelper hangs on Windows in this test. Reenable when we have proper condvar.")] #endif public void MultipleManagedObjects() { var firstNotifiedPropertyNames = new List<string>(); var secondNotifiedPropertyNames = new List<string>(); var first = new Person(); var second = new Person(); _realm.Write(() => { _realm.Add(first); _realm.Add(second); }); first.PropertyChanged += (sender, e) => { firstNotifiedPropertyNames.Add(e.PropertyName); }; second.PropertyChanged += (sender, e) => { secondNotifiedPropertyNames.Add(e.PropertyName); }; _realm.Write(() => { first.IsAmbivalent = true; }); _realm.Refresh(); Assert.That(firstNotifiedPropertyNames, Is.EquivalentTo(new[] { nameof(Person.IsAmbivalent) })); Assert.That(secondNotifiedPropertyNames, Is.Empty); _realm.Write(() => { second.Latitude = 4.6; second.Longitude = 5.6; }); _realm.Refresh(); Assert.That(firstNotifiedPropertyNames, Is.EquivalentTo(new[] { nameof(Person.IsAmbivalent) })); Assert.That(secondNotifiedPropertyNames, Is.EquivalentTo(new[] { nameof(Person.Latitude), nameof(Person.Longitude) })); } [Test] public void ManagedObject_AfterSubscribe_CanRemove() { var notifiedPropertyNames = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); person.PropertyChanged += (sender, e) => { Assert.That(sender, Is.EqualTo(person)); notifiedPropertyNames.Add(e.PropertyName); }; _realm.Write(() => { person.FirstName = "Peter"; }); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { nameof(Person.FirstName) })); _realm.Write(() => { _realm.Remove(person); }); Assert.That(_realm.All<Person>().Count(), Is.EqualTo(0)); } [Test] public void ManagedObject_MultipleSubscribers() { var subscriber1Properties = new List<string>(); var subscriber2Properties = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); var handler1 = new PropertyChangedEventHandler((sender, e) => { Assert.That(sender, Is.EqualTo(person)); subscriber1Properties.Add(e.PropertyName); }); person.PropertyChanged += handler1; person.PropertyChanged += (sender, e) => { Assert.That(sender, Is.EqualTo(person)); subscriber2Properties.Add(e.PropertyName); }; _realm.Write(() => { person.Birthday = new DateTimeOffset(1985, 1, 5, 8, 2, 3, TimeSpan.FromHours(3)); }); _realm.Refresh(); Assert.That(subscriber1Properties, Is.EquivalentTo(new[] { nameof(Person.Birthday) })); Assert.That(subscriber2Properties, Is.EquivalentTo(new[] { nameof(Person.Birthday) })); person.PropertyChanged -= handler1; _realm.Write(() => { person.IsInteresting = true; }); _realm.Refresh(); Assert.That(subscriber1Properties, Is.EquivalentTo(new[] { nameof(Person.Birthday) })); Assert.That(subscriber2Properties, Is.EquivalentTo(new[] { nameof(Person.Birthday), nameof(Person.IsInteresting) })); } [Test] public void ManagedObject_WhenMappedTo_ShouldUsePropertyName() { var notifiedPropertyNames = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); person.PropertyChanged += (sender, e) => { Assert.That(sender, Is.EqualTo(person)); notifiedPropertyNames.Add(e.PropertyName); }; _realm.Write(() => { person.Email = "peter@gmail.com"; }); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { "Email_" })); } [Test] public void UnmanagedObject_WhenMappedTo_ShouldUsePropertyName() { var notifiedPropertyNames = new List<string>(); var person = new Person(); person.PropertyChanged += (sender, e) => { Assert.That(sender, Is.EqualTo(person)); notifiedPropertyNames.Add(e.PropertyName); }; person.Email = "peter@gmail.com"; Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { "Email_" })); } [Test] #if WINDOWS [Ignore("GC blocks on Windows")] #endif public void ManagedObject_WhenHandleIsReleased_ShouldNotReceiveNotifications() { AsyncContext.Run(async delegate { var notifiedPropertyNames = new List<string>(); WeakReference personReference = null; new Action(() => { var person = new Person(); _realm.Write(() => _realm.Add(person)); person.PropertyChanged += (sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }; personReference = new WeakReference(person); _realm.Write(() => person.FirstName = "Peter"); // Sanity check _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { nameof(Person.FirstName) })); })(); notifiedPropertyNames.Clear(); while (personReference.IsAlive) { await Task.Yield(); GC.Collect(); GC.WaitForPendingFinalizers(); } Assert.That(personReference.IsAlive, Is.False); _realm.Write(() => { var peter = _realm.All<Person>().Single(); Assert.That(peter.FirstName, Is.EqualTo("Peter")); peter.FirstName = "George"; }); // person was garbage collected, so we should not be notified and no exception should be thrown. _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.Empty); }); } [Test] public void ManagedObject_WhenChanged_CallsOnPropertyChanged() { AsyncContext.Run(async delegate { var item = new AgedObject { Birthday = DateTimeOffset.UtcNow.AddYears(-5) }; _realm.Write(() => _realm.Add(item)); var notifiedPropertyNames = new List<string>(); item.PropertyChanged += (sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }; _realm.Write(() => { item.Birthday = DateTimeOffset.UtcNow.AddYears(-6); }); await Task.Yield(); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { nameof(AgedObject.Birthday), nameof(AgedObject.Age) })); }); } [Test] public void ManagedObject_WhenChangedOnAnotherThread_CallsOnPropertyChanged() { AsyncContext.Run(async delegate { var item = new AgedObject { Birthday = DateTimeOffset.UtcNow.AddYears(-5) }; _realm.Write(() => _realm.Add(item)); var notifiedPropertyNames = new List<string>(); item.PropertyChanged += (sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }; await _realm.WriteAsync(r => { var otherThreadInstance = r.All<AgedObject>().Single(); otherThreadInstance.Birthday = DateTimeOffset.UtcNow.AddYears(-6); }); await Task.Yield(); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { nameof(AgedObject.Birthday), nameof(AgedObject.Age) })); }); } [Test] public void UnmanagedObject_WhenChanged_CallsOnPropertyChanged() { var item = new AgedObject { Birthday = DateTimeOffset.UtcNow.AddYears(-5) }; var notifiedPropertyNames = new List<string>(); item.PropertyChanged += (sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }; item.Birthday = DateTimeOffset.UtcNow.AddYears(-6); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { nameof(AgedObject.Birthday), nameof(AgedObject.Age) })); } [Test] public void ManagedObject_WhenSubscribedDuringTransaction_AfterCommit_ShouldGetNotifications() { var notifiedPropertyNames = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); var handler = new PropertyChangedEventHandler((sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }); _realm.Write(() => { person.PropertyChanged += handler; person.FirstName = "Peter"; }); _realm.Refresh(); // We miss notifications from this transaction because we're subscribing after the // transaction has been committed. Assert.That(notifiedPropertyNames, Is.Empty); _realm.Write(() => { person.FirstName = "John"; }); _realm.Refresh(); // We should get subsequent notifications. Assert.That(notifiedPropertyNames, Is.EqualTo(new[] { nameof(Person.FirstName) })); person.PropertyChanged -= handler; } [Test] public void ManagedObject_WhenSubscribedDuringTransaction_AfterRollback_ShouldGetNotifications() { var notifiedPropertyNames = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); var handler = new PropertyChangedEventHandler((sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }); using (var transaction = _realm.BeginWrite()) { person.PropertyChanged += handler; person.FirstName = "Peter"; transaction.Rollback(); } _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.Empty); _realm.Write(() => { person.FirstName = "John"; }); _realm.Refresh(); // We should get subsequent notifications. Assert.That(notifiedPropertyNames, Is.EqualTo(new[] { nameof(Person.FirstName) })); person.PropertyChanged -= handler; } [Test] public void ManagedObject_WhenSubscribedDuringCreation_AfterCommit_ShouldReceiveNotifications() { var notifiedPropertyNames = new List<string>(); var person = new Person(); var handler = new PropertyChangedEventHandler((sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }); _realm.Write(() => { _realm.Add(person); person.PropertyChanged += handler; }); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.Empty); _realm.Write(() => { person.FirstName = "John"; }); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.EqualTo(new[] { nameof(Person.FirstName) })); person.PropertyChanged -= handler; } [Test] public void ManagedObject_WhenSubscribedDuringCreation_AfterRollback_ShouldNotThrow() { var notifiedPropertyNames = new List<string>(); var person = new Person(); var handler = new PropertyChangedEventHandler((sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }); using (var transaction = _realm.BeginWrite()) { _realm.Add(person); person.FirstName = "John"; person.PropertyChanged += handler; transaction.Rollback(); } _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.Empty); person.PropertyChanged -= handler; } [Test] public void ManagedObject_WhenSubscribedDuringDeletion_AfterCommit_ShouldNotThrow() { var notifiedPropertyNames = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); var handler = new PropertyChangedEventHandler((sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }); _realm.Write(() => { person.PropertyChanged += handler; person.FirstName = "John"; _realm.Remove(person); }); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.Empty); person.PropertyChanged -= handler; } [Test, NUnit.Framework.Explicit("After remove + rollback, the object handle is invalid - https://github.com/realm/realm-dotnet/issues/1332")] public void ManagedObject_WhenSubscribedDuringDeletion_AfterRollback_ShouldReceiveNotifications() { var notifiedPropertyNames = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); var handler = new PropertyChangedEventHandler((sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }); using (var transaction = _realm.BeginWrite()) { person.PropertyChanged += handler; person.FirstName = "John"; _realm.Remove(person); transaction.Rollback(); } _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.Empty); _realm.Write(() => person.FirstName = "John"); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.EqualTo(new[] { nameof(Person.FirstName) })); person.PropertyChanged -= handler; } private async Task TestManaged(Func<Person, string, Task> writeFirstNameAction) { var notifiedPropertyNames = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); var handler = new PropertyChangedEventHandler((sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }); person.PropertyChanged += handler; // Subscribed - regular set should trigger await writeFirstNameAction(person, "Peter"); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { nameof(Person.FirstName) })); // Subscribed - setting the same value for the property should trigger again // This is different from .NET's usual behavior, but is a limitation due to the fact that we don't // check the previous value of the property before setting it. await writeFirstNameAction(person, "Peter"); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.EquivalentTo(new[] { nameof(Person.FirstName), nameof(Person.FirstName) })); notifiedPropertyNames.Clear(); person.PropertyChanged -= handler; // Unsubscribed - should not trigger await writeFirstNameAction(person, "George"); _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.Empty); } private void TestManagedRollback(Action<Person, string> writeFirstNameAction, Func<Transaction> transactionFactory) { var notifiedPropertyNames = new List<string>(); var person = new Person(); _realm.Write(() => { _realm.Add(person); }); person.PropertyChanged += (sender, e) => { notifiedPropertyNames.Add(e.PropertyName); }; using (var transaction = transactionFactory()) { writeFirstNameAction(person, "Peter"); Assert.That(notifiedPropertyNames, Is.Empty); transaction.Rollback(); } _realm.Refresh(); Assert.That(notifiedPropertyNames, Is.Empty); } private class AgedObject : RealmObject { public DateTimeOffset Birthday { get; set; } public int Age { get { var now = DateTimeOffset.UtcNow; var age = now.Year - Birthday.Year; if (Birthday.AddYears(age) > now) { age--; } return age; } } protected override void OnPropertyChanged(string propertyName) { base.OnPropertyChanged(propertyName); if (propertyName == nameof(Birthday)) { RaisePropertyChanged(nameof(Age)); } } } } }